commit a934caa7d3192ea1e3d8623de17d50dd15812094 Author: Prad Nukala Date: Thu Oct 9 15:10:39 2025 -0400 No commit suggestions generated diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..b47083e --- /dev/null +++ b/Makefile @@ -0,0 +1,283 @@ +# Crypto Module Makefile +# Provides targets for testing all cryptographic packages individually + +.PHONY: all test test-verbose clean help +.PHONY: test-accumulator test-bulletproof test-core test-daed test-dkg test-ecies +.PHONY: test-internal test-keys test-mpc test-ot test-paillier test-sharing +.PHONY: test-signatures test-subtle test-tecdsa test-ted25519 test-zkp +.PHONY: test-core-curves test-core-native test-dkg-frost test-dkg-gennaro test-dkg-gennaro2p +.PHONY: test-ot-base test-ot-extension test-sharing-v1 test-signatures-bbs test-signatures-bls +.PHONY: test-signatures-common test-signatures-schnorr test-tecdsa-dklsv1 test-ted25519-frost +.PHONY: test-ted25519-ted25519 test-zkp-schnorr +.PHONY: bench bench-verbose coverage coverage-html + +# Default target +all: test + +tidy: + @go mod tidy + @go mod download + +# Test all packages +test: tidy + @gum log --level info "Running tests for all crypto packages..." + @go test ./... -v + +# Test all packages with verbose output +test-verbose: tidy + @gum log --level info "Running verbose tests for all crypto packages..." + @go test ./... -v -count=1 + +# Help target +help: + @gum log --level info "Crypto Module Makefile" + @gum log --level info "" + @gum log --level info "Available targets:" + @gum log --level info " all - Run all tests (default)" + @gum log --level info " test - Run all tests" + @gum log --level info " test-verbose - Run all tests with verbose output" + @gum log --level info "" + @gum log --level info "Individual package tests:" + @gum log --level info " test-accumulator - Test accumulator package" + @gum log --level info " test-bulletproof - Test bulletproof package" + @gum log --level info " test-core - Test core package" + @gum log --level info " test-daed - Test daed package" + @gum log --level info " test-dkg - Test dkg package" + @gum log --level info " test-ecies - Test ecies package" + @gum log --level info " test-internal - Test internal package" + @gum log --level info " test-keys - Test keys package" + @gum log --level info " test-mpc - Test mpc package" + @gum log --level info " test-ot - Test ot package" + @gum log --level info " test-paillier - Test paillier package" + @gum log --level info " test-sharing - Test sharing package" + @gum log --level info " test-signatures - Test signatures package" + @gum log --level info " test-subtle - Test subtle package" + @gum log --level info " test-tecdsa - Test tecdsa package" + @gum log --level info " test-ted25519 - Test ted25519 package" + @gum log --level info " test-zkp - Test zkp package" + @gum log --level info "" + @gum log --level info "Subpackage tests:" + @gum log --level info " test-core-curves - Test core/curves package" + @gum log --level info " test-core-native - Test core/curves/native package" + @gum log --level info " test-dkg-frost - Test dkg/frost package" + @gum log --level info " test-dkg-gennaro - Test dkg/gennaro package" + @gum log --level info " test-dkg-gennaro2p - Test dkg/gennaro2p package" + @gum log --level info " test-ot-base - Test ot/base package" + @gum log --level info " test-ot-extension - Test ot/extension package" + @gum log --level info " test-sharing-v1 - Test sharing/v1 package" + @gum log --level info " test-signatures-bbs - Test signatures/bbs package" + @gum log --level info " test-signatures-bls - Test signatures/bls package" + @gum log --level info " test-signatures-common - Test signatures/common package" + @gum log --level info " test-signatures-schnorr - Test signatures/schnorr package" + @gum log --level info " test-tecdsa-dklsv1 - Test tecdsa/dklsv1 package" + @gum log --level info " test-ted25519-frost - Test ted25519/frost package" + @gum log --level info " test-ted25519-ted25519 - Test ted25519/ted25519 package" + @gum log --level info " test-zkp-schnorr - Test zkp/schnorr package" + @gum log --level info "" + @gum log --level info "Performance and coverage:" + @gum log --level info " bench - Run benchmarks" + @gum log --level info " bench-verbose - Run benchmarks with verbose output" + @gum log --level info " coverage - Generate test coverage report" + @gum log --level info " coverage-html - Generate HTML test coverage report" + +# Top-level package tests +test-accumulator: + @gum log --level info "Testing accumulator package..." + @go test ./accumulator -v + +test-bulletproof: + @gum log --level info "Testing bulletproof package..." + @go test ./bulletproof -v + +test-core: + @gum log --level info "Testing core package..." + @go test ./core -v + +test-daed: + @gum log --level info "Testing daed package..." + @go test ./daed -v + +test-dkg: + @gum log --level info "Testing dkg package..." + @go test ./dkg/... -v + +test-ecies: + @gum log --level info "Testing ecies package..." + @go test ./ecies -v + +test-internal: + @gum log --level info "Testing internal package..." + @go test ./internal -v + +test-keys: + @gum log --level info "Testing keys package..." + @go test ./keys -v + +test-mpc: + @gum log --level info "Testing mpc package..." + @go test ./mpc -v + +test-ot: + @gum log --level info "Testing ot package..." + @go test ./ot/... -v + +test-paillier: + @gum log --level info "Testing paillier package..." + @go test ./paillier -v + +test-sharing: + @gum log --level info "Testing sharing package..." + @go test ./sharing/... -v + +test-signatures: + @gum log --level info "Testing signatures package..." + @go test ./signatures/... -v + +test-subtle: + @gum log --level info "Testing subtle package..." + @go test ./subtle -v + +test-tecdsa: + @gum log --level info "Testing tecdsa package..." + @go test ./tecdsa/... -v + +test-ted25519: + @gum log --level info "Testing ted25519 package..." + @go test ./ted25519/... -v + + +test-zkp: + @gum log --level info "Testing zkp package..." + @go test ./zkp/... -v + +# Subpackage tests +test-core-curves: + @gum log --level info "Testing core/curves package..." + @go test ./core/curves -v + +test-core-native: + @gum log --level info "Testing core/curves/native package..." + @go test ./core/curves/native/... -v + +test-dkg-frost: + @gum log --level info "Testing dkg/frost package..." + @go test ./dkg/frost -v + +test-dkg-gennaro: + @gum log --level info "Testing dkg/gennaro package..." + @go test ./dkg/gennaro -v + +test-dkg-gennaro2p: + @gum log --level info "Testing dkg/gennaro2p package..." + @go test ./dkg/gennaro2p -v + +test-ot-base: + @gum log --level info "Testing ot/base package..." + @go test ./ot/base/... -v + +test-ot-extension: + @gum log --level info "Testing ot/extension package..." + @go test ./ot/extension/... -v + +test-sharing-v1: + @gum log --level info "Testing sharing/v1 package..." + @go test ./sharing/v1 -v + +test-signatures-bbs: + @gum log --level info "Testing signatures/bbs package..." + @go test ./signatures/bbs -v + +test-signatures-bls: + @gum log --level info "Testing signatures/bls package..." + @go test ./signatures/bls/... -v + +test-signatures-common: + @gum log --level info "Testing signatures/common package..." + @go test ./signatures/common -v + +test-signatures-schnorr: + @gum log --level info "Testing signatures/schnorr package..." + @go test ./signatures/schnorr/... -v + +test-tecdsa-dklsv1: + @gum log --level info "Testing tecdsa/dklsv1 package..." + @go test ./tecdsa/dklsv1/... -v + +test-ted25519-frost: + @gum log --level info "Testing ted25519/frost package..." + @go test ./ted25519/frost -v + +test-ted25519-ted25519: + @gum log --level info "Testing ted25519/ted25519 package..." + @go test ./ted25519/ted25519 -v + +test-zkp-schnorr: + @gum log --level info "Testing zkp/schnorr package..." + @go test ./zkp/schnorr -v + +# Performance testing +bench: + @gum log --level info "Running benchmarks for all packages..." + @go test ./... -bench=. -run=^$ + +bench-verbose: + @gum log --level info "Running verbose benchmarks for all packages..." + @go test ./... -bench=. -benchmem -run=^$ -v + +# Coverage testing +coverage: + @gum log --level info "Generating test coverage report..." + @go test ./... -coverprofile=coverage.out + @go tool cover -func=coverage.out + +coverage-html: + @gum log --level info "Generating HTML test coverage report..." + @go test ./... -coverprofile=coverage.out + @go tool cover -html=coverage.out -o coverage.html + @gum log --level info "Coverage report generated: coverage.html" + +# Build verification +build: + @gum log --level info "Building all packages to verify compilation..." + @go build ./... + +# Module maintenance +mod-tidy: + @gum log --level info "Tidying go.mod..." + @go mod tidy + +mod-verify: + @gum log --level info "Verifying go.mod..." + @go mod verify + +mod-download: + @gum log --level info "Downloading dependencies..." + @go mod download + +# Security scanning (if staticcheck is available) +lint: + @gum log --level info "Running static analysis..." + @if command -v staticcheck >/dev/null 2>&1; then \ + staticcheck ./...; \ + else \ + gum log --level warn "staticcheck not installed. Install with: go install honnef.co/go/tools/cmd/staticcheck@latest"; \ + go vet ./...; \ + fi + +# Format code +fmt: + @gum log --level info "Formatting code..." + @go fmt ./... + +# Check for formatting issues +fmt-check: + @gum log --level info "Checking code formatting..." + @test -z "$$(go fmt ./...)" + +# Run all quality checks +check: fmt-check lint test + @gum log --level info "✅ All quality checks passed!" + +# Development workflow +dev: clean mod-tidy fmt lint test + @gum log --level info "✅ Development workflow completed successfully!" diff --git a/README.md b/README.md new file mode 100644 index 0000000..d38a9e6 --- /dev/null +++ b/README.md @@ -0,0 +1,27 @@ +# Sonr Crypto + +Sonr Crypto is a collection of cryptographic primitives that are used by Sonr. + +## Packages + +- [Accumulator](./accumulator): Accumulator is a cryptographic accumulator that allows for efficient verification of large sets of data. +- [Bulletproof](./bulletproof): Bulletproof is a zero-knowledge proof system that allows for efficient verification of large sets of data. +- [Core](./core): Core is a collection of cryptographic primitives that are used by Sonr. +- [Daed](./daed): Daed is a distributed key generation protocol that allows for efficient verification of large sets of data. +- [Dkg](./dkg): Dkg is a distributed key generation protocol that allows for efficient verification of large sets of data. +- [Ecies](./ecies): Ecies is a symmetric encryption algorithm that allows for efficient verification of large sets of data. +- [Keys](./keys): Keys is a collection of cryptographic primitives that are used by Sonr. +- [Mpc](./mpc): Mpc is a collection of cryptographic primitives that are used by Sonr. +- [Ot](./ot): Ot is a collection of cryptographic primitives that are used by Sonr. +- [Paillier](./paillier): Paillier is a cryptographic algorithm that allows for efficient verification of large sets of data. +- [Sharing](./sharing): Sharing is a collection of cryptographic primitives that are used by Sonr. +- [Signatures](./signatures): Signatures is a collection of cryptographic primitives that are used by Sonr. +- [Subtle](./subtle): Subtle is a collection of cryptographic primitives that are used by Sonr. +- [Tecdsa](./tecdsa): Tecdsa is a collection of cryptographic primitives that are used by Sonr. +- [Ted25519](./ted25519): Ted25519 is a collection of cryptographic primitives that are used by Sonr. +- [Ucan](./ucan): Ucan is a collection of cryptographic primitives that are used by Sonr. +- [Zkp](./zkp): Zkp is a collection of cryptographic primitives that are used by Sonr. + +## License + +This project is licensed under the [Apache 2.0 License](./LICENSE). diff --git a/accumulator/accumulator.go b/accumulator/accumulator.go new file mode 100644 index 0000000..4043d69 --- /dev/null +++ b/accumulator/accumulator.go @@ -0,0 +1,179 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Package accumulator implements the cryptographic accumulator as described in https://eprint.iacr.org/2020/777.pdf +// It also implements the zero knowledge proof of knowledge protocol +// described in section 7 of the paper. +// Note: the paper only describes for non-membership witness case, but we don't +// use non-membership witness. We only implement the membership witness case. +package accumulator + +import ( + "fmt" + + "git.sr.ht/~sircmpwn/go-bare" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +type structMarshal struct { + Curve string `bare:"curve"` + Value []byte `bare:"value"` +} + +type Element curves.Scalar + +// Coefficient is a point +type Coefficient curves.Point + +// Accumulator is a point +type Accumulator struct { + value curves.Point +} + +// New creates a new accumulator. +func (acc *Accumulator) New(curve *curves.PairingCurve) (*Accumulator, error) { + // If we need to support non-membership witness, we need to implement Accumulator Initialization + // as described in section 6 of + // for now we don't need non-membership witness + + // i.e., it computes V0 = prod(y + α) * P, y ∈ Y_V0, P is a generator of G1. Since we do not use non-membership witness + // we just set the initial accumulator a G1 generator. + acc.value = curve.Scalar.Point().Generator() + return acc, nil +} + +// WithElements initializes a new accumulator prefilled with entries +// Each member is assumed to be hashed +// V = prod(y + α) * V0, for all y∈ Y_V +func (acc *Accumulator) WithElements( + curve *curves.PairingCurve, + key *SecretKey, + m []Element, +) (*Accumulator, error) { + _, err := acc.New(curve) + if err != nil { + return nil, err + } + y, err := key.BatchAdditions(m) + if err != nil { + return nil, err + } + acc.value = acc.value.Mul(y) + return acc, nil +} + +// AddElements accumulates a set of elements into the accumulator. +func (acc *Accumulator) AddElements(key *SecretKey, m []Element) (*Accumulator, error) { + if acc.value == nil || key.value == nil { + return nil, fmt.Errorf("accumulator and secret key should not be nil") + } + y, err := key.BatchAdditions(m) + if err != nil { + return nil, err + } + acc.value = acc.value.Mul(y) + return acc, nil +} + +// Add accumulates a single element into the accumulator +// V' = (y + alpha) * V +func (acc *Accumulator) Add(key *SecretKey, e Element) (*Accumulator, error) { + if acc.value == nil || acc.value.IsIdentity() || key.value == nil || e == nil { + return nil, fmt.Errorf("accumulator, secret key and element should not be nil") + } + y := e.Add(key.value) // y + alpha + acc.value = acc.value.Mul(y) + return acc, nil +} + +// Remove removes a single element from accumulator if it exists +// V' = 1/(y+alpha) * V +func (acc *Accumulator) Remove(key *SecretKey, e Element) (*Accumulator, error) { + if acc.value == nil || acc.value.IsIdentity() || key.value == nil || e == nil { + return nil, fmt.Errorf("accumulator, secret key and element should not be nil") + } + y := e.Add(key.value) // y + alpha + y, err := y.Invert() // 1/(y+alpha) + if err != nil { + return nil, err + } + acc.value = acc.value.Mul(y) + return acc, nil +} + +// Update performs a batch addition and deletion as described on page 7, section 3 in +// https://eprint.iacr.org/2020/777.pdf +func (acc *Accumulator) Update( + key *SecretKey, + additions []Element, + deletions []Element, +) (*Accumulator, []Coefficient, error) { + if acc.value == nil || acc.value.IsIdentity() || key.value == nil { + return nil, nil, fmt.Errorf("accumulator and secret key should not be nil") + } + + // Compute dA(-alpha) = prod(y + alpha), y in the set of A ⊆ ACC-Y_V + a, err := key.BatchAdditions(additions) + if err != nil { + return nil, nil, err + } + + // Compute dD(-alpha) = 1/prod(y + alpha), y in the set of D ⊆ Y_V + d, err := key.BatchDeletions(deletions) + if err != nil { + return nil, nil, err + } + + // dA(-alpha)/dD(-alpha) + div := a.Mul(d) + newAcc := acc.value.Mul(div) + + // build an array of coefficients + elements, err := key.CreateCoefficients(additions, deletions) + if err != nil { + return nil, nil, err + } + + coefficients := make([]Coefficient, len(elements)) + for i := range elements { + coefficients[i] = acc.value.Mul(elements[i]) + } + acc.value = newAcc + return acc, coefficients, nil +} + +// MarshalBinary converts Accumulator to bytes +func (acc Accumulator) MarshalBinary() ([]byte, error) { + if acc.value == nil { + return nil, fmt.Errorf("accumulator cannot be nil") + } + tv := &structMarshal{ + Value: acc.value.ToAffineCompressed(), + Curve: acc.value.CurveName(), + } + return bare.Marshal(tv) +} + +// UnmarshalBinary sets Accumulator from bytes +func (acc *Accumulator) UnmarshalBinary(data []byte) error { + tv := new(structMarshal) + err := bare.Unmarshal(data, tv) + if err != nil { + return err + } + curve := curves.GetCurveByName(tv.Curve) + if curve == nil { + return fmt.Errorf("invalid curve") + } + + value, err := curve.NewIdentityPoint().FromAffineCompressed(tv.Value) + if err != nil { + return err + } + acc.value = value + return nil +} diff --git a/accumulator/accumulator_test.go b/accumulator/accumulator_test.go new file mode 100644 index 0000000..1b326c0 --- /dev/null +++ b/accumulator/accumulator_test.go @@ -0,0 +1,221 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package accumulator + +import ( + "encoding/hex" + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestNewAccumulator100(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + var seed [32]byte + key, err := new(SecretKey).New(curve, seed[:]) + require.NoError(t, err) + require.NotNil(t, key) + acc, err := new(Accumulator).New(curve) + require.NoError(t, err) + accBz, err := acc.MarshalBinary() + require.NoError(t, err) + fmt.Println(accBz) + fmt.Println(len(accBz)) + fmt.Println(hex.EncodeToString(accBz)) + fmt.Println(len(hex.EncodeToString(accBz))) + require.Equal(t, 60, len(accBz), "Marshalled accumulator should be 60 bytes") + require.Equal( + t, + 120, + len(hex.EncodeToString(accBz)), + "Hex-encoded accumulator should be 120 characters", + ) + require.NotNil(t, acc) + require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed()) +} + +func TestNewAccumulator10K(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + var seed [32]byte + key, err := new(SecretKey).New(curve, seed[:]) + require.NoError(t, err) + require.NotNil(t, key) + acc, err := new(Accumulator).New(curve) + require.NoError(t, err) + require.NotNil(t, acc) + require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed()) +} + +func TestNewAccumulator10M(t *testing.T) { + // Initiating 10M values takes time + if testing.Short() { + t.Skip("skipping test in short mode.") + } + curve := curves.BLS12381(&curves.PointBls12381G1{}) + var seed [32]byte + key, err := new(SecretKey).New(curve, seed[:]) + require.NoError(t, err) + require.NotNil(t, key) + acc, err := new(Accumulator).New(curve) + require.NoError(t, err) + require.NotNil(t, acc) + require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed()) +} + +func TestWithElements(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + var seed [32]byte + key, _ := new(SecretKey).New(curve, seed[:]) + element1 := curve.Scalar.Hash([]byte("value1")) + element2 := curve.Scalar.Hash([]byte("value2")) + elements := []Element{element1, element2} + newAcc, err := new(Accumulator).WithElements(curve, key, elements) + require.NoError(t, err) + require.NotNil(t, newAcc) + require.NotEqual( + t, + newAcc.value.ToAffineCompressed(), + curve.PointG1.Identity().ToAffineCompressed(), + ) + require.NotEqual( + t, + newAcc.value.ToAffineCompressed(), + curve.PointG1.Generator().ToAffineCompressed(), + ) + + _, _ = newAcc.Remove(key, element1) + _, _ = newAcc.Remove(key, element2) + require.Equal( + t, + newAcc.value.ToAffineCompressed(), + curve.PointG1.Generator().ToAffineCompressed(), + ) +} + +func TestAdd(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + var seed [32]byte + key, err := new(SecretKey).New(curve, seed[:]) + require.NoError(t, err) + require.NotNil(t, key) + acc := &Accumulator{curve.PointG1.Generator()} + _, _ = acc.New(curve) + require.NoError(t, err) + require.NotNil(t, acc) + + element := curve.Scalar.Hash([]byte("value1")) + require.NoError(t, err) + require.NotNil(t, element) + _, _ = acc.Add(key, element) + require.NotEqual( + t, + acc.value.ToAffineCompressed(), + curve.PointG1.Generator().ToAffineCompressed(), + ) +} + +func TestRemove(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + var seed [32]byte + key, err := new(SecretKey).New(curve, seed[:]) + require.NoError(t, err) + require.NotNil(t, key) + acc, err := new(Accumulator).New(curve) + require.NoError(t, err) + require.NotNil(t, acc) + require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed()) + + element := curve.Scalar.Hash([]byte("value1")) + require.NoError(t, err) + require.NotNil(t, element) + + // add element + _, _ = acc.Add(key, element) + require.NotEqual( + t, + acc.value.ToAffineCompressed(), + curve.PointG1.Generator().ToAffineCompressed(), + ) + + // remove element + acc, err = acc.Remove(key, element) + require.NoError(t, err) + require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed()) +} + +func TestAddElements(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + var seed [32]byte + key, err := new(SecretKey).New(curve, seed[:]) + require.NoError(t, err) + require.NotNil(t, key) + acc := &Accumulator{curve.PointG1.Generator()} + _, _ = acc.New(curve) + require.NoError(t, err) + require.NotNil(t, acc) + require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed()) + + element1 := curve.Scalar.Hash([]byte("value1")) + element2 := curve.Scalar.Hash([]byte("value2")) + element3 := curve.Scalar.Hash([]byte("value3")) + elements := []Element{element1, element2, element3} + + acc, err = acc.AddElements(key, elements) + require.NoError(t, err) + require.NotEqual( + t, + acc.value.ToAffineCompressed(), + curve.PointG1.Generator().ToAffineCompressed(), + ) +} + +func TestAccumulatorMarshal(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + point := curve.PointG1.Generator().Mul(curve.Scalar.New(2)) + data, err := Accumulator{point}.MarshalBinary() + require.NoError(t, err) + require.NotNil(t, data) + // element cannot be empty + _, err = Accumulator{}.MarshalBinary() + require.Error(t, err) + + e := &Accumulator{curve.PointG1.Generator()} + _ = e.UnmarshalBinary(data) + require.True(t, e.value.Equal(point)) +} + +func TestUpdate(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + var seed [32]byte + key, err := new(SecretKey).New(curve, seed[:]) + require.NoError(t, err) + require.NotNil(t, key) + acc, err := new(Accumulator).New(curve) + require.NoError(t, err) + require.NotNil(t, acc) + require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed()) + + element1 := curve.Scalar.Hash([]byte("value1")) + element2 := curve.Scalar.Hash([]byte("value2")) + element3 := curve.Scalar.Hash([]byte("value3")) + elements := []Element{element1, element2, element3} + + acc, _, err = acc.Update(key, elements, nil) + require.NoError(t, err) + require.NotEqual( + t, + acc.value.ToAffineCompressed(), + curve.PointG1.Generator().ToAffineCompressed(), + ) + + acc, _, err = acc.Update(key, nil, elements) + require.NoError(t, err) + require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed()) +} diff --git a/accumulator/key.go b/accumulator/key.go new file mode 100644 index 0000000..67a49e0 --- /dev/null +++ b/accumulator/key.go @@ -0,0 +1,247 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package accumulator + +import ( + "errors" + "fmt" + + "git.sr.ht/~sircmpwn/go-bare" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// SecretKey is the secret alpha only held by the accumulator manager. +type SecretKey struct { + value curves.Scalar +} + +// New creates a new secret key from the seed. +func (sk *SecretKey) New(curve *curves.PairingCurve, seed []byte) (*SecretKey, error) { + sk.value = curve.Scalar.Hash(seed) + return sk, nil +} + +// GetPublicKey creates a public key from SecretKey sk +func (sk SecretKey) GetPublicKey(curve *curves.PairingCurve) (*PublicKey, error) { + if sk.value == nil || curve == nil { + return nil, fmt.Errorf("curve and sk value cannot be nil") + } + value := curve.Scalar.Point().(curves.PairingPoint).OtherGroup().Generator().Mul(sk.value) + return &PublicKey{value.(curves.PairingPoint)}, nil +} + +// MarshalBinary converts SecretKey to bytes +func (sk SecretKey) MarshalBinary() ([]byte, error) { + if sk.value == nil { + return nil, fmt.Errorf("sk cannot be empty") + } + tv := &structMarshal{ + Value: sk.value.Bytes(), + Curve: sk.value.Point().CurveName(), + } + return bare.Marshal(tv) +} + +// UnmarshalBinary sets SecretKey from bytes +func (sk *SecretKey) UnmarshalBinary(data []byte) error { + tv := new(structMarshal) + err := bare.Unmarshal(data, tv) + if err != nil { + return err + } + curve := curves.GetCurveByName(tv.Curve) + if curve == nil { + return fmt.Errorf("invalid curve") + } + + value, err := curve.NewScalar().SetBytes(tv.Value) + if err != nil { + return err + } + sk.value = value + return nil +} + +// BatchAdditions computes product(y + sk) for y in additions and output the product +func (sk SecretKey) BatchAdditions(additions []Element) (Element, error) { + if sk.value == nil { + return nil, fmt.Errorf("secret key cannot be empty") + } + mul := sk.value.One() + for i := 0; i < len(additions); i++ { + if additions[i] == nil { + return nil, fmt.Errorf("some element in additions is nil") + } + // y + alpha + temp := additions[i].Add(sk.value) + // prod(y + alpha) + mul = mul.Mul(temp) + } + return mul, nil +} + +// BatchDeletions computes 1/product(y + sk) for y in deletions and output it +func (sk SecretKey) BatchDeletions(deletions []Element) (Element, error) { + v, err := sk.BatchAdditions(deletions) + if err != nil { + return nil, err + } + y, err := v.Invert() + if err != nil { + return nil, err + } + return y, nil +} + +// CreateCoefficients creates the Batch Polynomial coefficients +// See page 7 of https://eprint.iacr.org/2020/777.pdf +func (sk SecretKey) CreateCoefficients( + additions []Element, + deletions []Element, +) ([]Element, error) { + if sk.value == nil { + return nil, fmt.Errorf("secret key should not be nil") + } + + // vD(x) = ∑^{m}_{s=1}{ ∏ 1..s {yD_i + alpha}^-1 ∏ 1 ..s-1 {yD_j - x} + one := sk.value.One() + m1 := one.Neg() // m1 is -1 + vD := make(polynomial, 0, len(deletions)) + for s := 0; s < len(deletions); s++ { + // ∏ 1..s (yD_i + alpha)^-1 + c, err := sk.BatchDeletions(deletions[0 : s+1]) + if err != nil { + return nil, fmt.Errorf("error in sk batchDeletions") + } + poly := make(polynomial, 1, s+2) + poly[0] = one + + // ∏ 1..(s-1) (yD_j - x) + for j := 0; j < s; j++ { + t := make(polynomial, 2) + // yD_j + t[0] = deletions[j] + // -x + t[1] = m1 + + // polynomial multiplication (yD_1-x) * (yD_2 - x) ... + poly, err = poly.Mul(t) + if err != nil { + return nil, err + } + } + poly, err = poly.MulScalar(c) + if err != nil { + return nil, err + } + vD, err = vD.Add(poly) + if err != nil { + return nil, err + } + } + + // vD(x) * ∏ 1..n (yA_i + alpha) + bAdd, err := sk.BatchAdditions(additions) + if err != nil { + return nil, fmt.Errorf("error in sk batchAdditions") + } + vD, err = vD.MulScalar(bAdd) + if err != nil { + return nil, err + } + + // vA(x) = ∑^n_{s=1}{ ∏ 1..s-1 {yA_i + alpha} ∏ s+1..n {yA_j - x} } + vA := make(polynomial, 0, len(additions)) + for s := range additions { + // ∏ 1..s-1 {yA_i + alpha} + var c Element + if s == 0 { + c = one + } else { + c, err = sk.BatchAdditions(additions[0:s]) + if err != nil { + return nil, err + } + } + poly := make(polynomial, 1, s+2) + poly[0] = one + + // ∏ s+1..n {yA_j - x} + for j := s + 1; j < len(additions); j++ { + t := make(polynomial, 2) + t[0] = additions[j] + t[1] = m1 + + // polynomial multiplication (yA_1-x) * (yA_2 - x) ... + poly, err = poly.Mul(t) + if err != nil { + return nil, err + } + } + poly, err = poly.MulScalar(c) + if err != nil { + return nil, err + } + vA, err = vA.Add(poly) + if err != nil { + return nil, err + } + } + + // vA - vD + vA, err = vA.Sub(vD) + if err != nil { + return nil, err + } + result := make([]Element, len(vA)) + for i := 0; i < len(vA); i++ { + result[i] = vA[i] + } + return result, nil +} + +// PublicKey is the public key of accumulator, it should be sk * generator of G2 +type PublicKey struct { + value curves.PairingPoint +} + +// MarshalBinary converts PublicKey to bytes +func (pk PublicKey) MarshalBinary() ([]byte, error) { + if pk.value == nil { + return nil, fmt.Errorf("public key cannot be nil") + } + tv := &structMarshal{ + Value: pk.value.ToAffineCompressed(), + Curve: pk.value.CurveName(), + } + return bare.Marshal(tv) +} + +// UnmarshalBinary sets PublicKey from bytes +func (pk *PublicKey) UnmarshalBinary(data []byte) error { + tv := new(structMarshal) + err := bare.Unmarshal(data, tv) + if err != nil { + return err + } + curve := curves.GetPairingCurveByName(tv.Curve) + if curve == nil { + return fmt.Errorf("invalid curve") + } + + value, err := curve.NewScalar().Point().FromAffineCompressed(tv.Value) + if err != nil { + return err + } + var ok bool + pk.value, ok = value.(curves.PairingPoint) + if !ok { + return errors.New("can't convert to PairingPoint") + } + return nil +} diff --git a/accumulator/key_test.go b/accumulator/key_test.go new file mode 100644 index 0000000..07f1efc --- /dev/null +++ b/accumulator/key_test.go @@ -0,0 +1,88 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package accumulator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestSecretKeyMarshal(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + data, err := SecretKey{curve.Scalar.One()}.MarshalBinary() + require.NoError(t, err) + require.NotNil(t, data) + e := &SecretKey{curve.Scalar.New(2)} + err = e.UnmarshalBinary(data) + require.NoError(t, err) + require.Equal(t, e.value.Bytes(), curve.Scalar.One().Bytes()) + + // element cannot be empty + _, err = SecretKey{}.MarshalBinary() + require.Error(t, err) +} + +func TestPublicKeyMarshal(t *testing.T) { + // Actually test both toBytes() and from() + curve := curves.BLS12381(&curves.PointBls12381G1{}) + sk := &SecretKey{curve.Scalar.New(3)} + pk, _ := sk.GetPublicKey(curve) + pkBytes, err := pk.MarshalBinary() + require.NoError(t, err) + require.NotNil(t, pkBytes) + + pk2 := &PublicKey{} + err = pk2.UnmarshalBinary(pkBytes) + require.NoError(t, err) + require.True(t, pk.value.Equal(pk2.value)) +} + +func TestBatch(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + var seed [32]byte + sk, _ := new(SecretKey).New(curve, seed[:]) + element1 := curve.Scalar.Hash([]byte("value1")) + element2 := curve.Scalar.Hash([]byte("value2")) + elements := []Element{element1, element2} + + add, err := sk.BatchAdditions(elements) + require.NoError(t, err) + require.NotNil(t, add) + + del, err := sk.BatchDeletions(elements) + require.NoError(t, err) + require.NotNil(t, del) + + result := add.Mul(del) + require.Equal(t, result, curve.Scalar.One()) + + g1 := curve.PointG1.Generator() + acc := g1.Mul(add) + require.NotEqual(t, acc, g1) + acc = acc.Mul(del) + require.Equal(t, acc.ToAffineCompressed(), g1.ToAffineCompressed()) + + acc2 := g1.Mul(result) + require.True(t, acc2.Equal(g1)) +} + +func TestCoefficient(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + sk, _ := new(SecretKey).New(curve, []byte("1234567890")) + element1 := curve.Scalar.Hash([]byte("value1")) + element2 := curve.Scalar.Hash([]byte("value2")) + element3 := curve.Scalar.Hash([]byte("value3")) + element4 := curve.Scalar.Hash([]byte("value4")) + element5 := curve.Scalar.Hash([]byte("value5")) + elements := []Element{element1, element2, element3, element4, element5} + coefficients, err := sk.CreateCoefficients(elements[0:2], elements[2:5]) + require.NoError(t, err) + require.Equal(t, len(coefficients), 3) +} diff --git a/accumulator/lib.go b/accumulator/lib.go new file mode 100644 index 0000000..1c54380 --- /dev/null +++ b/accumulator/lib.go @@ -0,0 +1,204 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package accumulator + +import ( + "fmt" + "math" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// dad constructs two polynomials - dA(x) and dD(x) +// dA(y) = prod(y_A,t - y), t = 1...n +// dD(y) = prod(y_D,t - y), t = 1...n +func dad(values []Element, y Element) (Element, error) { + if values == nil || y == nil { + return nil, fmt.Errorf("curve, values or y should not be nil") + } + + for _, value := range values { + if value == nil { + return nil, fmt.Errorf("some element is nil") + } + } + + result := y.One() + if len(values) == 1 { + a := values[0] + result = a.Sub(y) + } else { + for i := range values { + temp := values[i].Sub(y) + result = result.Mul(temp) + } + } + return result, nil +} + +type polynomialPoint []curves.Point + +// evaluate evaluates a PolynomialG1 on input x. +func (p polynomialPoint) evaluate(x curves.Scalar) (curves.Point, error) { + if p == nil { + return nil, fmt.Errorf("p cannot be empty") + } + for i := 0; i < len(p); i++ { + if p[i] == nil { + return nil, fmt.Errorf("some coefficient in p is nil") + } + } + + pp := x + res := p[0] + for i := 1; i < len(p); i++ { + r := p[i].Mul(pp) + res = res.Add(r) + pp = pp.Mul(x) + } + return res, nil +} + +// Add adds two PolynomialG1 +func (p polynomialPoint) Add(rhs polynomialPoint) (polynomialPoint, error) { + maxLen := int(math.Max(float64(len(p)), float64(len(rhs)))) + + result := make(polynomialPoint, maxLen) + + for i, c := range p { + if c == nil { + return nil, fmt.Errorf("invalid coefficient at %d", i) + } + result[i] = c.Add(c.Identity()) + } + + for i, c := range rhs { + if c == nil { + return nil, fmt.Errorf("invalid coefficient at %d", i) + } + if result[i] == nil { + result[i] = c.Add(c.Identity()) + } else { + result[i] = result[i].Add(c) + } + } + return result, nil +} + +// Mul for PolynomialG1 computes rhs * p, p is a polynomial, rhs is a value +func (p polynomialPoint) Mul(rhs curves.Scalar) (polynomialPoint, error) { + result := make(polynomialPoint, len(p)) + + for i, c := range p { + if c == nil { + return nil, fmt.Errorf("invalid coefficient at %d", i) + } + result[i] = c.Mul(rhs) + } + + return result, nil +} + +type polynomial []curves.Scalar + +// Add adds two polynomials +func (p polynomial) Add(rhs polynomial) (polynomial, error) { + maxLen := int(math.Max(float64(len(p)), float64(len(rhs)))) + result := make([]curves.Scalar, maxLen) + + for i, c := range p { + if c == nil { + return nil, fmt.Errorf("invalid coefficient at %d", i) + } + result[i] = c.Clone() + } + + for i, c := range rhs { + if c == nil { + return nil, fmt.Errorf("invalid coefficient at %d", i) + } + if result[i] == nil { + result[i] = c.Clone() + } else { + result[i] = result[i].Add(c) + } + } + + return result, nil +} + +// Sub computes p-rhs and returns +func (p polynomial) Sub(rhs polynomial) (polynomial, error) { + maxLen := int(math.Max(float64(len(p)), float64(len(rhs)))) + result := make([]curves.Scalar, maxLen) + + for i, c := range p { + if c == nil { + return nil, fmt.Errorf("invalid coefficient at %d", i) + } + result[i] = c.Clone() + } + + for i, c := range rhs { + if c == nil { + return nil, fmt.Errorf("invalid coefficient at %d", i) + } + if result[i] == nil { + result[i] = c.Neg() + } else { + result[i] = result[i].Sub(c) + } + } + + return result, nil +} + +// Mul multiplies two polynomials - p * rhs +func (p polynomial) Mul(rhs polynomial) (polynomial, error) { + // Check for each coefficient that should not be nil + for i, c := range p { + if c == nil { + return nil, fmt.Errorf("coefficient in p at %d is nil", i) + } + } + + for i, c := range rhs { + if c == nil { + return nil, fmt.Errorf("coefficient in rhs at %d is nil", i) + } + } + + m := len(p) + n := len(rhs) + + // Initialize the product polynomial + prod := make(polynomial, m+n-1) + for i := 0; i < len(prod); i++ { + prod[i] = p[0].Zero() + } + + // Multiply two polynomials term by term + for i, cp := range p { + for j, cr := range rhs { + temp := cp.Mul(cr) + prod[i+j] = prod[i+j].Add(temp) + } + } + return prod, nil +} + +// MulScalar computes p * rhs, where rhs is a scalar value +func (p polynomial) MulScalar(rhs curves.Scalar) (polynomial, error) { + result := make(polynomial, len(p)) + for i, c := range p { + if c == nil { + return nil, fmt.Errorf("coefficient at %d is nil", i) + } + result[i] = c.Mul(rhs) + } + return result, nil +} diff --git a/accumulator/lib_test.go b/accumulator/lib_test.go new file mode 100644 index 0000000..173b53a --- /dev/null +++ b/accumulator/lib_test.go @@ -0,0 +1,404 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package accumulator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestEvaluatePolyG1(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + poly := polynomialPoint{ + curve.PointG1.Generator().Mul(curve.Scalar.New(3)), + curve.PointG1.Generator().Mul(curve.Scalar.New(2)), + curve.PointG1.Generator().Mul(curve.Scalar.New(1)), + } + output1, err := poly.evaluate(curve.Scalar.New(1)) + require.NoError(t, err) + require.NotNil(t, output1) + result1 := curve.PointG1.Generator().Mul(curve.Scalar.New(6)) + require.Equal(t, output1.ToAffineCompressed(), result1.ToAffineCompressed()) + + output2, err := poly.evaluate(curve.Scalar.New(2)) + require.NoError(t, err) + require.NotNil(t, output2) + result2 := curve.PointG1.Generator().Mul(curve.Scalar.New(11)) + require.Equal(t, output2.ToAffineCompressed(), result2.ToAffineCompressed()) +} + +func TestEvaluatePolyG1Error(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + poly := polynomialPoint{ + nil, + curve.PointG1.Generator().Mul(curve.Scalar.New(2)), + curve.PointG1.Generator().Mul(curve.Scalar.New(1)), + } + _, err := poly.evaluate(curve.Scalar.New(1)) + require.Error(t, err) +} + +func TestAddAssignPolyG1(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + // Test polynomial with equal length + poly1 := polynomialPoint{ + curve.PointG1.Generator().Mul(curve.Scalar.New(3)), + curve.PointG1.Generator().Mul(curve.Scalar.New(2)), + curve.PointG1.Generator().Mul(curve.Scalar.New(1)), + } + poly2 := polynomialPoint{ + curve.PointG1.Generator().Mul(curve.Scalar.New(1)), + curve.PointG1.Generator().Mul(curve.Scalar.New(2)), + curve.PointG1.Generator().Mul(curve.Scalar.New(3)), + } + + output, err := poly1.Add(poly2) + require.NoError(t, err) + require.NotNil(t, output) + result := polynomialPoint{ + curve.PointG1.Generator().Mul(curve.Scalar.New(4)), + curve.PointG1.Generator().Mul(curve.Scalar.New(4)), + curve.PointG1.Generator().Mul(curve.Scalar.New(4)), + } + for i := 0; i < len(output); i++ { + require.Equal(t, output[i].ToAffineCompressed(), result[i].ToAffineCompressed()) + } + + // Test polynomials with unequal length + poly3 := polynomialPoint{ + curve.PointG1.Generator().Mul(curve.Scalar.New(1)), + curve.PointG1.Generator().Mul(curve.Scalar.New(2)), + } + output2, err := poly1.Add(poly3) + require.NoError(t, err) + require.NotNil(t, output2) + result2 := polynomialPoint{ + curve.PointG1.Generator().Mul(curve.Scalar.New(4)), + curve.PointG1.Generator().Mul(curve.Scalar.New(4)), + curve.PointG1.Generator().Mul(curve.Scalar.New(1)), + } + require.Equal(t, len(output2), len(result2)) + for i := 0; i < len(output2); i++ { + require.Equal(t, output2[i].ToAffineCompressed(), result2[i].ToAffineCompressed()) + } + + // Test polynomial with Capacity + poly4 := make(polynomialPoint, 0, 3) + poly5, err := poly4.Add(poly1) + require.NoError(t, err) + require.Equal(t, len(poly5), len(poly1)) + for i := 0; i < len(poly5); i++ { + require.Equal(t, poly5[i].ToAffineCompressed(), poly1[i].ToAffineCompressed()) + } +} + +func TestAddAssignPolyG1Error(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + poly1 := polynomialPoint{ + nil, + curve.PointG1.Generator().Mul(curve.Scalar.New(2)), + curve.PointG1.Generator().Mul(curve.Scalar.New(1)), + } + poly2 := polynomialPoint{ + curve.PointG1.Generator().Mul(curve.Scalar.New(1)), + curve.PointG1.Generator().Mul(curve.Scalar.New(2)), + curve.PointG1.Generator().Mul(curve.Scalar.New(3)), + } + output, err := poly1.Add(poly2) + require.Error(t, err) + require.Nil(t, output) +} + +func TestMulAssignPolyG1(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + poly := polynomialPoint{ + curve.PointG1.Generator().Mul(curve.Scalar.New(3)), + curve.PointG1.Generator().Mul(curve.Scalar.New(2)), + curve.PointG1.Generator().Mul(curve.Scalar.New(1)), + } + rhs := curve.Scalar.New(3) + output, err := poly.Mul(rhs) + require.NoError(t, err) + require.NotNil(t, output) + poly2 := polynomialPoint{ + curve.PointG1.Generator().Mul(curve.Scalar.New(9)), + curve.PointG1.Generator().Mul(curve.Scalar.New(6)), + curve.PointG1.Generator().Mul(curve.Scalar.New(3)), + } + for i := 0; i < len(poly2); i++ { + require.Equal(t, output[i].ToAffineCompressed(), poly2[i].ToAffineCompressed()) + } +} + +func TestMulAssignPolyG1Error(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + poly := polynomialPoint{ + nil, + curve.PointG1.Generator().Mul(curve.Scalar.New(2)), + curve.PointG1.Generator().Mul(curve.Scalar.New(1)), + } + rhs := curve.Scalar.New(3) + output, err := poly.Mul(rhs) + require.Error(t, err) + require.Nil(t, output) +} + +func TestPushPoly(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + poly := polynomial{ + curve.Scalar.New(3), + curve.Scalar.New(2), + curve.Scalar.New(1), + } + scalar := curve.Scalar.New(4) + result := append(poly, scalar) + require.Equal(t, result[3], scalar) + + // Push one more + scalar2 := curve.Scalar.New(5) + result2 := append(result, scalar2) + require.Equal(t, result2[4], scalar2) + + // Push to a new polynomial + newPoly := polynomial{} + newPoly = append(newPoly, scalar) + require.Equal(t, newPoly[0], scalar) + newPoly = append(newPoly, scalar2) + require.Equal(t, newPoly[1], scalar2) +} + +func TestAddAssignPoly(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + // Test polynomial with equal length + poly1 := polynomial{ + curve.Scalar.New(3), + curve.Scalar.New(2), + curve.Scalar.New(1), + } + poly2 := polynomial{ + curve.Scalar.New(1), + curve.Scalar.New(2), + curve.Scalar.New(3), + } + + output, err := poly1.Add(poly2) + require.NoError(t, err) + require.NotNil(t, output) + result := []curves.Scalar{ + curve.Scalar.New(4), + curve.Scalar.New(4), + curve.Scalar.New(4), + } + for i := 0; i < len(output); i++ { + require.Equal(t, output[i], result[i]) + } + + // Test polynomials with unequal length + poly3 := polynomial{ + curve.Scalar.New(1), + curve.Scalar.New(2), + } + output2, err := poly1.Add(poly3) + require.NoError(t, err) + require.NotNil(t, output2) + result2 := []curves.Scalar{ + curve.Scalar.New(4), + curve.Scalar.New(4), + curve.Scalar.New(1), + } + require.Equal(t, len(output2), len(result2)) + for i := 0; i < len(output2); i++ { + require.Equal(t, output2[i], result2[i]) + } +} + +func TestAddAssignPolyError(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + // Test polynomial with equal length + poly1 := polynomial{ + nil, + curve.Scalar.New(2), + curve.Scalar.New(1), + } + poly2 := polynomial{ + curve.Scalar.New(1), + curve.Scalar.New(2), + curve.Scalar.New(3), + } + + output, err := poly1.Add(poly2) + require.Error(t, err) + require.Nil(t, output) +} + +func TestSubAssignPoly(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + // Test polynomial with equal length + poly1 := polynomial{ + curve.Scalar.New(3), + curve.Scalar.New(2), + curve.Scalar.New(1), + } + poly2 := polynomial{ + curve.Scalar.New(1), + curve.Scalar.New(2), + curve.Scalar.New(3), + } + + output, err := poly1.Sub(poly2) + require.NoError(t, err) + require.NotNil(t, output) + result := []curves.Scalar{ + curve.Scalar.New(2), + curve.Scalar.New(0), + curve.Scalar.New(-2), + } + for i := 0; i < len(output); i++ { + require.Equal(t, output[i].Bytes(), result[i].Bytes()) + } + + // Test polynomials with unequal length + poly3 := polynomial{ + curve.Scalar.New(1), + curve.Scalar.New(2), + curve.Scalar.New(3), + curve.Scalar.New(4), + } + output2, err := poly1.Sub(poly3) + require.NoError(t, err) + require.NotNil(t, output2) + result2 := []curves.Scalar{ + curve.Scalar.New(2), + curve.Scalar.New(0), + curve.Scalar.New(-2), + curve.Scalar.New(-4), + } + require.Equal(t, len(output2), len(result2)) + for i := 0; i < len(output2); i++ { + require.Equal(t, output2[i].Bytes(), result2[i].Bytes()) + } +} + +func TestSubAssignPolyError(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + poly1 := polynomial{ + nil, + curve.Scalar.New(2), + curve.Scalar.New(1), + } + poly2 := polynomial{ + curve.Scalar.New(1), + curve.Scalar.New(2), + curve.Scalar.New(3), + } + + output, err := poly1.Sub(poly2) + require.Error(t, err) + require.Nil(t, output) +} + +func TestMulAssignPoly(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + // Test polynomial with equal length + poly1 := polynomial{ + curve.Scalar.New(3), + curve.Scalar.New(2), + curve.Scalar.New(1), + } + poly2 := polynomial{ + curve.Scalar.New(1), + curve.Scalar.New(2), + curve.Scalar.New(3), + } + + output, err := poly1.Mul(poly2) + require.NoError(t, err) + require.NotNil(t, output) + result := []curves.Scalar{ + curve.Scalar.New(3), + curve.Scalar.New(8), + curve.Scalar.New(14), + curve.Scalar.New(8), + curve.Scalar.New(3), + } + for i := 0; i < len(result); i++ { + require.Equal(t, output[i].Bytes(), result[i].Bytes()) + } + + // Test polynomials with unequal length + poly3 := polynomial{ + curve.Scalar.New(1), + curve.Scalar.New(2), + } + output2, err := poly1.Mul(poly3) + require.NoError(t, err) + require.NotNil(t, output2) + result2 := []curves.Scalar{ + curve.Scalar.New(3), + curve.Scalar.New(8), + curve.Scalar.New(5), + curve.Scalar.New(2), + } + require.Equal(t, len(output2), 4) + for i := 0; i < len(output2); i++ { + require.Equal(t, output2[i].Bytes(), result2[i].Bytes()) + } +} + +func TestMulAssignPolyError(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + poly1 := polynomial{ + nil, + curve.Scalar.New(2), + curve.Scalar.New(1), + } + poly2 := polynomial{ + curve.Scalar.New(1), + curve.Scalar.New(2), + curve.Scalar.New(3), + } + output, err := poly1.Mul(poly2) + require.Error(t, err) + require.Nil(t, output) +} + +func TestMulValueAssignPoly(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + poly := polynomial{ + curve.Scalar.New(3), + curve.Scalar.New(2), + curve.Scalar.New(1), + } + rhs := curve.Scalar.New(3) + output, err := poly.MulScalar(rhs) + require.NoError(t, err) + require.NotNil(t, output) + coefficients2 := []curves.Scalar{ + curve.Scalar.New(9), + curve.Scalar.New(6), + curve.Scalar.New(3), + } + for i := 0; i < len(coefficients2); i++ { + require.Equal(t, output[i].Bytes(), coefficients2[i].Bytes()) + } +} + +func TestMulValueAssignPolyError(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + poly := polynomial{ + nil, + curve.Scalar.New(2), + curve.Scalar.New(1), + } + rhs := curve.Scalar.New(3) + output, err := poly.MulScalar(rhs) + require.Error(t, err) + require.Nil(t, output) +} diff --git a/accumulator/proof.go b/accumulator/proof.go new file mode 100644 index 0000000..6854fb1 --- /dev/null +++ b/accumulator/proof.go @@ -0,0 +1,527 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package accumulator + +import ( + "bytes" + crand "crypto/rand" + "errors" + "fmt" + + "git.sr.ht/~sircmpwn/go-bare" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +type proofParamsMarshal struct { + X []byte `bare:"x"` + Y []byte `bare:"y"` + Z []byte `bare:"z"` + Curve string `bare:"curve"` +} + +// ProofParams contains four distinct public generators of G1 - X, Y, Z +type ProofParams struct { + x, y, z curves.Point +} + +// New samples X, Y, Z, K +func (p *ProofParams) New( + curve *curves.PairingCurve, + pk *PublicKey, + entropy []byte, +) (*ProofParams, error) { + pkBytes, err := pk.MarshalBinary() + if err != nil { + return nil, err + } + prefix := bytes.Repeat([]byte{0xFF}, 32) + data := append(prefix, entropy...) + data = append(data, pkBytes...) + p.z = curve.Scalar.Point().Hash(data) + + data[0] = 0xFE + p.y = curve.Scalar.Point().Hash(data) + + data[0] = 0xFD + p.x = curve.Scalar.Point().Hash(data) + + return p, nil +} + +// MarshalBinary converts ProofParams to bytes +func (p *ProofParams) MarshalBinary() ([]byte, error) { + if p.x == nil || p.y == nil || p.z == nil { + return nil, fmt.Errorf("some value x, y, or z is nil") + } + tv := &proofParamsMarshal{ + X: p.x.ToAffineCompressed(), + Y: p.y.ToAffineCompressed(), + Z: p.z.ToAffineCompressed(), + Curve: p.x.CurveName(), + } + return bare.Marshal(tv) +} + +// UnmarshalBinary converts bytes to ProofParams +func (p *ProofParams) UnmarshalBinary(data []byte) error { + if data == nil { + return fmt.Errorf("expected non-zero byte sequence") + } + tv := new(proofParamsMarshal) + err := bare.Unmarshal(data, tv) + if err != nil { + return err + } + curve := curves.GetCurveByName(tv.Curve) + if curve == nil { + return fmt.Errorf("invalid curve") + } + x, err := curve.NewIdentityPoint().FromAffineCompressed(tv.X) + if err != nil { + return err + } + y, err := curve.NewIdentityPoint().FromAffineCompressed(tv.Y) + if err != nil { + return err + } + z, err := curve.NewIdentityPoint().FromAffineCompressed(tv.Z) + if err != nil { + return err + } + p.x = x + p.y = y + p.z = z + return nil +} + +// MembershipProofCommitting contains value computed in Proof of knowledge and +// Blinding phases as described in section 7 of https://eprint.iacr.org/2020/777.pdf +type MembershipProofCommitting struct { + eC curves.Point + tSigma curves.Point + tRho curves.Point + deltaSigma curves.Scalar + deltaRho curves.Scalar + blindingFactor curves.Scalar + rSigma curves.Scalar + rRho curves.Scalar + rDeltaSigma curves.Scalar + rDeltaRho curves.Scalar + sigma curves.Scalar + rho curves.Scalar + capRSigma curves.Point + capRRho curves.Point + capRDeltaSigma curves.Point + capRDeltaRho curves.Point + capRE curves.Scalar + accumulator curves.Point + witnessValue curves.Scalar + xG1 curves.Point + yG1 curves.Point + zG1 curves.Point +} + +// New initiates values of MembershipProofCommitting +func (mpc *MembershipProofCommitting) New( + witness *MembershipWitness, + acc *Accumulator, + pp *ProofParams, + pk *PublicKey, +) (*MembershipProofCommitting, error) { + // Randomly select σ, ρ + sigma := witness.y.Random(crand.Reader) + rho := witness.y.Random(crand.Reader) + + // E_C = C + (σ + ρ)Z + t := sigma + t = t.Add(rho) + eC := pp.z + eC = eC.Mul(t) + eC = eC.Add(witness.c) + + // T_σ = σX + tSigma := pp.x + tSigma = tSigma.Mul(sigma) + + // T_ρ = ρY + tRho := pp.y + tRho = tRho.Mul(rho) + + // δ_σ = yσ + deltaSigma := witness.y + deltaSigma = deltaSigma.Mul(sigma) + + // δ_ρ = yρ + deltaRho := witness.y + deltaRho = deltaRho.Mul(rho) + + // Randomly pick r_σ,r_ρ,r_δσ,r_δρ + rY := witness.y.Random(crand.Reader) + rSigma := witness.y.Random(crand.Reader) + rRho := witness.y.Random(crand.Reader) + rDeltaSigma := witness.y.Random(crand.Reader) + rDeltaRho := witness.y.Random(crand.Reader) + + // R_σ = r_σ X + capRSigma := pp.x + capRSigma = capRSigma.Mul(rSigma) + + // R_ρ = ρY + capRRho := pp.y + capRRho = capRRho.Mul(rRho) + + // R_δσ = r_y T_σ - r_δσ X + negX := pp.x + negX = negX.Neg() + capRDeltaSigma := tSigma.Mul(rY) + capRDeltaSigma = capRDeltaSigma.Add(negX.Mul(rDeltaSigma)) + + // R_δρ = r_y T_ρ - r_δρ Y + negY := pp.y + negY = negY.Neg() + capRDeltaRho := tRho.Mul(rY) + capRDeltaRho = capRDeltaRho.Add(negY.Mul(rDeltaRho)) + + // P~ + g2 := pk.value.Generator() + + // -r_δσ - r_δρ + exp := rDeltaSigma + exp = exp.Add(rDeltaRho) + exp = exp.Neg() + + // -r_σ - r_ρ + exp2 := rSigma + exp2 = exp2.Add(rRho) + exp2 = exp2.Neg() + + // rY * eC + rYeC := eC.Mul(rY) + + // (-r_δσ - r_δρ)*Z + expZ := pp.z.Mul(exp) + + // (-r_σ - r_ρ)*Z + exp2Z := pp.z.Mul(exp2) + + // Prepare + rYeCPrep, ok := rYeC.(curves.PairingPoint) + if !ok { + return nil, errors.New("incorrect type conversion") + } + g2Prep, ok := g2.(curves.PairingPoint) + if !ok { + return nil, errors.New("incorrect type conversion") + } + expZPrep, ok := expZ.(curves.PairingPoint) + if !ok { + return nil, errors.New("incorrect type conversion") + } + exp2ZPrep, ok := exp2Z.(curves.PairingPoint) + if !ok { + return nil, errors.New("incorrect type conversion") + } + pkPrep := pk.value + + // Pairing + capRE := g2Prep.MultiPairing(rYeCPrep, g2Prep, expZPrep, g2Prep, exp2ZPrep, pkPrep) + + return &MembershipProofCommitting{ + eC, + tSigma, + tRho, + deltaSigma, + deltaRho, + rY, + rSigma, + rRho, + rDeltaSigma, + rDeltaRho, + sigma, + rho, + capRSigma, + capRRho, + capRDeltaSigma, + capRDeltaRho, + capRE, + acc.value, + witness.y, + pp.x, + pp.y, + pp.z, + }, nil +} + +// GetChallengeBytes returns bytes that need to be hashed for generating challenge. +// V || Ec || T_sigma || T_rho || R_E || R_sigma || R_rho || R_delta_sigma || R_delta_rho +func (mpc MembershipProofCommitting) GetChallengeBytes() []byte { + res := mpc.accumulator.ToAffineCompressed() + res = append(res, mpc.eC.ToAffineCompressed()...) + res = append(res, mpc.tSigma.ToAffineCompressed()...) + res = append(res, mpc.tRho.ToAffineCompressed()...) + res = append(res, mpc.capRE.Bytes()...) + res = append(res, mpc.capRSigma.ToAffineCompressed()...) + res = append(res, mpc.capRRho.ToAffineCompressed()...) + res = append(res, mpc.capRDeltaSigma.ToAffineCompressed()...) + res = append(res, mpc.capRDeltaRho.ToAffineCompressed()...) + return res +} + +// GenProof computes the s values for Fiat-Shamir and return the actual +// proof to be sent to the verifier given the challenge c. +func (mpc *MembershipProofCommitting) GenProof(c curves.Scalar) *MembershipProof { + // s_y = r_y + c*y + sY := schnorr(mpc.blindingFactor, mpc.witnessValue, c) + // s_σ = r_σ + c*σ + sSigma := schnorr(mpc.rSigma, mpc.sigma, c) + // s_ρ = r_ρ + c*ρ + sRho := schnorr(mpc.rRho, mpc.rho, c) + // s_δσ = rδσ + c*δ_σ + sDeltaSigma := schnorr(mpc.rDeltaSigma, mpc.deltaSigma, c) + // s_δρ = rδρ + c*δ_ρ + sDeltaRho := schnorr(mpc.rDeltaRho, mpc.deltaRho, c) + + return &MembershipProof{ + mpc.eC, + mpc.tSigma, + mpc.tRho, + sSigma, + sRho, + sDeltaSigma, + sDeltaRho, + sY, + } +} + +func schnorr(r, v, challenge curves.Scalar) curves.Scalar { + res := v + res = res.Mul(challenge) + res = res.Add(r) + return res +} + +type membershipProofMarshal struct { + EC []byte `bare:"e_c"` + TSigma []byte `bare:"t_sigma"` + TRho []byte `bare:"t_rho"` + SSigma []byte `bare:"s_sigma"` + SRho []byte `bare:"s_rho"` + SDeltaSigma []byte `bare:"s_delta_sigma"` + SDeltaRho []byte `bare:"s_delta_rho"` + SY []byte `bare:"s_y"` + Curve string `bare:"curve"` +} + +// MembershipProof contains values in the proof to be verified +type MembershipProof struct { + eC curves.Point + tSigma curves.Point + tRho curves.Point + sSigma curves.Scalar + sRho curves.Scalar + sDeltaSigma curves.Scalar + sDeltaRho curves.Scalar + sY curves.Scalar +} + +// Finalize computes values in the proof to be verified. +func (mp *MembershipProof) Finalize( + acc *Accumulator, + pp *ProofParams, + pk *PublicKey, + challenge curves.Scalar, +) (*MembershipProofFinal, error) { + // R_σ = s_δ X + c T_σ + negTSigma := mp.tSigma + negTSigma = negTSigma.Neg() + capRSigma := pp.x.Mul(mp.sSigma) + capRSigma = capRSigma.Add(negTSigma.Mul(challenge)) + + // R_ρ = s_ρ Y + c T_ρ + negTRho := mp.tRho + negTRho = negTRho.Neg() + capRRho := pp.y.Mul(mp.sRho) + capRRho = capRRho.Add(negTRho.Mul(challenge)) + + // R_δσ = s_y T_σ - s_δσ X + negX := pp.x + negX = negX.Neg() + capRDeltaSigma := mp.tSigma.Mul(mp.sY) + capRDeltaSigma = capRDeltaSigma.Add(negX.Mul(mp.sDeltaSigma)) + + // R_δρ = s_y T_ρ - s_δρ Y + negY := pp.y + negY = negY.Neg() + capRDeltaRho := mp.tRho.Mul(mp.sY) + capRDeltaRho = capRDeltaRho.Add(negY.Mul(mp.sDeltaRho)) + + // tildeP + g2 := pk.value.Generator() + + // Compute capRE, the pairing + // E_c * s_y + eCsY := mp.eC.Mul(mp.sY) + + // (-s_delta_sigma - s_delta_rho) * Z + exp := mp.sDeltaSigma + exp = exp.Add(mp.sDeltaRho) + exp = exp.Neg() + expZ := pp.z.Mul(exp) + + // (-c) * V + exp = challenge.Neg() + expV := acc.value.Mul(exp) + + // E_c * s_y + (-s_delta_sigma - s_delta_rho) * Z + (-c) * V + lhs := eCsY.Add(expZ).Add(expV) + + // (-s_sigma - s_rho) * Z + exp = mp.sSigma + exp = exp.Add(mp.sRho) + exp = exp.Neg() + expZ2 := pp.z.Mul(exp) + + // E_c * c + cEc := mp.eC.Mul(challenge) + + // (-s_sigma - s_rho) * Z + E_c * c + rhs := cEc.Add(expZ2) + + // Prepare + lhsPrep, ok := lhs.(curves.PairingPoint) + if !ok { + return nil, errors.New("incorrect type conversion") + } + g2Prep, ok := g2.(curves.PairingPoint) + if !ok { + return nil, errors.New("incorrect type conversion") + } + rhsPrep, ok := rhs.(curves.PairingPoint) + if !ok { + return nil, errors.New("incorrect type conversion") + } + pkPrep := pk.value + + // capRE + capRE := g2Prep.MultiPairing(lhsPrep, g2Prep, rhsPrep, pkPrep) + + return &MembershipProofFinal{ + acc.value, + mp.eC, + mp.tSigma, + mp.tRho, + capRE, + capRSigma, + capRRho, + capRDeltaSigma, + capRDeltaRho, + }, nil +} + +// MarshalBinary converts MembershipProof to bytes +func (mp MembershipProof) MarshalBinary() ([]byte, error) { + tv := &membershipProofMarshal{ + EC: mp.eC.ToAffineCompressed(), + TSigma: mp.tSigma.ToAffineCompressed(), + TRho: mp.tRho.ToAffineCompressed(), + SSigma: mp.sSigma.Bytes(), + SRho: mp.sRho.Bytes(), + SDeltaSigma: mp.sDeltaSigma.Bytes(), + SDeltaRho: mp.sDeltaRho.Bytes(), + SY: mp.sY.Bytes(), + Curve: mp.eC.CurveName(), + } + return bare.Marshal(tv) +} + +// UnmarshalBinary converts bytes to MembershipProof +func (mp *MembershipProof) UnmarshalBinary(data []byte) error { + if data == nil { + return fmt.Errorf("expected non-zero byte sequence") + } + tv := new(membershipProofMarshal) + err := bare.Unmarshal(data, tv) + if err != nil { + return err + } + curve := curves.GetCurveByName(tv.Curve) + if curve == nil { + return fmt.Errorf("invalid curve") + } + eC, err := curve.NewIdentityPoint().FromAffineCompressed(tv.EC) + if err != nil { + return err + } + tSigma, err := curve.NewIdentityPoint().FromAffineCompressed(tv.TSigma) + if err != nil { + return err + } + tRho, err := curve.NewIdentityPoint().FromAffineCompressed(tv.TRho) + if err != nil { + return err + } + sSigma, err := curve.NewScalar().SetBytes(tv.SSigma) + if err != nil { + return err + } + sRho, err := curve.NewScalar().SetBytes(tv.SRho) + if err != nil { + return err + } + sDeltaSigma, err := curve.NewScalar().SetBytes(tv.SDeltaSigma) + if err != nil { + return err + } + sDeltaRho, err := curve.NewScalar().SetBytes(tv.SDeltaRho) + if err != nil { + return err + } + sY, err := curve.NewScalar().SetBytes(tv.SY) + if err != nil { + return err + } + + mp.eC = eC + mp.tSigma = tSigma + mp.tRho = tRho + mp.sSigma = sSigma + mp.sRho = sRho + mp.sDeltaSigma = sDeltaSigma + mp.sDeltaRho = sDeltaRho + mp.sY = sY + + return nil +} + +// MembershipProofFinal contains values that are input to Fiat-Shamir Heuristic +type MembershipProofFinal struct { + accumulator curves.Point + eC curves.Point + tSigma curves.Point + tRho curves.Point + capRE curves.Scalar + capRSigma curves.Point + capRRho curves.Point + capRDeltaSigma curves.Point + capRDeltaRho curves.Point +} + +// GetChallenge computes Fiat-Shamir Heuristic taking input values of MembershipProofFinal +func (m MembershipProofFinal) GetChallenge(curve *curves.PairingCurve) curves.Scalar { + res := m.accumulator.ToAffineCompressed() + res = append(res, m.eC.ToAffineCompressed()...) + res = append(res, m.tSigma.ToAffineCompressed()...) + res = append(res, m.tRho.ToAffineCompressed()...) + res = append(res, m.capRE.Bytes()...) + res = append(res, m.capRSigma.ToAffineCompressed()...) + res = append(res, m.capRRho.ToAffineCompressed()...) + res = append(res, m.capRDeltaSigma.ToAffineCompressed()...) + res = append(res, m.capRDeltaRho.ToAffineCompressed()...) + challenge := curve.Scalar.Hash(res) + return challenge +} diff --git a/accumulator/proof_test.go b/accumulator/proof_test.go new file mode 100644 index 0000000..b2228d7 --- /dev/null +++ b/accumulator/proof_test.go @@ -0,0 +1,182 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package accumulator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestProofParamsMarshal(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + sk, _ := new(SecretKey).New(curve, []byte("1234567890")) + pk, _ := sk.GetPublicKey(curve) + + params, err := new(ProofParams).New(curve, pk, []byte("entropy")) + require.NoError(t, err) + require.NotNil(t, params.x) + require.NotNil(t, params.y) + require.NotNil(t, params.z) + + bytes, err := params.MarshalBinary() + require.NoError(t, err) + require.NotNil(t, bytes) + + params2 := &ProofParams{ + curve.PointG1.Generator(), + curve.PointG1.Generator(), + curve.PointG1.Generator(), + } + err = params2.UnmarshalBinary(bytes) + require.NoError(t, err) + require.True(t, params.x.Equal(params2.x)) + require.True(t, params.y.Equal(params2.y)) + require.True(t, params.z.Equal(params2.z)) +} + +func TestMembershipProof(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + sk, _ := new(SecretKey).New(curve, []byte("1234567890")) + pk, _ := sk.GetPublicKey(curve) + + element1 := curve.Scalar.Hash([]byte("3")) + element2 := curve.Scalar.Hash([]byte("4")) + element3 := curve.Scalar.Hash([]byte("5")) + element4 := curve.Scalar.Hash([]byte("6")) + element5 := curve.Scalar.Hash([]byte("7")) + element6 := curve.Scalar.Hash([]byte("8")) + element7 := curve.Scalar.Hash([]byte("9")) + elements := []Element{element1, element2, element3, element4, element5, element6, element7} + + // Initiate a new accumulator + acc, err := new(Accumulator).WithElements(curve, sk, elements) + require.NoError(t, err) + require.NotNil(t, acc.value) + + // Initiate a new membership witness for value elements[3] + wit, err := new(MembershipWitness).New(elements[3], acc, sk) + require.NoError(t, err) + require.Equal(t, wit.y, elements[3]) + + // Create proof parameters, which contains randomly sampled G1 points X, Y, Z, K + params, err := new(ProofParams).New(curve, pk, []byte("entropy")) + require.NoError(t, err) + require.NotNil(t, params.x) + require.NotNil(t, params.y) + require.NotNil(t, params.z) + + mpc, err := new(MembershipProofCommitting).New(wit, acc, params, pk) + require.NoError(t, err) + testMPC(t, mpc) + + challenge := curve.Scalar.Hash(mpc.GetChallengeBytes()) + require.NotNil(t, challenge) + + proof := mpc.GenProof(challenge) + require.NotNil(t, proof) + testProof(t, proof) + + finalProof, err := proof.Finalize(acc, params, pk, challenge) + require.NoError(t, err) + require.NotNil(t, finalProof) + testFinalProof(t, finalProof) + + challenge2 := finalProof.GetChallenge(curve) + require.Equal(t, challenge, challenge2) + + // Check we can still have a valid proof even if accumulator and witness are updated + data1 := curve.Scalar.Hash([]byte("1")) + data2 := curve.Scalar.Hash([]byte("2")) + data3 := curve.Scalar.Hash([]byte("3")) + data4 := curve.Scalar.Hash([]byte("4")) + data5 := curve.Scalar.Hash([]byte("5")) + data := []Element{data1, data2, data3, data4, data5} + additions := data[0:2] + deletions := data[2:5] + _, coefficients, err := acc.Update(sk, additions, deletions) + require.NoError(t, err) + require.NotNil(t, coefficients) + + _, err = wit.BatchUpdate(additions, deletions, coefficients) + require.NoError(t, err) + + newParams, err := new(ProofParams).New(curve, pk, []byte("entropy")) + require.NoError(t, err) + require.NotNil(t, newParams.x) + require.NotNil(t, newParams.y) + require.NotNil(t, newParams.z) + + newMPC, err := new(MembershipProofCommitting).New(wit, acc, newParams, pk) + require.NoError(t, err) + testMPC(t, newMPC) + + challenge3 := curve.Scalar.Hash(newMPC.GetChallengeBytes()) + require.NotNil(t, challenge3) + + newProof := newMPC.GenProof(challenge3) + require.NotNil(t, newProof) + testProof(t, newProof) + + newFinalProof, err := newProof.Finalize(acc, newParams, pk, challenge3) + require.NoError(t, err) + require.NotNil(t, newFinalProof) + testFinalProof(t, newFinalProof) + + challenge4 := newFinalProof.GetChallenge(curve) + require.Equal(t, challenge3, challenge4) +} + +func testMPC(t *testing.T, mpc *MembershipProofCommitting) { + require.NotNil(t, mpc.eC) + require.NotNil(t, mpc.tSigma) + require.NotNil(t, mpc.tRho) + require.NotNil(t, mpc.deltaSigma) + require.NotNil(t, mpc.deltaRho) + require.NotNil(t, mpc.blindingFactor) + require.NotNil(t, mpc.rSigma) + require.NotNil(t, mpc.rRho) + require.NotNil(t, mpc.rDeltaSigma) + require.NotNil(t, mpc.rDeltaRho) + require.NotNil(t, mpc.sigma) + require.NotNil(t, mpc.rho) + require.NotNil(t, mpc.capRSigma) + require.NotNil(t, mpc.capRRho) + require.NotNil(t, mpc.capRDeltaSigma) + require.NotNil(t, mpc.capRDeltaRho) + require.NotNil(t, mpc.capRE) + require.NotNil(t, mpc.accumulator) + require.NotNil(t, mpc.witnessValue) + require.NotNil(t, mpc.xG1) + require.NotNil(t, mpc.yG1) + require.NotNil(t, mpc.zG1) +} + +func testProof(t *testing.T, proof *MembershipProof) { + require.NotNil(t, proof.eC) + require.NotNil(t, proof.tSigma) + require.NotNil(t, proof.tRho) + require.NotNil(t, proof.sSigma) + require.NotNil(t, proof.sRho) + require.NotNil(t, proof.sDeltaSigma) + require.NotNil(t, proof.sDeltaRho) + require.NotNil(t, proof.sY) +} + +func testFinalProof(t *testing.T, finalProof *MembershipProofFinal) { + require.NotNil(t, finalProof.accumulator) + require.NotNil(t, finalProof.eC) + require.NotNil(t, finalProof.tSigma) + require.NotNil(t, finalProof.tRho) + require.NotNil(t, finalProof.capRE) + require.NotNil(t, finalProof.capRSigma) + require.NotNil(t, finalProof.capRRho) + require.NotNil(t, finalProof.capRDeltaSigma) + require.NotNil(t, finalProof.capRDeltaRho) +} diff --git a/accumulator/witness.go b/accumulator/witness.go new file mode 100644 index 0000000..8d74183 --- /dev/null +++ b/accumulator/witness.go @@ -0,0 +1,392 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package accumulator + +import ( + "errors" + "fmt" + + "git.sr.ht/~sircmpwn/go-bare" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// MembershipWitness contains the witness c and the value y respect to the accumulator state. +type MembershipWitness struct { + c curves.Point + y curves.Scalar +} + +// New creates a new membership witness +func (mw *MembershipWitness) New( + y Element, + acc *Accumulator, + sk *SecretKey, +) (*MembershipWitness, error) { + if acc.value == nil || acc.value.IsIdentity() { + return nil, fmt.Errorf("value of accumulator should not be nil") + } + if sk.value == nil || sk.value.IsZero() { + return nil, fmt.Errorf("secret key should not be nil") + } + if y == nil || y.IsZero() { + return nil, fmt.Errorf("y should not be nil") + } + newAcc := &Accumulator{acc.value} + _, err := newAcc.Remove(sk, y) + if err != nil { + return nil, err + } + mw.c = newAcc.value + mw.y = y.Add(y.Zero()) + return mw, nil +} + +// Verify the MembershipWitness mw is a valid witness as per section 4 in +// +func (mw MembershipWitness) Verify(pk *PublicKey, acc *Accumulator) error { + if mw.c == nil || mw.y == nil || mw.c.IsIdentity() || mw.y.IsZero() { + return fmt.Errorf("c and y should not be nil") + } + + if pk.value == nil || pk.value.IsIdentity() { + return fmt.Errorf("invalid public key") + } + if acc.value == nil || acc.value.IsIdentity() { + return fmt.Errorf("accumulator value should not be nil") + } + + // Set -tildeP + g2, ok := pk.value.Generator().(curves.PairingPoint) + if !ok { + return errors.New("incorrect type conversion") + } + + // y*tildeP + tildeQ, tildeP is a G2 generator. + p, ok := g2.Mul(mw.y).Add(pk.value).(curves.PairingPoint) + if !ok { + return errors.New("incorrect type conversion") + } + + // Prepare + witness, ok := mw.c.(curves.PairingPoint) + if !ok { + return errors.New("incorrect type conversion") + } + v, ok := acc.value.Neg().(curves.PairingPoint) + if !ok { + return errors.New("incorrect type conversion") + } + + // Check e(witness, y*tildeP + tildeQ) * e(-acc, tildeP) == Identity + result := p.MultiPairing(witness, p, v, g2) + if !result.IsOne() { + return fmt.Errorf("invalid result") + } + + return nil +} + +// ApplyDelta returns C' = dA(y)/dD(y)*C + 1/dD(y) * +// according to the witness update protocol described in section 4 of +// https://eprint.iacr.org/2020/777.pdf +func (mw *MembershipWitness) ApplyDelta(delta *Delta) (*MembershipWitness, error) { + if mw.c == nil || mw.y == nil || delta == nil { + return nil, fmt.Errorf("y, c or delta should not be nil") + } + + // C' = dA(y)/dD(y)*C + 1/dD(y) * + mw.c = mw.c.Mul(delta.d).Add(delta.p) + return mw, nil +} + +// BatchUpdate performs batch update as described in section 4 +func (mw *MembershipWitness) BatchUpdate( + additions []Element, + deletions []Element, + coefficients []Coefficient, +) (*MembershipWitness, error) { + delta, err := evaluateDelta(mw.y, additions, deletions, coefficients) + if err != nil { + return nil, err + } + mw, err = mw.ApplyDelta(delta) + if err != nil { + return nil, fmt.Errorf("applyDelta fails") + } + return mw, nil +} + +// MultiBatchUpdate performs multi-batch update using epoch as described in section 4.2 +func (mw *MembershipWitness) MultiBatchUpdate( + A [][]Element, + D [][]Element, + C [][]Coefficient, +) (*MembershipWitness, error) { + delta, err := evaluateDeltas(mw.y, A, D, C) + if err != nil { + return nil, fmt.Errorf("evaluateDeltas fails") + } + mw, err = mw.ApplyDelta(delta) + if err != nil { + return nil, err + } + return mw, nil +} + +// MarshalBinary converts a membership witness to bytes +func (mw MembershipWitness) MarshalBinary() ([]byte, error) { + if mw.c == nil || mw.y == nil { + return nil, fmt.Errorf("c and y value should not be nil") + } + + result := append(mw.c.ToAffineCompressed(), mw.y.Bytes()...) + tv := &structMarshal{ + Value: result, + Curve: mw.c.CurveName(), + } + return bare.Marshal(tv) +} + +// UnmarshalBinary converts bytes into MembershipWitness +func (mw *MembershipWitness) UnmarshalBinary(data []byte) error { + if data == nil { + return fmt.Errorf("input data should not be nil") + } + tv := new(structMarshal) + err := bare.Unmarshal(data, tv) + if err != nil { + return err + } + curve := curves.GetCurveByName(tv.Curve) + if curve == nil { + return fmt.Errorf("invalid curve") + } + + ptLength := len(curve.Point.ToAffineCompressed()) + scLength := len(curve.Scalar.Bytes()) + expectedLength := ptLength + scLength + if len(tv.Value) != expectedLength { + return fmt.Errorf("invalid byte sequence") + } + cValue, err := curve.Point.FromAffineCompressed(tv.Value[:ptLength]) + if err != nil { + return err + } + yValue, err := curve.Scalar.SetBytes(tv.Value[ptLength:]) + if err != nil { + return err + } + mw.c = cValue + mw.y = yValue + return nil +} + +// Delta contains values d and p, where d should be the division dA(y)/dD(y) on some value y +// p should be equal to 1/dD * +type Delta struct { + d curves.Scalar + p curves.Point +} + +// MarshalBinary converts Delta into bytes +func (d *Delta) MarshalBinary() ([]byte, error) { + if d.d == nil || d.p == nil { + return nil, fmt.Errorf("d and p should not be nil") + } + var result []byte + result = append(result, d.p.ToAffineCompressed()...) + result = append(result, d.d.Bytes()...) + tv := &structMarshal{ + Value: result, + Curve: d.p.CurveName(), + } + return bare.Marshal(tv) +} + +// UnmarshalBinary converts data into Delta +func (d *Delta) UnmarshalBinary(data []byte) error { + if data == nil { + return fmt.Errorf("expected non-zero byte sequence") + } + + tv := new(structMarshal) + err := bare.Unmarshal(data, tv) + if err != nil { + return err + } + curve := curves.GetCurveByName(tv.Curve) + if curve == nil { + return fmt.Errorf("invalid curve") + } + + ptLength := len(curve.Point.ToAffineCompressed()) + scLength := len(curve.Scalar.Bytes()) + expectedLength := ptLength + scLength + if len(tv.Value) != expectedLength { + return fmt.Errorf("invalid byte sequence") + } + pValue, err := curve.NewIdentityPoint().FromAffineCompressed(tv.Value[:ptLength]) + if err != nil { + return err + } + dValue, err := curve.NewScalar().SetBytes(tv.Value[ptLength:]) + if err != nil { + return err + } + d.d = dValue + d.p = pValue + return nil +} + +// evaluateDeltas compute values used for membership witness batch update with epoch +// as described in section 4.2, page 11 of https://eprint.iacr.org/2020/777.pdf +func evaluateDeltas(y Element, A [][]Element, D [][]Element, C [][]Coefficient) (*Delta, error) { + if len(A) != len(D) || len(A) != len(C) { + return nil, fmt.Errorf("a, d, c should have same length") + } + + one := y.One() + size := len(A) + + // dA(x) = ∏ 1..n (yA_i - x) + aa := make([]curves.Scalar, 0) + // dD(x) = ∏ 1..m (yD_i - x) + dd := make([]curves.Scalar, 0) + + a := one + d := one + + // dA_{a->b}(y) = ∏ a..b dAs(y) + // dD_{a->b}(y) = ∏ a..b dDs(y) + for i := range size { + adds := A[i] + dels := D[i] + + // ta = dAs(y) + ta, err := dad(adds, y) + if err != nil { + return nil, fmt.Errorf("dad on additions fails") + } + // td = dDs(y) + td, err := dad(dels, y) + if err != nil { + return nil, fmt.Errorf("dad on deletions fails") + } + // ∏ a..b dAs(y) + a = a.Mul(ta) + // ∏ a..b dDs(y) + d = d.Mul(td) + + aa = append(aa, ta) + dd = append(dd, td) + } + + // If this fails, then this value was removed. + d, err := d.Invert() + if err != nil { + return nil, fmt.Errorf("no inverse exists") + } + + // + p := make(polynomialPoint, 0, size) + + // Ωi->j+1 = ∑ 1..t (dAt * dDt-1) · Ω + for i := 0; i < size; i++ { + // t = i+1 + // ∏^(t-1)_(h=i+1) + ddh := one + + // dDi→t−1 (y) + for h := 0; h < i; h++ { + ddh = ddh.Mul(dd[h]) + } + + // ∏^(j+1)_(k=t+1) + dak := one + // dAt->j(y) + for k := i + 1; k < size; k++ { + dak = dak.Mul(aa[k]) + } + + // dDi->t-1(y) * dAt->j(y) + dak = dak.Mul(ddh) + pp := make(polynomialPoint, len(C[i])) + for j := 0; j < len(pp); j++ { + pp[j] = C[i][j] + } + + // dDi->t-1(y) * dAt->j(y) · Ω + pp, err := pp.Mul(dak) + if err != nil { + return nil, fmt.Errorf("pp.Mul fails") + } + + p, err = p.Add(pp) + if err != nil { + return nil, fmt.Errorf("pp.Add fails") + } + } + // dAi->j(y)/dDi->j(y) + a = a.Mul(d) + + // Ωi->j(y) + v, err := p.evaluate(y) + if err != nil { + return nil, fmt.Errorf("p.evaluate fails") + } + + // (1/dDi->j(y)) * Ωi->j(y) + v = v.Mul(d) + + // return + return &Delta{d: a, p: v}, nil +} + +// evaluateDelta computes values used for membership witness batch update +// as described in section 4.1 of https://eprint.iacr.org/2020/777.pdf +func evaluateDelta( + y Element, + additions []Element, + deletions []Element, + coefficients []Coefficient, +) (*Delta, error) { + // dD(y) = ∏ 1..m (yD_i - y), d = 1/dD(y) + var err error + d, err := dad(deletions, y) + if err != nil { + return nil, fmt.Errorf("dad fails on deletions") + } + d, err = d.Invert() + if err != nil { + return nil, fmt.Errorf("no inverse exists") + } + + // dA(y) = ∏ 1..n (yA_i - y) + a, err := dad(additions, y) + if err != nil { + return nil, fmt.Errorf("dad fails on additions") + } + // dA(y)/dD(y) + a = a.Mul(d) + + // Create a PolynomialG1 from coefficients + p := make(polynomialPoint, len(coefficients)) + for i := range coefficients { + p[i] = coefficients[i] + } + + // + v, err := p.evaluate(y) + if err != nil { + return nil, fmt.Errorf("p.evaluate fails") + } + // 1/dD * + v = v.Mul(d) + + return &Delta{d: a, p: v}, nil +} diff --git a/accumulator/witness_test.go b/accumulator/witness_test.go new file mode 100644 index 0000000..e21a51a --- /dev/null +++ b/accumulator/witness_test.go @@ -0,0 +1,229 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package accumulator + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func Test_Membership_Witness_New(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + var seed [32]byte + key, _ := new(SecretKey).New(curve, seed[:]) + acc, _ := new(Accumulator).New(curve) + e := curve.Scalar.New(2) + mw, err := new(MembershipWitness).New(e, acc, key) + require.NoError(t, err) + require.NotNil(t, mw.c) + require.NotNil(t, mw.y) +} + +func Test_Membership_Witness_Marshal(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + mw := &MembershipWitness{ + curve.PointG1.Generator().Mul(curve.Scalar.New(10)), + curve.Scalar.New(15), + } + data, err := mw.MarshalBinary() + require.NoError(t, err) + require.NotNil(t, data) + newMW := &MembershipWitness{} + err = newMW.UnmarshalBinary(data) + require.NoError(t, err) + require.True(t, mw.c.Equal(newMW.c)) + require.Equal(t, 0, mw.y.Cmp(newMW.y)) +} + +func Test_Membership(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + sk, _ := new(SecretKey).New(curve, []byte("1234567890")) + pk, _ := sk.GetPublicKey(curve) + + element1 := curve.Scalar.Hash([]byte("3")) + element2 := curve.Scalar.Hash([]byte("4")) + element3 := curve.Scalar.Hash([]byte("5")) + element4 := curve.Scalar.Hash([]byte("6")) + element5 := curve.Scalar.Hash([]byte("7")) + element6 := curve.Scalar.Hash([]byte("8")) + element7 := curve.Scalar.Hash([]byte("9")) + elements := []Element{element1, element2, element3, element4, element5, element6, element7} + + // nm_witness_max works as well if set to value larger than 0 for this test.x + acc, err := new(Accumulator).WithElements(curve, sk, elements) + require.NoError(t, err) + require.NotNil(t, acc.value) + require.False(t, acc.value.IsIdentity()) + require.True(t, acc.value.IsOnCurve()) + require.NotEqual(t, acc.value, curve.NewG1GeneratorPoint()) + + wit, err := new(MembershipWitness).New(elements[3], acc, sk) + require.NoError(t, err) + require.Equal(t, wit.y, elements[3]) + + err = wit.Verify(pk, acc) + require.NoError(t, err) + + // Test wrong cases, forge a wrong witness + wrongWit := MembershipWitness{ + curve.PointG1.Identity(), + curve.Scalar.One(), + } + err = wrongWit.Verify(pk, acc) + require.Error(t, err) + + // Test wrong cases, forge a wrong accumulator + wrongAcc := &Accumulator{ + curve.PointG1.Generator(), + } + err = wit.Verify(pk, wrongAcc) + require.Error(t, err) +} + +func Test_Membership_Batch_Update(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + sk, _ := new(SecretKey).New(curve, []byte("1234567890")) + pk, _ := sk.GetPublicKey(curve) + + element1 := curve.Scalar.Hash([]byte("3")) + element2 := curve.Scalar.Hash([]byte("4")) + element3 := curve.Scalar.Hash([]byte("5")) + element4 := curve.Scalar.Hash([]byte("6")) + element5 := curve.Scalar.Hash([]byte("7")) + element6 := curve.Scalar.Hash([]byte("8")) + element7 := curve.Scalar.Hash([]byte("9")) + elements := []Element{element1, element2, element3, element4, element5, element6, element7} + + // nm_witness_max works as well if set to value larger than 0 for this test. + acc, err := new(Accumulator).WithElements(curve, sk, elements) + require.NoError(t, err) + require.NotNil(t, acc.value) + + wit, err := new(MembershipWitness).New(elements[3], acc, sk) + require.NoError(t, err) + require.Equal(t, wit.y, elements[3]) + + err = wit.Verify(pk, acc) + require.Nil(t, err) + + data1 := curve.Scalar.Hash([]byte("1")) + data2 := curve.Scalar.Hash([]byte("2")) + data3 := curve.Scalar.Hash([]byte("3")) + data4 := curve.Scalar.Hash([]byte("4")) + data5 := curve.Scalar.Hash([]byte("5")) + data := []Element{data1, data2, data3, data4, data5} + additions := data[0:2] + deletions := data[2:5] + _, coefficients, err := acc.Update(sk, additions, deletions) + require.NoError(t, err) + require.NotNil(t, coefficients) + + _, err = wit.BatchUpdate(additions, deletions, coefficients) + require.NoError(t, err) + err = wit.Verify(pk, acc) + require.Nil(t, err) +} + +func Test_Membership_Multi_Batch_Update(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G1{}) + sk, _ := new(SecretKey).New(curve, []byte("1234567890")) + pk, _ := sk.GetPublicKey(curve) + + element1 := curve.Scalar.Hash([]byte("3")) + element2 := curve.Scalar.Hash([]byte("4")) + element3 := curve.Scalar.Hash([]byte("5")) + element4 := curve.Scalar.Hash([]byte("6")) + element5 := curve.Scalar.Hash([]byte("7")) + element6 := curve.Scalar.Hash([]byte("8")) + element7 := curve.Scalar.Hash([]byte("9")) + element8 := curve.Scalar.Hash([]byte("10")) + element9 := curve.Scalar.Hash([]byte("11")) + element10 := curve.Scalar.Hash([]byte("12")) + element11 := curve.Scalar.Hash([]byte("13")) + element12 := curve.Scalar.Hash([]byte("14")) + element13 := curve.Scalar.Hash([]byte("15")) + element14 := curve.Scalar.Hash([]byte("16")) + element15 := curve.Scalar.Hash([]byte("17")) + element16 := curve.Scalar.Hash([]byte("18")) + element17 := curve.Scalar.Hash([]byte("19")) + element18 := curve.Scalar.Hash([]byte("20")) + elements := []Element{ + element1, + element2, + element3, + element4, + element5, + element6, + element7, + element8, + element9, + element10, + element11, + element12, + element13, + element14, + element15, + element16, + element17, + element18, + } + acc, err := new(Accumulator).WithElements(curve, sk, elements) + require.NoError(t, err) + require.NotNil(t, acc.value) + + wit, err := new(MembershipWitness).New(elements[3], acc, sk) + require.NoError(t, err) + + err = wit.Verify(pk, acc) + require.Nil(t, err) + + data1 := curve.Scalar.Hash([]byte("1")) + data2 := curve.Scalar.Hash([]byte("2")) + data3 := curve.Scalar.Hash([]byte("3")) + data4 := curve.Scalar.Hash([]byte("4")) + data5 := curve.Scalar.Hash([]byte("5")) + data := []Element{data1, data2, data3, data4, data5} + adds1 := data[0:2] + dels1 := data[2:5] + _, coeffs1, err := acc.Update(sk, adds1, dels1) + require.NoError(t, err) + require.NotNil(t, coeffs1) + + dels2 := elements[8:10] + _, coeffs2, err := acc.Update(sk, []Element{}, dels2) + require.NoError(t, err) + require.NotNil(t, coeffs2) + + dels3 := elements[11:14] + _, coeffs3, err := acc.Update(sk, []Element{}, dels3) + require.NoError(t, err) + require.NotNil(t, coeffs3) + + a := make([][]Element, 3) + a[0] = adds1 + a[1] = []Element{} + a[2] = []Element{} + + d := make([][]Element, 3) + d[0] = dels1 + d[1] = dels2 + d[2] = dels3 + + c := make([][]Coefficient, 3) + c[0] = coeffs1 + c[1] = coeffs2 + c[2] = coeffs3 + + _, err = wit.MultiBatchUpdate(a, d, c) + require.NoError(t, err) + + err = wit.Verify(pk, acc) + require.Nil(t, err) +} diff --git a/aead/aes_gcm.go b/aead/aes_gcm.go new file mode 100644 index 0000000..cf51191 --- /dev/null +++ b/aead/aes_gcm.go @@ -0,0 +1,121 @@ +// Package aead provides authenticated encryption with associated data (AEAD) implementations +// following NIST SP 800-38D standards for secure data encryption and integrity verification. +package aead + +import ( + "crypto/aes" + "crypto/cipher" + "crypto/rand" + "fmt" + "io" +) + +const ( + // NonceSize defines the standard 96-bit nonce size for optimal GCM performance + NonceSize = 12 + // TagSize defines the 128-bit authentication tag size for GCM + TagSize = 16 + // KeySize defines the AES-256 key size + KeySize = 32 +) + +// AESGCMCipher wraps AES-GCM operations with secure defaults +type AESGCMCipher struct { + gcm cipher.AEAD +} + +// NewAESGCM creates a new AES-GCM cipher with the provided 256-bit key +func NewAESGCM(key []byte) (*AESGCMCipher, error) { + if len(key) != KeySize { + return nil, fmt.Errorf("invalid key size: expected %d bytes, got %d", KeySize, len(key)) + } + + block, err := aes.NewCipher(key) + if err != nil { + return nil, fmt.Errorf("failed to create AES cipher: %w", err) + } + + gcm, err := cipher.NewGCM(block) + if err != nil { + return nil, fmt.Errorf("failed to create GCM mode: %w", err) + } + + return &AESGCMCipher{gcm: gcm}, nil +} + +// Encrypt encrypts plaintext with additional authenticated data (AAD) using AES-GCM +// Returns nonce + ciphertext + tag concatenated for easy storage +func (a *AESGCMCipher) Encrypt(plaintext, aad []byte) ([]byte, error) { + // Generate cryptographically secure random nonce + nonce, err := generateNonce() + if err != nil { + return nil, fmt.Errorf("failed to generate nonce: %w", err) + } + + // Encrypt with GCM (includes authentication tag) + ciphertext := a.gcm.Seal(nil, nonce, plaintext, aad) + + // Prepend nonce to ciphertext for transport + result := make([]byte, NonceSize+len(ciphertext)) + copy(result[:NonceSize], nonce) + copy(result[NonceSize:], ciphertext) + + return result, nil +} + +// Decrypt decrypts and authenticates ciphertext with AAD using AES-GCM +// Expects input format: nonce + ciphertext + tag +func (a *AESGCMCipher) Decrypt(data, aad []byte) ([]byte, error) { + if len(data) < NonceSize+TagSize { + return nil, fmt.Errorf("invalid ciphertext length: minimum %d bytes required", NonceSize+TagSize) + } + + // Extract nonce and ciphertext + nonce := data[:NonceSize] + ciphertext := data[NonceSize:] + + // Decrypt and verify authentication tag + plaintext, err := a.gcm.Open(nil, nonce, ciphertext, aad) + if err != nil { + return nil, fmt.Errorf("decryption and authentication failed: %w", err) + } + + return plaintext, nil +} + +// EncryptWithNonce encrypts plaintext using a provided nonce (for testing purposes) +// WARNING: Nonce reuse can compromise security. Use only for testing. +func (a *AESGCMCipher) EncryptWithNonce(plaintext, aad, nonce []byte) ([]byte, error) { + if len(nonce) != NonceSize { + return nil, fmt.Errorf("invalid nonce size: expected %d bytes, got %d", NonceSize, len(nonce)) + } + + // Encrypt with provided nonce + ciphertext := a.gcm.Seal(nil, nonce, plaintext, aad) + + // Prepend nonce to ciphertext + result := make([]byte, NonceSize+len(ciphertext)) + copy(result[:NonceSize], nonce) + copy(result[NonceSize:], ciphertext) + + return result, nil +} + +// generateNonce creates a cryptographically secure 96-bit nonce +func generateNonce() ([]byte, error) { + nonce := make([]byte, NonceSize) + if _, err := io.ReadFull(rand.Reader, nonce); err != nil { + return nil, fmt.Errorf("failed to read random bytes: %w", err) + } + return nonce, nil +} + +// GetNonceSize returns the nonce size used by this cipher +func (a *AESGCMCipher) GetNonceSize() int { + return NonceSize +} + +// GetTagSize returns the authentication tag size +func (a *AESGCMCipher) GetTagSize() int { + return TagSize +} diff --git a/aead/aes_gcm_test.go b/aead/aes_gcm_test.go new file mode 100644 index 0000000..a80e6e1 --- /dev/null +++ b/aead/aes_gcm_test.go @@ -0,0 +1,349 @@ +package aead + +import ( + "crypto/rand" + "fmt" + "testing" +) + +func TestNewAESGCM(t *testing.T) { + tests := []struct { + name string + keySize int + wantErr bool + }{ + {"valid 256-bit key", 32, false}, + {"invalid 128-bit key", 16, true}, + {"invalid 192-bit key", 24, true}, + {"empty key", 0, true}, + {"oversized key", 64, true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + key := make([]byte, tt.keySize) + _, err := rand.Read(key) + if err != nil { + t.Fatalf("Failed to generate test key: %v", err) + } + + cipher, err := NewAESGCM(key) + if (err != nil) != tt.wantErr { + t.Errorf("NewAESGCM() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if !tt.wantErr && cipher == nil { + t.Error("NewAESGCM() returned nil cipher without error") + } + }) + } +} + +func TestAESGCMEncryptDecrypt(t *testing.T) { + // Generate random 256-bit key + key := make([]byte, KeySize) + _, err := rand.Read(key) + if err != nil { + t.Fatalf("Failed to generate test key: %v", err) + } + + cipher, err := NewAESGCM(key) + if err != nil { + t.Fatalf("Failed to create AES-GCM cipher: %v", err) + } + + tests := []struct { + name string + plaintext []byte + aad []byte + }{ + {"empty plaintext", []byte{}, nil}, + {"small plaintext", []byte("hello"), nil}, + {"large plaintext", make([]byte, 1024), nil}, + {"with AAD", []byte("secret data"), []byte("additional auth data")}, + {"unicode plaintext", []byte("Hello, 世界! 🔐"), []byte("metadata")}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Fill large plaintext with test data + if len(tt.plaintext) == 1024 { + for i := range tt.plaintext { + tt.plaintext[i] = byte(i % 256) + } + } + + // Encrypt + ciphertext, err := cipher.Encrypt(tt.plaintext, tt.aad) + if err != nil { + t.Fatalf("Encrypt() error = %v", err) + } + + // Verify ciphertext structure + expectedLen := NonceSize + len(tt.plaintext) + TagSize + if len(ciphertext) != expectedLen { + t.Errorf("Unexpected ciphertext length: got %d, want %d", len(ciphertext), expectedLen) + } + + // Decrypt + decrypted, err := cipher.Decrypt(ciphertext, tt.aad) + if err != nil { + t.Fatalf("Decrypt() error = %v", err) + } + + // Verify plaintext matches + if string(decrypted) != string(tt.plaintext) { + t.Errorf("Decrypted text doesn't match original: got %q, want %q", string(decrypted), string(tt.plaintext)) + } + }) + } +} + +func TestAESGCMAuthenticationFailure(t *testing.T) { + key := make([]byte, KeySize) + _, err := rand.Read(key) + if err != nil { + t.Fatalf("Failed to generate test key: %v", err) + } + + cipher, err := NewAESGCM(key) + if err != nil { + t.Fatalf("Failed to create AES-GCM cipher: %v", err) + } + + plaintext := []byte("authenticated data") + aad := []byte("additional data") + + ciphertext, err := cipher.Encrypt(plaintext, aad) + if err != nil { + t.Fatalf("Encrypt() error = %v", err) + } + + tests := []struct { + name string + modifyFunc func([]byte) []byte + modifyAAD func([]byte) []byte + expectFailure bool + }{ + { + "tampered ciphertext", + func(data []byte) []byte { + if len(data) > NonceSize+5 { + data[NonceSize+5] ^= 0x01 // Flip one bit in ciphertext + } + return data + }, + nil, + true, + }, + { + "tampered nonce", + func(data []byte) []byte { + if len(data) > 5 { + data[5] ^= 0x01 // Flip one bit in nonce + } + return data + }, + nil, + true, + }, + { + "tampered AAD", + nil, + func(aad []byte) []byte { + modified := make([]byte, len(aad)) + copy(modified, aad) + if len(modified) > 0 { + modified[0] ^= 0x01 + } + return modified + }, + true, + }, + { + "valid decryption", + nil, + nil, + false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + testCiphertext := make([]byte, len(ciphertext)) + copy(testCiphertext, ciphertext) + testAAD := make([]byte, len(aad)) + copy(testAAD, aad) + + if tt.modifyFunc != nil { + testCiphertext = tt.modifyFunc(testCiphertext) + } + if tt.modifyAAD != nil { + testAAD = tt.modifyAAD(testAAD) + } + + _, err := cipher.Decrypt(testCiphertext, testAAD) + if tt.expectFailure && err == nil { + t.Error("Expected decryption to fail due to tampering, but it succeeded") + } + if !tt.expectFailure && err != nil { + t.Errorf("Expected decryption to succeed, but got error: %v", err) + } + }) + } +} + +func TestAESGCMWithNonce(t *testing.T) { + key := make([]byte, KeySize) + _, err := rand.Read(key) + if err != nil { + t.Fatalf("Failed to generate test key: %v", err) + } + + cipher, err := NewAESGCM(key) + if err != nil { + t.Fatalf("Failed to create AES-GCM cipher: %v", err) + } + + plaintext := []byte("test message") + nonce := make([]byte, NonceSize) + _, err = rand.Read(nonce) + if err != nil { + t.Fatalf("Failed to generate nonce: %v", err) + } + + // Test encryption with provided nonce + ciphertext, err := cipher.EncryptWithNonce(plaintext, nil, nonce) + if err != nil { + t.Fatalf("EncryptWithNonce() error = %v", err) + } + + // Verify nonce is properly prepended + if string(ciphertext[:NonceSize]) != string(nonce) { + t.Error("Nonce not properly prepended to ciphertext") + } + + // Test decryption works + decrypted, err := cipher.Decrypt(ciphertext, nil) + if err != nil { + t.Fatalf("Decrypt() error = %v", err) + } + + if string(decrypted) != string(plaintext) { + t.Errorf("Decrypted text doesn't match: got %q, want %q", string(decrypted), string(plaintext)) + } + + // Test invalid nonce size + invalidNonce := make([]byte, 8) + _, err = cipher.EncryptWithNonce(plaintext, nil, invalidNonce) + if err == nil { + t.Error("Expected error for invalid nonce size") + } +} + +func TestAESGCMInvalidCiphertext(t *testing.T) { + key := make([]byte, KeySize) + _, err := rand.Read(key) + if err != nil { + t.Fatalf("Failed to generate test key: %v", err) + } + + cipher, err := NewAESGCM(key) + if err != nil { + t.Fatalf("Failed to create AES-GCM cipher: %v", err) + } + + tests := []struct { + name string + ciphertext []byte + }{ + {"empty ciphertext", []byte{}}, + {"too short ciphertext", make([]byte, NonceSize)}, + {"minimal invalid", make([]byte, NonceSize+TagSize-1)}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := cipher.Decrypt(tt.ciphertext, nil) + if err == nil { + t.Error("Expected error for invalid ciphertext length") + } + }) + } +} + +func BenchmarkAESGCMEncrypt(b *testing.B) { + key := make([]byte, KeySize) + _, err := rand.Read(key) + if err != nil { + b.Fatalf("Failed to generate test key: %v", err) + } + + cipher, err := NewAESGCM(key) + if err != nil { + b.Fatalf("Failed to create AES-GCM cipher: %v", err) + } + + sizes := []int{64, 512, 1024, 4096} + + for _, size := range sizes { + b.Run(fmt.Sprintf("%dB", size), func(b *testing.B) { + plaintext := make([]byte, size) + _, err := rand.Read(plaintext) + if err != nil { + b.Fatalf("Failed to generate test data: %v", err) + } + + b.ResetTimer() + b.SetBytes(int64(size)) + + for i := 0; i < b.N; i++ { + _, err := cipher.Encrypt(plaintext, nil) + if err != nil { + b.Fatalf("Encrypt error: %v", err) + } + } + }) + } +} + +func BenchmarkAESGCMDecrypt(b *testing.B) { + key := make([]byte, KeySize) + _, err := rand.Read(key) + if err != nil { + b.Fatalf("Failed to generate test key: %v", err) + } + + cipher, err := NewAESGCM(key) + if err != nil { + b.Fatalf("Failed to create AES-GCM cipher: %v", err) + } + + sizes := []int{64, 512, 1024, 4096} + + for _, size := range sizes { + b.Run(fmt.Sprintf("%dB", size), func(b *testing.B) { + plaintext := make([]byte, size) + _, err := rand.Read(plaintext) + if err != nil { + b.Fatalf("Failed to generate test data: %v", err) + } + + ciphertext, err := cipher.Encrypt(plaintext, nil) + if err != nil { + b.Fatalf("Failed to encrypt test data: %v", err) + } + + b.ResetTimer() + b.SetBytes(int64(size)) + + for i := 0; i < b.N; i++ { + _, err := cipher.Decrypt(ciphertext, nil) + if err != nil { + b.Fatalf("Decrypt error: %v", err) + } + } + }) + } +} diff --git a/argon2/kdf.go b/argon2/kdf.go new file mode 100644 index 0000000..1fcf5ad --- /dev/null +++ b/argon2/kdf.go @@ -0,0 +1,213 @@ +// Package argon2 provides secure key derivation using Argon2id +package argon2 + +import ( + "crypto/rand" + "crypto/subtle" + "encoding/base64" + "fmt" + "strings" + + "golang.org/x/crypto/argon2" +) + +// Config defines Argon2id parameters +type Config struct { + Time uint32 // Number of iterations + Memory uint32 // Memory in KB + Parallelism uint8 // Number of threads + SaltLength uint32 // Salt length in bytes + KeyLength uint32 // Output key length in bytes +} + +// DefaultConfig returns secure default parameters +func DefaultConfig() *Config { + return &Config{ + Time: 1, + Memory: 64 * 1024, // 64MB + Parallelism: 4, + SaltLength: 32, + KeyLength: 32, + } +} + +// LightConfig returns lighter parameters for testing +func LightConfig() *Config { + return &Config{ + Time: 1, + Memory: 16 * 1024, // 16MB + Parallelism: 2, + SaltLength: 16, + KeyLength: 32, + } +} + +// HighSecurityConfig returns high-security parameters +func HighSecurityConfig() *Config { + return &Config{ + Time: 3, + Memory: 128 * 1024, // 128MB + Parallelism: 4, + SaltLength: 32, + KeyLength: 32, + } +} + +// KDF implements Argon2id key derivation +type KDF struct { + config *Config +} + +// New creates a new Argon2id KDF with the given configuration +func New(config *Config) *KDF { + if config == nil { + config = DefaultConfig() + } + return &KDF{config: config} +} + +// DeriveKey derives a key from password and salt +func (k *KDF) DeriveKey(password []byte, salt []byte) []byte { + return argon2.IDKey( + password, + salt, + k.config.Time, + k.config.Memory, + k.config.Parallelism, + k.config.KeyLength, + ) +} + +// GenerateSalt generates a cryptographically secure salt +func (k *KDF) GenerateSalt() ([]byte, error) { + salt := make([]byte, k.config.SaltLength) + if _, err := rand.Read(salt); err != nil { + return nil, fmt.Errorf("failed to generate salt: %w", err) + } + return salt, nil +} + +// HashPassword generates a hash with embedded salt and parameters +func (k *KDF) HashPassword(password []byte) (string, error) { + salt, err := k.GenerateSalt() + if err != nil { + return "", err + } + + hash := k.DeriveKey(password, salt) + + // Encode in PHC format: $argon2id$v=19$m=65536,t=1,p=4$salt$hash + encodedSalt := base64.RawStdEncoding.EncodeToString(salt) + encodedHash := base64.RawStdEncoding.EncodeToString(hash) + + return fmt.Sprintf("$argon2id$v=%d$m=%d,t=%d,p=%d$%s$%s", + argon2.Version, + k.config.Memory, + k.config.Time, + k.config.Parallelism, + encodedSalt, + encodedHash, + ), nil +} + +// VerifyPassword verifies a password against a PHC-formatted hash +func VerifyPassword(password []byte, encodedHash string) (bool, error) { + params, salt, hash, err := decodeHash(encodedHash) + if err != nil { + return false, err + } + + kdf := &KDF{config: params} + derivedHash := kdf.DeriveKey(password, salt) + + // Constant-time comparison + return subtle.ConstantTimeCompare(hash, derivedHash) == 1, nil +} + +// decodeHash parses PHC-formatted Argon2id hash +func decodeHash(encodedHash string) (*Config, []byte, []byte, error) { + parts := strings.Split(encodedHash, "$") + if len(parts) != 6 { + return nil, nil, nil, fmt.Errorf("invalid hash format") + } + + if parts[1] != "argon2id" { + return nil, nil, nil, fmt.Errorf("unsupported algorithm: %s", parts[1]) + } + + var version int + _, err := fmt.Sscanf(parts[2], "v=%d", &version) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to parse version: %w", err) + } + + if version != argon2.Version { + return nil, nil, nil, fmt.Errorf("unsupported Argon2 version: %d", version) + } + + var memory, time uint32 + var parallelism uint8 + _, err = fmt.Sscanf(parts[3], "m=%d,t=%d,p=%d", &memory, &time, ¶llelism) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to parse parameters: %w", err) + } + + salt, err := base64.RawStdEncoding.DecodeString(parts[4]) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to decode salt: %w", err) + } + + hash, err := base64.RawStdEncoding.DecodeString(parts[5]) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to decode hash: %w", err) + } + + config := &Config{ + Time: time, + Memory: memory, + Parallelism: parallelism, + SaltLength: uint32(len(salt)), + KeyLength: uint32(len(hash)), + } + + return config, salt, hash, nil +} + +// CompareHashes performs constant-time comparison of two hashes +func CompareHashes(hash1, hash2 []byte) bool { + return subtle.ConstantTimeCompare(hash1, hash2) == 1 +} + +// ValidateConfig validates Argon2id parameters +func ValidateConfig(config *Config) error { + if config.Time < 1 { + return fmt.Errorf("time must be at least 1") + } + if config.Memory < 8*1024 { + return fmt.Errorf("memory must be at least 8MB") + } + if config.Parallelism < 1 { + return fmt.Errorf("parallelism must be at least 1") + } + if config.SaltLength < 8 { + return fmt.Errorf("salt length must be at least 8 bytes") + } + if config.KeyLength < 16 { + return fmt.Errorf("key length must be at least 16 bytes") + } + return nil +} + +// EstimateTime estimates the time required for key derivation +func EstimateTime(config *Config, iterations int) string { + // This is a rough estimate - actual time depends on hardware + baseTime := float64(config.Time) * float64(config.Memory) / (64 * 1024) + totalTime := baseTime * float64(iterations) + + if totalTime < 1 { + return fmt.Sprintf("%.2f ms", totalTime*1000) + } else if totalTime < 60 { + return fmt.Sprintf("%.2f s", totalTime) + } + return fmt.Sprintf("%.2f min", totalTime/60) +} diff --git a/argon2/kdf_test.go b/argon2/kdf_test.go new file mode 100644 index 0000000..8dd41c7 --- /dev/null +++ b/argon2/kdf_test.go @@ -0,0 +1,396 @@ +package argon2 + +import ( + "bytes" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestKDF_DeriveKey(t *testing.T) { + kdf := New(DefaultConfig()) + + password := []byte("test-password") + salt := []byte("salt-must-be-at-least-16-bytes!!") + + // Derive key + key := kdf.DeriveKey(password, salt) + assert.Len(t, key, int(kdf.config.KeyLength)) + + // Same inputs should produce same key + key2 := kdf.DeriveKey(password, salt) + assert.Equal(t, key, key2) + + // Different password should produce different key + key3 := kdf.DeriveKey([]byte("different"), salt) + assert.NotEqual(t, key, key3) + + // Different salt should produce different key + salt2 := []byte("different-salt-at-least-16-bytes") + key4 := kdf.DeriveKey(password, salt2) + assert.NotEqual(t, key, key4) +} + +func TestKDF_GenerateSalt(t *testing.T) { + kdf := New(DefaultConfig()) + + salt1, err := kdf.GenerateSalt() + require.NoError(t, err) + assert.Len(t, salt1, int(kdf.config.SaltLength)) + + // Should generate different salt each time + salt2, err := kdf.GenerateSalt() + require.NoError(t, err) + assert.NotEqual(t, salt1, salt2) +} + +func TestKDF_HashPassword(t *testing.T) { + kdf := New(DefaultConfig()) + password := []byte("MySecureP@ssw0rd") + + hash, err := kdf.HashPassword(password) + require.NoError(t, err) + + // Check format + assert.True(t, strings.HasPrefix(hash, "$argon2id$")) + parts := strings.Split(hash, "$") + assert.Len(t, parts, 6) + + // Verify password + valid, err := VerifyPassword(password, hash) + require.NoError(t, err) + assert.True(t, valid) + + // Wrong password should fail + valid, err = VerifyPassword([]byte("wrong"), hash) + require.NoError(t, err) + assert.False(t, valid) +} + +func TestVerifyPassword(t *testing.T) { + testCases := []struct { + name string + password string + hash string + valid bool + wantErr bool + }{ + { + name: "valid password", + password: "password123", + hash: "$argon2id$v=19$m=65536,t=1,p=4$c2FsdC1tdXN0LWJlLWF0LWxlYXN0LTE2LWJ5dGVzISE$+4smaTt/N7ivKLrqsPIbTplUxDBRMxTKCYOcXWTJOEI", + valid: true, + }, + { + name: "invalid password", + password: "wrongpassword", + hash: "$argon2id$v=19$m=65536,t=1,p=4$c2FsdC1tdXN0LWJlLWF0LWxlYXN0LTE2LWJ5dGVzISE$+4smaTt/N7ivKLrqsPIbTplUxDBRMxTKCYOcXWTJOEI", + valid: false, + }, + { + name: "invalid format", + password: "password", + hash: "invalid-hash-format", + wantErr: true, + }, + { + name: "wrong algorithm", + password: "password", + hash: "$bcrypt$v=19$m=65536,t=1,p=4$salt$hash", + wantErr: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + valid, err := VerifyPassword([]byte(tc.password), tc.hash) + if tc.wantErr { + assert.Error(t, err) + } else { + require.NoError(t, err) + assert.Equal(t, tc.valid, valid) + } + }) + } +} + +func TestConfigurations(t *testing.T) { + configs := map[string]*Config{ + "default": DefaultConfig(), + "light": LightConfig(), + "high": HighSecurityConfig(), + } + + for name, config := range configs { + t.Run(name, func(t *testing.T) { + err := ValidateConfig(config) + assert.NoError(t, err) + + kdf := New(config) + password := []byte("test-password") + + hash, err := kdf.HashPassword(password) + require.NoError(t, err) + + valid, err := VerifyPassword(password, hash) + require.NoError(t, err) + assert.True(t, valid) + }) + } +} + +func TestValidateConfig(t *testing.T) { + testCases := []struct { + name string + config *Config + wantErr bool + errMsg string + }{ + { + name: "valid config", + config: DefaultConfig(), + wantErr: false, + }, + { + name: "time too low", + config: &Config{ + Time: 0, + Memory: 64 * 1024, + Parallelism: 4, + SaltLength: 32, + KeyLength: 32, + }, + wantErr: true, + errMsg: "time must be at least 1", + }, + { + name: "memory too low", + config: &Config{ + Time: 1, + Memory: 4 * 1024, + Parallelism: 4, + SaltLength: 32, + KeyLength: 32, + }, + wantErr: true, + errMsg: "memory must be at least 8MB", + }, + { + name: "parallelism too low", + config: &Config{ + Time: 1, + Memory: 64 * 1024, + Parallelism: 0, + SaltLength: 32, + KeyLength: 32, + }, + wantErr: true, + errMsg: "parallelism must be at least 1", + }, + { + name: "salt too short", + config: &Config{ + Time: 1, + Memory: 64 * 1024, + Parallelism: 4, + SaltLength: 4, + KeyLength: 32, + }, + wantErr: true, + errMsg: "salt length must be at least 8 bytes", + }, + { + name: "key too short", + config: &Config{ + Time: 1, + Memory: 64 * 1024, + Parallelism: 4, + SaltLength: 32, + KeyLength: 8, + }, + wantErr: true, + errMsg: "key length must be at least 16 bytes", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + err := ValidateConfig(tc.config) + if tc.wantErr { + assert.Error(t, err) + assert.Contains(t, err.Error(), tc.errMsg) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestCompareHashes(t *testing.T) { + hash1 := []byte("hash1") + hash2 := []byte("hash1") + hash3 := []byte("hash2") + + assert.True(t, CompareHashes(hash1, hash2)) + assert.False(t, CompareHashes(hash1, hash3)) + assert.False(t, CompareHashes([]byte("short"), []byte("longer"))) +} + +func TestEstimateTime(t *testing.T) { + config := DefaultConfig() + + estimate := EstimateTime(config, 1) + assert.NotEmpty(t, estimate) + assert.True(t, strings.HasSuffix(estimate, "ms") || + strings.HasSuffix(estimate, "s") || + strings.HasSuffix(estimate, "min")) + + // Test different scales + estimate = EstimateTime(config, 100) + assert.NotEmpty(t, estimate) + + // High security config should take longer + highConfig := HighSecurityConfig() + highEstimate := EstimateTime(highConfig, 1) + assert.NotEmpty(t, highEstimate) +} + +func TestDecodeHash(t *testing.T) { + validHash := "$argon2id$v=19$m=65536,t=1,p=4$c2FsdA$aGFzaA" + + config, salt, hash, err := decodeHash(validHash) + require.NoError(t, err) + + assert.Equal(t, uint32(65536), config.Memory) + assert.Equal(t, uint32(1), config.Time) + assert.Equal(t, uint8(4), config.Parallelism) + assert.Equal(t, []byte("salt"), salt) + assert.Equal(t, []byte("hash"), hash) + + // Test invalid formats + invalidHashes := []string{ + "invalid", + "$bcrypt$v=19$m=65536,t=1,p=4$salt$hash", + "$argon2id$v=18$m=65536,t=1,p=4$salt$hash", // wrong version + "$argon2id$v=19$invalid$salt$hash", + "$argon2id$v=19$m=65536,t=1,p=4$!invalid!$hash", + } + + for _, h := range invalidHashes { + _, _, _, err := decodeHash(h) + assert.Error(t, err) + } +} + +func BenchmarkDeriveKey(b *testing.B) { + configs := map[string]*Config{ + "light": LightConfig(), + "default": DefaultConfig(), + "high": HighSecurityConfig(), + } + + password := []byte("benchmark-password") + salt := []byte("benchmark-salt-at-least-16-bytes") + + for name, config := range configs { + b.Run(name, func(b *testing.B) { + kdf := New(config) + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = kdf.DeriveKey(password, salt) + } + }) + } +} + +func BenchmarkHashPassword(b *testing.B) { + kdf := New(LightConfig()) // Use light config for benchmarks + password := []byte("benchmark-password") + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, _ = kdf.HashPassword(password) + } +} + +func BenchmarkVerifyPassword(b *testing.B) { + kdf := New(LightConfig()) + password := []byte("benchmark-password") + hash, _ := kdf.HashPassword(password) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, _ = VerifyPassword(password, hash) + } +} + +func TestConcurrentDerivation(t *testing.T) { + kdf := New(DefaultConfig()) + password := []byte("concurrent-test") + + // Generate multiple salts + salts := make([][]byte, 10) + for i := range salts { + salt, err := kdf.GenerateSalt() + require.NoError(t, err) + salts[i] = salt + } + + // Derive keys concurrently + results := make([][]byte, len(salts)) + done := make(chan int, len(salts)) + + for i, salt := range salts { + go func(idx int, s []byte) { + results[idx] = kdf.DeriveKey(password, s) + done <- idx + }(i, salt) + } + + // Wait for all goroutines + for i := 0; i < len(salts); i++ { + <-done + } + + // Verify all keys are different (different salts) + for i := 0; i < len(results)-1; i++ { + for j := i + 1; j < len(results); j++ { + assert.False(t, bytes.Equal(results[i], results[j])) + } + } +} + +func TestPerformanceBenchmark(t *testing.T) { + if testing.Short() { + t.Skip("Skipping performance benchmark in short mode") + } + + configs := []struct { + name string + config *Config + maxMs int64 + }{ + {"light", LightConfig(), 100}, + {"default", DefaultConfig(), 500}, + } + + password := []byte("perf-test") + + for _, tc := range configs { + t.Run(tc.name, func(t *testing.T) { + kdf := New(tc.config) + salt, _ := kdf.GenerateSalt() + + start := time.Now() + _ = kdf.DeriveKey(password, salt) + elapsed := time.Since(start).Milliseconds() + + t.Logf("%s config took %dms", tc.name, elapsed) + assert.Less(t, elapsed, tc.maxMs, + "derivation took too long: %dms > %dms", elapsed, tc.maxMs) + }) + } +} diff --git a/bulletproof/generators.go b/bulletproof/generators.go new file mode 100644 index 0000000..5246753 --- /dev/null +++ b/bulletproof/generators.go @@ -0,0 +1,57 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bulletproof + +import ( + "github.com/pkg/errors" + "golang.org/x/crypto/sha3" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// generators contains a list of points to be used as generators for bulletproofs. +type generators []curves.Point + +// ippGenerators holds generators necessary for an Inner Product Proof +// It includes a single u generator, and a list of generators divided in half to G and H +// See lines 10 on pg 16 of https://eprint.iacr.org/2017/1066.pdf +type ippGenerators struct { + G generators + H generators +} + +// getGeneratorPoints generates generators using HashToCurve with Shake256(domain) as input +// lenVector is the length of the scalars used for the Inner Product Proof +// getGeneratorPoints will return 2*lenVector + 1 total points, split between a single u generator +// and G and H lists of vectors per the IPP specification +// See lines 10 on pg 16 of https://eprint.iacr.org/2017/1066.pdf +func getGeneratorPoints(lenVector int, domain []byte, curve curves.Curve) (*ippGenerators, error) { + shake := sha3.NewShake256() + _, err := shake.Write(domain) + if err != nil { + return nil, errors.Wrap(err, "getGeneratorPoints shake.Write") + } + numPoints := lenVector * 2 + points := make([]curves.Point, numPoints) + for i := 0; i < numPoints; i++ { + bytes := [64]byte{} + _, err := shake.Read(bytes[:]) + if err != nil { + return nil, errors.Wrap(err, "getGeneratorPoints shake.Read") + } + nextPoint := curve.Point.Hash(bytes[:]) + points[i] = nextPoint + } + // Get G and H by splitting points in half + G, H, err := splitPointVector(points) + if err != nil { + return nil, errors.Wrap(err, "getGeneratorPoints splitPointVector") + } + out := ippGenerators{G: G, H: H} + + return &out, nil +} diff --git a/bulletproof/generators_test.go b/bulletproof/generators_test.go new file mode 100644 index 0000000..8495c3e --- /dev/null +++ b/bulletproof/generators_test.go @@ -0,0 +1,61 @@ +package bulletproof + +import ( + "testing" + + "github.com/stretchr/testify/require" + "golang.org/x/crypto/sha3" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestGeneratorsHappyPath(t *testing.T) { + curve := curves.ED25519() + gs, err := getGeneratorPoints(10, []byte("test"), *curve) + gsConcatenated := concatIPPGenerators(*gs) + require.NoError(t, err) + require.Len(t, gs.G, 10) + require.Len(t, gs.H, 10) + require.True(t, noDuplicates(gsConcatenated)) +} + +func TestGeneratorsUniquePerDomain(t *testing.T) { + curve := curves.ED25519() + gs1, err := getGeneratorPoints(10, []byte("test"), *curve) + gs1Concatenated := concatIPPGenerators(*gs1) + require.NoError(t, err) + gs2, err := getGeneratorPoints(10, []byte("test2"), *curve) + gs2Concatenated := concatIPPGenerators(*gs2) + require.NoError(t, err) + require.True(t, areDisjoint(gs1Concatenated, gs2Concatenated)) +} + +func noDuplicates(gs generators) bool { + seen := map[[32]byte]bool{} + for _, G := range gs { + value := sha3.Sum256(G.ToAffineCompressed()) + if seen[value] { + return false + } + seen[value] = true + } + return true +} + +func areDisjoint(gs1, gs2 generators) bool { + for _, g1 := range gs1 { + for _, g2 := range gs2 { + if g1.Equal(g2) { + return false + } + } + } + return true +} + +func concatIPPGenerators(ippGens ippGenerators) generators { + var out generators + out = append(out, ippGens.G...) + out = append(out, ippGens.H...) + return out +} diff --git a/bulletproof/helpers.go b/bulletproof/helpers.go new file mode 100644 index 0000000..323da4f --- /dev/null +++ b/bulletproof/helpers.go @@ -0,0 +1,181 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bulletproof + +import ( + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// innerProduct takes two lists of scalars (a, b) and performs the dot product returning a single scalar. +func innerProduct(a, b []curves.Scalar) (curves.Scalar, error) { + if len(a) != len(b) { + return nil, errors.New("length of scalar vectors must be the same") + } + if len(a) < 1 { + return nil, errors.New("length of vectors must be at least one") + } + // Get a new scalar of value zero of the same curve as input arguments + innerProduct := a[0].Zero() + for i, aElem := range a { + bElem := b[i] + // innerProduct = aElem*bElem + innerProduct + innerProduct = aElem.MulAdd(bElem, innerProduct) + } + + return innerProduct, nil +} + +// splitPointVector takes a vector of points, splits it in half returning each half. +func splitPointVector(points []curves.Point) ([]curves.Point, []curves.Point, error) { + if len(points) < 1 { + return nil, nil, errors.New("length of points must be at least one") + } + if len(points)&0x01 != 0 { + return nil, nil, errors.New("length of points must be even") + } + nPrime := len(points) >> 1 + firstHalf := points[:nPrime] + secondHalf := points[nPrime:] + return firstHalf, secondHalf, nil +} + +// splitScalarVector takes a vector of scalars, splits it in half returning each half. +func splitScalarVector(scalars []curves.Scalar) ([]curves.Scalar, []curves.Scalar, error) { + if len(scalars) < 1 { + return nil, nil, errors.New("length of scalars must be at least one") + } + if len(scalars)&0x01 != 0 { + return nil, nil, errors.New("length of scalars must be even") + } + nPrime := len(scalars) >> 1 + firstHalf := scalars[:nPrime] + secondHalf := scalars[nPrime:] + return firstHalf, secondHalf, nil +} + +// multiplyScalarToPointVector takes a single scalar and a list of points, multiplies each point by scalar. +func multiplyScalarToPointVector(x curves.Scalar, g []curves.Point) []curves.Point { + products := make([]curves.Point, len(g)) + for i, gElem := range g { + product := gElem.Mul(x) + products[i] = product + } + + return products +} + +// multiplyScalarToScalarVector takes a single scalar (x) and a list of scalars (a), multiplies each scalar in the vector by the scalar. +func multiplyScalarToScalarVector(x curves.Scalar, a []curves.Scalar) []curves.Scalar { + products := make([]curves.Scalar, len(a)) + for i, aElem := range a { + product := aElem.Mul(x) + products[i] = product + } + + return products +} + +// multiplyPairwisePointVectors takes two lists of points (g, h) and performs a pairwise multiplication returning a list of points. +func multiplyPairwisePointVectors(g, h []curves.Point) ([]curves.Point, error) { + if len(g) != len(h) { + return nil, errors.New("length of point vectors must be the same") + } + product := make([]curves.Point, len(g)) + for i, gElem := range g { + product[i] = gElem.Add(h[i]) + } + + return product, nil +} + +// multiplyPairwiseScalarVectors takes two lists of points (a, b) and performs a pairwise multiplication returning a list of scalars. +func multiplyPairwiseScalarVectors(a, b []curves.Scalar) ([]curves.Scalar, error) { + if len(a) != len(b) { + return nil, errors.New("length of point vectors must be the same") + } + product := make([]curves.Scalar, len(a)) + for i, aElem := range a { + product[i] = aElem.Mul(b[i]) + } + + return product, nil +} + +// addPairwiseScalarVectors takes two lists of scalars (a, b) and performs a pairwise addition returning a list of scalars. +func addPairwiseScalarVectors(a, b []curves.Scalar) ([]curves.Scalar, error) { + if len(a) != len(b) { + return nil, errors.New("length of scalar vectors must be the same") + } + sum := make([]curves.Scalar, len(a)) + for i, aElem := range a { + sum[i] = aElem.Add(b[i]) + } + + return sum, nil +} + +// subtractPairwiseScalarVectors takes two lists of scalars (a, b) and performs a pairwise subtraction returning a list of scalars. +func subtractPairwiseScalarVectors(a, b []curves.Scalar) ([]curves.Scalar, error) { + if len(a) != len(b) { + return nil, errors.New("length of scalar vectors must be the same") + } + diff := make([]curves.Scalar, len(a)) + for i, aElem := range a { + diff[i] = aElem.Sub(b[i]) + } + return diff, nil +} + +// invertScalars takes a list of scalars then returns a list with each element inverted. +func invertScalars(xs []curves.Scalar) ([]curves.Scalar, error) { + xinvs := make([]curves.Scalar, len(xs)) + for i, x := range xs { + xinv, err := x.Invert() + if err != nil { + return nil, errors.Wrap(err, "bulletproof helpers invertx") + } + xinvs[i] = xinv + } + + return xinvs, nil +} + +// isPowerOfTwo returns whether a number i is a power of two or not. +func isPowerOfTwo(i int) bool { + return i&(i-1) == 0 +} + +// get2nVector returns a scalar vector 2^n such that [1, 2, 4, ... 2^(n-1)] +// See k^n and 2^n definitions on pg 12 of https://eprint.iacr.org/2017/1066.pdf +func get2nVector(length int, curve curves.Curve) []curves.Scalar { + vector2n := make([]curves.Scalar, length) + vector2n[0] = curve.Scalar.One() + for i := 1; i < length; i++ { + vector2n[i] = vector2n[i-1].Double() + } + return vector2n +} + +func get1nVector(length int, curve curves.Curve) []curves.Scalar { + vector1n := make([]curves.Scalar, length) + for i := 0; i < length; i++ { + vector1n[i] = curve.Scalar.One() + } + return vector1n +} + +func getknVector(k curves.Scalar, length int, curve curves.Curve) []curves.Scalar { + vectorkn := make([]curves.Scalar, length) + vectorkn[0] = curve.Scalar.One() + vectorkn[1] = k + for i := 2; i < length; i++ { + vectorkn[i] = vectorkn[i-1].Mul(k) + } + return vectorkn +} diff --git a/bulletproof/helpers_test.go b/bulletproof/helpers_test.go new file mode 100644 index 0000000..8ae037e --- /dev/null +++ b/bulletproof/helpers_test.go @@ -0,0 +1,85 @@ +package bulletproof + +import ( + crand "crypto/rand" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestInnerProductHappyPath(t *testing.T) { + curve := curves.ED25519() + a := randScalarVec(3, *curve) + b := randScalarVec(3, *curve) + _, err := innerProduct(a, b) + require.NoError(t, err) +} + +func TestInnerProductMismatchedLengths(t *testing.T) { + curve := curves.ED25519() + a := randScalarVec(3, *curve) + b := randScalarVec(4, *curve) + _, err := innerProduct(a, b) + require.Error(t, err) +} + +func TestInnerProductEmptyVector(t *testing.T) { + curve := curves.ED25519() + a := randScalarVec(0, *curve) + b := randScalarVec(0, *curve) + _, err := innerProduct(a, b) + require.Error(t, err) +} + +func TestInnerProductOut(t *testing.T) { + curve := curves.ED25519() + a := randScalarVec(2, *curve) + b := randScalarVec(2, *curve) + c, err := innerProduct(a, b) + require.NoError(t, err) + + // Calculate manually a0*b0 + a1*b1 + cPrime := a[0].Mul(b[0]).Add(a[1].Mul(b[1])) + require.Equal(t, c, cPrime) +} + +func TestSplitListofPointsHappyPath(t *testing.T) { + curve := curves.ED25519() + points := randPointVec(10, *curve) + firstHalf, secondHalf, err := splitPointVector(points) + require.NoError(t, err) + require.Len(t, firstHalf, 5) + require.Len(t, secondHalf, 5) +} + +func TestSplitListofPointsOddLength(t *testing.T) { + curve := curves.ED25519() + points := randPointVec(11, *curve) + _, _, err := splitPointVector(points) + require.Error(t, err) +} + +func TestSplitListofPointsZeroLength(t *testing.T) { + curve := curves.ED25519() + points := randPointVec(0, *curve) + _, _, err := splitPointVector(points) + require.Error(t, err) +} + +func randScalarVec(length int, curve curves.Curve) []curves.Scalar { + out := make([]curves.Scalar, length) + for i := 0; i < length; i++ { + out[i] = curve.Scalar.Random(crand.Reader) + } + return out +} + +func randPointVec(length int, curve curves.Curve) []curves.Point { + out := make([]curves.Point, length) + for i := 0; i < length; i++ { + out[i] = curve.Point.Random(crand.Reader) + } + return out +} diff --git a/bulletproof/ipp_prover.go b/bulletproof/ipp_prover.go new file mode 100644 index 0000000..3633d27 --- /dev/null +++ b/bulletproof/ipp_prover.go @@ -0,0 +1,415 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Package bulletproof implements the zero knowledge protocol bulletproofs as defined in https://eprint.iacr.org/2017/1066.pdf +package bulletproof + +import ( + "github.com/gtank/merlin" + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// InnerProductProver is the struct used to create InnerProductProofs +// It specifies which curve to use and holds precomputed generators +// See NewInnerProductProver() for prover initialization. +type InnerProductProver struct { + curve curves.Curve + generators ippGenerators +} + +// InnerProductProof contains necessary output for the inner product proof +// a and b are the final input vectors of scalars, they should be of length 1 +// Ls and Rs are calculated per recursion of the IPP and are necessary for verification +// See section 3.1 on pg 15 of https://eprint.iacr.org/2017/1066.pdf +type InnerProductProof struct { + a, b curves.Scalar + capLs, capRs []curves.Point + curve *curves.Curve +} + +// ippRecursion is the same as IPP but tracks recursive a', b', g', h' and Ls and Rs +// It should only be used internally by InnerProductProver.Prove() +// See L35 on pg 16 of https://eprint.iacr.org/2017/1066.pdf +type ippRecursion struct { + a, b []curves.Scalar + c curves.Scalar + capLs, capRs []curves.Point + g, h []curves.Point + u, capP curves.Point + transcript *merlin.Transcript +} + +// NewInnerProductProver initializes a new prover +// It uses the specified domain to generate generators for vectors of at most maxVectorLength +// A prover can be used to construct inner product proofs for vectors of length less than or equal to maxVectorLength +// A prover is defined by an explicit curve. +func NewInnerProductProver( + maxVectorLength int, + domain []byte, + curve curves.Curve, +) (*InnerProductProver, error) { + generators, err := getGeneratorPoints(maxVectorLength, domain, curve) + if err != nil { + return nil, errors.Wrap(err, "ipp getGenerators") + } + return &InnerProductProver{curve: curve, generators: *generators}, nil +} + +// NewInnerProductProof initializes a new InnerProductProof for a specified curve +// This should be used in tandem with UnmarshalBinary() to convert a marshaled proof into the struct. +func NewInnerProductProof(curve *curves.Curve) *InnerProductProof { + var capLs, capRs []curves.Point + newProof := InnerProductProof{ + a: curve.NewScalar(), + b: curve.NewScalar(), + capLs: capLs, + capRs: capRs, + curve: curve, + } + return &newProof +} + +// rangeToIPP takes the output of a range proof and converts it into an inner product proof +// See section 4.2 on pg 20 +// The conversion specifies generators to use (g and hPrime), as well as the two vectors l, r of which the inner product is tHat +// Additionally, note that the P used for the IPP is in fact P*h^-mu from the range proof. +func (prover *InnerProductProver) rangeToIPP( + proofG, proofH []curves.Point, + l, r []curves.Scalar, + tHat curves.Scalar, + capPhmuinv, u curves.Point, + transcript *merlin.Transcript, +) (*InnerProductProof, error) { + // Note that P as a witness is only g^l * h^r + // P needs to be in the form of g^l * h^r * u^ + // Calculate the final P including the u^ term + utHat := u.Mul(tHat) + capP := capPhmuinv.Add(utHat) + + // Use params to prove inner product + recursionParams := &ippRecursion{ + a: l, + b: r, + capLs: []curves.Point{}, + capRs: []curves.Point{}, + c: tHat, + g: proofG, + h: proofH, + capP: capP, + u: u, + transcript: transcript, + } + + return prover.proveRecursive(recursionParams) +} + +// getP returns the initial P value given two scalars a,b and point u +// This method should only be used for testing +// See (3) on page 13 of https://eprint.iacr.org/2017/1066.pdf +func (prover *InnerProductProver) getP(a, b []curves.Scalar, u curves.Point) (curves.Point, error) { + // Vectors must have length power of two + if !isPowerOfTwo(len(a)) { + return nil, errors.New("ipp vector length must be power of two") + } + // Generator vectors must be same length + if len(prover.generators.G) != len(prover.generators.H) { + return nil, errors.New("ipp generator lengths of g and h must be equal") + } + // Inner product requires len(a) == len(b) else error is returned + c, err := innerProduct(a, b) + if err != nil { + return nil, errors.Wrap(err, "ipp getInnerProduct") + } + + // In case where len(a) is less than number of generators precomputed by prover, trim to length + proofG := prover.generators.G[0:len(a)] + proofH := prover.generators.H[0:len(b)] + + // initial P = g^a * h^b * u^(a dot b) (See (3) on page 13 of https://eprint.iacr.org/2017/1066.pdf) + ga := prover.curve.NewGeneratorPoint().SumOfProducts(proofG, a) + hb := prover.curve.NewGeneratorPoint().SumOfProducts(proofH, b) + uadotb := u.Mul(c) + capP := ga.Add(hb).Add(uadotb) + + return capP, nil +} + +// Prove executes the prover protocol on pg 16 of https://eprint.iacr.org/2017/1066.pdf +// It generates an inner product proof for vectors a and b, using u to blind the inner product in P +// A transcript is used for the Fiat Shamir heuristic. +func (prover *InnerProductProver) Prove( + a, b []curves.Scalar, + u curves.Point, + transcript *merlin.Transcript, +) (*InnerProductProof, error) { + // Vectors must have length power of two + if !isPowerOfTwo(len(a)) { + return nil, errors.New("ipp vector length must be power of two") + } + // Generator vectors must be same length + if len(prover.generators.G) != len(prover.generators.H) { + return nil, errors.New("ipp generator lengths of g and h must be equal") + } + // Inner product requires len(a) == len(b) else error is returned + c, err := innerProduct(a, b) + if err != nil { + return nil, errors.Wrap(err, "ipp getInnerProduct") + } + + // Length of vectors must be less than the number of generators generated + if len(a) > len(prover.generators.G) { + return nil, errors.New("ipp vector length must be less than maxVectorLength") + } + // In case where len(a) is less than number of generators precomputed by prover, trim to length + proofG := prover.generators.G[0:len(a)] + proofH := prover.generators.H[0:len(b)] + + // initial P = g^a * h^b * u^(a dot b) (See (3) on page 13 of https://eprint.iacr.org/2017/1066.pdf) + ga := prover.curve.NewGeneratorPoint().SumOfProducts(proofG, a) + hb := prover.curve.NewGeneratorPoint().SumOfProducts(proofH, b) + uadotb := u.Mul(c) + capP := ga.Add(hb).Add(uadotb) + + recursionParams := &ippRecursion{ + a: a, + b: b, + capLs: []curves.Point{}, + capRs: []curves.Point{}, + c: c, + g: proofG, + h: proofH, + capP: capP, + u: u, + transcript: transcript, + } + return prover.proveRecursive(recursionParams) +} + +// proveRecursive executes the recursion on pg 16 of https://eprint.iacr.org/2017/1066.pdf +func (prover *InnerProductProver) proveRecursive( + recursionParams *ippRecursion, +) (*InnerProductProof, error) { + // length checks + if len(recursionParams.a) != len(recursionParams.b) { + return nil, errors.New("ipp proveRecursive a and b different lengths") + } + if len(recursionParams.g) != len(recursionParams.h) { + return nil, errors.New("ipp proveRecursive g and h different lengths") + } + if len(recursionParams.a) != len(recursionParams.g) { + return nil, errors.New("ipp proveRecursive scalar and point vectors different lengths") + } + // Base case (L14, pg16 of https://eprint.iacr.org/2017/1066.pdf) + if len(recursionParams.a) == 1 { + proof := &InnerProductProof{ + a: recursionParams.a[0], + b: recursionParams.b[0], + capLs: recursionParams.capLs, + capRs: recursionParams.capRs, + curve: &prover.curve, + } + return proof, nil + } + + // Split current state into low (first half) vs high (second half) vectors + aLo, aHi, err := splitScalarVector(recursionParams.a) + if err != nil { + return nil, errors.Wrap(err, "recursionParams splitScalarVector") + } + bLo, bHi, err := splitScalarVector(recursionParams.b) + if err != nil { + return nil, errors.Wrap(err, "recursionParams splitScalarVector") + } + gLo, gHi, err := splitPointVector(recursionParams.g) + if err != nil { + return nil, errors.Wrap(err, "recursionParams splitPointVector") + } + hLo, hHi, err := splitPointVector(recursionParams.h) + if err != nil { + return nil, errors.Wrap(err, "recursionParams splitPointVector") + } + + // c_l, c_r (L21,22, pg16 of https://eprint.iacr.org/2017/1066.pdf) + cL, err := innerProduct(aLo, bHi) + if err != nil { + return nil, errors.Wrap(err, "recursionParams innerProduct") + } + + cR, err := innerProduct(aHi, bLo) + if err != nil { + return nil, errors.Wrap(err, "recursionParams innerProduct") + } + + // L, R (L23,24, pg16 of https://eprint.iacr.org/2017/1066.pdf) + lga := prover.curve.Point.SumOfProducts(gHi, aLo) + lhb := prover.curve.Point.SumOfProducts(hLo, bHi) + ucL := recursionParams.u.Mul(cL) + capL := lga.Add(lhb).Add(ucL) + + rga := prover.curve.Point.SumOfProducts(gLo, aHi) + rhb := prover.curve.Point.SumOfProducts(hHi, bLo) + ucR := recursionParams.u.Mul(cR) + capR := rga.Add(rhb).Add(ucR) + + // Add L,R for verifier to use to calculate final g, h + newL := recursionParams.capLs + newL = append(newL, capL) + newR := recursionParams.capRs + newR = append(newR, capR) + + // Get x from L, R for non-interactive (See section 4.4 pg22 of https://eprint.iacr.org/2017/1066.pdf) + // Note this replaces the interactive model, i.e. L36-28 of pg16 of https://eprint.iacr.org/2017/1066.pdf + x, err := prover.calcx(capL, capR, recursionParams.transcript) + if err != nil { + return nil, errors.Wrap(err, "recursionParams calcx") + } + + // Calculate recursive inputs + xInv, err := x.Invert() + if err != nil { + return nil, errors.Wrap(err, "recursionParams x.Invert") + } + + // g', h' (L29,30, pg16 of https://eprint.iacr.org/2017/1066.pdf) + gLoxInverse := multiplyScalarToPointVector(xInv, gLo) + gHix := multiplyScalarToPointVector(x, gHi) + gPrime, err := multiplyPairwisePointVectors(gLoxInverse, gHix) + if err != nil { + return nil, errors.Wrap(err, "recursionParams multiplyPairwisePointVectors") + } + + hLox := multiplyScalarToPointVector(x, hLo) + hHixInv := multiplyScalarToPointVector(xInv, hHi) + hPrime, err := multiplyPairwisePointVectors(hLox, hHixInv) + if err != nil { + return nil, errors.Wrap(err, "recursionParams multiplyPairwisePointVectors") + } + + // P' (L31, pg16 of https://eprint.iacr.org/2017/1066.pdf) + xSquare := x.Square() + xInvSquare := xInv.Square() + LxSquare := capL.Mul(xSquare) + RxInvSquare := capR.Mul(xInvSquare) + PPrime := LxSquare.Add(recursionParams.capP).Add(RxInvSquare) + + // a', b' (L33, 34, pg16 of https://eprint.iacr.org/2017/1066.pdf) + aLox := multiplyScalarToScalarVector(x, aLo) + aHixIn := multiplyScalarToScalarVector(xInv, aHi) + aPrime, err := addPairwiseScalarVectors(aLox, aHixIn) + if err != nil { + return nil, errors.Wrap(err, "recursionParams addPairwiseScalarVectors") + } + + bLoxInv := multiplyScalarToScalarVector(xInv, bLo) + bHix := multiplyScalarToScalarVector(x, bHi) + bPrime, err := addPairwiseScalarVectors(bLoxInv, bHix) + if err != nil { + return nil, errors.Wrap(err, "recursionParams addPairwiseScalarVectors") + } + + // c' + cPrime, err := innerProduct(aPrime, bPrime) + if err != nil { + return nil, errors.Wrap(err, "recursionParams innerProduct") + } + + // Make recursive call (L35, pg16 of https://eprint.iacr.org/2017/1066.pdf) + recursiveIPP := &ippRecursion{ + a: aPrime, + b: bPrime, + capLs: newL, + capRs: newR, + c: cPrime, + g: gPrime, + h: hPrime, + capP: PPrime, + u: recursionParams.u, + transcript: recursionParams.transcript, + } + + out, err := prover.proveRecursive(recursiveIPP) + if err != nil { + return nil, errors.Wrap(err, "recursionParams proveRecursive") + } + return out, nil +} + +// calcx uses a merlin transcript for Fiat Shamir +// For each recursion, it takes the current state of the transcript and appends the newly calculated L and R values +// A new scalar is then read from the transcript +// See section 4.4 pg22 of https://eprint.iacr.org/2017/1066.pdf +func (prover *InnerProductProver) calcx( + capL, capR curves.Point, + transcript *merlin.Transcript, +) (curves.Scalar, error) { + // Add the newest capL and capR values to transcript + transcript.AppendMessage([]byte("addRecursiveL"), capL.ToAffineUncompressed()) + transcript.AppendMessage([]byte("addRecursiveR"), capR.ToAffineUncompressed()) + // Read 64 bytes from, set to scalar + outBytes := transcript.ExtractBytes([]byte("getx"), 64) + x, err := prover.curve.NewScalar().SetBytesWide(outBytes) + if err != nil { + return nil, errors.Wrap(err, "calcx NewScalar SetBytesWide") + } + + return x, nil +} + +// MarshalBinary takes an inner product proof and marshals into bytes. +func (proof *InnerProductProof) MarshalBinary() []byte { + var out []byte + out = append(out, proof.a.Bytes()...) + out = append(out, proof.b.Bytes()...) + for i, capLElem := range proof.capLs { + capRElem := proof.capRs[i] + out = append(out, capLElem.ToAffineCompressed()...) + out = append(out, capRElem.ToAffineCompressed()...) + } + return out +} + +// UnmarshalBinary takes bytes of a marshaled proof and writes them into an inner product proof +// The inner product proof used should be from the output of NewInnerProductProof(). +func (proof *InnerProductProof) UnmarshalBinary(data []byte) error { + scalarLen := len(proof.curve.NewScalar().Bytes()) + pointLen := len(proof.curve.NewGeneratorPoint().ToAffineCompressed()) + ptr := 0 + // Get scalars + a, err := proof.curve.NewScalar().SetBytes(data[ptr : ptr+scalarLen]) + if err != nil { + return errors.New("innerProductProof UnmarshalBinary SetBytes") + } + proof.a = a + ptr += scalarLen + b, err := proof.curve.NewScalar().SetBytes(data[ptr : ptr+scalarLen]) + if err != nil { + return errors.New("innerProductProof UnmarshalBinary SetBytes") + } + proof.b = b + ptr += scalarLen + // Get points + var capLs, capRs []curves.Point //nolint:prealloc // pointer arithmetic makes it too unreadable. + for ptr < len(data) { + capLElem, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen]) + if err != nil { + return errors.New("innerProductProof UnmarshalBinary FromAffineCompressed") + } + capLs = append(capLs, capLElem) + ptr += pointLen + capRElem, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen]) + if err != nil { + return errors.New("innerProductProof UnmarshalBinary FromAffineCompressed") + } + capRs = append(capRs, capRElem) + ptr += pointLen + } + proof.capLs = capLs + proof.capRs = capRs + + return nil +} diff --git a/bulletproof/ipp_prover_test.go b/bulletproof/ipp_prover_test.go new file mode 100644 index 0000000..3a9762b --- /dev/null +++ b/bulletproof/ipp_prover_test.go @@ -0,0 +1,99 @@ +package bulletproof + +import ( + crand "crypto/rand" + "testing" + + "github.com/gtank/merlin" + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestIPPHappyPath(t *testing.T) { + curve := curves.ED25519() + prover, err := NewInnerProductProver(8, []byte("test"), *curve) + require.NoError(t, err) + a := randScalarVec(8, *curve) + b := randScalarVec(8, *curve) + u := curve.Point.Random(crand.Reader) + transcript := merlin.NewTranscript("test") + proof, err := prover.Prove(a, b, u, transcript) + require.NoError(t, err) + require.Equal(t, 3, len(proof.capLs)) + require.Equal(t, 3, len(proof.capRs)) +} + +func TestIPPMismatchedVectors(t *testing.T) { + curve := curves.ED25519() + prover, err := NewInnerProductProver(8, []byte("test"), *curve) + require.NoError(t, err) + a := randScalarVec(4, *curve) + b := randScalarVec(8, *curve) + u := curve.Point.Random(crand.Reader) + transcript := merlin.NewTranscript("test") + _, err = prover.Prove(a, b, u, transcript) + require.Error(t, err) +} + +func TestIPPNonPowerOfTwoLengthVectors(t *testing.T) { + curve := curves.ED25519() + prover, err := NewInnerProductProver(8, []byte("test"), *curve) + require.NoError(t, err) + a := randScalarVec(3, *curve) + b := randScalarVec(3, *curve) + u := curve.Point.Random(crand.Reader) + transcript := merlin.NewTranscript("test") + _, err = prover.Prove(a, b, u, transcript) + require.Error(t, err) +} + +func TestIPPZeroLengthVectors(t *testing.T) { + curve := curves.ED25519() + prover, err := NewInnerProductProver(8, []byte("test"), *curve) + require.NoError(t, err) + a := randScalarVec(0, *curve) + b := randScalarVec(0, *curve) + u := curve.Point.Random(crand.Reader) + transcript := merlin.NewTranscript("test") + _, err = prover.Prove(a, b, u, transcript) + require.Error(t, err) +} + +func TestIPPGreaterThanMaxLengthVectors(t *testing.T) { + curve := curves.ED25519() + prover, err := NewInnerProductProver(8, []byte("test"), *curve) + require.NoError(t, err) + a := randScalarVec(16, *curve) + b := randScalarVec(16, *curve) + u := curve.Point.Random(crand.Reader) + transcript := merlin.NewTranscript("test") + _, err = prover.Prove(a, b, u, transcript) + require.Error(t, err) +} + +func TestIPPMarshal(t *testing.T) { + curve := curves.ED25519() + prover, err := NewInnerProductProver(8, []byte("test"), *curve) + require.NoError(t, err) + a := randScalarVec(8, *curve) + b := randScalarVec(8, *curve) + u := curve.Point.Random(crand.Reader) + transcript := merlin.NewTranscript("test") + proof, err := prover.Prove(a, b, u, transcript) + require.NoError(t, err) + + proofMarshaled := proof.MarshalBinary() + proofPrime := NewInnerProductProof(curve) + err = proofPrime.UnmarshalBinary(proofMarshaled) + require.NoError(t, err) + require.Zero(t, proof.a.Cmp(proofPrime.a)) + require.Zero(t, proof.b.Cmp(proofPrime.b)) + for i, proofCapLElem := range proof.capLs { + proofPrimeCapLElem := proofPrime.capLs[i] + require.True(t, proofCapLElem.Equal(proofPrimeCapLElem)) + proofCapRElem := proof.capRs[i] + proofPrimeCapRElem := proofPrime.capRs[i] + require.True(t, proofCapRElem.Equal(proofPrimeCapRElem)) + } +} diff --git a/bulletproof/ipp_verifier.go b/bulletproof/ipp_verifier.go new file mode 100644 index 0000000..046c91e --- /dev/null +++ b/bulletproof/ipp_verifier.go @@ -0,0 +1,236 @@ +package bulletproof + +import ( + "github.com/gtank/merlin" + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// InnerProductVerifier is the struct used to verify inner product proofs +// It specifies which curve to use and holds precomputed generators +// See NewInnerProductProver() for prover initialization. +type InnerProductVerifier struct { + curve curves.Curve + generators ippGenerators +} + +// NewInnerProductVerifier initializes a new verifier +// It uses the specified domain to generate generators for vectors of at most maxVectorLength +// A verifier can be used to verify inner product proofs for vectors of length less than or equal to maxVectorLength +// A verifier is defined by an explicit curve. +func NewInnerProductVerifier( + maxVectorLength int, + domain []byte, + curve curves.Curve, +) (*InnerProductVerifier, error) { + generators, err := getGeneratorPoints(maxVectorLength, domain, curve) + if err != nil { + return nil, errors.Wrap(err, "ipp getGenerators") + } + return &InnerProductVerifier{curve: curve, generators: *generators}, nil +} + +// Verify verifies the given proof inputs +// It implements the final comparison of section 3.1 on pg17 of https://eprint.iacr.org/2017/1066.pdf +func (verifier *InnerProductVerifier) Verify( + capP, u curves.Point, + proof *InnerProductProof, + transcript *merlin.Transcript, +) (bool, error) { + if len(proof.capLs) != len(proof.capRs) { + return false, errors.New("ipp capLs and capRs must be same length") + } + // Generator vectors must be same length + if len(verifier.generators.G) != len(verifier.generators.H) { + return false, errors.New("ipp generator lengths of g and h must be equal") + } + + // Get generators for each elem in a, b and one more for u + // len(Ls) = log n, therefore can just exponentiate + n := 1 << len(proof.capLs) + + // Length of vectors must be less than the number of generators generated + if n > len(verifier.generators.G) { + return false, errors.New("ipp vector length must be less than maxVectorLength") + } + // In case where len(a) is less than number of generators precomputed by prover, trim to length + proofG := verifier.generators.G[0:n] + proofH := verifier.generators.H[0:n] + + xs, err := getxs(transcript, proof.capLs, proof.capRs, verifier.curve) + if err != nil { + return false, errors.Wrap(err, "verifier getxs") + } + s, err := verifier.getsNew(xs, n) + if err != nil { + return false, errors.Wrap(err, "verifier getss") + } + lhs, err := verifier.getLHS(u, proof, proofG, proofH, s) + if err != nil { + return false, errors.Wrap(err, "verify getLHS") + } + rhs, err := verifier.getRHS(capP, proof, xs) + if err != nil { + return false, errors.Wrap(err, "verify getRHS") + } + return lhs.Equal(rhs), nil +} + +// Verify verifies the given proof inputs +// It implements the final comparison of section 3.1 on pg17 of https://eprint.iacr.org/2017/1066.pdf +func (verifier *InnerProductVerifier) VerifyFromRangeProof( + proofG, proofH []curves.Point, + capPhmuinv, u curves.Point, + tHat curves.Scalar, + proof *InnerProductProof, + transcript *merlin.Transcript, +) (bool, error) { + // Get generators for each elem in a, b and one more for u + // len(Ls) = log n, therefore can just exponentiate + n := 1 << len(proof.capLs) + + xs, err := getxs(transcript, proof.capLs, proof.capRs, verifier.curve) + if err != nil { + return false, errors.Wrap(err, "verifier getxs") + } + s, err := verifier.gets(xs, n) + if err != nil { + return false, errors.Wrap(err, "verifier getss") + } + lhs, err := verifier.getLHS(u, proof, proofG, proofH, s) + if err != nil { + return false, errors.Wrap(err, "verify getLHS") + } + utHat := u.Mul(tHat) + capP := capPhmuinv.Add(utHat) + rhs, err := verifier.getRHS(capP, proof, xs) + if err != nil { + return false, errors.Wrap(err, "verify getRHS") + } + return lhs.Equal(rhs), nil +} + +// getRHS gets the right hand side of the final comparison of section 3.1 on pg17. +func (*InnerProductVerifier) getRHS( + capP curves.Point, + proof *InnerProductProof, + xs []curves.Scalar, +) (curves.Point, error) { + product := capP + for j, Lj := range proof.capLs { + Rj := proof.capRs[j] + xj := xs[j] + xjSquare := xj.Square() + xjSquareInv, err := xjSquare.Invert() + if err != nil { + return nil, errors.Wrap(err, "verify invert") + } + LjxjSquare := Lj.Mul(xjSquare) + RjxjSquareInv := Rj.Mul(xjSquareInv) + product = product.Add(LjxjSquare).Add(RjxjSquareInv) + } + return product, nil +} + +// getLHS gets the left hand side of the final comparison of section 3.1 on pg17. +func (verifier *InnerProductVerifier) getLHS( + u curves.Point, + proof *InnerProductProof, + g, h []curves.Point, + s []curves.Scalar, +) (curves.Point, error) { + sInv, err := invertScalars(s) + if err != nil { + return nil, errors.Wrap(err, "verify invertScalars") + } + // g^(a*s) + as := multiplyScalarToScalarVector(proof.a, s) + gas := verifier.curve.Point.SumOfProducts(g, as) + // h^(b*s^-1) + bsInv := multiplyScalarToScalarVector(proof.b, sInv) + hbsInv := verifier.curve.Point.SumOfProducts(h, bsInv) + // u^a*b + ab := proof.a.Mul(proof.b) + uab := u.Mul(ab) + // g^(a*s) * h^(b*s^-1) * u^a*b + out := gas.Add(hbsInv).Add(uab) + + return out, nil +} + +// getxs calculates the x values from Ls and Rs +// Note that each x is read from the transcript, then the L and R at a certain index are written to the transcript +// This mirrors the reading of xs and writing of Ls and Rs in the prover. +func getxs( + transcript *merlin.Transcript, + capLs, capRs []curves.Point, + curve curves.Curve, +) ([]curves.Scalar, error) { + xs := make([]curves.Scalar, len(capLs)) + for i, capLi := range capLs { + capRi := capRs[i] + // Add the newest L and R values to transcript + transcript.AppendMessage([]byte("addRecursiveL"), capLi.ToAffineUncompressed()) + transcript.AppendMessage([]byte("addRecursiveR"), capRi.ToAffineUncompressed()) + // Read 64 bytes from, set to scalar + outBytes := transcript.ExtractBytes([]byte("getx"), 64) + x, err := curve.NewScalar().SetBytesWide(outBytes) + if err != nil { + return nil, errors.Wrap(err, "calcx NewScalar SetBytesWide") + } + xs[i] = x + } + + return xs, nil +} + +// gets calculates the vector s of values used for verification +// See the second expression of section 3.1 on pg15 +// nolint +func (verifier *InnerProductVerifier) gets(xs []curves.Scalar, n int) ([]curves.Scalar, error) { + ss := make([]curves.Scalar, n) + for i := 0; i < n; i++ { + si := verifier.curve.Scalar.One() + for j, xj := range xs { + if i>>(len(xs)-j-1)&0x01 == 1 { + si = si.Mul(xj) + } else { + xjInverse, err := xj.Invert() + if err != nil { + return nil, errors.Wrap(err, "getss invert") + } + si = si.Mul(xjInverse) + } + } + ss[i] = si + } + + return ss, nil +} + +// getsNew calculates the vector s of values used for verification +// It provides analogous functionality as gets(), but uses a O(n) algorithm vs O(nlogn) +// The algorithm inverts all xs, then begins multiplying the inversion by the square of x elements to +// calculate all s values thus minimizing necessary inversions/ computation. +func (verifier *InnerProductVerifier) getsNew(xs []curves.Scalar, n int) ([]curves.Scalar, error) { + var err error + ss := make([]curves.Scalar, n) + // First element is all xs inverted mul'd + ss[0] = verifier.curve.Scalar.One() + for _, xj := range xs { + ss[0] = ss[0].Mul(xj) + } + ss[0], err = ss[0].Invert() + if err != nil { + return nil, errors.Wrap(err, "ipp gets inv ss0") + } + for j, xj := range xs { + xjSquared := xj.Square() + for i := 0; i < n; i += 1 << (len(xs) - j) { + ss[i+1<<(len(xs)-j-1)] = ss[i].Mul(xjSquared) + } + } + + return ss, nil +} diff --git a/bulletproof/ipp_verifier_test.go b/bulletproof/ipp_verifier_test.go new file mode 100644 index 0000000..b386a6d --- /dev/null +++ b/bulletproof/ipp_verifier_test.go @@ -0,0 +1,79 @@ +package bulletproof + +import ( + crand "crypto/rand" + "testing" + + "github.com/gtank/merlin" + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestIPPVerifyHappyPath(t *testing.T) { + curve := curves.ED25519() + vecLength := 256 + prover, err := NewInnerProductProver(vecLength, []byte("test"), *curve) + require.NoError(t, err) + a := randScalarVec(vecLength, *curve) + b := randScalarVec(vecLength, *curve) + u := curve.Point.Random(crand.Reader) + transcriptProver := merlin.NewTranscript("test") + proof, err := prover.Prove(a, b, u, transcriptProver) + require.NoError(t, err) + + verifier, err := NewInnerProductVerifier(vecLength, []byte("test"), *curve) + require.NoError(t, err) + capP, err := prover.getP(a, b, u) + require.NoError(t, err) + transcriptVerifier := merlin.NewTranscript("test") + verified, err := verifier.Verify(capP, u, proof, transcriptVerifier) + require.NoError(t, err) + require.True(t, verified) +} + +func BenchmarkIPPVerification(bench *testing.B) { + curve := curves.ED25519() + vecLength := 1024 + prover, _ := NewInnerProductProver(vecLength, []byte("test"), *curve) + a := randScalarVec(vecLength, *curve) + b := randScalarVec(vecLength, *curve) + u := curve.Point.Random(crand.Reader) + transcriptProver := merlin.NewTranscript("test") + proof, _ := prover.Prove(a, b, u, transcriptProver) + + verifier, _ := NewInnerProductVerifier(vecLength, []byte("test"), *curve) + capP, _ := prover.getP(a, b, u) + transcriptVerifier := merlin.NewTranscript("test") + verified, _ := verifier.Verify(capP, u, proof, transcriptVerifier) + require.True(bench, verified) +} + +func TestIPPVerifyInvalidProof(t *testing.T) { + curve := curves.ED25519() + vecLength := 64 + prover, err := NewInnerProductProver(vecLength, []byte("test"), *curve) + require.NoError(t, err) + + a := randScalarVec(vecLength, *curve) + b := randScalarVec(vecLength, *curve) + u := curve.Point.Random(crand.Reader) + + aPrime := randScalarVec(64, *curve) + bPrime := randScalarVec(64, *curve) + uPrime := curve.Point.Random(crand.Reader) + transcriptProver := merlin.NewTranscript("test") + + proofPrime, err := prover.Prove(aPrime, bPrime, uPrime, transcriptProver) + require.NoError(t, err) + + verifier, err := NewInnerProductVerifier(vecLength, []byte("test"), *curve) + require.NoError(t, err) + capP, err := prover.getP(a, b, u) + require.NoError(t, err) + transcriptVerifier := merlin.NewTranscript("test") + // Check for different capP, u from proof + verified, err := verifier.Verify(capP, u, proofPrime, transcriptVerifier) + require.NoError(t, err) + require.False(t, verified) +} diff --git a/bulletproof/range_batch_prover.go b/bulletproof/range_batch_prover.go new file mode 100644 index 0000000..82cd143 --- /dev/null +++ b/bulletproof/range_batch_prover.go @@ -0,0 +1,386 @@ +package bulletproof + +import ( + crand "crypto/rand" + + "github.com/gtank/merlin" + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// BatchProve proves that a list of scalars v are in the range n. +// It implements the aggregating logarithmic proofs defined on pg21. +// Instead of taking a single value and a single blinding factor, BatchProve takes in a list of values and list of +// blinding factors. +func (prover *RangeProver) BatchProve( + v, gamma []curves.Scalar, + n int, + proofGenerators RangeProofGenerators, + transcript *merlin.Transcript, +) (*RangeProof, error) { + // Define nm as the total bits required for secrets, calculated as number of secrets * n + m := len(v) + nm := n * m + // nm must be less than or equal to the number of generators generated + if nm > len(prover.generators.G) { + return nil, errors.New("ipp vector length must be less than or equal to maxVectorLength") + } + + // In case where nm is less than number of generators precomputed by prover, trim to length + proofG := prover.generators.G[0:nm] + proofH := prover.generators.H[0:nm] + + // Check that each elem in v is in range [0, 2^n] + for _, vi := range v { + checkedRange := checkRange(vi, n) + if checkedRange != nil { + return nil, checkedRange + } + } + + // L40 on pg19 + aL, err := getaLBatched(v, n, prover.curve) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + onenm := get1nVector(nm, prover.curve) + // L41 on pg19 + aR, err := subtractPairwiseScalarVectors(aL, onenm) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + alpha := prover.curve.Scalar.Random(crand.Reader) + // Calc A (L44, pg19) + halpha := proofGenerators.h.Mul(alpha) + gaL := prover.curve.Point.SumOfProducts(proofG, aL) + haR := prover.curve.Point.SumOfProducts(proofH, aR) + capA := halpha.Add(gaL).Add(haR) + + // L45, 46, pg19 + sL := getBlindingVector(nm, prover.curve) + sR := getBlindingVector(nm, prover.curve) + rho := prover.curve.Scalar.Random(crand.Reader) + + // Calc S (L47, pg19) + hrho := proofGenerators.h.Mul(rho) + gsL := prover.curve.Point.SumOfProducts(proofG, sL) + hsR := prover.curve.Point.SumOfProducts(proofH, sR) + capS := hrho.Add(gsL).Add(hsR) + + // Fiat Shamir for y,z (L49, pg19) + capV := getcapVBatched(v, gamma, proofGenerators.g, proofGenerators.h) + y, z, err := calcyzBatched(capV, capA, capS, transcript, prover.curve) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + // Calc t_1, t_2 + // See the l(X), r(X), equations on pg 21 + // Use l(X)'s and r(X)'s constant and linear terms to derive t_1 and t_2 + // (a_l - z*1^n) + zonenm := multiplyScalarToScalarVector(z, onenm) + constantTerml, err := subtractPairwiseScalarVectors(aL, zonenm) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + linearTerml := sL + + // zSum term, see equation 71 on pg21 + zSum := getSumTermrXBatched(z, n, len(v), prover.curve) + // a_r + z*1^nm + aRPluszonenm, err := addPairwiseScalarVectors(aR, zonenm) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + ynm := getknVector(y, nm, prover.curve) + hadamard, err := multiplyPairwiseScalarVectors(ynm, aRPluszonenm) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + constantTermr, err := addPairwiseScalarVectors(hadamard, zSum) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + linearTermr, err := multiplyPairwiseScalarVectors(ynm, sR) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + // t_1 (as the linear coefficient) is the sum of the dot products of l(X)'s linear term dot r(X)'s constant term + // and r(X)'s linear term dot l(X)'s constant term + t1FirstTerm, err := innerProduct(linearTerml, constantTermr) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + t1SecondTerm, err := innerProduct(linearTermr, constantTerml) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + t1 := t1FirstTerm.Add(t1SecondTerm) + + // t_2 (as the quadratic coefficient) is the dot product of l(X)'s and r(X)'s linear terms + t2, err := innerProduct(linearTerml, linearTermr) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + // L52, pg20 + tau1 := prover.curve.Scalar.Random(crand.Reader) + tau2 := prover.curve.Scalar.Random(crand.Reader) + + // T_1, T_2 (L53, pg20) + capT1 := proofGenerators.g.Mul(t1).Add(proofGenerators.h.Mul(tau1)) + capT2 := proofGenerators.g.Mul(t2).Add(proofGenerators.h.Mul(tau2)) + + // Fiat shamir for x (L55, pg20) + x, err := calcx(capT1, capT2, transcript, prover.curve) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + // Calc l + // Instead of using the expression in the line, evaluate l() at x + sLx := multiplyScalarToScalarVector(x, linearTerml) + l, err := addPairwiseScalarVectors(constantTerml, sLx) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + // Calc r + // Instead of using the expression in the line, evaluate r() at x + ynsRx := multiplyScalarToScalarVector(x, linearTermr) + r, err := addPairwiseScalarVectors(constantTermr, ynsRx) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + // Calc t hat + // For efficiency, instead of calculating the dot product, evaluate t() at x + zm := getknVector(z, m, prover.curve) + zsquarezm := multiplyScalarToScalarVector(z.Square(), zm) + sumv := prover.curve.Scalar.Zero() + for i := 0; i < m; i++ { + elem := zsquarezm[i].Mul(v[i]) + sumv = sumv.Add(elem) + } + + deltayzBatched, err := deltayzBatched(y, z, n, m, prover.curve) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + t0 := sumv.Add(deltayzBatched) + tLinear := t1.Mul(x) + tQuadratic := t2.Mul(x.Square()) + tHat := t0.Add(tLinear).Add(tQuadratic) + + // Calc tau_x (L61, pg20) + tau2xsquare := tau2.Mul(x.Square()) + tau1x := tau1.Mul(x) + zsum := prover.curve.Scalar.Zero() + zExp := z.Clone() + for j := 1; j < m+1; j++ { + zExp = zExp.Mul(z) + zsum = zsum.Add(zExp.Mul(gamma[j-1])) + } + taux := tau2xsquare.Add(tau1x).Add(zsum) + + // Calc mu (L62, pg20) + mu := alpha.Add(rho.Mul(x)) + + // Calc IPP (See section 4.2) + hPrime, err := gethPrime(proofH, y, prover.curve) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + // P is redefined in batched case, see bottom equation on pg21. + capPhmu := getPhmuBatched( + proofG, + hPrime, + proofGenerators.h, + capA, + capS, + x, + y, + z, + mu, + n, + m, + prover.curve, + ) + + wBytes := transcript.ExtractBytes([]byte("getw"), 64) + w, err := prover.curve.NewScalar().SetBytesWide(wBytes) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + ipp, err := prover.ippProver.rangeToIPP( + proofG, + hPrime, + l, + r, + tHat, + capPhmu, + proofGenerators.u.Mul(w), + transcript, + ) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + out := &RangeProof{ + capA: capA, + capS: capS, + capT1: capT1, + capT2: capT2, + taux: taux, + mu: mu, + tHat: tHat, + ipp: ipp, + curve: &prover.curve, + } + return out, nil +} + +// See final term of L71 on pg 21 +// Sigma_{j=1}^{m} z^{1+j} * (0^{(j-1)*n} || 2^{n} || 0^{(m-j)*n}). +func getSumTermrXBatched(z curves.Scalar, n, m int, curve curves.Curve) []curves.Scalar { + twoN := get2nVector(n, curve) + var out []curves.Scalar + // The final power should be one more than m + zExp := z.Clone() + for j := 0; j < m; j++ { + zExp = zExp.Mul(z) + elem := multiplyScalarToScalarVector(zExp, twoN) + out = append(out, elem...) + } + + return out +} + +func getcapVBatched(v, gamma []curves.Scalar, g, h curves.Point) []curves.Point { + out := make([]curves.Point, len(v)) + for i, vi := range v { + out[i] = getcapV(vi, gamma[i], g, h) + } + return out +} + +func getaLBatched(v []curves.Scalar, n int, curve curves.Curve) ([]curves.Scalar, error) { + var aL []curves.Scalar + for _, vi := range v { + aLi, err := getaL(vi, n, curve) + if err != nil { + return nil, err + } + aL = append(aL, aLi...) + } + return aL, nil +} + +func calcyzBatched( + capV []curves.Point, + capA, capS curves.Point, + transcript *merlin.Transcript, + curve curves.Curve, +) (curves.Scalar, curves.Scalar, error) { + // Add the A,S values to transcript + for _, capVi := range capV { + transcript.AppendMessage([]byte("addV"), capVi.ToAffineUncompressed()) + } + transcript.AppendMessage([]byte("addcapA"), capA.ToAffineUncompressed()) + transcript.AppendMessage([]byte("addcapS"), capS.ToAffineUncompressed()) + // Read 64 bytes twice from, set to scalar for y and z + yBytes := transcript.ExtractBytes([]byte("gety"), 64) + y, err := curve.NewScalar().SetBytesWide(yBytes) + if err != nil { + return nil, nil, errors.Wrap(err, "calcyz NewScalar SetBytesWide") + } + zBytes := transcript.ExtractBytes([]byte("getz"), 64) + z, err := curve.NewScalar().SetBytesWide(zBytes) + if err != nil { + return nil, nil, errors.Wrap(err, "calcyz NewScalar SetBytesWide") + } + + return y, z, nil +} + +func deltayzBatched(y, z curves.Scalar, n, m int, curve curves.Curve) (curves.Scalar, error) { + // z - z^2 + zMinuszsquare := z.Sub(z.Square()) + // 1^(n*m) + onenm := get1nVector(n*m, curve) + // <1^nm, y^nm> + onenmdotynm, err := innerProduct(onenm, getknVector(y, n*m, curve)) + if err != nil { + return nil, errors.Wrap(err, "deltayz") + } + // (z - z^2)*<1^n, y^n> + termFirst := zMinuszsquare.Mul(onenmdotynm) + + // <1^n, 2^n> + onendottwon, err := innerProduct(get1nVector(n, curve), get2nVector(n, curve)) + if err != nil { + return nil, errors.Wrap(err, "deltayz") + } + + termSecond := curve.Scalar.Zero() + zExp := z.Square() + for j := 1; j < m+1; j++ { + zExp = zExp.Mul(z) + elem := zExp.Mul(onendottwon) + termSecond = termSecond.Add(elem) + } + + // (z - z^2)*<1^n, y^n> - z^3*<1^n, 2^n> + out := termFirst.Sub(termSecond) + + return out, nil +} + +// Bottom equation on pg21. +func getPhmuBatched( + proofG, proofHPrime []curves.Point, + h, capA, capS curves.Point, + x, y, z, mu curves.Scalar, + n, m int, + curve curves.Curve, +) curves.Point { + twoN := get2nVector(n, curve) + // h'^(z*y^n + z^2*2^n) + lastElem := curve.NewIdentityPoint() + zExp := z.Clone() + for j := 1; j < m+1; j++ { + // Get subvector of h + hSubvector := proofHPrime[(j-1)*n : j*n] + // z^(j+1) + zExp = zExp.Mul(z) + exp := multiplyScalarToScalarVector(zExp, twoN) + // Final elem + elem := curve.Point.SumOfProducts(hSubvector, exp) + lastElem = lastElem.Add(elem) + } + + zynm := multiplyScalarToScalarVector(z, getknVector(y, n*m, curve)) + hPrimezynm := curve.Point.SumOfProducts(proofHPrime, zynm) + lastElem = lastElem.Add(hPrimezynm) + + // S^x + capSx := capS.Mul(x) + + // g^-z --> -z*<1,g> + onenm := get1nVector(n*m, curve) + zNeg := z.Neg() + zinvonen := multiplyScalarToScalarVector(zNeg, onenm) + zgdotonen := curve.Point.SumOfProducts(proofG, zinvonen) + + // L66 on pg20 + P := capA.Add(capSx).Add(zgdotonen).Add(lastElem) + hmu := h.Mul(mu) + Phmu := P.Sub(hmu) + + return Phmu +} diff --git a/bulletproof/range_batch_prover_test.go b/bulletproof/range_batch_prover_test.go new file mode 100644 index 0000000..82ee031 --- /dev/null +++ b/bulletproof/range_batch_prover_test.go @@ -0,0 +1,102 @@ +package bulletproof + +import ( + crand "crypto/rand" + "testing" + + "github.com/gtank/merlin" + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestRangeBatchProverHappyPath(t *testing.T) { + curve := curves.ED25519() + n := 256 + prover, err := NewRangeProver(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve) + require.NoError(t, err) + v1 := curve.Scalar.Random(crand.Reader) + v2 := curve.Scalar.Random(crand.Reader) + v3 := curve.Scalar.Random(crand.Reader) + v4 := curve.Scalar.Random(crand.Reader) + v := []curves.Scalar{v1, v2, v3, v4} + + gamma1 := curve.Scalar.Random(crand.Reader) + gamma2 := curve.Scalar.Random(crand.Reader) + gamma3 := curve.Scalar.Random(crand.Reader) + gamma4 := curve.Scalar.Random(crand.Reader) + gamma := []curves.Scalar{gamma1, gamma2, gamma3, gamma4} + g := curve.Point.Random(crand.Reader) + h := curve.Point.Random(crand.Reader) + u := curve.Point.Random(crand.Reader) + proofGenerators := RangeProofGenerators{ + g: g, + h: h, + u: u, + } + transcript := merlin.NewTranscript("test") + proof, err := prover.BatchProve(v, gamma, n, proofGenerators, transcript) + require.NoError(t, err) + require.NotNil(t, proof) + require.Equal(t, 10, len(proof.ipp.capLs)) + require.Equal(t, 10, len(proof.ipp.capRs)) +} + +func TestGetaLBatched(t *testing.T) { + curve := curves.ED25519() + v1 := curve.Scalar.Random(crand.Reader) + v2 := curve.Scalar.Random(crand.Reader) + v3 := curve.Scalar.Random(crand.Reader) + v4 := curve.Scalar.Random(crand.Reader) + v := []curves.Scalar{v1, v2, v3, v4} + aL, err := getaLBatched(v, 256, *curve) + require.NoError(t, err) + twoN := get2nVector(256, *curve) + for i := 1; i < len(v)+1; i++ { + vec := aL[(i-1)*256 : i*256] + product, err := innerProduct(vec, twoN) + require.NoError(t, err) + require.Zero(t, product.Cmp(v[i-1])) + } +} + +func TestRangeBatchProverMarshal(t *testing.T) { + curve := curves.ED25519() + n := 256 + prover, err := NewRangeProver(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve) + require.NoError(t, err) + v1 := curve.Scalar.Random(crand.Reader) + v2 := curve.Scalar.Random(crand.Reader) + v3 := curve.Scalar.Random(crand.Reader) + v4 := curve.Scalar.Random(crand.Reader) + v := []curves.Scalar{v1, v2, v3, v4} + + gamma1 := curve.Scalar.Random(crand.Reader) + gamma2 := curve.Scalar.Random(crand.Reader) + gamma3 := curve.Scalar.Random(crand.Reader) + gamma4 := curve.Scalar.Random(crand.Reader) + gamma := []curves.Scalar{gamma1, gamma2, gamma3, gamma4} + g := curve.Point.Random(crand.Reader) + h := curve.Point.Random(crand.Reader) + u := curve.Point.Random(crand.Reader) + proofGenerators := RangeProofGenerators{ + g: g, + h: h, + u: u, + } + transcript := merlin.NewTranscript("test") + proof, err := prover.BatchProve(v, gamma, n, proofGenerators, transcript) + require.NoError(t, err) + + proofMarshaled := proof.MarshalBinary() + proofPrime := NewRangeProof(curve) + err = proofPrime.UnmarshalBinary(proofMarshaled) + require.NoError(t, err) + require.True(t, proof.capA.Equal(proofPrime.capA)) + require.True(t, proof.capS.Equal(proofPrime.capS)) + require.True(t, proof.capT1.Equal(proofPrime.capT1)) + require.True(t, proof.capT2.Equal(proofPrime.capT2)) + require.Zero(t, proof.taux.Cmp(proofPrime.taux)) + require.Zero(t, proof.mu.Cmp(proofPrime.mu)) + require.Zero(t, proof.tHat.Cmp(proofPrime.tHat)) +} diff --git a/bulletproof/range_batch_verifier.go b/bulletproof/range_batch_verifier.go new file mode 100644 index 0000000..fbd0cee --- /dev/null +++ b/bulletproof/range_batch_verifier.go @@ -0,0 +1,133 @@ +package bulletproof + +import ( + "github.com/gtank/merlin" + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// VerifyBatched verifies a given batched range proof. +// It takes in a list of commitments to the secret values as capV instead of a single commitment to a single point +// when compared to the unbatched single range proof case. +func (verifier *RangeVerifier) VerifyBatched( + proof *RangeProof, + capV []curves.Point, + proofGenerators RangeProofGenerators, + n int, + transcript *merlin.Transcript, +) (bool, error) { + // Define nm as the total bits required for secrets, calculated as number of secrets * n + m := len(capV) + nm := n * m + // nm must be less than the number of generators generated + if nm > len(verifier.generators.G) { + return false, errors.New("ipp vector length must be less than maxVectorLength") + } + + // In case where len(a) is less than number of generators precomputed by prover, trim to length + proofG := verifier.generators.G[0:nm] + proofH := verifier.generators.H[0:nm] + + // Calc y,z,x from Fiat Shamir heuristic + y, z, err := calcyzBatched(capV, proof.capA, proof.capS, transcript, verifier.curve) + if err != nil { + return false, errors.Wrap(err, "rangeproof verify") + } + + x, err := calcx(proof.capT1, proof.capT2, transcript, verifier.curve) + if err != nil { + return false, errors.Wrap(err, "rangeproof verify") + } + + wBytes := transcript.ExtractBytes([]byte("getw"), 64) + w, err := verifier.curve.NewScalar().SetBytesWide(wBytes) + if err != nil { + return false, errors.Wrap(err, "rangeproof prove") + } + + // Calc delta(y,z), redefined for batched case on pg21 + deltayzBatched, err := deltayzBatched(y, z, n, m, verifier.curve) + if err != nil { + return false, errors.Wrap(err, "rangeproof verify") + } + + // Check tHat: L65, pg20 + // See equation 72 on pg21 + tHatIsValid := verifier.checktHatBatched( + proof, + capV, + proofGenerators.g, + proofGenerators.h, + deltayzBatched, + x, + z, + m, + ) + if !tHatIsValid { + return false, errors.New("rangeproof verify tHat is invalid") + } + + // Verify IPP + hPrime, err := gethPrime(proofH, y, verifier.curve) + if err != nil { + return false, errors.Wrap(err, "rangeproof verify") + } + + capPhmu := getPhmuBatched( + proofG, + hPrime, + proofGenerators.h, + proof.capA, + proof.capS, + x, + y, + z, + proof.mu, + n, + m, + verifier.curve, + ) + + ippVerified, err := verifier.ippVerifier.VerifyFromRangeProof( + proofG, + hPrime, + capPhmu, + proofGenerators.u.Mul(w), + proof.tHat, + proof.ipp, + transcript, + ) + if err != nil { + return false, errors.Wrap(err, "rangeproof verify") + } + + return ippVerified, nil +} + +// L65, pg20. +func (verifier *RangeVerifier) checktHatBatched( + proof *RangeProof, + capV []curves.Point, + g, h curves.Point, + deltayz, x, z curves.Scalar, + m int, +) bool { + // g^tHat * h^tau_x + gtHat := g.Mul(proof.tHat) + htaux := h.Mul(proof.taux) + lhs := gtHat.Add(htaux) + + // V^z^2 * g^delta(y,z) * Tau_1^x * Tau_2^x^2 + // g^delta(y,z) * V^(z^2*z^m) * Tau_1^x * Tau_2^x^2 + zm := getknVector(z, m, verifier.curve) + zsquarezm := multiplyScalarToScalarVector(z.Square(), zm) + capVzsquaretwom := verifier.curve.Point.SumOfProducts(capV, zsquarezm) + gdeltayz := g.Mul(deltayz) + capTau1x := proof.capT1.Mul(x) + capTau2xsquare := proof.capT2.Mul(x.Square()) + rhs := capVzsquaretwom.Add(gdeltayz).Add(capTau1x).Add(capTau2xsquare) + + // Compare lhs =? rhs + return lhs.Equal(rhs) +} diff --git a/bulletproof/range_batch_verifier_test.go b/bulletproof/range_batch_verifier_test.go new file mode 100644 index 0000000..3bfb442 --- /dev/null +++ b/bulletproof/range_batch_verifier_test.go @@ -0,0 +1,148 @@ +package bulletproof + +import ( + crand "crypto/rand" + "testing" + + "github.com/gtank/merlin" + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestRangeBatchVerifyHappyPath(t *testing.T) { + curve := curves.ED25519() + n := 256 + prover, err := NewRangeProver(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve) + require.NoError(t, err) + v1 := curve.Scalar.Random(crand.Reader) + v2 := curve.Scalar.Random(crand.Reader) + v3 := curve.Scalar.Random(crand.Reader) + v4 := curve.Scalar.Random(crand.Reader) + v := []curves.Scalar{v1, v2, v3, v4} + gamma1 := curve.Scalar.Random(crand.Reader) + gamma2 := curve.Scalar.Random(crand.Reader) + gamma3 := curve.Scalar.Random(crand.Reader) + gamma4 := curve.Scalar.Random(crand.Reader) + gamma := []curves.Scalar{gamma1, gamma2, gamma3, gamma4} + g := curve.Point.Random(crand.Reader) + h := curve.Point.Random(crand.Reader) + u := curve.Point.Random(crand.Reader) + proofGenerators := RangeProofGenerators{ + g: g, + h: h, + u: u, + } + transcript := merlin.NewTranscript("test") + proof, err := prover.BatchProve(v, gamma, n, proofGenerators, transcript) + require.NoError(t, err) + + verifier, err := NewRangeVerifier(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve) + require.NoError(t, err) + transcriptVerifier := merlin.NewTranscript("test") + capV := getcapVBatched(v, gamma, g, h) + verified, err := verifier.VerifyBatched(proof, capV, proofGenerators, n, transcriptVerifier) + require.NoError(t, err) + require.True(t, verified) +} + +func TestRangeBatchVerifyNotInRange(t *testing.T) { + curve := curves.ED25519() + n := 2 + prover, err := NewRangeProver(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve) + require.NoError(t, err) + v1 := curve.Scalar.One() + v2 := curve.Scalar.Random(crand.Reader) + v3 := curve.Scalar.Random(crand.Reader) + v4 := curve.Scalar.Random(crand.Reader) + v := []curves.Scalar{v1, v2, v3, v4} + gamma1 := curve.Scalar.Random(crand.Reader) + gamma2 := curve.Scalar.Random(crand.Reader) + gamma3 := curve.Scalar.Random(crand.Reader) + gamma4 := curve.Scalar.Random(crand.Reader) + gamma := []curves.Scalar{gamma1, gamma2, gamma3, gamma4} + g := curve.Point.Random(crand.Reader) + h := curve.Point.Random(crand.Reader) + u := curve.Point.Random(crand.Reader) + proofGenerators := RangeProofGenerators{ + g: g, + h: h, + u: u, + } + transcript := merlin.NewTranscript("test") + _, err = prover.BatchProve(v, gamma, n, proofGenerators, transcript) + require.Error(t, err) +} + +func TestRangeBatchVerifyNonRandom(t *testing.T) { + curve := curves.ED25519() + n := 2 + prover, err := NewRangeProver(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve) + require.NoError(t, err) + v1 := curve.Scalar.One() + v2 := curve.Scalar.One() + v3 := curve.Scalar.One() + v4 := curve.Scalar.One() + v := []curves.Scalar{v1, v2, v3, v4} + gamma1 := curve.Scalar.Random(crand.Reader) + gamma2 := curve.Scalar.Random(crand.Reader) + gamma3 := curve.Scalar.Random(crand.Reader) + gamma4 := curve.Scalar.Random(crand.Reader) + gamma := []curves.Scalar{gamma1, gamma2, gamma3, gamma4} + g := curve.Point.Random(crand.Reader) + h := curve.Point.Random(crand.Reader) + u := curve.Point.Random(crand.Reader) + proofGenerators := RangeProofGenerators{ + g: g, + h: h, + u: u, + } + transcript := merlin.NewTranscript("test") + proof, err := prover.BatchProve(v, gamma, n, proofGenerators, transcript) + require.NoError(t, err) + + verifier, err := NewRangeVerifier(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve) + require.NoError(t, err) + transcriptVerifier := merlin.NewTranscript("test") + capV := getcapVBatched(v, gamma, g, h) + verified, err := verifier.VerifyBatched(proof, capV, proofGenerators, n, transcriptVerifier) + require.NoError(t, err) + require.True(t, verified) +} + +func TestRangeBatchVerifyInvalid(t *testing.T) { + curve := curves.ED25519() + n := 2 + prover, err := NewRangeProver(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve) + require.NoError(t, err) + v1 := curve.Scalar.One() + v2 := curve.Scalar.One() + v3 := curve.Scalar.One() + v4 := curve.Scalar.One() + v := []curves.Scalar{v1, v2, v3, v4} + gamma1 := curve.Scalar.Random(crand.Reader) + gamma2 := curve.Scalar.Random(crand.Reader) + gamma3 := curve.Scalar.Random(crand.Reader) + gamma4 := curve.Scalar.Random(crand.Reader) + gamma := []curves.Scalar{gamma1, gamma2, gamma3, gamma4} + g := curve.Point.Random(crand.Reader) + h := curve.Point.Random(crand.Reader) + u := curve.Point.Random(crand.Reader) + proofGenerators := RangeProofGenerators{ + g: g, + h: h, + u: u, + } + transcript := merlin.NewTranscript("test") + proof, err := prover.BatchProve(v, gamma, n, proofGenerators, transcript) + require.NoError(t, err) + + verifier, err := NewRangeVerifier(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve) + require.NoError(t, err) + transcriptVerifier := merlin.NewTranscript("test") + capV := getcapVBatched(v, gamma, g, h) + capV[0] = curve.Point.Random(crand.Reader) + verified, err := verifier.VerifyBatched(proof, capV, proofGenerators, n, transcriptVerifier) + require.Error(t, err) + require.False(t, verified) +} diff --git a/bulletproof/range_prover.go b/bulletproof/range_prover.go new file mode 100644 index 0000000..021b86f --- /dev/null +++ b/bulletproof/range_prover.go @@ -0,0 +1,514 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Package bulletproof implements the zero knowledge protocol bulletproofs as defined in https://eprint.iacr.org/2017/1066.pdf +package bulletproof + +import ( + crand "crypto/rand" + "math/big" + + "github.com/gtank/merlin" + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// RangeProver is the struct used to create RangeProofs +// It specifies which curve to use and holds precomputed generators +// See NewRangeProver() for prover initialization. +type RangeProver struct { + curve curves.Curve + generators *ippGenerators + ippProver *InnerProductProver +} + +// RangeProof is the struct used to hold a range proof +// capA is a commitment to a_L and a_R using randomness alpha +// capS is a commitment to s_L and s_R using randomness rho +// capTau1,2 are commitments to t1,t2 respectively using randomness tau_1,2 +// tHat represents t(X) as defined on page 19 +// taux is the blinding factor for tHat +// ipp is the inner product proof used for compacting the transfer of l,r (See 4.2 on pg20). +type RangeProof struct { + capA, capS, capT1, capT2 curves.Point + taux, mu, tHat curves.Scalar + ipp *InnerProductProof + curve *curves.Curve +} + +type RangeProofGenerators struct { + g, h, u curves.Point +} + +// NewRangeProver initializes a new prover +// It uses the specified domain to generate generators for vectors of at most maxVectorLength +// A prover can be used to construct range proofs for vectors of length less than or equal to maxVectorLength +// A prover is defined by an explicit curve. +func NewRangeProver( + maxVectorLength int, + rangeDomain, ippDomain []byte, + curve curves.Curve, +) (*RangeProver, error) { + generators, err := getGeneratorPoints(maxVectorLength, rangeDomain, curve) + if err != nil { + return nil, errors.Wrap(err, "range NewRangeProver") + } + ippProver, err := NewInnerProductProver(maxVectorLength, ippDomain, curve) + if err != nil { + return nil, errors.Wrap(err, "range NewRangeProver") + } + return &RangeProver{curve: curve, generators: generators, ippProver: ippProver}, nil +} + +// NewRangeProof initializes a new RangeProof for a specified curve +// This should be used in tandem with UnmarshalBinary() to convert a marshaled proof into the struct. +func NewRangeProof(curve *curves.Curve) *RangeProof { + out := RangeProof{ + capA: nil, + capS: nil, + capT1: nil, + capT2: nil, + taux: nil, + mu: nil, + tHat: nil, + ipp: NewInnerProductProof(curve), + curve: curve, + } + + return &out +} + +// Prove uses the range prover to prove that some value v is within the range [0, 2^n] +// It implements the protocol defined on pgs 19,20 in https://eprint.iacr.org/2017/1066.pdf +// v is the value of which to prove the range +// n is the power that specifies the upper bound of the range, ie. 2^n +// gamma is a scalar used for as a blinding factor +// g, h, u are unique points used as generators for the blinding factor +// transcript is a merlin transcript to be used for the fiat shamir heuristic. +func (prover *RangeProver) Prove( + v, gamma curves.Scalar, + n int, + proofGenerators RangeProofGenerators, + transcript *merlin.Transcript, +) (*RangeProof, error) { + // n must be less than or equal to the number of generators generated + if n > len(prover.generators.G) { + return nil, errors.New("ipp vector length must be less than or equal to maxVectorLength") + } + // In case where len(a) is less than number of generators precomputed by prover, trim to length + proofG := prover.generators.G[0:n] + proofH := prover.generators.H[0:n] + + // Check that v is in range [0, 2^n] + if bigZero := big.NewInt(0); v.BigInt().Cmp(bigZero) == -1 { + return nil, errors.New("v is less than 0") + } + + bigTwo := big.NewInt(2) + if n < 0 { + return nil, errors.New("n cannot be less than 0") + } + bigN := big.NewInt(int64(n)) + var bigTwoToN big.Int + bigTwoToN.Exp(bigTwo, bigN, nil) + if v.BigInt().Cmp(&bigTwoToN) == 1 { + return nil, errors.New("v is greater than 2^n") + } + + // L40 on pg19 + aL, err := getaL(v, n, prover.curve) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + onen := get1nVector(n, prover.curve) + // L41 on pg19 + aR, err := subtractPairwiseScalarVectors(aL, onen) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + alpha := prover.curve.Scalar.Random(crand.Reader) + // Calc A (L44, pg19) + halpha := proofGenerators.h.Mul(alpha) + gaL := prover.curve.Point.SumOfProducts(proofG, aL) + haR := prover.curve.Point.SumOfProducts(proofH, aR) + capA := halpha.Add(gaL).Add(haR) + + // L45, 46, pg19 + sL := getBlindingVector(n, prover.curve) + sR := getBlindingVector(n, prover.curve) + rho := prover.curve.Scalar.Random(crand.Reader) + + // Calc S (L47, pg19) + hrho := proofGenerators.h.Mul(rho) + gsL := prover.curve.Point.SumOfProducts(proofG, sL) + hsR := prover.curve.Point.SumOfProducts(proofH, sR) + capS := hrho.Add(gsL).Add(hsR) + + // Fiat Shamir for y,z (L49, pg19) + capV := getcapV(v, gamma, proofGenerators.g, proofGenerators.h) + y, z, err := calcyz(capV, capA, capS, transcript, prover.curve) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + // Calc t_1, t_2 + // See the l(X), r(X), t(X) equations on pg 19 + // Use l(X)'s and r(X)'s constant and linear terms to derive t_1 and t_2 + // (a_l - z*1^n) + zonen := multiplyScalarToScalarVector(z, onen) + constantTerml, err := subtractPairwiseScalarVectors(aL, zonen) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + linearTerml := sL + + // z^2 * 2^N + twoN := get2nVector(n, prover.curve) + zSquareTwon := multiplyScalarToScalarVector(z.Square(), twoN) + // a_r + z*1^n + aRPluszonen, err := addPairwiseScalarVectors(aR, zonen) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + yn := getknVector(y, n, prover.curve) + hadamard, err := multiplyPairwiseScalarVectors(yn, aRPluszonen) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + constantTermr, err := addPairwiseScalarVectors(hadamard, zSquareTwon) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + linearTermr, err := multiplyPairwiseScalarVectors(yn, sR) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + // t_1 (as the linear coefficient) is the sum of the dot products of l(X)'s linear term dot r(X)'s constant term + // and r(X)'s linear term dot l(X)'s constant term + t1FirstTerm, err := innerProduct(linearTerml, constantTermr) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + t1SecondTerm, err := innerProduct(linearTermr, constantTerml) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + t1 := t1FirstTerm.Add(t1SecondTerm) + + // t_2 (as the quadratic coefficient) is the dot product of l(X)'s and r(X)'s linear terms + t2, err := innerProduct(linearTerml, linearTermr) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + // L52, pg20 + tau1 := prover.curve.Scalar.Random(crand.Reader) + tau2 := prover.curve.Scalar.Random(crand.Reader) + + // T_1, T_2 (L53, pg20) + capT1 := proofGenerators.g.Mul(t1).Add(proofGenerators.h.Mul(tau1)) + capT2 := proofGenerators.g.Mul(t2).Add(proofGenerators.h.Mul(tau2)) + + // Fiat shamir for x (L55, pg20) + x, err := calcx(capT1, capT2, transcript, prover.curve) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + // Calc l (L58, pg20) + // Instead of using the expression in the line, evaluate l() at x + sLx := multiplyScalarToScalarVector(x, linearTerml) + l, err := addPairwiseScalarVectors(constantTerml, sLx) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + // Calc r (L59, pg20) + // Instead of using the expression in the line, evaluate r() at x + ynsRx := multiplyScalarToScalarVector(x, linearTermr) + r, err := addPairwiseScalarVectors(constantTermr, ynsRx) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + // Calc t hat (L60, pg20) + // For efficiency, instead of calculating the dot product, evaluate t() at x + deltayz, err := deltayz(y, z, n, prover.curve) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + t0 := v.Mul(z.Square()).Add(deltayz) + tLinear := t1.Mul(x) + tQuadratic := t2.Mul(x.Square()) + tHat := t0.Add(tLinear).Add(tQuadratic) + + // Calc tau_x (L61, pg20) + tau2xsquare := tau2.Mul(x.Square()) + tau1x := tau1.Mul(x) + zsquaregamma := z.Square().Mul(gamma) + taux := tau2xsquare.Add(tau1x).Add(zsquaregamma) + + // Calc mu (L62, pg20) + mu := alpha.Add(rho.Mul(x)) + + // Calc IPP (See section 4.2) + hPrime, err := gethPrime(proofH, y, prover.curve) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + capPhmu, err := getPhmu( + proofG, + hPrime, + proofGenerators.h, + capA, + capS, + x, + y, + z, + mu, + n, + prover.curve, + ) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + wBytes := transcript.ExtractBytes([]byte("getw"), 64) + w, err := prover.curve.NewScalar().SetBytesWide(wBytes) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + ipp, err := prover.ippProver.rangeToIPP( + proofG, + hPrime, + l, + r, + tHat, + capPhmu, + proofGenerators.u.Mul(w), + transcript, + ) + if err != nil { + return nil, errors.Wrap(err, "rangeproof prove") + } + + out := &RangeProof{ + capA: capA, + capS: capS, + capT1: capT1, + capT2: capT2, + taux: taux, + mu: mu, + tHat: tHat, + ipp: ipp, + curve: &prover.curve, + } + return out, nil +} + +// MarshalBinary takes a range proof and marshals into bytes. +func (proof *RangeProof) MarshalBinary() []byte { + var out []byte + out = append(out, proof.capA.ToAffineCompressed()...) + out = append(out, proof.capS.ToAffineCompressed()...) + out = append(out, proof.capT1.ToAffineCompressed()...) + out = append(out, proof.capT2.ToAffineCompressed()...) + out = append(out, proof.taux.Bytes()...) + out = append(out, proof.mu.Bytes()...) + out = append(out, proof.tHat.Bytes()...) + out = append(out, proof.ipp.MarshalBinary()...) + + return out +} + +// UnmarshalBinary takes bytes of a marshaled proof and writes them into a range proof +// The range proof used should be from the output of NewRangeProof(). +func (proof *RangeProof) UnmarshalBinary(data []byte) error { + scalarLen := len(proof.curve.NewScalar().Bytes()) + pointLen := len(proof.curve.NewGeneratorPoint().ToAffineCompressed()) + ptr := 0 + // Get points + capA, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen]) + if err != nil { + return errors.New("rangeProof UnmarshalBinary FromAffineCompressed") + } + proof.capA = capA + ptr += pointLen + capS, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen]) + if err != nil { + return errors.New("rangeProof UnmarshalBinary FromAffineCompressed") + } + proof.capS = capS + ptr += pointLen + capT1, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen]) + if err != nil { + return errors.New("rangeProof UnmarshalBinary FromAffineCompressed") + } + proof.capT1 = capT1 + ptr += pointLen + capT2, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen]) + if err != nil { + return errors.New("rangeProof UnmarshalBinary FromAffineCompressed") + } + proof.capT2 = capT2 + ptr += pointLen + + // Get scalars + taux, err := proof.curve.NewScalar().SetBytes(data[ptr : ptr+scalarLen]) + if err != nil { + return errors.New("rangeProof UnmarshalBinary SetBytes") + } + proof.taux = taux + ptr += scalarLen + mu, err := proof.curve.NewScalar().SetBytes(data[ptr : ptr+scalarLen]) + if err != nil { + return errors.New("rangeProof UnmarshalBinary SetBytes") + } + proof.mu = mu + ptr += scalarLen + tHat, err := proof.curve.NewScalar().SetBytes(data[ptr : ptr+scalarLen]) + if err != nil { + return errors.New("rangeProof UnmarshalBinary SetBytes") + } + proof.tHat = tHat + ptr += scalarLen + + // Get IPP + err = proof.ipp.UnmarshalBinary(data[ptr:]) + if err != nil { + return errors.New("rangeProof UnmarshalBinary") + } + + return nil +} + +// checkRange validates whether some scalar v is within the range [0, 2^n - 1] +// It will return an error if v is less than 0 or greater than 2^n - 1 +// Otherwise it will return nil. +func checkRange(v curves.Scalar, n int) error { + bigOne := big.NewInt(1) + if n < 0 { + return errors.New("n cannot be less than 0") + } + var bigTwoToN big.Int + bigTwoToN.Lsh(bigOne, uint(n)) + if v.BigInt().Cmp(&bigTwoToN) == 1 { + return errors.New("v is greater than 2^n") + } + + return nil +} + +// getBlindingVector returns a vector of scalars used as blinding factors for commitments. +func getBlindingVector(length int, curve curves.Curve) []curves.Scalar { + vec := make([]curves.Scalar, length) + for i := 0; i < length; i++ { + vec[i] = curve.Scalar.Random(crand.Reader) + } + return vec +} + +// getcapV returns a commitment to v using blinding factor gamma. +func getcapV(v, gamma curves.Scalar, g, h curves.Point) curves.Point { + return h.Mul(gamma).Add(g.Mul(v)) +} + +// getaL obtains the bit vector representation of v +// See the a_L definition towards the bottom of pg 17 of https://eprint.iacr.org/2017/1066.pdf +func getaL(v curves.Scalar, n int, curve curves.Curve) ([]curves.Scalar, error) { + var err error + + vBytes := v.Bytes() + zero := curve.Scalar.Zero() + one := curve.Scalar.One() + aL := make([]curves.Scalar, n) + for j := 0; j < len(aL); j++ { + aL[j] = zero + } + for i := 0; i < n; i++ { + ithBit := vBytes[i>>3] >> (i & 0x07) & 0x01 + aL[i], err = cmoveScalar(zero, one, int(ithBit), curve) + if err != nil { + return nil, errors.Wrap(err, "getaL") + } + } + + return aL, nil +} + +// cmoveScalar provides a constant time operation that returns x if which is 0 and returns y if which is 1. +func cmoveScalar(x, y curves.Scalar, which int, curve curves.Curve) (curves.Scalar, error) { + if which != 0 && which != 1 { + return nil, errors.New("cmoveScalar which must be 0 or 1") + } + mask := -byte(which) + xBytes := x.Bytes() + yBytes := y.Bytes() + for i, xByte := range xBytes { + xBytes[i] ^= (xByte ^ yBytes[i]) & mask + } + out, err := curve.NewScalar().SetBytes(xBytes) + if err != nil { + return nil, errors.Wrap(err, "cmoveScalar SetBytes") + } + + return out, nil +} + +// calcyz uses a merlin transcript for Fiat Shamir +// It takes the current state of the transcript and appends the newly calculated capA and capS values +// Two new scalars are then read from the transcript +// See section 4.4 pg22 of https://eprint.iacr.org/2017/1066.pdf +func calcyz( + capV, capA, capS curves.Point, + transcript *merlin.Transcript, + curve curves.Curve, +) (curves.Scalar, curves.Scalar, error) { + // Add the A,S values to transcript + transcript.AppendMessage([]byte("addV"), capV.ToAffineUncompressed()) + transcript.AppendMessage([]byte("addcapA"), capA.ToAffineUncompressed()) + transcript.AppendMessage([]byte("addcapS"), capS.ToAffineUncompressed()) + // Read 64 bytes twice from, set to scalar for y and z + yBytes := transcript.ExtractBytes([]byte("gety"), 64) + y, err := curve.NewScalar().SetBytesWide(yBytes) + if err != nil { + return nil, nil, errors.Wrap(err, "calcyz NewScalar SetBytesWide") + } + zBytes := transcript.ExtractBytes([]byte("getz"), 64) + z, err := curve.NewScalar().SetBytesWide(zBytes) + if err != nil { + return nil, nil, errors.Wrap(err, "calcyz NewScalar SetBytesWide") + } + + return y, z, nil +} + +// calcx uses a merlin transcript for Fiat Shamir +// It takes the current state of the transcript and appends the newly calculated capT1 and capT2 values +// A new scalar is then read from the transcript +// See section 4.4 pg22 of https://eprint.iacr.org/2017/1066.pdf +func calcx( + capT1, capT2 curves.Point, + transcript *merlin.Transcript, + curve curves.Curve, +) (curves.Scalar, error) { + // Add the Tau1,2 values to transcript + transcript.AppendMessage([]byte("addcapT1"), capT1.ToAffineUncompressed()) + transcript.AppendMessage([]byte("addcapT2"), capT2.ToAffineUncompressed()) + // Read 64 bytes from, set to scalar + outBytes := transcript.ExtractBytes([]byte("getx"), 64) + x, err := curve.NewScalar().SetBytesWide(outBytes) + if err != nil { + return nil, errors.Wrap(err, "calcx NewScalar SetBytesWide") + } + + return x, nil +} diff --git a/bulletproof/range_prover_test.go b/bulletproof/range_prover_test.go new file mode 100644 index 0000000..1ca4b08 --- /dev/null +++ b/bulletproof/range_prover_test.go @@ -0,0 +1,86 @@ +package bulletproof + +import ( + crand "crypto/rand" + "testing" + + "github.com/gtank/merlin" + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestRangeProverHappyPath(t *testing.T) { + curve := curves.ED25519() + n := 256 + prover, err := NewRangeProver(n, []byte("rangeDomain"), []byte("ippDomain"), *curve) + require.NoError(t, err) + v := curve.Scalar.Random(crand.Reader) + gamma := curve.Scalar.Random(crand.Reader) + g := curve.Point.Random(crand.Reader) + h := curve.Point.Random(crand.Reader) + u := curve.Point.Random(crand.Reader) + proofGenerators := RangeProofGenerators{ + g: g, + h: h, + u: u, + } + transcript := merlin.NewTranscript("test") + proof, err := prover.Prove(v, gamma, n, proofGenerators, transcript) + require.NoError(t, err) + require.NotNil(t, proof) + require.Equal(t, 8, len(proof.ipp.capLs)) + require.Equal(t, 8, len(proof.ipp.capRs)) +} + +func TestGetaL(t *testing.T) { + curve := curves.ED25519() + v := curve.Scalar.Random(crand.Reader) + aL, err := getaL(v, 256, *curve) + require.NoError(t, err) + twoN := get2nVector(256, *curve) + product, err := innerProduct(aL, twoN) + require.NoError(t, err) + require.Zero(t, product.Cmp(v)) +} + +func TestCmove(t *testing.T) { + curve := curves.ED25519() + two := curve.Scalar.One().Double() + four := two.Double() + out, err := cmoveScalar(two, four, 1, *curve) + require.NoError(t, err) + require.Zero(t, out.Cmp(four)) +} + +func TestRangeProverMarshal(t *testing.T) { + curve := curves.ED25519() + n := 256 + prover, err := NewRangeProver(n, []byte("rangeDomain"), []byte("ippDomain"), *curve) + require.NoError(t, err) + v := curve.Scalar.Random(crand.Reader) + gamma := curve.Scalar.Random(crand.Reader) + g := curve.Point.Random(crand.Reader) + h := curve.Point.Random(crand.Reader) + u := curve.Point.Random(crand.Reader) + proofGenerators := RangeProofGenerators{ + g: g, + h: h, + u: u, + } + transcript := merlin.NewTranscript("test") + proof, err := prover.Prove(v, gamma, n, proofGenerators, transcript) + require.NoError(t, err) + + proofMarshaled := proof.MarshalBinary() + proofPrime := NewRangeProof(curve) + err = proofPrime.UnmarshalBinary(proofMarshaled) + require.NoError(t, err) + require.True(t, proof.capA.Equal(proofPrime.capA)) + require.True(t, proof.capS.Equal(proofPrime.capS)) + require.True(t, proof.capT1.Equal(proofPrime.capT1)) + require.True(t, proof.capT2.Equal(proofPrime.capT2)) + require.Zero(t, proof.taux.Cmp(proofPrime.taux)) + require.Zero(t, proof.mu.Cmp(proofPrime.mu)) + require.Zero(t, proof.tHat.Cmp(proofPrime.tHat)) +} diff --git a/bulletproof/range_verifier.go b/bulletproof/range_verifier.go new file mode 100644 index 0000000..cda97b5 --- /dev/null +++ b/bulletproof/range_verifier.go @@ -0,0 +1,235 @@ +package bulletproof + +import ( + "github.com/gtank/merlin" + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// RangeVerifier is the struct used to verify RangeProofs +// It specifies which curve to use and holds precomputed generators +// See NewRangeVerifier() for verifier initialization. +type RangeVerifier struct { + curve curves.Curve + generators *ippGenerators + ippVerifier *InnerProductVerifier +} + +// NewRangeVerifier initializes a new verifier +// It uses the specified domain to generate generators for vectors of at most maxVectorLength +// A verifier can be used to verify range proofs for vectors of length less than or equal to maxVectorLength +// A verifier is defined by an explicit curve. +func NewRangeVerifier( + maxVectorLength int, + rangeDomain, ippDomain []byte, + curve curves.Curve, +) (*RangeVerifier, error) { + generators, err := getGeneratorPoints(maxVectorLength, rangeDomain, curve) + if err != nil { + return nil, errors.Wrap(err, "range NewRangeProver") + } + ippVerifier, err := NewInnerProductVerifier(maxVectorLength, ippDomain, curve) + if err != nil { + return nil, errors.Wrap(err, "range NewRangeProver") + } + return &RangeVerifier{curve: curve, generators: generators, ippVerifier: ippVerifier}, nil +} + +// Verify verifies the given range proof inputs +// It implements the checking of L65 on pg 20 +// It also verifies the dot product of using the inner product proof\ +// capV is a commitment to v using blinding factor gamma +// n is the power that specifies the upper bound of the range, ie. 2^n +// g, h, u are unique points used as generators for the blinding factor +// transcript is a merlin transcript to be used for the fiat shamir heuristic. +func (verifier *RangeVerifier) Verify( + proof *RangeProof, + capV curves.Point, + proofGenerators RangeProofGenerators, + n int, + transcript *merlin.Transcript, +) (bool, error) { + // Length of vectors must be less than the number of generators generated + if n > len(verifier.generators.G) { + return false, errors.New("ipp vector length must be less than maxVectorLength") + } + // In case where len(a) is less than number of generators precomputed by prover, trim to length + proofG := verifier.generators.G[0:n] + proofH := verifier.generators.H[0:n] + + // Calc y,z,x from Fiat Shamir heuristic + y, z, err := calcyz(capV, proof.capA, proof.capS, transcript, verifier.curve) + if err != nil { + return false, errors.Wrap(err, "rangeproof verify") + } + + x, err := calcx(proof.capT1, proof.capT2, transcript, verifier.curve) + if err != nil { + return false, errors.Wrap(err, "rangeproof verify") + } + + wBytes := transcript.ExtractBytes([]byte("getw"), 64) + w, err := verifier.curve.NewScalar().SetBytesWide(wBytes) + if err != nil { + return false, errors.Wrap(err, "rangeproof prove") + } + + // Calc delta(y,z) + deltayz, err := deltayz(y, z, n, verifier.curve) + if err != nil { + return false, errors.Wrap(err, "rangeproof verify") + } + + // Check tHat: L65, pg20 + tHatIsValid := verifier.checktHat( + proof, + capV, + proofGenerators.g, + proofGenerators.h, + deltayz, + x, + z, + ) + if !tHatIsValid { + return false, errors.New("rangeproof verify tHat is invalid") + } + + // Verify IPP + hPrime, err := gethPrime(proofH, y, verifier.curve) + if err != nil { + return false, errors.Wrap(err, "rangeproof verify") + } + + capPhmu, err := getPhmu( + proofG, + hPrime, + proofGenerators.h, + proof.capA, + proof.capS, + x, + y, + z, + proof.mu, + n, + verifier.curve, + ) + if err != nil { + return false, errors.Wrap(err, "rangeproof verify") + } + + ippVerified, err := verifier.ippVerifier.VerifyFromRangeProof( + proofG, + hPrime, + capPhmu, + proofGenerators.u.Mul(w), + proof.tHat, + proof.ipp, + transcript, + ) + if err != nil { + return false, errors.Wrap(err, "rangeproof verify") + } + + return ippVerified, nil +} + +// L65, pg20. +func (*RangeVerifier) checktHat( + proof *RangeProof, + capV, g, h curves.Point, + deltayz, x, z curves.Scalar, +) bool { + // g^tHat * h^tau_x + gtHat := g.Mul(proof.tHat) + htaux := h.Mul(proof.taux) + lhs := gtHat.Add(htaux) + + // V^z^2 * g^delta(y,z) * Tau_1^x * Tau_2^x^2 + capVzsquare := capV.Mul(z.Square()) + gdeltayz := g.Mul(deltayz) + capTau1x := proof.capT1.Mul(x) + capTau2xsquare := proof.capT2.Mul(x.Square()) + rhs := capVzsquare.Add(gdeltayz).Add(capTau1x).Add(capTau2xsquare) + + // Compare lhs =? rhs + return lhs.Equal(rhs) +} + +// gethPrime calculates new h prime generators as defined in L64 on pg20. +func gethPrime(h []curves.Point, y curves.Scalar, curve curves.Curve) ([]curves.Point, error) { + hPrime := make([]curves.Point, len(h)) + yInv, err := y.Invert() + yInvn := getknVector(yInv, len(h), curve) + if err != nil { + return nil, errors.Wrap(err, "gethPrime") + } + for i, hElem := range h { + hPrime[i] = hElem.Mul(yInvn[i]) + } + return hPrime, nil +} + +// Obtain P used for IPP verification +// See L67 on pg20 +// Note P on L66 includes blinding factor hmu, this method removes that factor. +func getPhmu( + proofG, proofHPrime []curves.Point, + h, capA, capS curves.Point, + x, y, z, mu curves.Scalar, + n int, + curve curves.Curve, +) (curves.Point, error) { + // h'^(z*y^n + z^2*2^n) + zyn := multiplyScalarToScalarVector(z, getknVector(y, n, curve)) + zsquaretwon := multiplyScalarToScalarVector(z.Square(), get2nVector(n, curve)) + elemLastExponent, err := addPairwiseScalarVectors(zyn, zsquaretwon) + if err != nil { + return nil, errors.Wrap(err, "getPhmu") + } + lastElem := curve.Point.SumOfProducts(proofHPrime, elemLastExponent) + + // S^x + capSx := capS.Mul(x) + + // g^-z --> -z*<1,g> + onen := get1nVector(n, curve) + zNeg := z.Neg() + zinvonen := multiplyScalarToScalarVector(zNeg, onen) + zgdotonen := curve.Point.SumOfProducts(proofG, zinvonen) + + // L66 on pg20 + P := capA.Add(capSx).Add(zgdotonen).Add(lastElem) + hmu := h.Mul(mu) + Phmu := P.Sub(hmu) + + return Phmu, nil +} + +// Delta function for delta(y,z), See (39) on pg18. +func deltayz(y, z curves.Scalar, n int, curve curves.Curve) (curves.Scalar, error) { + // z - z^2 + zMinuszsquare := z.Sub(z.Square()) + // 1^n + onen := get1nVector(n, curve) + // <1^n, y^n> + onendotyn, err := innerProduct(onen, getknVector(y, n, curve)) + if err != nil { + return nil, errors.Wrap(err, "deltayz") + } + // (z - z^2)*<1^n, y^n> + termFirst := zMinuszsquare.Mul(onendotyn) + + // <1^n, 2^n> + onendottwon, err := innerProduct(onen, get2nVector(n, curve)) + if err != nil { + return nil, errors.Wrap(err, "deltayz") + } + // z^3*<1^n, 2^n> + termSecond := z.Cube().Mul(onendottwon) + + // (z - z^2)*<1^n, y^n> - z^3*<1^n, 2^n> + out := termFirst.Sub(termSecond) + + return out, nil +} diff --git a/bulletproof/range_verifier_test.go b/bulletproof/range_verifier_test.go new file mode 100644 index 0000000..8436b42 --- /dev/null +++ b/bulletproof/range_verifier_test.go @@ -0,0 +1,87 @@ +package bulletproof + +import ( + crand "crypto/rand" + "testing" + + "github.com/gtank/merlin" + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestRangeVerifyHappyPath(t *testing.T) { + curve := curves.ED25519() + n := 256 + prover, err := NewRangeProver(n, []byte("rangeDomain"), []byte("ippDomain"), *curve) + require.NoError(t, err) + v := curve.Scalar.Random(crand.Reader) + gamma := curve.Scalar.Random(crand.Reader) + g := curve.Point.Random(crand.Reader) + h := curve.Point.Random(crand.Reader) + u := curve.Point.Random(crand.Reader) + proofGenerators := RangeProofGenerators{ + g: g, + h: h, + u: u, + } + transcript := merlin.NewTranscript("test") + proof, err := prover.Prove(v, gamma, n, proofGenerators, transcript) + require.NoError(t, err) + + verifier, err := NewRangeVerifier(n, []byte("rangeDomain"), []byte("ippDomain"), *curve) + require.NoError(t, err) + transcriptVerifier := merlin.NewTranscript("test") + capV := getcapV(v, gamma, g, h) + verified, err := verifier.Verify(proof, capV, proofGenerators, n, transcriptVerifier) + require.NoError(t, err) + require.True(t, verified) +} + +func TestRangeVerifyNotInRange(t *testing.T) { + curve := curves.ED25519() + n := 2 + prover, err := NewRangeProver(n, []byte("rangeDomain"), []byte("ippDomain"), *curve) + require.NoError(t, err) + v := curve.Scalar.Random(crand.Reader) + gamma := curve.Scalar.Random(crand.Reader) + g := curve.Point.Random(crand.Reader) + h := curve.Point.Random(crand.Reader) + u := curve.Point.Random(crand.Reader) + proofGenerators := RangeProofGenerators{ + g: g, + h: h, + u: u, + } + transcript := merlin.NewTranscript("test") + _, err = prover.Prove(v, gamma, n, proofGenerators, transcript) + require.Error(t, err) +} + +func TestRangeVerifyNonRandom(t *testing.T) { + curve := curves.ED25519() + n := 2 + prover, err := NewRangeProver(n, []byte("rangeDomain"), []byte("ippDomain"), *curve) + require.NoError(t, err) + v := curve.Scalar.One() + gamma := curve.Scalar.Random(crand.Reader) + g := curve.Point.Random(crand.Reader) + h := curve.Point.Random(crand.Reader) + u := curve.Point.Random(crand.Reader) + proofGenerators := RangeProofGenerators{ + g: g, + h: h, + u: u, + } + transcript := merlin.NewTranscript("test") + proof, err := prover.Prove(v, gamma, n, proofGenerators, transcript) + require.NoError(t, err) + + verifier, err := NewRangeVerifier(n, []byte("rangeDomain"), []byte("ippDomain"), *curve) + require.NoError(t, err) + transcriptVerifier := merlin.NewTranscript("test") + capV := getcapV(v, gamma, g, h) + verified, err := verifier.Verify(proof, capV, proofGenerators, n, transcriptVerifier) + require.NoError(t, err) + require.True(t, verified) +} diff --git a/core/README.md b/core/README.md new file mode 100755 index 0000000..8cc6599 --- /dev/null +++ b/core/README.md @@ -0,0 +1,14 @@ +--- +aliases: [README] +tags: [] +title: README +linter-yaml-title-alias: README +date created: Wednesday, April 17th 2024, 4:11:40 pm +date modified: Thursday, April 18th 2024, 8:19:25 am +--- + +## Core Package + +The core package contains a set of primitives, including but not limited to various +elliptic curves, hashes, and commitment schemes. These primitives are used internally +and can also be used independently on their own externally. diff --git a/core/commit.go b/core/commit.go new file mode 100755 index 0000000..6725e86 --- /dev/null +++ b/core/commit.go @@ -0,0 +1,123 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package core + +import ( + "crypto/hmac" + crand "crypto/rand" + "crypto/sha256" + "crypto/subtle" + "encoding/json" + "fmt" + "hash" +) + +// Size of random values and hash outputs are determined by our hash function +const Size = sha256.Size + +type ( + // Commitment to a given message which can be later revealed. + // This is sent to and held by a verifier until the corresponding + // witness is provided. + Commitment []byte + + // Witness is sent to and opened by the verifier. This proves that + // committed message hasn't been altered by later information. + Witness struct { + Msg []byte + r [Size]byte + } + + // witnessJSON is used for un/marshaling. + witnessJSON struct { + Msg []byte + R [Size]byte + } +) + +// MarshalJSON encodes Witness in JSON +func (w Witness) MarshalJSON() ([]byte, error) { + return json.Marshal(witnessJSON{w.Msg, w.r}) +} + +// UnmarshalJSON decodes JSON into a Witness struct +func (w *Witness) UnmarshalJSON(data []byte) error { + witness := &witnessJSON{} + err := json.Unmarshal(data, witness) + if err != nil { + return err + } + w.Msg = witness.Msg + w.r = witness.R + return nil +} + +// Commit to a given message. Uses SHA256 as the hash function. +func Commit(msg []byte) (Commitment, *Witness, error) { + // Initialize our decommitment + d := Witness{msg, [Size]byte{}} + + // Generate a random nonce of the required length + n, err := crand.Read(d.r[:]) + // Ensure no errors retrieving nonce + if err != nil { + return nil, nil, err + } + + // Ensure we read all the bytes expected + if n != Size { + return nil, nil, fmt.Errorf( + "failed to read %v bytes from crypto/rand: received %v bytes", + Size, + n, + ) + } + // Compute the commitment: HMAC(Sha2, msg, key) + c, err := ComputeHMAC(sha256.New, msg, d.r[:]) + if err != nil { + return nil, nil, err + } + return c, &d, nil +} + +// Open a commitment and return true if the commitment/decommitment pair are valid. +// reference: spec.§2.4: Commitment Scheme +func Open(c Commitment, d Witness) (bool, error) { + // Ensure commitment is well-formed. + if len(c) != Size { + return false, fmt.Errorf("invalid commitment, wrong length. %v != %v", len(c), Size) + } + + // Re-compute the commitment: HMAC(Sha2, msg, key) + cʹ, err := ComputeHMAC(sha256.New, d.Msg, d.r[:]) + if err != nil { + return false, err + } + return subtle.ConstantTimeCompare(cʹ, c) == 1, nil +} + +// ComputeHMAC computes HMAC(hash_fn, msg, key) +// Takes in a hash function to use for HMAC +func ComputeHMAC(f func() hash.Hash, msg []byte, k []byte) ([]byte, error) { + if f == nil { + return nil, fmt.Errorf("hash function cannot be nil") + } + + mac := hmac.New(f, k) + w, err := mac.Write(msg) + + if w != len(msg) { + return nil, fmt.Errorf( + "bytes written to hash doesn't match expected: %v != %v", + w, + len(msg), + ) + } else if err != nil { + return nil, err + } + return mac.Sum(nil), nil +} diff --git a/core/commit_test.go b/core/commit_test.go new file mode 100755 index 0000000..72d3a8a --- /dev/null +++ b/core/commit_test.go @@ -0,0 +1,396 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package core + +import ( + "bytes" + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" +) + +// An entry into our test table +type entry struct { + // Input + msg []byte + + // Result (actual, not expected) + commit Commitment + decommit *Witness + err error +} + +// Test inputs and placeholders for results that will be filled in +// during init() +var testResults = []entry{ + {[]byte("This is a test message"), nil, nil, nil}, + {[]byte("short msg"), nil, nil, nil}, + { + []byte( + "This input field is intentionally longer than the SHA256 block size to ensure that the entire message is processed", + ), + nil, nil, nil, + }, + { + []byte{ + 0xFB, + 0x1A, + 0x18, + 0x47, + 0x39, + 0x3C, + 0x9F, + 0x45, + 0x5F, + 0x29, + 0x4C, + 0x51, + 0x42, + 0x30, + 0xA6, + 0xB9, + }, + nil, nil, nil, + }, + // msg = \epsilon (empty string) + {[]byte{}, nil, nil, nil}, + // msg == nil + {nil, nil, nil, nil}, +} + +// Run our inputs through commit and record the outputs +func init() { + for i := range testResults { + entry := &testResults[i] + entry.commit, entry.decommit, entry.err = Commit(entry.msg) + } +} + +// Computing commitments should never produce errors +func TestCommitWithoutErrors(t *testing.T) { + for _, entry := range testResults { + if entry.err != nil { + t.Errorf("received Commit(%v): %v", entry.msg, entry.err) + } + } +} + +// Commitments should be 256b == 64B in length +func TestCommitmentsAreExpectedLength(t *testing.T) { + const expLen = 256 / 8 + for _, entry := range testResults { + if len(entry.commit) != expLen { + t.Errorf("commitment is not expected length: %v != %v", len(entry.commit), expLen) + } + } +} + +// Decommit cannot be nil +func TestCommmitProducesDecommit(t *testing.T) { + for _, entry := range testResults { + if entry.decommit == nil { + t.Errorf("decommit cannot be nil: Commit(%v)", entry.msg) + } + } +} + +// Decommit value should contain the same message +func TestCommmitProducesDecommitWithSameMessage(t *testing.T) { + for _, entry := range testResults { + if !bytes.Equal(entry.msg, entry.decommit.Msg) { + t.Errorf("decommit.msg != msg: %v != %v", entry.msg, entry.decommit.Msg) + } + } +} + +// Commitments should be unique +func TestCommmitProducesDistinctCommitments(t *testing.T) { + seen := make(map[[Size]byte]bool) + + // Check the pre-computed commitments for uniquness + for _, entry := range testResults { + + // Slices cannot be used as hash keys, so we need to copy into + // an array. Oh, go-lang. + var cee [Size]byte + copy(cee[:], entry.commit) + + // Ensure each commit is unique + if seen[cee] { + t.Errorf("duplicate commit found: %v", cee) + } + seen[cee] = true + } +} + +// Commitments should be unique even for the same message since the nonce is +// randomly selected +func TestCommmitDistinctCommitments(t *testing.T) { + seen := make(map[[Size]byte]bool) + msg := []byte("black lives matter") + const iterations = 1000 + + // Check the pre-computed commitments for uniquness + for i := 0; i < iterations; i++ { + // Compute a commitment + c, _, err := Commit(msg) + if err != nil { + t.Error(err) + } + + // Slices cannot be used as hash keys, so copy into an array + var cee [Size]byte + copy(cee[:], []byte(c)) + + // Ensure each commit is unique + if seen[cee] { + t.Errorf("duplicate commit found: %v", cee) + } + seen[cee] = true + } +} + +// Nonces must be 256b = 64B +func TestCommmitNonceIsExpectedLength(t *testing.T) { + const expLen = 256 / 8 + + // Check the pre-computed nonces + for _, entry := range testResults { + if len(entry.decommit.r) != expLen { + t.Errorf("nonce is not expected length: %v != %v", len(entry.decommit.r), expLen) + } + } +} + +// Randomly selected nonces will be unique with overwhelming probability +func TestCommmitProducesDistinctNonces(t *testing.T) { + seen := make(map[[Size]byte]bool) + msg := []byte("black lives matter") + const iterations = 1000 + + // Check the pre-computed commitments for uniquness + for i := 0; i < iterations; i++ { + // Compute a commitment + _, dee, err := Commit(msg) + if err != nil { + t.Error(err) + } + + // Ensure each nonce is unique + if seen[dee.r] { + t.Errorf("duplicate nonce found: %v", dee.r) + } + seen[dee.r] = true + } +} + +func TestOpenOnValidCommitments(t *testing.T) { + for _, entry := range testResults { + + // Open each commitment + ok, err := Open(entry.commit, *entry.decommit) + // There should be no error + if err != nil { + t.Error(err) + } + + // The commitments should verify + if !ok { + t.Errorf("commitment failed to open: %v", entry.msg) + } + } +} + +func TestOpenOnModifiedNonce(t *testing.T) { + for _, entry := range testResults { + dʹ := copyWitness(entry.decommit) + + // Modify the nonce + dʹ.r[0] ^= 0x40 + + // Open and check for failure + ok, err := Open(entry.commit, *dʹ) + assertFailedOpen(t, ok, err) + } +} + +func TestOpenOnZeroPrefixNonce(t *testing.T) { + for _, entry := range testResults { + dʹ := copyWitness(entry.decommit) + + // Modify the nonce + dʹ.r[0] = 0x00 + dʹ.r[1] = 0x00 + dʹ.r[2] = 0x00 + dʹ.r[3] = 0x00 + dʹ.r[4] = 0x00 + dʹ.r[5] = 0x00 + dʹ.r[6] = 0x00 + dʹ.r[7] = 0x00 + dʹ.r[8] = 0x00 + dʹ.r[9] = 0x00 + dʹ.r[10] = 0x00 + + // Open and check for failure + ok, err := Open(entry.commit, *dʹ) + assertFailedOpen(t, ok, err) + } +} + +// Makes a deep copy of a Witness +func copyWitness(d *Witness) *Witness { + msg := make([]byte, len(d.Msg)) + var r [Size]byte + + copy(msg, d.Msg) + copy(r[:], d.r[:]) + return &Witness{msg, r} +} + +// Asserts that err != nil, and ok == false. +func assertFailedOpen(t *testing.T, ok bool, err error) { + // There should be no error + if err != nil { + t.Error(err) + } + + // But the commitments should fail + if ok { + t.Error("commitment was verified but was expected to fail") + } +} + +// An unrelated message should fail on open +func TestOpenOnNewMessage(t *testing.T) { + for _, entry := range testResults { + dʹ := copyWitness(entry.decommit) + + // Use a distinct message + dʹ.Msg = []byte("no one expects the spanish inquisition") + + // Open and check for failure + ok, err := Open(entry.commit, *dʹ) + assertFailedOpen(t, ok, err) + } +} + +// An appended message should fail on open +func TestOpenOnAppendedMessage(t *testing.T) { + for _, entry := range testResults { + dʹ := copyWitness(entry.decommit) + + // Modify the message + dʹ.Msg = []byte("no one expects the spanish inquisition") + + // Open and check for failure + ok, err := Open(entry.commit, *dʹ) + assertFailedOpen(t, ok, err) + } +} + +// A modified message should fail on open +func TestOpenOnModifiedMessage(t *testing.T) { + for _, entry := range testResults { + // Skip the empty string message for this test case + if len(entry.msg) == 0 { + continue + } + + // Modify the message _in situ_ + dʹ := copyWitness(entry.decommit) + dʹ.Msg[1] ^= 0x99 + + // Open and check for failure + ok, err := Open(entry.commit, *dʹ) + assertFailedOpen(t, ok, err) + } +} + +// A modified commitment should fail on open +func TestOpenOnModifiedCommitment(t *testing.T) { + for _, entry := range testResults { + // Copy and then modify the commitment + cʹ := make([]byte, Size) + copy(cʹ[:], entry.commit) + cʹ[6] ^= 0x33 + + // Open and check for failure + ok, err := Open(cʹ, *entry.decommit) + assertFailedOpen(t, ok, err) + } +} + +// An empty decommit should fail to open +func TestOpenOnDefaultDecommitObject(t *testing.T) { + for _, entry := range testResults { + // Open and check for failure + ok, err := Open(entry.commit, Witness{}) + assertFailedOpen(t, ok, err) + } +} + +// A nil commit should return an error +func TestOpenOnNilCommitment(t *testing.T) { + _, err := Open(nil, Witness{}) + assertError(t, err) +} + +// Verifies that err != nil +func assertError(t *testing.T, err error) { + if err == nil { + t.Error("expected an error but received nil") + } +} + +// Ill-formed commitment should produce an error +func TestOpenOnLongCommitment(t *testing.T) { + tooLong := make([]byte, Size+1) + _, err := Open(tooLong, Witness{}) + assertError(t, err) +} + +// Ill-formed commitment should produce an error +func TestOpenOnShortCommitment(t *testing.T) { + tooShort := make([]byte, Size-1) + _, err := Open(tooShort, Witness{}) + assertError(t, err) +} + +// Tests that marshal-unmarshal is the identity function +func TestWitnessMarshalRoundTrip(t *testing.T) { + expected := &Witness{ + []byte("I'm the dude. So that's what you call me"), + [Size]byte{0xAC}, + } + + // Marhal and test + jsonBytes, err := json.Marshal(expected) + require.NoError(t, err) + require.NotNil(t, jsonBytes) + + // Unmarshal and test + actual := &Witness{} + require.NoError(t, json.Unmarshal(jsonBytes, actual)) + require.Equal(t, expected.Msg, actual.Msg) + require.Equal(t, expected.r, actual.r) +} + +// Tests that marshal-unmarshal is the identity function +func TestCommitmentMarshalRoundTrip(t *testing.T) { + expected := Commitment([]byte("That or uh his-dudeness or duder or el duderino.")) + + // Marhal and test + jsonBytes, err := json.Marshal(expected) + require.NoError(t, err) + require.NotNil(t, jsonBytes) + + // Unmarshal and test + actual := Commitment{} + require.NoError(t, json.Unmarshal(jsonBytes, &actual)) + require.Equal(t, []byte(expected), []byte(actual)) +} diff --git a/core/curves/bls12377_curve.go b/core/curves/bls12377_curve.go new file mode 100644 index 0000000..26e87e5 --- /dev/null +++ b/core/curves/bls12377_curve.go @@ -0,0 +1,1226 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// NOTE that the bls curves are NOT constant time. There is an open issue to address it: https://github.com/sonr-eco/kryptology/issues/233 + +package curves + +import ( + "crypto/rand" + "crypto/sha256" + "crypto/subtle" + "fmt" + "io" + "math/big" + + bls12377 "github.com/consensys/gnark-crypto/ecc/bls12-377" + "golang.org/x/crypto/sha3" + + "github.com/sonr-io/sonr/crypto/core" +) + +// See 'r' = https://eprint.iacr.org/2018/962.pdf Figure 16 +var ( + bls12377modulus = bhex("12ab655e9a2ca55660b44d1e5c37b00159aa76fed00000010a11800000000001") + g1Inf = bls12377.G1Affine{X: [6]uint64{}, Y: [6]uint64{}} + g2Inf = bls12377.G2Affine{ + X: bls12377.E2{A0: [6]uint64{}, A1: [6]uint64{}}, + Y: bls12377.E2{A0: [6]uint64{}, A1: [6]uint64{}}, + } +) + +type ScalarBls12377 struct { + value *big.Int + point Point +} + +type PointBls12377G1 struct { + value *bls12377.G1Affine +} + +type PointBls12377G2 struct { + value *bls12377.G2Affine +} + +type ScalarBls12377Gt struct { + value *bls12377.GT +} + +func (s *ScalarBls12377) Random(reader io.Reader) Scalar { + if reader == nil { + return nil + } + var seed [64]byte + _, _ = reader.Read(seed[:]) + return s.Hash(seed[:]) +} + +func (s *ScalarBls12377) Hash(bytes []byte) Scalar { + xmd, err := expandMsgXmd(sha256.New(), bytes, []byte("BLS12377_XMD:SHA-256_SSWU_RO_"), 48) + if err != nil { + return nil + } + v := new(big.Int).SetBytes(xmd) + return &ScalarBls12377{ + value: v.Mod(v, bls12377modulus), + point: s.point, + } +} + +func (s *ScalarBls12377) Zero() Scalar { + return &ScalarBls12377{ + value: big.NewInt(0), + point: s.point, + } +} + +func (s *ScalarBls12377) One() Scalar { + return &ScalarBls12377{ + value: big.NewInt(1), + point: s.point, + } +} + +func (s *ScalarBls12377) IsZero() bool { + return subtle.ConstantTimeCompare(s.value.Bytes(), []byte{}) == 1 +} + +func (s *ScalarBls12377) IsOne() bool { + return subtle.ConstantTimeCompare(s.value.Bytes(), []byte{1}) == 1 +} + +func (s *ScalarBls12377) IsOdd() bool { + return s.value.Bit(0) == 1 +} + +func (s *ScalarBls12377) IsEven() bool { + return s.value.Bit(0) == 0 +} + +func (s *ScalarBls12377) New(value int) Scalar { + v := big.NewInt(int64(value)) + if value < 0 { + v.Mod(v, bls12377modulus) + } + return &ScalarBls12377{ + value: v, + point: s.point, + } +} + +func (s *ScalarBls12377) Cmp(rhs Scalar) int { + r, ok := rhs.(*ScalarBls12377) + if ok { + return s.value.Cmp(r.value) + } else { + return -2 + } +} + +func (s *ScalarBls12377) Square() Scalar { + return &ScalarBls12377{ + value: new(big.Int).Exp(s.value, big.NewInt(2), bls12377modulus), + point: s.point, + } +} + +func (s *ScalarBls12377) Double() Scalar { + v := new(big.Int).Add(s.value, s.value) + return &ScalarBls12377{ + value: v.Mod(v, bls12377modulus), + point: s.point, + } +} + +func (s *ScalarBls12377) Invert() (Scalar, error) { + return &ScalarBls12377{ + value: new(big.Int).ModInverse(s.value, bls12377modulus), + point: s.point, + }, nil +} + +func (s *ScalarBls12377) Sqrt() (Scalar, error) { + return &ScalarBls12377{ + value: new(big.Int).ModSqrt(s.value, bls12377modulus), + point: s.point, + }, nil +} + +func (s *ScalarBls12377) Cube() Scalar { + return &ScalarBls12377{ + value: new(big.Int).Exp(s.value, big.NewInt(3), bls12377modulus), + point: s.point, + } +} + +func (s *ScalarBls12377) Add(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarBls12377) + if ok { + v := new(big.Int).Add(s.value, r.value) + return &ScalarBls12377{ + value: v.Mod(v, bls12377modulus), + point: s.point, + } + } else { + return nil + } +} + +func (s *ScalarBls12377) Sub(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarBls12377) + if ok { + v := new(big.Int).Sub(s.value, r.value) + return &ScalarBls12377{ + value: v.Mod(v, bls12377modulus), + point: s.point, + } + } else { + return nil + } +} + +func (s *ScalarBls12377) Mul(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarBls12377) + if ok { + v := new(big.Int).Mul(s.value, r.value) + return &ScalarBls12377{ + value: v.Mod(v, bls12377modulus), + point: s.point, + } + } else { + return nil + } +} + +func (s *ScalarBls12377) MulAdd(y, z Scalar) Scalar { + return s.Mul(y).Add(z) +} + +func (s *ScalarBls12377) Div(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarBls12377) + if ok { + v := new(big.Int).ModInverse(r.value, bls12377modulus) + v.Mul(v, s.value) + return &ScalarBls12377{ + value: v.Mod(v, bls12377modulus), + point: s.point, + } + } else { + return nil + } +} + +func (s *ScalarBls12377) Neg() Scalar { + z := new(big.Int).Neg(s.value) + return &ScalarBls12377{ + value: z.Mod(z, bls12377modulus), + point: s.point, + } +} + +func (s *ScalarBls12377) SetBigInt(v *big.Int) (Scalar, error) { + if v == nil { + return nil, fmt.Errorf("invalid value") + } + t := new(big.Int).Mod(v, bls12377modulus) + if t.Cmp(v) != 0 { + return nil, fmt.Errorf("invalid value") + } + return &ScalarBls12377{ + value: t, + point: s.point, + }, nil +} + +func (s *ScalarBls12377) BigInt() *big.Int { + return new(big.Int).Set(s.value) +} + +func (s *ScalarBls12377) Bytes() []byte { + var out [32]byte + return s.value.FillBytes(out[:]) +} + +func (s *ScalarBls12377) SetBytes(bytes []byte) (Scalar, error) { + value := new(big.Int).SetBytes(bytes) + t := new(big.Int).Mod(value, bls12377modulus) + if t.Cmp(value) != 0 { + return nil, fmt.Errorf("invalid byte sequence") + } + return &ScalarBls12377{ + value: t, + point: s.point, + }, nil +} + +func (s *ScalarBls12377) SetBytesWide(bytes []byte) (Scalar, error) { + if len(bytes) < 32 || len(bytes) > 128 { + return nil, fmt.Errorf("invalid byte sequence") + } + value := new(big.Int).SetBytes(bytes) + t := new(big.Int).Mod(value, bls12377modulus) + return &ScalarBls12377{ + value: t, + point: s.point, + }, nil +} + +func (s *ScalarBls12377) Point() Point { + return s.point.Identity() +} + +func (s *ScalarBls12377) Clone() Scalar { + return &ScalarBls12377{ + value: new(big.Int).Set(s.value), + point: s.point, + } +} + +func (s *ScalarBls12377) SetPoint(p Point) PairingScalar { + return &ScalarBls12377{ + value: new(big.Int).Set(s.value), + point: p, + } +} + +func (s *ScalarBls12377) Order() *big.Int { + return bls12377modulus +} + +func (s *ScalarBls12377) MarshalBinary() ([]byte, error) { + return scalarMarshalBinary(s) +} + +func (s *ScalarBls12377) UnmarshalBinary(input []byte) error { + sc, err := scalarUnmarshalBinary(input) + if err != nil { + return err + } + ss, ok := sc.(*ScalarBls12377) + if !ok { + return fmt.Errorf("invalid scalar") + } + s.value = ss.value + s.point = ss.point + return nil +} + +func (s *ScalarBls12377) MarshalText() ([]byte, error) { + return scalarMarshalText(s) +} + +func (s *ScalarBls12377) UnmarshalText(input []byte) error { + sc, err := scalarUnmarshalText(input) + if err != nil { + return err + } + ss, ok := sc.(*ScalarBls12377) + if !ok { + return fmt.Errorf("invalid scalar") + } + s.value = ss.value + s.point = ss.point + return nil +} + +func (s *ScalarBls12377) MarshalJSON() ([]byte, error) { + return scalarMarshalJson(s) +} + +func (s *ScalarBls12377) UnmarshalJSON(input []byte) error { + sc, err := scalarUnmarshalJson(input) + if err != nil { + return err + } + S, ok := sc.(*ScalarBls12377) + if !ok { + return fmt.Errorf("invalid type") + } + s.value = S.value + return nil +} + +func (p *PointBls12377G1) Random(reader io.Reader) Point { + var seed [64]byte + _, _ = reader.Read(seed[:]) + return p.Hash(seed[:]) +} + +func (p *PointBls12377G1) Hash(bytes []byte) Point { + domain := []byte("BLS12377G1_XMD:SHA-256_SVDW_RO_") + pt, err := bls12377.HashToG1(bytes, domain) + if err != nil { + return nil + } + return &PointBls12377G1{value: &pt} +} + +func (p *PointBls12377G1) Identity() Point { + t := bls12377.G1Affine{} + return &PointBls12377G1{ + value: t.Set(&g1Inf), + } +} + +func (p *PointBls12377G1) Generator() Point { + t := bls12377.G1Affine{} + _, _, g1Aff, _ := bls12377.Generators() + return &PointBls12377G1{ + value: t.Set(&g1Aff), + } +} + +func (p *PointBls12377G1) IsIdentity() bool { + return p.value.IsInfinity() +} + +func (p *PointBls12377G1) IsNegative() bool { + // According to https://github.com/zcash/librustzcash/blob/6e0364cd42a2b3d2b958a54771ef51a8db79dd29/pairing/src/bls12_381/README.md#serialization + // This bit represents the sign of the `y` coordinate which is what we want + + return (p.value.Bytes()[0]>>5)&1 == 1 +} + +func (p *PointBls12377G1) IsOnCurve() bool { + return p.value.IsOnCurve() +} + +func (p *PointBls12377G1) Double() Point { + t := &bls12377.G1Jac{} + t.FromAffine(p.value) + t.DoubleAssign() + value := bls12377.G1Affine{} + return &PointBls12377G1{value.FromJacobian(t)} +} + +func (p *PointBls12377G1) Scalar() Scalar { + return &ScalarBls12377{ + value: new(big.Int), + point: new(PointBls12377G1), + } +} + +func (p *PointBls12377G1) Neg() Point { + value := &bls12377.G1Affine{} + value.Neg(p.value) + return &PointBls12377G1{value} +} + +func (p *PointBls12377G1) Add(rhs Point) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*PointBls12377G1) + if ok { + value := &bls12377.G1Affine{} + return &PointBls12377G1{value.Add(p.value, r.value)} + } else { + return nil + } +} + +func (p *PointBls12377G1) Sub(rhs Point) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*PointBls12377G1) + if ok { + value := &bls12377.G1Affine{} + return &PointBls12377G1{value.Sub(p.value, r.value)} + } else { + return nil + } +} + +func (p *PointBls12377G1) Mul(rhs Scalar) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*ScalarBls12377) + if ok { + value := &bls12377.G1Affine{} + return &PointBls12377G1{value.ScalarMultiplication(p.value, r.value)} + } else { + return nil + } +} + +func (p *PointBls12377G1) Equal(rhs Point) bool { + r, ok := rhs.(*PointBls12377G1) + if ok { + return p.value.Equal(r.value) + } else { + return false + } +} + +func (p *PointBls12377G1) Set(x, y *big.Int) (Point, error) { + if x.Cmp(core.Zero) == 0 && + y.Cmp(core.Zero) == 0 { + return p.Identity(), nil + } + var data [96]byte + x.FillBytes(data[:48]) + y.FillBytes(data[48:]) + value := &bls12377.G1Affine{} + _, err := value.SetBytes(data[:]) + if err != nil { + return nil, fmt.Errorf("invalid coordinates") + } + return &PointBls12377G1{value}, nil +} + +func (p *PointBls12377G1) ToAffineCompressed() []byte { + v := p.value.Bytes() + return v[:] +} + +func (p *PointBls12377G1) ToAffineUncompressed() []byte { + v := p.value.RawBytes() + return v[:] +} + +func (p *PointBls12377G1) FromAffineCompressed(bytes []byte) (Point, error) { + if len(bytes) != bls12377.SizeOfG1AffineCompressed { + return nil, fmt.Errorf("invalid point") + } + value := &bls12377.G1Affine{} + _, err := value.SetBytes(bytes) + if err != nil { + return nil, err + } + return &PointBls12377G1{value}, nil +} + +func (p *PointBls12377G1) FromAffineUncompressed(bytes []byte) (Point, error) { + if len(bytes) != bls12377.SizeOfG1AffineUncompressed { + return nil, fmt.Errorf("invalid point") + } + value := &bls12377.G1Affine{} + _, err := value.SetBytes(bytes) + if err != nil { + return nil, err + } + return &PointBls12377G1{value}, nil +} + +func (p *PointBls12377G1) CurveName() string { + return "BLS12377G1" +} + +func (p *PointBls12377G1) SumOfProducts(points []Point, scalars []Scalar) Point { + nScalars := make([]*big.Int, len(scalars)) + for i, sc := range scalars { + s, ok := sc.(*ScalarBls12377) + if !ok { + return nil + } + nScalars[i] = s.value + } + return sumOfProductsPippenger(points, nScalars) +} + +func (p *PointBls12377G1) OtherGroup() PairingPoint { + return new(PointBls12377G2).Identity().(PairingPoint) +} + +func (p *PointBls12377G1) Pairing(rhs PairingPoint) Scalar { + pt, ok := rhs.(*PointBls12377G2) + if !ok { + return nil + } + if !p.value.IsInSubGroup() || + !pt.value.IsInSubGroup() { + return nil + } + value := bls12377.GT{} + if p.value.IsInfinity() || pt.value.IsInfinity() { + return &ScalarBls12377Gt{&value} + } + value, err := bls12377.Pair([]bls12377.G1Affine{*p.value}, []bls12377.G2Affine{*pt.value}) + if err != nil { + return nil + } + + return &ScalarBls12377Gt{&value} +} + +func (p *PointBls12377G1) MultiPairing(points ...PairingPoint) Scalar { + return multiPairingBls12377(points...) +} + +func (p *PointBls12377G1) X() *big.Int { + b := p.value.RawBytes() + return new(big.Int).SetBytes(b[:48]) +} + +func (p *PointBls12377G1) Y() *big.Int { + b := p.value.RawBytes() + return new(big.Int).SetBytes(b[48:]) +} + +func (p *PointBls12377G1) Modulus() *big.Int { + return bls12377modulus +} + +func (p *PointBls12377G1) MarshalBinary() ([]byte, error) { + return pointMarshalBinary(p) +} + +func (p *PointBls12377G1) UnmarshalBinary(input []byte) error { + pt, err := pointUnmarshalBinary(input) + if err != nil { + return err + } + ppt, ok := pt.(*PointBls12377G1) + if !ok { + return fmt.Errorf("invalid point") + } + p.value = ppt.value + return nil +} + +func (p *PointBls12377G1) MarshalText() ([]byte, error) { + return pointMarshalText(p) +} + +func (p *PointBls12377G1) UnmarshalText(input []byte) error { + pt, err := pointUnmarshalText(input) + if err != nil { + return err + } + ppt, ok := pt.(*PointBls12377G1) + if !ok { + return fmt.Errorf("invalid point") + } + p.value = ppt.value + return nil +} + +func (p *PointBls12377G1) MarshalJSON() ([]byte, error) { + return pointMarshalJSON(p) +} + +func (p *PointBls12377G1) UnmarshalJSON(input []byte) error { + pt, err := pointUnmarshalJSON(input) + if err != nil { + return err + } + P, ok := pt.(*PointBls12377G1) + if !ok { + return fmt.Errorf("invalid type") + } + p.value = P.value + return nil +} + +func (p *PointBls12377G2) Random(reader io.Reader) Point { + var seed [64]byte + _, _ = reader.Read(seed[:]) + return p.Hash(seed[:]) +} + +func (p *PointBls12377G2) Hash(bytes []byte) Point { + domain := []byte("BLS12377G2_XMD:SHA-256_SVDW_RO_") + pt, err := bls12377.HashToG2(bytes, domain) + if err != nil { + return nil + } + return &PointBls12377G2{value: &pt} +} + +func (p *PointBls12377G2) Identity() Point { + t := bls12377.G2Affine{} + return &PointBls12377G2{ + value: t.Set(&g2Inf), + } +} + +func (p *PointBls12377G2) Generator() Point { + t := bls12377.G2Affine{} + _, _, _, g2Aff := bls12377.Generators() + return &PointBls12377G2{ + value: t.Set(&g2Aff), + } +} + +func (p *PointBls12377G2) IsIdentity() bool { + return p.value.IsInfinity() +} + +func (p *PointBls12377G2) IsNegative() bool { + // According to https://github.com/zcash/librustzcash/blob/6e0364cd42a2b3d2b958a54771ef51a8db79dd29/pairing/src/bls12_381/README.md#serialization + // This bit represents the sign of the `y` coordinate which is what we want + return (p.value.Bytes()[0]>>5)&1 == 1 +} + +func (p *PointBls12377G2) IsOnCurve() bool { + return p.value.IsOnCurve() +} + +func (p *PointBls12377G2) Double() Point { + t := &bls12377.G2Jac{} + t.FromAffine(p.value) + t.DoubleAssign() + value := bls12377.G2Affine{} + return &PointBls12377G2{value.FromJacobian(t)} +} + +func (p *PointBls12377G2) Scalar() Scalar { + return &ScalarBls12377{ + value: new(big.Int), + point: new(PointBls12377G2), + } +} + +func (p *PointBls12377G2) Neg() Point { + value := &bls12377.G2Affine{} + value.Neg(p.value) + return &PointBls12377G2{value} +} + +func (p *PointBls12377G2) Add(rhs Point) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*PointBls12377G2) + if ok { + value := &bls12377.G2Affine{} + return &PointBls12377G2{value.Add(p.value, r.value)} + } else { + return nil + } +} + +func (p *PointBls12377G2) Sub(rhs Point) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*PointBls12377G2) + if ok { + value := &bls12377.G2Affine{} + return &PointBls12377G2{value.Sub(p.value, r.value)} + } else { + return nil + } +} + +func (p *PointBls12377G2) Mul(rhs Scalar) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*ScalarBls12377) + if ok { + value := &bls12377.G2Affine{} + return &PointBls12377G2{value.ScalarMultiplication(p.value, r.value)} + } else { + return nil + } +} + +func (p *PointBls12377G2) Equal(rhs Point) bool { + r, ok := rhs.(*PointBls12377G2) + if ok { + return p.value.Equal(r.value) + } else { + return false + } +} + +func (p *PointBls12377G2) Set(x, y *big.Int) (Point, error) { + if x.Cmp(core.Zero) == 0 && + y.Cmp(core.Zero) == 0 { + return p.Identity(), nil + } + var data [192]byte + x.FillBytes(data[:96]) + y.FillBytes(data[96:]) + value := &bls12377.G2Affine{} + _, err := value.SetBytes(data[:]) + if err != nil { + return nil, fmt.Errorf("invalid coordinates") + } + return &PointBls12377G2{value}, nil +} + +func (p *PointBls12377G2) ToAffineCompressed() []byte { + v := p.value.Bytes() + return v[:] +} + +func (p *PointBls12377G2) ToAffineUncompressed() []byte { + v := p.value.RawBytes() + return v[:] +} + +func (p *PointBls12377G2) FromAffineCompressed(bytes []byte) (Point, error) { + if len(bytes) != bls12377.SizeOfG2AffineCompressed { + return nil, fmt.Errorf("invalid point") + } + value := &bls12377.G2Affine{} + _, err := value.SetBytes(bytes) + if err != nil { + return nil, err + } + return &PointBls12377G2{value}, nil +} + +func (p *PointBls12377G2) FromAffineUncompressed(bytes []byte) (Point, error) { + if len(bytes) != bls12377.SizeOfG2AffineUncompressed { + return nil, fmt.Errorf("invalid point") + } + value := &bls12377.G2Affine{} + _, err := value.SetBytes(bytes) + if err != nil { + return nil, err + } + return &PointBls12377G2{value}, nil +} + +func (p *PointBls12377G2) CurveName() string { + return "BLS12377G2" +} + +func (p *PointBls12377G2) SumOfProducts(points []Point, scalars []Scalar) Point { + nScalars := make([]*big.Int, len(scalars)) + for i, sc := range scalars { + s, ok := sc.(*ScalarBls12377) + if !ok { + return nil + } + nScalars[i] = s.value + } + return sumOfProductsPippenger(points, nScalars) +} + +func (p *PointBls12377G2) OtherGroup() PairingPoint { + return new(PointBls12377G1).Identity().(PairingPoint) +} + +func (p *PointBls12377G2) Pairing(rhs PairingPoint) Scalar { + pt, ok := rhs.(*PointBls12377G1) + if !ok { + return nil + } + if !p.value.IsInSubGroup() || + !pt.value.IsInSubGroup() { + return nil + } + value := bls12377.GT{} + if p.value.IsInfinity() || pt.value.IsInfinity() { + return &ScalarBls12377Gt{&value} + } + value, err := bls12377.Pair([]bls12377.G1Affine{*pt.value}, []bls12377.G2Affine{*p.value}) + if err != nil { + return nil + } + + return &ScalarBls12377Gt{&value} +} + +func (p *PointBls12377G2) MultiPairing(points ...PairingPoint) Scalar { + return multiPairingBls12377(points...) +} + +func (p *PointBls12377G2) X() *big.Int { + b := p.value.RawBytes() + return new(big.Int).SetBytes(b[:96]) +} + +func (p *PointBls12377G2) Y() *big.Int { + b := p.value.RawBytes() + return new(big.Int).SetBytes(b[96:]) +} + +func (p *PointBls12377G2) Modulus() *big.Int { + return bls12377modulus +} + +func (p *PointBls12377G2) MarshalBinary() ([]byte, error) { + return pointMarshalBinary(p) +} + +func (p *PointBls12377G2) UnmarshalBinary(input []byte) error { + pt, err := pointUnmarshalBinary(input) + if err != nil { + return err + } + ppt, ok := pt.(*PointBls12377G2) + if !ok { + return fmt.Errorf("invalid point") + } + p.value = ppt.value + return nil +} + +func (p *PointBls12377G2) MarshalText() ([]byte, error) { + return pointMarshalText(p) +} + +func (p *PointBls12377G2) UnmarshalText(input []byte) error { + pt, err := pointUnmarshalText(input) + if err != nil { + return err + } + ppt, ok := pt.(*PointBls12377G2) + if !ok { + return fmt.Errorf("invalid point") + } + p.value = ppt.value + return nil +} + +func (p *PointBls12377G2) MarshalJSON() ([]byte, error) { + return pointMarshalJSON(p) +} + +func (p *PointBls12377G2) UnmarshalJSON(input []byte) error { + pt, err := pointUnmarshalJSON(input) + if err != nil { + return err + } + P, ok := pt.(*PointBls12377G2) + if !ok { + return fmt.Errorf("invalid type") + } + p.value = P.value + return nil +} + +func multiPairingBls12377(points ...PairingPoint) Scalar { + if len(points)%2 != 0 { + return nil + } + g1Arr := make([]bls12377.G1Affine, 0, len(points)/2) + g2Arr := make([]bls12377.G2Affine, 0, len(points)/2) + valid := true + for i := 0; i < len(points); i += 2 { + pt1, ok := points[i].(*PointBls12377G1) + valid = valid && ok + pt2, ok := points[i+1].(*PointBls12377G2) + valid = valid && ok + if valid { + valid = valid && pt1.value.IsInSubGroup() + valid = valid && pt2.value.IsInSubGroup() + } + if valid { + g1Arr = append(g1Arr, *pt1.value) + g2Arr = append(g2Arr, *pt2.value) + } + } + if !valid { + return nil + } + + value, err := bls12377.Pair(g1Arr, g2Arr) + if err != nil { + return nil + } + + return &ScalarBls12377Gt{&value} +} + +func (s *ScalarBls12377Gt) Random(reader io.Reader) Scalar { + const width = 48 + offset := 0 + var data [bls12377.SizeOfGT]byte + for i := 0; i < 12; i++ { + tv, err := rand.Int(reader, bls12377modulus) + if err != nil { + return nil + } + tv.FillBytes(data[offset*width : (offset+1)*width]) + offset++ + } + value := bls12377.GT{} + err := value.SetBytes(data[:]) + if err != nil { + return nil + } + return &ScalarBls12377Gt{&value} +} + +func (s *ScalarBls12377Gt) Hash(bytes []byte) Scalar { + reader := sha3.NewShake256() + n, err := reader.Write(bytes) + if err != nil { + return nil + } + if n != len(bytes) { + return nil + } + return s.Random(reader) +} + +func (s *ScalarBls12377Gt) Zero() Scalar { + var t [bls12377.SizeOfGT]byte + value := bls12377.GT{} + err := value.SetBytes(t[:]) + if err != nil { + return nil + } + return &ScalarBls12377Gt{&value} +} + +func (s *ScalarBls12377Gt) One() Scalar { + value := bls12377.GT{} + return &ScalarBls12377Gt{value.SetOne()} +} + +func (s *ScalarBls12377Gt) IsZero() bool { + r := byte(0) + b := s.value.Bytes() + for _, i := range b { + r |= i + } + return r == 0 +} + +func (s *ScalarBls12377Gt) IsOne() bool { + o := bls12377.GT{} + return s.value.Equal(o.SetOne()) +} + +func (s *ScalarBls12377Gt) MarshalBinary() ([]byte, error) { + return scalarMarshalBinary(s) +} + +func (s *ScalarBls12377Gt) UnmarshalBinary(input []byte) error { + sc, err := scalarUnmarshalBinary(input) + if err != nil { + return err + } + ss, ok := sc.(*ScalarBls12377Gt) + if !ok { + return fmt.Errorf("invalid scalar") + } + s.value = ss.value + return nil +} + +func (s *ScalarBls12377Gt) MarshalText() ([]byte, error) { + return scalarMarshalText(s) +} + +func (s *ScalarBls12377Gt) UnmarshalText(input []byte) error { + sc, err := scalarUnmarshalText(input) + if err != nil { + return err + } + ss, ok := sc.(*ScalarBls12377Gt) + if !ok { + return fmt.Errorf("invalid scalar") + } + s.value = ss.value + return nil +} + +func (s *ScalarBls12377Gt) MarshalJSON() ([]byte, error) { + return scalarMarshalJson(s) +} + +func (s *ScalarBls12377Gt) UnmarshalJSON(input []byte) error { + sc, err := scalarUnmarshalJson(input) + if err != nil { + return err + } + S, ok := sc.(*ScalarBls12377Gt) + if !ok { + return fmt.Errorf("invalid type") + } + s.value = S.value + return nil +} + +func (s *ScalarBls12377Gt) IsOdd() bool { + data := s.value.Bytes() + return data[len(data)-1]&1 == 1 +} + +func (s *ScalarBls12377Gt) IsEven() bool { + data := s.value.Bytes() + return data[len(data)-1]&1 == 0 +} + +func (s *ScalarBls12377Gt) New(input int) Scalar { + var data [576]byte + data[3] = byte(input >> 24 & 0xFF) + data[2] = byte(input >> 16 & 0xFF) + data[1] = byte(input >> 8 & 0xFF) + data[0] = byte(input & 0xFF) + + value := bls12377.GT{} + err := value.SetBytes(data[:]) + if err != nil { + return nil + } + return &ScalarBls12377Gt{&value} +} + +func (s *ScalarBls12377Gt) Cmp(rhs Scalar) int { + r, ok := rhs.(*ScalarBls12377Gt) + if ok && s.value.Equal(r.value) { + return 0 + } else { + return -2 + } +} + +func (s *ScalarBls12377Gt) Square() Scalar { + value := bls12377.GT{} + return &ScalarBls12377Gt{ + value.Square(s.value), + } +} + +func (s *ScalarBls12377Gt) Double() Scalar { + value := &bls12377.GT{} + return &ScalarBls12377Gt{ + value.Add(s.value, s.value), + } +} + +func (s *ScalarBls12377Gt) Invert() (Scalar, error) { + value := &bls12377.GT{} + return &ScalarBls12377Gt{ + value.Inverse(s.value), + }, nil +} + +func (s *ScalarBls12377Gt) Sqrt() (Scalar, error) { + // Not implemented + return nil, nil +} + +func (s *ScalarBls12377Gt) Cube() Scalar { + value := &bls12377.GT{} + value.Square(s.value) + value.Mul(value, s.value) + return &ScalarBls12377Gt{ + value, + } +} + +func (s *ScalarBls12377Gt) Add(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarBls12377Gt) + if ok { + value := &bls12377.GT{} + return &ScalarBls12377Gt{ + value.Add(s.value, r.value), + } + } else { + return nil + } +} + +func (s *ScalarBls12377Gt) Sub(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarBls12377Gt) + if ok { + value := &bls12377.GT{} + return &ScalarBls12377Gt{ + value.Sub(s.value, r.value), + } + } else { + return nil + } +} + +func (s *ScalarBls12377Gt) Mul(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarBls12377Gt) + if ok { + value := &bls12377.GT{} + return &ScalarBls12377Gt{ + value.Mul(s.value, r.value), + } + } else { + return nil + } +} + +func (s *ScalarBls12377Gt) MulAdd(y, z Scalar) Scalar { + return s.Mul(y).Add(z) +} + +func (s *ScalarBls12377Gt) Div(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarBls12377Gt) + if ok { + value := &bls12377.GT{} + value.Inverse(r.value) + value.Mul(value, s.value) + return &ScalarBls12377Gt{ + value, + } + } else { + return nil + } +} + +func (s *ScalarBls12377Gt) Neg() Scalar { + sValue := &bls12377.GT{} + sValue.SetOne() + value := &bls12377.GT{} + value.SetOne() + value.Sub(value, sValue) + return &ScalarBls12377Gt{ + value.Sub(value, s.value), + } +} + +func (s *ScalarBls12377Gt) SetBigInt(v *big.Int) (Scalar, error) { + var bytes [576]byte + v.FillBytes(bytes[:]) + return s.SetBytes(bytes[:]) +} + +func (s *ScalarBls12377Gt) BigInt() *big.Int { + b := s.value.Bytes() + return new(big.Int).SetBytes(b[:]) +} + +func (s *ScalarBls12377Gt) Point() Point { + p := &PointBls12377G1{} + return p.Identity() +} + +func (s *ScalarBls12377Gt) Bytes() []byte { + b := s.value.Bytes() + return b[:] +} + +func (s *ScalarBls12377Gt) SetBytes(bytes []byte) (Scalar, error) { + value := &bls12377.GT{} + err := value.SetBytes(bytes) + if err != nil { + return nil, err + } + return &ScalarBls12377Gt{value}, nil +} + +func (s *ScalarBls12377Gt) SetBytesWide(bytes []byte) (Scalar, error) { + l := len(bytes) + if l != 1152 { + return nil, fmt.Errorf("invalid byte sequence") + } + value := &bls12377.GT{} + err := value.SetBytes(bytes[:l/2]) + if err != nil { + return nil, err + } + value2 := &bls12377.GT{} + err = value2.SetBytes(bytes[l/2:]) + if err != nil { + return nil, err + } + value.Add(value, value2) + return &ScalarBls12377Gt{value}, nil +} + +func (s *ScalarBls12377Gt) Clone() Scalar { + value := &bls12377.GT{} + return &ScalarBls12377Gt{ + value.Set(s.value), + } +} diff --git a/core/curves/bls12381_curve.go b/core/curves/bls12381_curve.go new file mode 100644 index 0000000..bbf8e5b --- /dev/null +++ b/core/curves/bls12381_curve.go @@ -0,0 +1,1131 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package curves + +import ( + "fmt" + "io" + "math/big" + + "golang.org/x/crypto/sha3" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/core/curves/native/bls12381" + "github.com/sonr-io/sonr/crypto/internal" +) + +var bls12381modulus = bhex( + "1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab", +) + +type ScalarBls12381 struct { + Value *native.Field + point Point +} + +type PointBls12381G1 struct { + Value *bls12381.G1 +} + +type PointBls12381G2 struct { + Value *bls12381.G2 +} + +type ScalarBls12381Gt struct { + Value *bls12381.Gt +} + +func (s *ScalarBls12381) Random(reader io.Reader) Scalar { + if reader == nil { + return nil + } + var seed [64]byte + _, _ = reader.Read(seed[:]) + return s.Hash(seed[:]) +} + +func (s *ScalarBls12381) Hash(bytes []byte) Scalar { + dst := []byte("BLS12381_XMD:SHA-256_SSWU_RO_") + xmd := native.ExpandMsgXmd(native.EllipticPointHasherSha256(), bytes, dst, 48) + var t [64]byte + copy(t[:48], internal.ReverseScalarBytes(xmd)) + + return &ScalarBls12381{ + Value: bls12381.Bls12381FqNew().SetBytesWide(&t), + point: s.point, + } +} + +func (s *ScalarBls12381) Zero() Scalar { + return &ScalarBls12381{ + Value: bls12381.Bls12381FqNew().SetZero(), + point: s.point, + } +} + +func (s *ScalarBls12381) One() Scalar { + return &ScalarBls12381{ + Value: bls12381.Bls12381FqNew().SetOne(), + point: s.point, + } +} + +func (s *ScalarBls12381) IsZero() bool { + return s.Value.IsZero() == 1 +} + +func (s *ScalarBls12381) IsOne() bool { + return s.Value.IsOne() == 1 +} + +func (s *ScalarBls12381) IsOdd() bool { + bytes := s.Value.Bytes() + return bytes[0]&1 == 1 +} + +func (s *ScalarBls12381) IsEven() bool { + bytes := s.Value.Bytes() + return bytes[0]&1 == 0 +} + +func (s *ScalarBls12381) New(value int) Scalar { + t := bls12381.Bls12381FqNew() + v := big.NewInt(int64(value)) + if value < 0 { + v.Mod(v, t.Params.BiModulus) + } + return &ScalarBls12381{ + Value: t.SetBigInt(v), + point: s.point, + } +} + +func (s *ScalarBls12381) Cmp(rhs Scalar) int { + r, ok := rhs.(*ScalarBls12381) + if ok { + return s.Value.Cmp(r.Value) + } else { + return -2 + } +} + +func (s *ScalarBls12381) Square() Scalar { + return &ScalarBls12381{ + Value: bls12381.Bls12381FqNew().Square(s.Value), + point: s.point, + } +} + +func (s *ScalarBls12381) Double() Scalar { + v := bls12381.Bls12381FqNew().Double(s.Value) + return &ScalarBls12381{ + Value: v, + point: s.point, + } +} + +func (s *ScalarBls12381) Invert() (Scalar, error) { + value, wasInverted := bls12381.Bls12381FqNew().Invert(s.Value) + if !wasInverted { + return nil, fmt.Errorf("inverse doesn't exist") + } + return &ScalarBls12381{ + Value: value, + point: s.point, + }, nil +} + +func (s *ScalarBls12381) Sqrt() (Scalar, error) { + value, wasSquare := bls12381.Bls12381FqNew().Sqrt(s.Value) + if !wasSquare { + return nil, fmt.Errorf("not a square") + } + return &ScalarBls12381{ + Value: value, + point: s.point, + }, nil +} + +func (s *ScalarBls12381) Cube() Scalar { + value := bls12381.Bls12381FqNew().Square(s.Value) + value.Mul(value, s.Value) + return &ScalarBls12381{ + Value: value, + point: s.point, + } +} + +func (s *ScalarBls12381) Add(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarBls12381) + if ok { + return &ScalarBls12381{ + Value: bls12381.Bls12381FqNew().Add(s.Value, r.Value), + point: s.point, + } + } else { + return nil + } +} + +func (s *ScalarBls12381) Sub(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarBls12381) + if ok { + return &ScalarBls12381{ + Value: bls12381.Bls12381FqNew().Sub(s.Value, r.Value), + point: s.point, + } + } else { + return nil + } +} + +func (s *ScalarBls12381) Mul(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarBls12381) + if ok { + return &ScalarBls12381{ + Value: bls12381.Bls12381FqNew().Mul(s.Value, r.Value), + point: s.point, + } + } else { + return nil + } +} + +func (s *ScalarBls12381) MulAdd(y, z Scalar) Scalar { + return s.Mul(y).Add(z) +} + +func (s *ScalarBls12381) Div(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarBls12381) + if ok { + v, wasInverted := bls12381.Bls12381FqNew().Invert(r.Value) + if !wasInverted { + return nil + } + v.Mul(v, s.Value) + return &ScalarBls12381{ + Value: v, + point: s.point, + } + } else { + return nil + } +} + +func (s *ScalarBls12381) Neg() Scalar { + return &ScalarBls12381{ + Value: bls12381.Bls12381FqNew().Neg(s.Value), + point: s.point, + } +} + +func (s *ScalarBls12381) SetBigInt(v *big.Int) (Scalar, error) { + if v == nil { + return nil, fmt.Errorf("invalid value") + } + return &ScalarBls12381{ + Value: bls12381.Bls12381FqNew().SetBigInt(v), + point: s.point, + }, nil +} + +func (s *ScalarBls12381) BigInt() *big.Int { + return s.Value.BigInt() +} + +func (s *ScalarBls12381) Bytes() []byte { + t := s.Value.Bytes() + return internal.ReverseScalarBytes(t[:]) +} + +func (s *ScalarBls12381) SetBytes(bytes []byte) (Scalar, error) { + if len(bytes) != 32 { + return nil, fmt.Errorf("invalid length") + } + var seq [32]byte + copy(seq[:], internal.ReverseScalarBytes(bytes)) + value, err := bls12381.Bls12381FqNew().SetBytes(&seq) + if err != nil { + return nil, err + } + return &ScalarBls12381{ + value, s.point, + }, nil +} + +func (s *ScalarBls12381) SetBytesWide(bytes []byte) (Scalar, error) { + if len(bytes) != 64 { + return nil, fmt.Errorf("invalid length") + } + var seq [64]byte + copy(seq[:], bytes) + return &ScalarBls12381{ + bls12381.Bls12381FqNew().SetBytesWide(&seq), s.point, + }, nil +} + +func (s *ScalarBls12381) Point() Point { + return s.point.Identity() +} + +func (s *ScalarBls12381) Clone() Scalar { + return &ScalarBls12381{ + Value: bls12381.Bls12381FqNew().Set(s.Value), + point: s.point, + } +} + +func (s *ScalarBls12381) SetPoint(p Point) PairingScalar { + return &ScalarBls12381{ + Value: bls12381.Bls12381FqNew().Set(s.Value), + point: p, + } +} + +func (s *ScalarBls12381) Order() *big.Int { + return s.Value.Params.BiModulus +} + +func (s *ScalarBls12381) MarshalBinary() ([]byte, error) { + return scalarMarshalBinary(s) +} + +func (s *ScalarBls12381) UnmarshalBinary(input []byte) error { + sc, err := scalarUnmarshalBinary(input) + if err != nil { + return err + } + ss, ok := sc.(*ScalarBls12381) + if !ok { + return fmt.Errorf("invalid scalar") + } + s.Value = ss.Value + s.point = ss.point + return nil +} + +func (s *ScalarBls12381) MarshalText() ([]byte, error) { + return scalarMarshalText(s) +} + +func (s *ScalarBls12381) UnmarshalText(input []byte) error { + sc, err := scalarUnmarshalText(input) + if err != nil { + return err + } + ss, ok := sc.(*ScalarBls12381) + if !ok { + return fmt.Errorf("invalid scalar") + } + s.Value = ss.Value + s.point = ss.point + return nil +} + +func (s *ScalarBls12381) MarshalJSON() ([]byte, error) { + return scalarMarshalJson(s) +} + +func (s *ScalarBls12381) UnmarshalJSON(input []byte) error { + sc, err := scalarUnmarshalJson(input) + if err != nil { + return err + } + S, ok := sc.(*ScalarBls12381) + if !ok { + return fmt.Errorf("invalid type") + } + s.Value = S.Value + return nil +} + +func (p *PointBls12381G1) Random(reader io.Reader) Point { + var seed [64]byte + _, _ = reader.Read(seed[:]) + return p.Hash(seed[:]) +} + +func (p *PointBls12381G1) Hash(bytes []byte) Point { + domain := []byte("BLS12381G1_XMD:SHA-256_SSWU_RO_") + pt := new(bls12381.G1).Hash(native.EllipticPointHasherSha256(), bytes, domain) + return &PointBls12381G1{Value: pt} +} + +func (p *PointBls12381G1) Identity() Point { + return &PointBls12381G1{ + Value: new(bls12381.G1).Identity(), + } +} + +func (p *PointBls12381G1) Generator() Point { + return &PointBls12381G1{ + Value: new(bls12381.G1).Generator(), + } +} + +func (p *PointBls12381G1) IsIdentity() bool { + return p.Value.IsIdentity() == 1 +} + +func (p *PointBls12381G1) IsNegative() bool { + // According to https://github.com/zcash/librustzcash/blob/6e0364cd42a2b3d2b958a54771ef51a8db79dd29/pairing/src/bls12_381/README.md#serialization + // This bit represents the sign of the `y` coordinate which is what we want + return (p.Value.ToCompressed()[0]>>5)&1 == 1 +} + +func (p *PointBls12381G1) IsOnCurve() bool { + return p.Value.IsOnCurve() == 1 +} + +func (p *PointBls12381G1) Double() Point { + return &PointBls12381G1{new(bls12381.G1).Double(p.Value)} +} + +func (p *PointBls12381G1) Scalar() Scalar { + return &ScalarBls12381{ + Value: bls12381.Bls12381FqNew(), + point: new(PointBls12381G1), + } +} + +func (p *PointBls12381G1) Neg() Point { + return &PointBls12381G1{new(bls12381.G1).Neg(p.Value)} +} + +func (p *PointBls12381G1) Add(rhs Point) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*PointBls12381G1) + if ok { + return &PointBls12381G1{new(bls12381.G1).Add(p.Value, r.Value)} + } else { + return nil + } +} + +func (p *PointBls12381G1) Sub(rhs Point) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*PointBls12381G1) + if ok { + return &PointBls12381G1{new(bls12381.G1).Sub(p.Value, r.Value)} + } else { + return nil + } +} + +func (p *PointBls12381G1) Mul(rhs Scalar) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*ScalarBls12381) + if ok { + return &PointBls12381G1{new(bls12381.G1).Mul(p.Value, r.Value)} + } else { + return nil + } +} + +func (p *PointBls12381G1) Equal(rhs Point) bool { + r, ok := rhs.(*PointBls12381G1) + if ok { + return p.Value.Equal(r.Value) == 1 + } else { + return false + } +} + +func (p *PointBls12381G1) Set(x, y *big.Int) (Point, error) { + value, err := new(bls12381.G1).SetBigInt(x, y) + if err != nil { + return nil, fmt.Errorf("invalid coordinates") + } + return &PointBls12381G1{value}, nil +} + +func (p *PointBls12381G1) ToAffineCompressed() []byte { + out := p.Value.ToCompressed() + return out[:] +} + +func (p *PointBls12381G1) ToAffineUncompressed() []byte { + out := p.Value.ToUncompressed() + return out[:] +} + +func (p *PointBls12381G1) FromAffineCompressed(bytes []byte) (Point, error) { + var b [bls12381.FieldBytes]byte + copy(b[:], bytes) + value, err := new(bls12381.G1).FromCompressed(&b) + if err != nil { + return nil, err + } + return &PointBls12381G1{value}, nil +} + +func (p *PointBls12381G1) FromAffineUncompressed(bytes []byte) (Point, error) { + var b [96]byte + copy(b[:], bytes) + value, err := new(bls12381.G1).FromUncompressed(&b) + if err != nil { + return nil, err + } + return &PointBls12381G1{value}, nil +} + +func (p *PointBls12381G1) CurveName() string { + return "BLS12381G1" +} + +func (p *PointBls12381G1) SumOfProducts(points []Point, scalars []Scalar) Point { + nPoints := make([]*bls12381.G1, len(points)) + nScalars := make([]*native.Field, len(scalars)) + for i, pt := range points { + pp, ok := pt.(*PointBls12381G1) + if !ok { + return nil + } + nPoints[i] = pp.Value + } + for i, sc := range scalars { + s, ok := sc.(*ScalarBls12381) + if !ok { + return nil + } + nScalars[i] = s.Value + } + value, err := new(bls12381.G1).SumOfProducts(nPoints, nScalars) + if err != nil { + return nil + } + return &PointBls12381G1{value} +} + +func (p *PointBls12381G1) OtherGroup() PairingPoint { + return new(PointBls12381G2).Identity().(PairingPoint) +} + +func (p *PointBls12381G1) Pairing(rhs PairingPoint) Scalar { + pt, ok := rhs.(*PointBls12381G2) + if !ok { + return nil + } + e := new(bls12381.Engine) + e.AddPair(p.Value, pt.Value) + + value := e.Result() + + return &ScalarBls12381Gt{value} +} + +func (p *PointBls12381G1) MultiPairing(points ...PairingPoint) Scalar { + return multiPairing(points...) +} + +func (p *PointBls12381G1) X() *big.Int { + return p.Value.GetX().BigInt() +} + +func (p *PointBls12381G1) Y() *big.Int { + return p.Value.GetY().BigInt() +} + +func (p *PointBls12381G1) Modulus() *big.Int { + return bls12381modulus +} + +func (p *PointBls12381G1) MarshalBinary() ([]byte, error) { + return pointMarshalBinary(p) +} + +func (p *PointBls12381G1) UnmarshalBinary(input []byte) error { + pt, err := pointUnmarshalBinary(input) + if err != nil { + return err + } + ppt, ok := pt.(*PointBls12381G1) + if !ok { + return fmt.Errorf("invalid point") + } + p.Value = ppt.Value + return nil +} + +func (p *PointBls12381G1) MarshalText() ([]byte, error) { + return pointMarshalText(p) +} + +func (p *PointBls12381G1) UnmarshalText(input []byte) error { + pt, err := pointUnmarshalText(input) + if err != nil { + return err + } + ppt, ok := pt.(*PointBls12381G1) + if !ok { + return fmt.Errorf("invalid point") + } + p.Value = ppt.Value + return nil +} + +func (p *PointBls12381G1) MarshalJSON() ([]byte, error) { + return pointMarshalJSON(p) +} + +func (p *PointBls12381G1) UnmarshalJSON(input []byte) error { + pt, err := pointUnmarshalJSON(input) + if err != nil { + return err + } + P, ok := pt.(*PointBls12381G1) + if !ok { + return fmt.Errorf("invalid type") + } + p.Value = P.Value + return nil +} + +func (p *PointBls12381G2) Random(reader io.Reader) Point { + var seed [64]byte + _, _ = reader.Read(seed[:]) + return p.Hash(seed[:]) +} + +func (p *PointBls12381G2) Hash(bytes []byte) Point { + domain := []byte("BLS12381G2_XMD:SHA-256_SSWU_RO_") + pt := new(bls12381.G2).Hash(native.EllipticPointHasherSha256(), bytes, domain) + return &PointBls12381G2{Value: pt} +} + +func (p *PointBls12381G2) Identity() Point { + return &PointBls12381G2{ + Value: new(bls12381.G2).Identity(), + } +} + +func (p *PointBls12381G2) Generator() Point { + return &PointBls12381G2{ + Value: new(bls12381.G2).Generator(), + } +} + +func (p *PointBls12381G2) IsIdentity() bool { + return p.Value.IsIdentity() == 1 +} + +func (p *PointBls12381G2) IsNegative() bool { + // According to https://github.com/zcash/librustzcash/blob/6e0364cd42a2b3d2b958a54771ef51a8db79dd29/pairing/src/bls12_381/README.md#serialization + // This bit represents the sign of the `y` coordinate which is what we want + return (p.Value.ToCompressed()[0]>>5)&1 == 1 +} + +func (p *PointBls12381G2) IsOnCurve() bool { + return p.Value.IsOnCurve() == 1 +} + +func (p *PointBls12381G2) Double() Point { + return &PointBls12381G2{new(bls12381.G2).Double(p.Value)} +} + +func (p *PointBls12381G2) Scalar() Scalar { + return &ScalarBls12381{ + Value: bls12381.Bls12381FqNew(), + point: new(PointBls12381G2), + } +} + +func (p *PointBls12381G2) Neg() Point { + return &PointBls12381G2{new(bls12381.G2).Neg(p.Value)} +} + +func (p *PointBls12381G2) Add(rhs Point) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*PointBls12381G2) + if ok { + return &PointBls12381G2{new(bls12381.G2).Add(p.Value, r.Value)} + } else { + return nil + } +} + +func (p *PointBls12381G2) Sub(rhs Point) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*PointBls12381G2) + if ok { + return &PointBls12381G2{new(bls12381.G2).Sub(p.Value, r.Value)} + } else { + return nil + } +} + +func (p *PointBls12381G2) Mul(rhs Scalar) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*ScalarBls12381) + if ok { + return &PointBls12381G2{new(bls12381.G2).Mul(p.Value, r.Value)} + } else { + return nil + } +} + +func (p *PointBls12381G2) Equal(rhs Point) bool { + r, ok := rhs.(*PointBls12381G2) + if ok { + return p.Value.Equal(r.Value) == 1 + } else { + return false + } +} + +func (p *PointBls12381G2) Set(x, y *big.Int) (Point, error) { + value, err := new(bls12381.G2).SetBigInt(x, y) + if err != nil { + return nil, fmt.Errorf("invalid coordinates") + } + return &PointBls12381G2{value}, nil +} + +func (p *PointBls12381G2) ToAffineCompressed() []byte { + out := p.Value.ToCompressed() + return out[:] +} + +func (p *PointBls12381G2) ToAffineUncompressed() []byte { + out := p.Value.ToUncompressed() + return out[:] +} + +func (p *PointBls12381G2) FromAffineCompressed(bytes []byte) (Point, error) { + var b [bls12381.WideFieldBytes]byte + copy(b[:], bytes) + value, err := new(bls12381.G2).FromCompressed(&b) + if err != nil { + return nil, err + } + return &PointBls12381G2{value}, nil +} + +func (p *PointBls12381G2) FromAffineUncompressed(bytes []byte) (Point, error) { + var b [bls12381.DoubleWideFieldBytes]byte + copy(b[:], bytes) + value, err := new(bls12381.G2).FromUncompressed(&b) + if err != nil { + return nil, err + } + return &PointBls12381G2{value}, nil +} + +func (p *PointBls12381G2) CurveName() string { + return "BLS12381G2" +} + +func (p *PointBls12381G2) SumOfProducts(points []Point, scalars []Scalar) Point { + nPoints := make([]*bls12381.G2, len(points)) + nScalars := make([]*native.Field, len(scalars)) + for i, pt := range points { + pp, ok := pt.(*PointBls12381G2) + if !ok { + return nil + } + nPoints[i] = pp.Value + } + for i, sc := range scalars { + s, ok := sc.(*ScalarBls12381) + if !ok { + return nil + } + nScalars[i] = s.Value + } + value, err := new(bls12381.G2).SumOfProducts(nPoints, nScalars) + if err != nil { + return nil + } + return &PointBls12381G2{value} +} + +func (p *PointBls12381G2) OtherGroup() PairingPoint { + return new(PointBls12381G1).Identity().(PairingPoint) +} + +func (p *PointBls12381G2) Pairing(rhs PairingPoint) Scalar { + pt, ok := rhs.(*PointBls12381G1) + if !ok { + return nil + } + e := new(bls12381.Engine) + e.AddPair(pt.Value, p.Value) + + value := e.Result() + + return &ScalarBls12381Gt{value} +} + +func (p *PointBls12381G2) MultiPairing(points ...PairingPoint) Scalar { + return multiPairing(points...) +} + +func (p *PointBls12381G2) X() *big.Int { + x := p.Value.ToUncompressed() + return new(big.Int).SetBytes(x[:bls12381.WideFieldBytes]) +} + +func (p *PointBls12381G2) Y() *big.Int { + y := p.Value.ToUncompressed() + return new(big.Int).SetBytes(y[bls12381.WideFieldBytes:]) +} + +func (p *PointBls12381G2) Modulus() *big.Int { + return bls12381modulus +} + +func (p *PointBls12381G2) MarshalBinary() ([]byte, error) { + return pointMarshalBinary(p) +} + +func (p *PointBls12381G2) UnmarshalBinary(input []byte) error { + pt, err := pointUnmarshalBinary(input) + if err != nil { + return err + } + ppt, ok := pt.(*PointBls12381G2) + if !ok { + return fmt.Errorf("invalid point") + } + p.Value = ppt.Value + return nil +} + +func (p *PointBls12381G2) MarshalText() ([]byte, error) { + return pointMarshalText(p) +} + +func (p *PointBls12381G2) UnmarshalText(input []byte) error { + pt, err := pointUnmarshalText(input) + if err != nil { + return err + } + ppt, ok := pt.(*PointBls12381G2) + if !ok { + return fmt.Errorf("invalid point") + } + p.Value = ppt.Value + return nil +} + +func (p *PointBls12381G2) MarshalJSON() ([]byte, error) { + return pointMarshalJSON(p) +} + +func (p *PointBls12381G2) UnmarshalJSON(input []byte) error { + pt, err := pointUnmarshalJSON(input) + if err != nil { + return err + } + P, ok := pt.(*PointBls12381G2) + if !ok { + return fmt.Errorf("invalid type") + } + p.Value = P.Value + return nil +} + +func multiPairing(points ...PairingPoint) Scalar { + if len(points)%2 != 0 { + return nil + } + valid := true + eng := new(bls12381.Engine) + for i := 0; i < len(points); i += 2 { + pt1, ok := points[i].(*PointBls12381G1) + valid = valid && ok + pt2, ok := points[i+1].(*PointBls12381G2) + valid = valid && ok + if valid { + eng.AddPair(pt1.Value, pt2.Value) + } + } + if !valid { + return nil + } + + value := eng.Result() + return &ScalarBls12381Gt{value} +} + +func (s *ScalarBls12381Gt) Random(reader io.Reader) Scalar { + value, err := new(bls12381.Gt).Random(reader) + if err != nil { + return nil + } + return &ScalarBls12381Gt{value} +} + +func (s *ScalarBls12381Gt) Hash(bytes []byte) Scalar { + reader := sha3.NewShake256() + n, err := reader.Write(bytes) + if err != nil { + return nil + } + if n != len(bytes) { + return nil + } + return s.Random(reader) +} + +func (s *ScalarBls12381Gt) Zero() Scalar { + return &ScalarBls12381Gt{new(bls12381.Gt)} +} + +func (s *ScalarBls12381Gt) One() Scalar { + return &ScalarBls12381Gt{new(bls12381.Gt).SetOne()} +} + +func (s *ScalarBls12381Gt) IsZero() bool { + return s.Value.IsZero() == 1 +} + +func (s *ScalarBls12381Gt) IsOne() bool { + return s.Value.IsOne() == 1 +} + +func (s *ScalarBls12381Gt) MarshalBinary() ([]byte, error) { + return scalarMarshalBinary(s) +} + +func (s *ScalarBls12381Gt) UnmarshalBinary(input []byte) error { + sc, err := scalarUnmarshalBinary(input) + if err != nil { + return err + } + ss, ok := sc.(*ScalarBls12381Gt) + if !ok { + return fmt.Errorf("invalid scalar") + } + s.Value = ss.Value + return nil +} + +func (s *ScalarBls12381Gt) MarshalText() ([]byte, error) { + return scalarMarshalText(s) +} + +func (s *ScalarBls12381Gt) UnmarshalText(input []byte) error { + sc, err := scalarUnmarshalText(input) + if err != nil { + return err + } + ss, ok := sc.(*ScalarBls12381Gt) + if !ok { + return fmt.Errorf("invalid scalar") + } + s.Value = ss.Value + return nil +} + +func (s *ScalarBls12381Gt) MarshalJSON() ([]byte, error) { + return scalarMarshalJson(s) +} + +func (s *ScalarBls12381Gt) UnmarshalJSON(input []byte) error { + sc, err := scalarUnmarshalJson(input) + if err != nil { + return err + } + S, ok := sc.(*ScalarBls12381Gt) + if !ok { + return fmt.Errorf("invalid type") + } + s.Value = S.Value + return nil +} + +func (s *ScalarBls12381Gt) IsOdd() bool { + data := s.Value.Bytes() + return data[0]&1 == 1 +} + +func (s *ScalarBls12381Gt) IsEven() bool { + data := s.Value.Bytes() + return data[0]&1 == 0 +} + +func (s *ScalarBls12381Gt) New(input int) Scalar { + var data [bls12381.GtFieldBytes]byte + data[3] = byte(input >> 24 & 0xFF) + data[2] = byte(input >> 16 & 0xFF) + data[1] = byte(input >> 8 & 0xFF) + data[0] = byte(input & 0xFF) + + value, isCanonical := new(bls12381.Gt).SetBytes(&data) + if isCanonical != 1 { + return nil + } + return &ScalarBls12381Gt{value} +} + +func (s *ScalarBls12381Gt) Cmp(rhs Scalar) int { + r, ok := rhs.(*ScalarBls12381Gt) + if ok && s.Value.Equal(r.Value) == 1 { + return 0 + } else { + return -2 + } +} + +func (s *ScalarBls12381Gt) Square() Scalar { + return &ScalarBls12381Gt{ + new(bls12381.Gt).Square(s.Value), + } +} + +func (s *ScalarBls12381Gt) Double() Scalar { + return &ScalarBls12381Gt{ + new(bls12381.Gt).Double(s.Value), + } +} + +func (s *ScalarBls12381Gt) Invert() (Scalar, error) { + value, wasInverted := new(bls12381.Gt).Invert(s.Value) + if wasInverted != 1 { + return nil, fmt.Errorf("not invertible") + } + return &ScalarBls12381Gt{ + value, + }, nil +} + +func (s *ScalarBls12381Gt) Sqrt() (Scalar, error) { + // Not implemented + return nil, nil +} + +func (s *ScalarBls12381Gt) Cube() Scalar { + value := new(bls12381.Gt).Square(s.Value) + value.Add(value, s.Value) + return &ScalarBls12381Gt{ + value, + } +} + +func (s *ScalarBls12381Gt) Add(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarBls12381Gt) + if ok { + return &ScalarBls12381Gt{ + new(bls12381.Gt).Add(s.Value, r.Value), + } + } else { + return nil + } +} + +func (s *ScalarBls12381Gt) Sub(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarBls12381Gt) + if ok { + return &ScalarBls12381Gt{ + new(bls12381.Gt).Sub(s.Value, r.Value), + } + } else { + return nil + } +} + +func (s *ScalarBls12381Gt) Mul(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarBls12381Gt) + if ok { + return &ScalarBls12381Gt{ + new(bls12381.Gt).Add(s.Value, r.Value), + } + } else { + return nil + } +} + +func (s *ScalarBls12381Gt) MulAdd(y, z Scalar) Scalar { + return s.Mul(y).Add(z) +} + +func (s *ScalarBls12381Gt) Div(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarBls12381Gt) + if ok { + return &ScalarBls12381Gt{ + new(bls12381.Gt).Sub(s.Value, r.Value), + } + } else { + return nil + } +} + +func (s *ScalarBls12381Gt) Neg() Scalar { + return &ScalarBls12381Gt{ + new(bls12381.Gt).Neg(s.Value), + } +} + +func (s *ScalarBls12381Gt) SetBigInt(v *big.Int) (Scalar, error) { + var bytes [bls12381.GtFieldBytes]byte + v.FillBytes(bytes[:]) + return s.SetBytes(bytes[:]) +} + +func (s *ScalarBls12381Gt) BigInt() *big.Int { + bytes := s.Value.Bytes() + return new(big.Int).SetBytes(bytes[:]) +} + +func (s *ScalarBls12381Gt) Point() Point { + return &PointBls12381G1{Value: new(bls12381.G1).Identity()} +} + +func (s *ScalarBls12381Gt) Bytes() []byte { + bytes := s.Value.Bytes() + return bytes[:] +} + +func (s *ScalarBls12381Gt) SetBytes(bytes []byte) (Scalar, error) { + var b [bls12381.GtFieldBytes]byte + copy(b[:], bytes) + ss, isCanonical := new(bls12381.Gt).SetBytes(&b) + if isCanonical == 0 { + return nil, fmt.Errorf("invalid bytes") + } + return &ScalarBls12381Gt{ss}, nil +} + +func (s *ScalarBls12381Gt) SetBytesWide(bytes []byte) (Scalar, error) { + l := len(bytes) + if l != bls12381.GtFieldBytes*2 { + return nil, fmt.Errorf("invalid byte sequence") + } + var b [bls12381.GtFieldBytes]byte + copy(b[:], bytes[:bls12381.GtFieldBytes]) + + value, isCanonical := new(bls12381.Gt).SetBytes(&b) + if isCanonical == 0 { + return nil, fmt.Errorf("invalid byte sequence") + } + copy(b[:], bytes[bls12381.GtFieldBytes:]) + value2, isCanonical := new(bls12381.Gt).SetBytes(&b) + if isCanonical == 0 { + return nil, fmt.Errorf("invalid byte sequence") + } + value.Add(value, value2) + return &ScalarBls12381Gt{value}, nil +} + +func (s *ScalarBls12381Gt) Clone() Scalar { + return &ScalarBls12381Gt{ + Value: new(bls12381.Gt).Set(s.Value), + } +} diff --git a/core/curves/curve.go b/core/curves/curve.go new file mode 100644 index 0000000..5770d89 --- /dev/null +++ b/core/curves/curve.go @@ -0,0 +1,863 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package curves + +import ( + "crypto/elliptic" + "encoding/hex" + "encoding/json" + "fmt" + "hash" + "io" + "math/big" + "sync" + + "github.com/sonr-io/sonr/crypto/core/curves/native/bls12381" +) + +var ( + k256Initonce sync.Once + k256 Curve + + bls12381g1Initonce sync.Once + bls12381g1 Curve + + bls12381g2Initonce sync.Once + bls12381g2 Curve + + bls12377g1Initonce sync.Once + bls12377g1 Curve + + bls12377g2Initonce sync.Once + bls12377g2 Curve + + p256Initonce sync.Once + p256 Curve + + ed25519Initonce sync.Once + ed25519 Curve + + pallasInitonce sync.Once + pallas Curve +) + +const ( + K256Name = "secp256k1" + BLS12381G1Name = "BLS12381G1" + BLS12381G2Name = "BLS12381G2" + BLS12831Name = "BLS12831" + P256Name = "P-256" + ED25519Name = "ed25519" + PallasName = "pallas" + BLS12377G1Name = "BLS12377G1" + BLS12377G2Name = "BLS12377G2" + BLS12377Name = "BLS12377" +) + +const scalarBytes = 32 + +// Scalar represents an element of the scalar field \mathbb{F}_q +// of the elliptic curve construction. +type Scalar interface { + // Random returns a random scalar using the provided reader + // to retrieve bytes + Random(reader io.Reader) Scalar + // Hash the specific bytes in a manner to yield a + // uniformly distributed scalar + Hash(bytes []byte) Scalar + // Zero returns the additive identity element + Zero() Scalar + // One returns the multiplicative identity element + One() Scalar + // IsZero returns true if this element is the additive identity element + IsZero() bool + // IsOne returns true if this element is the multiplicative identity element + IsOne() bool + // IsOdd returns true if this element is odd + IsOdd() bool + // IsEven returns true if this element is even + IsEven() bool + // New returns an element with the value equal to `value` + New(value int) Scalar + // Cmp returns + // -2 if this element is in a different field than rhs + // -1 if this element is less than rhs + // 0 if this element is equal to rhs + // 1 if this element is greater than rhs + Cmp(rhs Scalar) int + // Square returns element*element + Square() Scalar + // Double returns element+element + Double() Scalar + // Invert returns element^-1 mod p + Invert() (Scalar, error) + // Sqrt computes the square root of this element if it exists. + Sqrt() (Scalar, error) + // Cube returns element*element*element + Cube() Scalar + // Add returns element+rhs + Add(rhs Scalar) Scalar + // Sub returns element-rhs + Sub(rhs Scalar) Scalar + // Mul returns element*rhs + Mul(rhs Scalar) Scalar + // MulAdd returns element * y + z mod p + MulAdd(y, z Scalar) Scalar + // Div returns element*rhs^-1 mod p + Div(rhs Scalar) Scalar + // Neg returns -element mod p + Neg() Scalar + // SetBigInt returns this element set to the value of v + SetBigInt(v *big.Int) (Scalar, error) + // BigInt returns this element as a big integer + BigInt() *big.Int + // Point returns the associated point for this scalar + Point() Point + // Bytes returns the canonical byte representation of this scalar + Bytes() []byte + // SetBytes creates a scalar from the canonical representation expecting the exact number of bytes needed to represent the scalar + SetBytes(bytes []byte) (Scalar, error) + // SetBytesWide creates a scalar expecting double the exact number of bytes needed to represent the scalar which is reduced by the modulus + SetBytesWide(bytes []byte) (Scalar, error) + // Clone returns a cloned Scalar of this value + Clone() Scalar +} + +type PairingScalar interface { + Scalar + SetPoint(p Point) PairingScalar +} + +func unmarshalScalar(input []byte) (*Curve, []byte, error) { + sep := byte(':') + i := 0 + for ; i < len(input); i++ { + if input[i] == sep { + break + } + } + name := string(input[:i]) + curve := GetCurveByName(name) + if curve == nil { + return nil, nil, fmt.Errorf("unrecognized curve") + } + return curve, input[i+1:], nil +} + +func scalarMarshalBinary(scalar Scalar) ([]byte, error) { + // All scalars are 32 bytes long + // The last 32 bytes are the actual value + // The first remaining bytes are the curve name + // separated by a colon + name := []byte(scalar.Point().CurveName()) + output := make([]byte, len(name)+1+scalarBytes) + copy(output[:len(name)], name) + output[len(name)] = byte(':') + copy(output[len(name)+1:], scalar.Bytes()) + return output, nil +} + +func scalarUnmarshalBinary(input []byte) (Scalar, error) { + // All scalars are 32 bytes long + // The first 32 bytes are the actual value + // The remaining bytes are the curve name + if len(input) < scalarBytes+1+len(P256Name) { + return nil, fmt.Errorf("invalid byte sequence") + } + sc, data, err := unmarshalScalar(input) + if err != nil { + return nil, err + } + return sc.Scalar.SetBytes(data) +} + +func scalarMarshalText(scalar Scalar) ([]byte, error) { + // All scalars are 32 bytes long + // For text encoding we put the curve name first for readability + // separated by a colon, then the hex encoding of the scalar + // which avoids the base64 weakness with strict mode or not + name := []byte(scalar.Point().CurveName()) + output := make([]byte, len(name)+1+scalarBytes*2) + copy(output[:len(name)], name) + output[len(name)] = byte(':') + _ = hex.Encode(output[len(name)+1:], scalar.Bytes()) + return output, nil +} + +func scalarUnmarshalText(input []byte) (Scalar, error) { + if len(input) < scalarBytes*2+len(P256Name)+1 { + return nil, fmt.Errorf("invalid byte sequence") + } + curve, data, err := unmarshalScalar(input) + if err != nil { + return nil, err + } + var t [scalarBytes]byte + _, err = hex.Decode(t[:], data) + if err != nil { + return nil, err + } + return curve.Scalar.SetBytes(t[:]) +} + +func scalarMarshalJson(scalar Scalar) ([]byte, error) { + m := make(map[string]string, 2) + m["type"] = scalar.Point().CurveName() + m["value"] = hex.EncodeToString(scalar.Bytes()) + return json.Marshal(m) +} + +func scalarUnmarshalJson(input []byte) (Scalar, error) { + var m map[string]string + + err := json.Unmarshal(input, &m) + if err != nil { + return nil, err + } + curve := GetCurveByName(m["type"]) + if curve == nil { + return nil, fmt.Errorf("invalid type") + } + s, err := hex.DecodeString(m["value"]) + if err != nil { + return nil, err + } + S, err := curve.Scalar.SetBytes(s) + if err != nil { + return nil, err + } + return S, nil +} + +// Point represents an elliptic curve point +type Point interface { + Random(reader io.Reader) Point + Hash(bytes []byte) Point + Identity() Point + Generator() Point + IsIdentity() bool + IsNegative() bool + IsOnCurve() bool + Double() Point + Scalar() Scalar + Neg() Point + Add(rhs Point) Point + Sub(rhs Point) Point + Mul(rhs Scalar) Point + Equal(rhs Point) bool + Set(x, y *big.Int) (Point, error) + ToAffineCompressed() []byte + ToAffineUncompressed() []byte + FromAffineCompressed(bytes []byte) (Point, error) + FromAffineUncompressed(bytes []byte) (Point, error) + CurveName() string + SumOfProducts(points []Point, scalars []Scalar) Point +} + +type PairingPoint interface { + Point + OtherGroup() PairingPoint + Pairing(rhs PairingPoint) Scalar + MultiPairing(...PairingPoint) Scalar +} + +func pointMarshalBinary(point Point) ([]byte, error) { + // Always stores points in compressed form + // The first bytes are the curve name + // separated by a colon followed by the compressed point + // bytes + t := point.ToAffineCompressed() + name := []byte(point.CurveName()) + output := make([]byte, len(name)+1+len(t)) + copy(output[:len(name)], name) + output[len(name)] = byte(':') + copy(output[len(output)-len(t):], t) + return output, nil +} + +func pointUnmarshalBinary(input []byte) (Point, error) { + if len(input) < scalarBytes+1+len(P256Name) { + return nil, fmt.Errorf("invalid byte sequence") + } + sep := byte(':') + i := 0 + for ; i < len(input); i++ { + if input[i] == sep { + break + } + } + name := string(input[:i]) + curve := GetCurveByName(name) + if curve == nil { + return nil, fmt.Errorf("unrecognized curve") + } + return curve.Point.FromAffineCompressed(input[i+1:]) +} + +func pointMarshalText(point Point) ([]byte, error) { + // Always stores points in compressed form + // The first bytes are the curve name + // separated by a colon followed by the compressed point + // bytes + t := point.ToAffineCompressed() + name := []byte(point.CurveName()) + output := make([]byte, len(name)+1+len(t)*2) + copy(output[:len(name)], name) + output[len(name)] = byte(':') + hex.Encode(output[len(output)-len(t)*2:], t) + return output, nil +} + +func pointUnmarshalText(input []byte) (Point, error) { + if len(input) < scalarBytes*2+1+len(P256Name) { + return nil, fmt.Errorf("invalid byte sequence") + } + sep := byte(':') + i := 0 + for ; i < len(input); i++ { + if input[i] == sep { + break + } + } + name := string(input[:i]) + curve := GetCurveByName(name) + if curve == nil { + return nil, fmt.Errorf("unrecognized curve") + } + buffer := make([]byte, (len(input)-i)/2) + _, err := hex.Decode(buffer, input[i+1:]) + if err != nil { + return nil, err + } + return curve.Point.FromAffineCompressed(buffer) +} + +func pointMarshalJSON(point Point) ([]byte, error) { + m := make(map[string]string, 2) + m["type"] = point.CurveName() + m["value"] = hex.EncodeToString(point.ToAffineCompressed()) + return json.Marshal(m) +} + +func pointUnmarshalJSON(input []byte) (Point, error) { + var m map[string]string + + err := json.Unmarshal(input, &m) + if err != nil { + return nil, err + } + curve := GetCurveByName(m["type"]) + if curve == nil { + return nil, fmt.Errorf("invalid type") + } + p, err := hex.DecodeString(m["value"]) + if err != nil { + return nil, err + } + P, err := curve.Point.FromAffineCompressed(p) + if err != nil { + return nil, err + } + return P, nil +} + +// Curve represents a named elliptic curve with a scalar field and point group +type Curve struct { + Scalar Scalar + Point Point + Name string +} + +func (c Curve) ScalarBaseMult(sc Scalar) Point { + return c.Point.Generator().Mul(sc) +} + +func (c Curve) NewGeneratorPoint() Point { + return c.Point.Generator() +} + +func (c Curve) NewIdentityPoint() Point { + return c.Point.Identity() +} + +func (c Curve) NewScalar() Scalar { + return c.Scalar.Zero() +} + +// ToEllipticCurve returns the equivalent of this curve as the go interface `elliptic.Curve` +func (c Curve) ToEllipticCurve() (elliptic.Curve, error) { + err := fmt.Errorf("can't convert %s", c.Name) + switch c.Name { + case K256Name: + return K256Curve(), nil + case BLS12381G1Name: + return nil, err + case BLS12381G2Name: + return nil, err + case BLS12831Name: + return nil, err + case P256Name: + return NistP256Curve(), nil + case ED25519Name: + return nil, err + case PallasName: + return nil, err + case BLS12377G1Name: + return nil, err + case BLS12377G2Name: + return nil, err + case BLS12377Name: + return nil, err + default: + return nil, err + } +} + +// PairingCurve represents a named elliptic curve +// that supports pairings +type PairingCurve struct { + Scalar PairingScalar + PointG1 PairingPoint + PointG2 PairingPoint + GT Scalar + Name string +} + +func (c PairingCurve) ScalarG1BaseMult(sc Scalar) PairingPoint { + return c.PointG1.Generator().Mul(sc).(PairingPoint) +} + +func (c PairingCurve) ScalarG2BaseMult(sc Scalar) PairingPoint { + return c.PointG2.Generator().Mul(sc).(PairingPoint) +} + +func (c PairingCurve) NewG1GeneratorPoint() PairingPoint { + return c.PointG1.Generator().(PairingPoint) +} + +func (c PairingCurve) NewG2GeneratorPoint() PairingPoint { + return c.PointG2.Generator().(PairingPoint) +} + +func (c PairingCurve) NewG1IdentityPoint() PairingPoint { + return c.PointG1.Identity().(PairingPoint) +} + +func (c PairingCurve) NewG2IdentityPoint() PairingPoint { + return c.PointG2.Identity().(PairingPoint) +} + +func (c PairingCurve) NewScalar() PairingScalar { + return c.Scalar.Zero().(PairingScalar) +} + +// GetCurveByName returns the correct `Curve` given the name +func GetCurveByName(name string) *Curve { + switch name { + case K256Name: + return K256() + case BLS12381G1Name: + return BLS12381G1() + case BLS12381G2Name: + return BLS12381G2() + case BLS12831Name: + return BLS12381G1() + case P256Name: + return P256() + case ED25519Name: + return ED25519() + case PallasName: + return PALLAS() + case BLS12377G1Name: + return BLS12377G1() + case BLS12377G2Name: + return BLS12377G2() + case BLS12377Name: + return BLS12377G1() + default: + return nil + } +} + +func GetPairingCurveByName(name string) *PairingCurve { + switch name { + case BLS12381G1Name: + return BLS12381(BLS12381G1().NewIdentityPoint()) + case BLS12381G2Name: + return BLS12381(BLS12381G2().NewIdentityPoint()) + case BLS12831Name: + return BLS12381(BLS12381G1().NewIdentityPoint()) + default: + return nil + } +} + +// BLS12381G1 returns the BLS12-381 curve with points in G1 +func BLS12381G1() *Curve { + bls12381g1Initonce.Do(bls12381g1Init) + return &bls12381g1 +} + +func bls12381g1Init() { + bls12381g1 = Curve{ + Scalar: &ScalarBls12381{ + Value: bls12381.Bls12381FqNew(), + point: new(PointBls12381G1), + }, + Point: new(PointBls12381G1).Identity(), + Name: BLS12381G1Name, + } +} + +// BLS12381G2 returns the BLS12-381 curve with points in G2 +func BLS12381G2() *Curve { + bls12381g2Initonce.Do(bls12381g2Init) + return &bls12381g2 +} + +func bls12381g2Init() { + bls12381g2 = Curve{ + Scalar: &ScalarBls12381{ + Value: bls12381.Bls12381FqNew(), + point: new(PointBls12381G2), + }, + Point: new(PointBls12381G2).Identity(), + Name: BLS12381G2Name, + } +} + +func BLS12381(preferredPoint Point) *PairingCurve { + return &PairingCurve{ + Scalar: &ScalarBls12381{ + Value: bls12381.Bls12381FqNew(), + point: preferredPoint, + }, + PointG1: &PointBls12381G1{ + Value: new(bls12381.G1).Identity(), + }, + PointG2: &PointBls12381G2{ + Value: new(bls12381.G2).Identity(), + }, + GT: &ScalarBls12381Gt{ + Value: new(bls12381.Gt).SetOne(), + }, + Name: BLS12831Name, + } +} + +// BLS12377G1 returns the BLS12-377 curve with points in G1 +func BLS12377G1() *Curve { + bls12377g1Initonce.Do(bls12377g1Init) + return &bls12377g1 +} + +func bls12377g1Init() { + bls12377g1 = Curve{ + Scalar: &ScalarBls12377{ + value: new(big.Int), + point: new(PointBls12377G1), + }, + Point: new(PointBls12377G1).Identity(), + Name: BLS12377G1Name, + } +} + +// BLS12377G2 returns the BLS12-377 curve with points in G2 +func BLS12377G2() *Curve { + bls12377g2Initonce.Do(bls12377g2Init) + return &bls12377g2 +} + +func bls12377g2Init() { + bls12377g2 = Curve{ + Scalar: &ScalarBls12377{ + value: new(big.Int), + point: new(PointBls12377G2), + }, + Point: new(PointBls12377G2).Identity(), + Name: BLS12377G2Name, + } +} + +// K256 returns the secp256k1 curve +func K256() *Curve { + k256Initonce.Do(k256Init) + return &k256 +} + +func k256Init() { + k256 = Curve{ + Scalar: new(ScalarK256).Zero(), + Point: new(PointK256).Identity(), + Name: K256Name, + } +} + +func P256() *Curve { + p256Initonce.Do(p256Init) + return &p256 +} + +func p256Init() { + p256 = Curve{ + Scalar: new(ScalarP256).Zero(), + Point: new(PointP256).Identity(), + Name: P256Name, + } +} + +func ED25519() *Curve { + ed25519Initonce.Do(ed25519Init) + return &ed25519 +} + +func ed25519Init() { + ed25519 = Curve{ + Scalar: new(ScalarEd25519).Zero(), + Point: new(PointEd25519).Identity(), + Name: ED25519Name, + } +} + +func PALLAS() *Curve { + pallasInitonce.Do(pallasInit) + return &pallas +} + +func pallasInit() { + pallas = Curve{ + Scalar: new(ScalarPallas).Zero(), + Point: new(PointPallas).Identity(), + Name: PallasName, + } +} + +// https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-11#appendix-G.2.1 +func osswu3mod4(u *big.Int, p *sswuParams) (x, y *big.Int) { + params := p.Params + field := NewField(p.Params.P) + + tv1 := field.NewElement(u) + tv1 = tv1.Mul(tv1) // tv1 = u^2 + tv3 := field.NewElement(p.Z).Mul(tv1) // tv3 = Z * tv1 + tv2 := tv3.Mul(tv3) // tv2 = tv3^2 + xd := tv2.Add(tv3) // xd = tv2 + tv3 + x1n := xd.Add(field.One()) // x1n = (xd + 1) + x1n = x1n.Mul(field.NewElement(p.B)) // x1n * B + aNeg := field.NewElement(p.A).Neg() + xd = xd.Mul(aNeg) // xd = -A * xd + + if xd.Value.Cmp(big.NewInt(0)) == 0 { + xd = field.NewElement(p.Z).Mul(field.NewElement(p.A)) // xd = Z * A + } + + tv2 = xd.Mul(xd) // tv2 = xd^2 + gxd := tv2.Mul(xd) // gxd = tv2 * xd + tv2 = tv2.Mul(field.NewElement(p.A)) // tv2 = A * tv2 + + gx1 := x1n.Mul(x1n) // gx1 = x1n^2 + gx1 = gx1.Add(tv2) // gx1 = gx1 + tv2 + gx1 = gx1.Mul(x1n) // gx1 = gx1 * x1n + tv2 = gxd.Mul(field.NewElement(p.B)) // tv2 = B * gxd + gx1 = gx1.Add(tv2) // gx1 = gx1 + tv2 + + tv4 := gxd.Mul(gxd) // tv4 = gxd^2 + tv2 = gx1.Mul(gxd) // tv2 = gx1 * gxd + tv4 = tv4.Mul(tv2) // tv4 = tv4 * tv2 + + y1 := tv4.Pow(field.NewElement(p.C1)) + y1 = y1.Mul(tv2) // y1 = y1 * tv2 + x2n := tv3.Mul(x1n) // x2n = tv3 * x1n + + y2 := y1.Mul(field.NewElement(p.C2)) // y2 = y1 * c2 + y2 = y2.Mul(tv1) // y2 = y2 * tv1 + y2 = y2.Mul(field.NewElement(u)) // y2 = y2 * u + + tv2 = y1.Mul(y1) // tv2 = y1^2 + + tv2 = tv2.Mul(gxd) // tv2 = tv2 * gxd + + e2 := tv2.Value.Cmp(gx1.Value) == 0 + + // If e2, x = x1, else x = x2 + if e2 { + x = x1n.Value + } else { + x = x2n.Value + } + // xn / xd + x.Mul(x, new(big.Int).ModInverse(xd.Value, params.P)) + x.Mod(x, params.P) + + // If e2, y = y1, else y = y2 + if e2 { + y = y1.Value + } else { + y = y2.Value + } + + uBytes := u.Bytes() + yBytes := y.Bytes() + + usign := uBytes[len(uBytes)-1] & 1 + ysign := yBytes[len(yBytes)-1] & 1 + + // Fix sign of y + if usign != ysign { + y.Neg(y) + y.Mod(y, params.P) + } + + return x, y +} + +func expandMsgXmd(h hash.Hash, msg, domain []byte, outLen int) ([]byte, error) { + if len(domain) > 255 { + return nil, fmt.Errorf("invalid domain length") + } + domainLen := uint8(len(domain)) + // DST_prime = DST || I2OSP(len(DST), 1) + // b_0 = H(Z_pad || msg || l_i_b_str || I2OSP(0, 1) || DST_prime) + _, _ = h.Write(make([]byte, h.BlockSize())) + _, _ = h.Write(msg) + _, _ = h.Write([]byte{uint8(outLen >> 8), uint8(outLen)}) + _, _ = h.Write([]byte{0}) + _, _ = h.Write(domain) + _, _ = h.Write([]byte{domainLen}) + b0 := h.Sum(nil) + + // b_1 = H(b_0 || I2OSP(1, 1) || DST_prime) + h.Reset() + _, _ = h.Write(b0) + _, _ = h.Write([]byte{1}) + _, _ = h.Write(domain) + _, _ = h.Write([]byte{domainLen}) + b1 := h.Sum(nil) + + // b_i = H(strxor(b_0, b_(i - 1)) || I2OSP(i, 1) || DST_prime) + ell := (outLen + h.Size() - 1) / h.Size() + bi := b1 + out := make([]byte, outLen) + for i := 1; i < ell; i++ { + h.Reset() + // b_i = H(strxor(b_0, b_(i - 1)) || I2OSP(i, 1) || DST_prime) + tmp := make([]byte, h.Size()) + for j := 0; j < h.Size(); j++ { + tmp[j] = b0[j] ^ bi[j] + } + _, _ = h.Write(tmp) + _, _ = h.Write([]byte{1 + uint8(i)}) + _, _ = h.Write(domain) + _, _ = h.Write([]byte{domainLen}) + + // b_1 || ... || b_(ell - 1) + copy(out[(i-1)*h.Size():i*h.Size()], bi[:]) + bi = h.Sum(nil) + } + // b_ell + copy(out[(ell-1)*h.Size():], bi[:]) + return out[:outLen], nil +} + +func bhex(s string) *big.Int { + r, _ := new(big.Int).SetString(s, 16) + return r +} + +type sswuParams struct { + Params *elliptic.CurveParams + C1, C2, A, B, Z *big.Int +} + +// sumOfProductsPippenger implements a version of Pippenger's algorithm. +// +// The algorithm works as follows: +// +// Let `n` be a number of point-scalar pairs. +// Let `w` be a window of bits (6..8, chosen based on `n`, see cost factor). +// +// 1. Prepare `2^(w-1) - 1` buckets with indices `[1..2^(w-1))` initialized with identity points. +// Bucket 0 is not needed as it would contain points multiplied by 0. +// 2. Convert scalars to a radix-`2^w` representation with signed digits in `[-2^w/2, 2^w/2]`. +// Note: only the last digit may equal `2^w/2`. +// 3. Starting with the last window, for each point `i=[0..n)` add it to a a bucket indexed by +// the point's scalar's value in the window. +// 4. Once all points in a window are sorted into buckets, add buckets by multiplying each +// by their index. Efficient way of doing it is to start with the last bucket and compute two sums: +// intermediate sum from the last to the first, and the full sum made of all intermediate sums. +// 5. Shift the resulting sum of buckets by `w` bits by using `w` doublings. +// 6. Add to the return value. +// 7. Repeat the loop. +// +// Approximate cost w/o wNAF optimizations (A = addition, D = doubling): +// +// ```ascii +// cost = (n*A + 2*(2^w/2)*A + w*D + A)*256/w +// +// | | | | | +// | | | | looping over 256/w windows +// | | | adding to the result +// sorting points | shifting the sum by w bits (to the next window, starting from last window) +// one by one | +// into buckets adding/subtracting all buckets +// multiplied by their indexes +// using a sum of intermediate sums +// +// ``` +// +// For large `n`, dominant factor is (n*256/w) additions. +// However, if `w` is too big and `n` is not too big, then `(2^w/2)*A` could dominate. +// Therefore, the optimal choice of `w` grows slowly as `n` grows. +// +// # For constant time we use a fixed window of 6 +// +// This algorithm is adapted from section 4 of . +// and https://cacr.uwaterloo.ca/techreports/2010/cacr2010-26.pdf +func sumOfProductsPippenger(points []Point, scalars []*big.Int) Point { + if len(points) != len(scalars) { + return nil + } + + const w = 6 + + bucketSize := (1 << w) - 1 + windows := make([]Point, 255/w+1) + for i := range windows { + windows[i] = points[0].Identity() + } + bucket := make([]Point, bucketSize) + + for j := 0; j < len(windows); j++ { + for i := 0; i < bucketSize; i++ { + bucket[i] = points[0].Identity() + } + + for i := 0; i < len(scalars); i++ { + index := bucketSize & int(new(big.Int).Rsh(scalars[i], uint(w*j)).Int64()) + if index != 0 { + bucket[index-1] = bucket[index-1].Add(points[i]) + } + } + + acc, sum := windows[j].Identity(), windows[j].Identity() + + for i := bucketSize - 1; i >= 0; i-- { + sum = sum.Add(bucket[i]) + acc = acc.Add(sum) + } + windows[j] = acc + } + + acc := windows[0].Identity() + for i := len(windows) - 1; i >= 0; i-- { + for j := 0; j < w; j++ { + acc = acc.Double() + } + acc = acc.Add(windows[i]) + } + return acc +} diff --git a/core/curves/curve_test.go b/core/curves/curve_test.go new file mode 100644 index 0000000..8b8ace3 --- /dev/null +++ b/core/curves/curve_test.go @@ -0,0 +1,93 @@ +// Package curves provides integration tests for elliptic curve support +package curves + +import ( + "crypto/elliptic" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core" +) + +// TestAdditionalCurveSupport tests the new curve support in hash.go +func TestAdditionalCurveSupport(t *testing.T) { + tests := []struct { + name string + curve elliptic.Curve + expectedSecurity int + expectedL int + }{ + { + name: "P-256", + curve: elliptic.P256(), + expectedSecurity: 128, + expectedL: 48, + }, + { + name: "P-384", + curve: elliptic.P384(), + expectedSecurity: 192, + expectedL: 72, + }, + { + name: "P-521", + curve: elliptic.P521(), + expectedSecurity: 256, + expectedL: 98, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Test Hash function with the curve + msg := []byte("test message for curve support") + result, err := core.Hash(msg, tt.curve) + + require.NoError(t, err, "Hash should succeed for %s", tt.name) + require.NotNil(t, result, "Hash result should not be nil for %s", tt.name) + + // Verify the result is a valid big integer + require.Greater(t, result.BitLen(), 0, "Hash result should have non-zero bit length") + }) + } +} + +// BenchmarkHashWithCurves benchmarks the hash function with different curves +func BenchmarkHashWithCurves(b *testing.B) { + curves := []struct { + name string + curve elliptic.Curve + }{ + {"P256", elliptic.P256()}, + {"P384", elliptic.P384()}, + {"P521", elliptic.P521()}, + } + + msg := []byte("benchmark message for hash function") + + for _, c := range curves { + b.Run(c.name, func(b *testing.B) { + for i := 0; i < b.N; i++ { + _, _ = core.Hash(msg, c.curve) + } + }) + } +} + +// TestBackwardCompatibility ensures old curves still work +func TestBackwardCompatibility(t *testing.T) { + // Test that P-256 still works as before + msg := []byte("backward compatibility test") + + result256, err := core.Hash(msg, elliptic.P256()) + require.NoError(t, err) + require.NotNil(t, result256) + + // Test with same message multiple times for consistency + for i := 0; i < 10; i++ { + result, err := core.Hash(msg, elliptic.P256()) + require.NoError(t, err) + require.Equal(t, result256.Cmp(result), 0, "Hash should be consistent") + } +} diff --git a/core/curves/ec_point.go b/core/curves/ec_point.go new file mode 100644 index 0000000..6ab9cbd --- /dev/null +++ b/core/curves/ec_point.go @@ -0,0 +1,251 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package curves + +import ( + "crypto/elliptic" + "encoding/json" + "fmt" + "math/big" + + "github.com/sonr-io/sonr/crypto/core" + + "github.com/dustinxie/ecc" + "github.com/sonr-io/sonr/crypto/internal" +) + +var curveNameToID = map[string]byte{ + "secp256k1": 0, + "P-224": 1, + "P-256": 2, + "P-384": 3, + "P-521": 4, +} + +var curveIDToName = map[byte]func() elliptic.Curve{ + 0: ecc.P256k1, + 1: elliptic.P224, + 2: elliptic.P256, + 3: elliptic.P384, + 4: elliptic.P521, +} + +var curveMapper = map[string]func() elliptic.Curve{ + "secp256k1": ecc.P256k1, + "P-224": elliptic.P224, + "P-256": elliptic.P256, + "P-384": elliptic.P384, + "P-521": elliptic.P521, +} + +// EcPoint represents an elliptic curve Point +type EcPoint struct { + Curve elliptic.Curve + X, Y *big.Int +} + +// EcPointJSON encapsulates the data that is serialized to JSON +// used internally and not for external use. Public so other pieces +// can use for serialization +type EcPointJSON struct { + X *big.Int `json:"x"` + Y *big.Int `json:"y"` + CurveName string `json:"curve_name"` +} + +// MarshalJSON serializes EcPoint to JSON +func (a EcPoint) MarshalJSON() ([]byte, error) { + return json.Marshal(EcPointJSON{ + CurveName: a.Curve.Params().Name, + X: a.X, + Y: a.Y, + }) +} + +// UnmarshalJSON deserializes JSON to EcPoint +func (a *EcPoint) UnmarshalJSON(bytes []byte) error { + data := new(EcPointJSON) + err := json.Unmarshal(bytes, data) + if err != nil { + return err + } + if mapper, ok := curveMapper[data.CurveName]; ok { + a.Curve = mapper() + a.X = data.X + a.Y = data.Y + return nil + } + return fmt.Errorf("unknown curve deserialized") +} + +// MarshalBinary serializes EcPoint to binary +func (a *EcPoint) MarshalBinary() ([]byte, error) { + result := [65]byte{} + if code, ok := curveNameToID[a.Curve.Params().Name]; ok { + result[0] = code + a.X.FillBytes(result[1:33]) + a.Y.FillBytes(result[33:65]) + return result[:], nil + } + return nil, fmt.Errorf("unknown curve serialized") +} + +// UnmarshalBinary deserializes binary to EcPoint +func (a *EcPoint) UnmarshalBinary(data []byte) error { + if mapper, ok := curveIDToName[data[0]]; ok { + a.Curve = mapper() + a.X = new(big.Int).SetBytes(data[1:33]) + a.Y = new(big.Int).SetBytes(data[33:65]) + return nil + } + return fmt.Errorf("unknown curve deserialized") +} + +// IsValid checks if the point is valid +func (a EcPoint) IsValid() bool { + return a.IsOnCurve() || a.IsIdentity() +} + +// IsOnCurve checks if the point is on the curve +func (a EcPoint) IsOnCurve() bool { + return a.Curve.IsOnCurve(a.X, a.Y) +} + +// IsIdentity returns true if this Point is the Point at infinity +func (a EcPoint) IsIdentity() bool { + x := core.ConstantTimeEqByte(a.X, core.Zero) + y := core.ConstantTimeEqByte(a.Y, core.Zero) + return (x & y) == 1 +} + +// Equals return true if a and b have the same x,y coordinates +func (a EcPoint) Equals(b *EcPoint) bool { + if !sameCurve(&a, b) { + return false + } + x := core.ConstantTimeEqByte(a.X, b.X) + y := core.ConstantTimeEqByte(a.Y, b.Y) + return (x & y) == 1 +} + +// IsBasePoint returns true if this Point is curve's base Point +func (a EcPoint) IsBasePoint() bool { + p := a.Curve.Params() + x := core.ConstantTimeEqByte(a.X, p.Gx) + y := core.ConstantTimeEqByte(a.Y, p.Gy) + return (x & y) == 1 +} + +// reduceModN normalizes the Scalar to a positive element smaller than the base Point order. +func reduceModN(curve elliptic.Curve, k *big.Int) *big.Int { + return new(big.Int).Mod(k, curve.Params().N) +} + +// Add performs elliptic curve addition on two points +func (a *EcPoint) Add(b *EcPoint) (*EcPoint, error) { + if a == nil || b == nil { + return nil, internal.ErrNilArguments + } + if !sameCurve(a, b) { + return nil, internal.ErrPointsDistinctCurves + } + p := &EcPoint{Curve: a.Curve} + p.X, p.Y = a.Curve.Add(a.X, a.Y, b.X, b.Y) + if !p.IsValid() { + return nil, internal.ErrNotOnCurve + } + return p, nil +} + +// Neg returns the negation of a Weierstrass Point. +func (a *EcPoint) Neg() (*EcPoint, error) { + if a == nil { + return nil, internal.ErrNilArguments + } + p := &EcPoint{Curve: a.Curve, X: a.X, Y: new(big.Int).Sub(a.Curve.Params().P, a.Y)} + if !p.IsValid() { + return nil, internal.ErrNotOnCurve + } + return p, nil +} + +// ScalarMult multiplies this Point by a Scalar +func (a *EcPoint) ScalarMult(k *big.Int) (*EcPoint, error) { + if a == nil || k == nil { + return nil, fmt.Errorf("cannot multiply nil Point or element") + } + n := reduceModN(a.Curve, k) + p := new(EcPoint) + p.Curve = a.Curve + p.X, p.Y = a.Curve.ScalarMult(a.X, a.Y, n.Bytes()) + if !p.IsValid() { + return nil, fmt.Errorf("result not on the curve") + } + return p, nil +} + +// NewScalarBaseMult creates a Point from the base Point multiplied by a field element +func NewScalarBaseMult(curve elliptic.Curve, k *big.Int) (*EcPoint, error) { + if curve == nil || k == nil { + return nil, fmt.Errorf("nil parameters are not supported") + } + n := reduceModN(curve, k) + p := new(EcPoint) + p.Curve = curve + p.X, p.Y = curve.ScalarBaseMult(n.Bytes()) + if !p.IsValid() { + return nil, fmt.Errorf("result not on the curve") + } + return p, nil +} + +// Bytes returns the bytes represented by this Point with x || y +func (a EcPoint) Bytes() []byte { + fieldSize := internal.CalcFieldSize(a.Curve) + out := make([]byte, fieldSize*2) + + a.X.FillBytes(out[0:fieldSize]) + a.Y.FillBytes(out[fieldSize : fieldSize*2]) + return out +} + +// PointFromBytesUncompressed outputs uncompressed X || Y similar to +// https://www.secg.org/sec1-v1.99.dif.pdf section 2.2 and 2.3 +func PointFromBytesUncompressed(curve elliptic.Curve, b []byte) (*EcPoint, error) { + fieldSize := internal.CalcFieldSize(curve) + if len(b) != fieldSize*2 { + return nil, fmt.Errorf("invalid number of bytes") + } + p := &EcPoint{ + Curve: curve, + X: new(big.Int).SetBytes(b[:fieldSize]), + Y: new(big.Int).SetBytes(b[fieldSize:]), + } + if !p.IsValid() { + return nil, fmt.Errorf("invalid Point") + } + return p, nil +} + +// sameCurve determines if points a,b appear to be from the same curve +func sameCurve(a, b *EcPoint) bool { + if a == b { + return true + } + if a == nil || b == nil { + return false + } + aParams := a.Curve.Params() + bParams := b.Curve.Params() + return aParams.P.Cmp(bParams.P) == 0 && + aParams.N.Cmp(bParams.N) == 0 && + aParams.B.Cmp(bParams.B) == 0 && + aParams.BitSize == bParams.BitSize && + aParams.Gx.Cmp(bParams.Gx) == 0 && + aParams.Gy.Cmp(bParams.Gy) == 0 && + aParams.Name == bParams.Name +} diff --git a/core/curves/ec_point_test.go b/core/curves/ec_point_test.go new file mode 100644 index 0000000..cc13c4e --- /dev/null +++ b/core/curves/ec_point_test.go @@ -0,0 +1,369 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package curves + +import ( + "bytes" + "crypto/elliptic" + "math/big" + "testing" + + "github.com/btcsuite/btcd/btcec/v2" + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core" + tt "github.com/sonr-io/sonr/crypto/internal" +) + +func TestIsIdentity(t *testing.T) { + // Should be Point at infinity + identity := &EcPoint{btcec.S256(), core.Zero, core.Zero} + require.True(t, identity.IsIdentity()) +} + +func TestNewScalarBaseMultZero(t *testing.T) { + // Should be Point at infinity + curve := btcec.S256() + num := big.NewInt(0) + p, err := NewScalarBaseMult(curve, num) + if err != nil { + t.Errorf("NewScalarBaseMult failed: %v", err) + } + if p == nil { + t.Errorf("NewScalarBaseMult failed when it should've succeeded.") + } +} + +func TestNewScalarBaseMultOne(t *testing.T) { + // Should be base Point + curve := btcec.S256() + num := big.NewInt(1) + p, err := NewScalarBaseMult(curve, num) + if err != nil { + t.Errorf("NewScalarBaseMult failed: %v", err) + } + if p == nil { + t.Errorf("NewScalarBaseMult failed when it should've succeeded.") + t.FailNow() + } + if !bytes.Equal(p.Bytes(), append(curve.Gx.Bytes(), curve.Gy.Bytes()...)) { + t.Errorf("NewScalarBaseMult should've returned the base Point.") + } +} + +func TestNewScalarBaseMultNeg(t *testing.T) { + curve := btcec.S256() + num := big.NewInt(-1) + p, err := NewScalarBaseMult(curve, num) + if err != nil { + t.Errorf("NewScalarBaseMult failed: %v", err) + } + if p == nil { + t.Errorf("NewScalarBaseMult failed when it should've succeeded.") + t.FailNow() + } + num.Mod(num, curve.N) + + e, err := NewScalarBaseMult(curve, num) + if err != nil { + t.Errorf("NewScalarBaseMult failed: %v", err) + } + if e == nil { + t.Errorf("NewScalarBaseMult failed when it should've succeeded.") + t.FailNow() + } + + if !bytes.Equal(p.Bytes(), e.Bytes()) { + t.Errorf("NewScalarBaseMult should've returned the %v, found: %v", e, p) + } +} + +func TestScalarMultZero(t *testing.T) { + // Should be Point at infinity + curve := btcec.S256() + p := &EcPoint{ + Curve: curve, + X: curve.Gx, + Y: curve.Gy, + } + num := big.NewInt(0) + q, err := p.ScalarMult(num) + if err != nil { + t.Errorf("ScalarMult failed: %v", err) + } + if q == nil { + t.Errorf("ScalarMult failed when it should've succeeded.") + t.FailNow() + } + if !q.IsIdentity() { + t.Errorf("ScalarMult should've returned the identity Point.") + } +} + +func TestScalarMultOne(t *testing.T) { + // Should be base Point + curve := btcec.S256() + p := &EcPoint{ + Curve: curve, + X: curve.Gx, + Y: curve.Gy, + } + num := big.NewInt(1) + q, err := p.ScalarMult(num) + if err != nil { + t.Errorf("ScalarMult failed: %v", err) + } + if q == nil { + t.Errorf("ScalarMult failed when it should've succeeded.") + t.FailNow() + } + if !bytes.Equal(q.Bytes(), append(curve.Gx.Bytes(), curve.Gy.Bytes()...)) { + t.Errorf("ScalarMult should've returned the base Point.") + } +} + +func TestScalarMultNeg(t *testing.T) { + curve := btcec.S256() + p := &EcPoint{ + Curve: curve, + X: curve.Gx, + Y: curve.Gy, + } + num := big.NewInt(-1) + q, err := p.ScalarMult(num) + if err != nil { + t.Errorf("ScalarMult failed: %v", err) + } + if q == nil { + t.Errorf("ScalarMult failed when it should've succeeded.") + } + num.Mod(num, curve.N) + + e, err := p.ScalarMult(num) + if err != nil { + t.Errorf("ScalarMult failed: %v", err) + } + if e == nil { + t.Errorf("ScalarMult failed when it should've succeeded.") + t.FailNow() + } + + if !bytes.Equal(q.Bytes(), e.Bytes()) { + t.Errorf("ScalarMult should've returned the %v, found: %v", e, p) + } +} + +func TestEcPointAddSimple(t *testing.T) { + curve := btcec.S256() + num := big.NewInt(1) + p1, _ := NewScalarBaseMult(curve, num) + + p2, _ := NewScalarBaseMult(curve, num) + p3, err := p1.Add(p2) + if err != nil { + t.Errorf("EcPoint.Add failed: %v", err) + } + num = big.NewInt(2) + + ep, _ := NewScalarBaseMult(curve, num) + + if !bytes.Equal(ep.Bytes(), p3.Bytes()) { + t.Errorf("EcPoint.Add failed: should equal %v, found: %v", ep, p3) + } +} + +func TestEcPointAddCommunicative(t *testing.T) { + curve := btcec.S256() + a, _ := core.Rand(curve.Params().N) + b, _ := core.Rand(curve.Params().N) + + p1, _ := NewScalarBaseMult(curve, a) + p2, _ := NewScalarBaseMult(curve, b) + p3, err := p1.Add(p2) + if err != nil { + t.Errorf("EcPoint.Add failed: %v", err) + } + p4, err := p2.Add(p1) + if err != nil { + t.Errorf("EcPoint.Add failed: %v", err) + } + if !bytes.Equal(p3.Bytes(), p4.Bytes()) { + t.Errorf("EcPoint.Add Communicative not valid") + } +} + +func TestEcPointAddNeg(t *testing.T) { + curve := btcec.S256() + num := big.NewInt(-1) + + p1, _ := NewScalarBaseMult(curve, num) + num.Abs(num) + + p2, _ := NewScalarBaseMult(curve, num) + + p3, err := p1.Add(p2) + if err != nil { + t.Errorf("EcPoint.Add failed: %v", err) + } + zero := make([]byte, 64) + if !bytes.Equal(zero, p3.Bytes()) { + t.Errorf("Expected value to be zero, found: %v", p3) + } +} + +func TestEcPointBytes(t *testing.T) { + curve := btcec.S256() + + point, err := NewScalarBaseMult(curve, big.NewInt(2)) + require.NoError(t, err) + data := point.Bytes() + point2, err := PointFromBytesUncompressed(curve, data) + require.NoError(t, err) + if point.X.Cmp(point2.X) != 0 && point.Y.Cmp(point2.Y) != 0 { + t.Errorf("Points are not equal. Expected %v, found %v", point, point2) + } + + curve2 := elliptic.P224() + p2, err := NewScalarBaseMult(curve2, big.NewInt(2)) + require.NoError(t, err) + dta := p2.Bytes() + point3, err := PointFromBytesUncompressed(curve2, dta) + require.NoError(t, err) + if p2.X.Cmp(point3.X) != 0 && p2.Y.Cmp(point3.Y) != 0 { + t.Errorf("Points are not equal. Expected %v, found %v", p2, point3) + } + + curve3 := elliptic.P521() + p3, err := NewScalarBaseMult(curve3, big.NewInt(2)) + require.NoError(t, err) + data = p3.Bytes() + point4, err := PointFromBytesUncompressed(curve3, data) + require.NoError(t, err) + if p3.X.Cmp(point4.X) != 0 && p3.Y.Cmp(point4.Y) != 0 { + t.Errorf("Points are not equal. Expected %v, found %v", p3, point4) + } +} + +func TestEcPointBytesDifferentCurves(t *testing.T) { + k256 := btcec.S256() + p224 := elliptic.P224() + p256 := elliptic.P256() + + kp, err := NewScalarBaseMult(k256, big.NewInt(1)) + require.NoError(t, err) + data := kp.Bytes() + _, err = PointFromBytesUncompressed(p224, data) + require.Error(t, err) + _, err = PointFromBytesUncompressed(p256, data) + require.Error(t, err) +} + +func TestEcPointBytesInvalidNumberBytes(t *testing.T) { + curve := btcec.S256() + + for i := 1; i < 64; i++ { + data := make([]byte, i) + _, err := PointFromBytesUncompressed(curve, data) + require.Error(t, err) + } + for i := 65; i < 128; i++ { + data := make([]byte, i) + _, err := PointFromBytesUncompressed(curve, data) + require.Error(t, err) + } +} + +func TestEcPointMultRandom(t *testing.T) { + curve := btcec.S256() + r, err := core.Rand(curve.N) + require.NoError(t, err) + pt, err := NewScalarBaseMult(curve, r) + require.NoError(t, err) + require.NotNil(t, pt) + data := pt.Bytes() + pt2, err := PointFromBytesUncompressed(curve, data) + require.NoError(t, err) + if pt.X.Cmp(pt2.X) != 0 || pt.Y.Cmp(pt2.Y) != 0 { + t.Errorf("Points are not equal. Expected: %v, found: %v", pt, pt2) + } +} + +func TestIsBasePoint(t *testing.T) { + k256 := btcec.S256() + p224 := elliptic.P224() + p256 := elliptic.P256() + + notG_p224, err := NewScalarBaseMult(p224, tt.B10("9876453120")) + require.NoError(t, err) + + tests := []struct { + name string + curve elliptic.Curve + x, y *big.Int + expected bool + }{ + {"k256-positive", k256, k256.Gx, k256.Gy, true}, + {"p224-positive", p224, p224.Params().Gx, p224.Params().Gy, true}, + {"p256-positive", p256, p256.Params().Gx, p256.Params().Gy, true}, + + {"p224-negative", p224, notG_p224.X, notG_p224.Y, false}, + {"p256-negative-wrong-curve", p256, notG_p224.X, notG_p224.Y, false}, + {"k256-negative-doubleGx", k256, k256.Gx, k256.Gx, false}, + {"k256-negative-doubleGy", k256, k256.Gy, k256.Gy, false}, + {"k256-negative-xy-swap", k256, k256.Gy, k256.Gx, false}, + {"k256-negative-oh-oh", k256, core.Zero, core.Zero, false}, + } + // Run all the tests! + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + actual := EcPoint{test.curve, test.x, test.y}.IsBasePoint() + require.Equal(t, test.expected, actual) + }) + } +} + +func TestEquals(t *testing.T) { + k256 := btcec.S256() + p224 := elliptic.P224() + p256 := elliptic.P256() + P_p224, _ := NewScalarBaseMult(p224, tt.B10("9876453120")) + P1_p224, _ := NewScalarBaseMult(p224, tt.B10("9876453120")) + + P_k256 := &EcPoint{k256, P_p224.X, P_p224.Y} + + id_p224 := &EcPoint{p224, core.Zero, core.Zero} + id_k256 := &EcPoint{k256, core.Zero, core.Zero} + id_p256 := &EcPoint{p256, core.Zero, core.Zero} + + tests := []struct { + name string + x, y *EcPoint + expected bool + }{ + {"p224 same pointer", P_p224, P_p224, true}, + {"p224 same Point", P_p224, P1_p224, true}, + {"p224 identity", id_p224, id_p224, true}, + {"p256 identity", id_p256, id_p256, true}, + {"k256 identity", id_k256, id_k256, true}, + + {"negative-same x different y", P_p224, &EcPoint{p224, P_p224.X, core.One}, false}, + {"negative-same y different x", P_p224, &EcPoint{p224, core.Two, P_k256.Y}, false}, + + {"negative-wrong curve", P_p224, P_k256, false}, + {"negative-wrong curve reversed", P_k256, P_p224, false}, + {"Point is not the identity", P_p224, id_p224, false}, + {"negative nil", P1_p224, nil, false}, + {"identities on wrong curve", id_p256, id_k256, false}, + } + // Run all the tests! + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + actual := test.x.Equals(test.y) + require.Equal(t, test.expected, actual) + }) + } +} diff --git a/core/curves/ec_scalar.go b/core/curves/ec_scalar.go new file mode 100644 index 0000000..a3076c2 --- /dev/null +++ b/core/curves/ec_scalar.go @@ -0,0 +1,353 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package curves + +import ( + "crypto/elliptic" + crand "crypto/rand" + "crypto/sha512" + "fmt" + "io" + "math/big" + + "filippo.io/edwards25519" + "github.com/btcsuite/btcd/btcec/v2" + "github.com/bwesterb/go-ristretto" + + "github.com/sonr-io/sonr/crypto/core" + "github.com/sonr-io/sonr/crypto/core/curves/native/bls12381" + "github.com/sonr-io/sonr/crypto/internal" +) + +type EcScalar interface { + Add(x, y *big.Int) *big.Int + Sub(x, y *big.Int) *big.Int + Neg(x *big.Int) *big.Int + Mul(x, y *big.Int) *big.Int + Hash(input []byte) *big.Int + Div(x, y *big.Int) *big.Int + Random() (*big.Int, error) + IsValid(x *big.Int) bool + Bytes(x *big.Int) []byte // fixed-length byte array +} + +type K256Scalar struct{} + +// Static interface assertion +var _ EcScalar = (*K256Scalar)(nil) + +// warning: the Euclidean alg which Mod uses is not constant-time. + +func NewK256Scalar() *K256Scalar { + return &K256Scalar{} +} + +func (k K256Scalar) Add(x, y *big.Int) *big.Int { + v := new(big.Int).Add(x, y) + v.Mod(v, btcec.S256().N) + return v +} + +func (k K256Scalar) Sub(x, y *big.Int) *big.Int { + v := new(big.Int).Sub(x, y) + v.Mod(v, btcec.S256().N) + return v +} + +func (k K256Scalar) Neg(x *big.Int) *big.Int { + v := new(big.Int).Sub(btcec.S256().N, x) + v.Mod(v, btcec.S256().N) + return v +} + +func (k K256Scalar) Mul(x, y *big.Int) *big.Int { + v := new(big.Int).Mul(x, y) + v.Mod(v, btcec.S256().N) + return v +} + +func (k K256Scalar) Div(x, y *big.Int) *big.Int { + t := new(big.Int).ModInverse(y, btcec.S256().N) + return k.Mul(x, t) +} + +func (k K256Scalar) Hash(input []byte) *big.Int { + return new(ScalarK256).Hash(input).BigInt() +} + +func (k K256Scalar) Random() (*big.Int, error) { + b := make([]byte, 48) + n, err := crand.Read(b) + if err != nil { + return nil, err + } + if n != 48 { + return nil, fmt.Errorf("insufficient bytes read") + } + v := new(big.Int).SetBytes(b) + v.Mod(v, btcec.S256().N) + return v, nil +} + +func (k K256Scalar) IsValid(x *big.Int) bool { + return core.In(x, btcec.S256().N) == nil +} + +func (k K256Scalar) Bytes(x *big.Int) []byte { + bytes := make([]byte, 32) + x.FillBytes(bytes) // big-endian; will left-pad. + return bytes +} + +type P256Scalar struct{} + +// Static interface assertion +var _ EcScalar = (*P256Scalar)(nil) + +func NewP256Scalar() *P256Scalar { + return &P256Scalar{} +} + +func (k P256Scalar) Add(x, y *big.Int) *big.Int { + v := new(big.Int).Add(x, y) + v.Mod(v, elliptic.P256().Params().N) + return v +} + +func (k P256Scalar) Sub(x, y *big.Int) *big.Int { + v := new(big.Int).Sub(x, y) + v.Mod(v, elliptic.P256().Params().N) + return v +} + +func (k P256Scalar) Neg(x *big.Int) *big.Int { + v := new(big.Int).Sub(elliptic.P256().Params().N, x) + v.Mod(v, elliptic.P256().Params().N) + return v +} + +func (k P256Scalar) Mul(x, y *big.Int) *big.Int { + v := new(big.Int).Mul(x, y) + v.Mod(v, elliptic.P256().Params().N) + return v +} + +func (k P256Scalar) Div(x, y *big.Int) *big.Int { + t := new(big.Int).ModInverse(y, elliptic.P256().Params().N) + return k.Mul(x, t) +} + +func (k P256Scalar) Hash(input []byte) *big.Int { + return new(ScalarP256).Hash(input).BigInt() +} + +func (k P256Scalar) Random() (*big.Int, error) { + b := make([]byte, 48) + n, err := crand.Read(b) + if err != nil { + return nil, err + } + if n != 48 { + return nil, fmt.Errorf("insufficient bytes read") + } + v := new(big.Int).SetBytes(b) + v.Mod(v, elliptic.P256().Params().N) + return v, nil +} + +func (k P256Scalar) IsValid(x *big.Int) bool { + return core.In(x, elliptic.P256().Params().N) == nil +} + +func (k P256Scalar) Bytes(x *big.Int) []byte { + bytes := make([]byte, 32) + x.FillBytes(bytes) // big-endian; will left-pad. + return bytes +} + +type Bls12381Scalar struct{} + +// Static interface assertion +var _ EcScalar = (*Bls12381Scalar)(nil) + +func NewBls12381Scalar() *Bls12381Scalar { + return &Bls12381Scalar{} +} + +func (k Bls12381Scalar) Add(x, y *big.Int) *big.Int { + a := bls12381.Bls12381FqNew().SetBigInt(x) + b := bls12381.Bls12381FqNew().SetBigInt(y) + return a.Add(a, b).BigInt() +} + +func (k Bls12381Scalar) Sub(x, y *big.Int) *big.Int { + a := bls12381.Bls12381FqNew().SetBigInt(x) + b := bls12381.Bls12381FqNew().SetBigInt(y) + return a.Sub(a, b).BigInt() +} + +func (k Bls12381Scalar) Neg(x *big.Int) *big.Int { + a := bls12381.Bls12381FqNew().SetBigInt(x) + return a.Neg(a).BigInt() +} + +func (k Bls12381Scalar) Mul(x, y *big.Int) *big.Int { + a := bls12381.Bls12381FqNew().SetBigInt(x) + b := bls12381.Bls12381FqNew().SetBigInt(y) + return a.Mul(a, b).BigInt() +} + +func (k Bls12381Scalar) Div(x, y *big.Int) *big.Int { + c := bls12381.Bls12381FqNew() + a := bls12381.Bls12381FqNew().SetBigInt(x) + b := bls12381.Bls12381FqNew().SetBigInt(y) + _, wasInverted := c.Invert(b) + c.Mul(a, c) + tt := map[bool]int{false: 0, true: 1} + return a.CMove(a, c, tt[wasInverted]).BigInt() +} + +func (k Bls12381Scalar) Hash(input []byte) *big.Int { + return new(ScalarBls12381).Hash(input).BigInt() +} + +func (k Bls12381Scalar) Random() (*big.Int, error) { + a := BLS12381G1().NewScalar().Random(crand.Reader) + if a == nil { + return nil, fmt.Errorf("invalid random value") + } + return a.BigInt(), nil +} + +func (k Bls12381Scalar) Bytes(x *big.Int) []byte { + bytes := make([]byte, 32) + x.FillBytes(bytes) // big-endian; will left-pad. + return bytes +} + +func (k Bls12381Scalar) IsValid(x *big.Int) bool { + a := bls12381.Bls12381FqNew().SetBigInt(x) + return a.BigInt().Cmp(x) == 0 +} + +// taken from https://datatracker.ietf.org/doc/html/rfc8032 +var ed25519N, _ = new( + big.Int, +).SetString("1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED", 16) + +type Ed25519Scalar struct{} + +// Static interface assertion +var _ EcScalar = (*Ed25519Scalar)(nil) + +func NewEd25519Scalar() *Ed25519Scalar { + return &Ed25519Scalar{} +} + +func (k Ed25519Scalar) Add(x, y *big.Int) *big.Int { + a, err := internal.BigInt2Ed25519Scalar(x) + if err != nil { + panic(err) + } + b, err := internal.BigInt2Ed25519Scalar(y) + if err != nil { + panic(err) + } + a.Add(a, b) + return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes())) +} + +func (k Ed25519Scalar) Sub(x, y *big.Int) *big.Int { + a, err := internal.BigInt2Ed25519Scalar(x) + if err != nil { + panic(err) + } + b, err := internal.BigInt2Ed25519Scalar(y) + if err != nil { + panic(err) + } + a.Subtract(a, b) + return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes())) +} + +func (k Ed25519Scalar) Neg(x *big.Int) *big.Int { + a, err := internal.BigInt2Ed25519Scalar(x) + if err != nil { + panic(err) + } + a.Negate(a) + return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes())) +} + +func (k Ed25519Scalar) Mul(x, y *big.Int) *big.Int { + a, err := internal.BigInt2Ed25519Scalar(x) + if err != nil { + panic(err) + } + b, err := internal.BigInt2Ed25519Scalar(y) + if err != nil { + panic(err) + } + a.Multiply(a, b) + return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes())) +} + +func (k Ed25519Scalar) Div(x, y *big.Int) *big.Int { + b, err := internal.BigInt2Ed25519Scalar(y) + if err != nil { + panic(err) + } + b.Invert(b) + a, err := internal.BigInt2Ed25519Scalar(x) + if err != nil { + panic(err) + } + a.Multiply(a, b) + return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes())) +} + +func (k Ed25519Scalar) Hash(input []byte) *big.Int { + v := new(ristretto.Scalar).Derive(input) + var data [32]byte + v.BytesInto(&data) + return new(big.Int).SetBytes(internal.ReverseScalarBytes(data[:])) +} + +func (k Ed25519Scalar) Bytes(x *big.Int) []byte { + a, err := internal.BigInt2Ed25519Scalar(x) + if err != nil { + panic(err) + } + return internal.ReverseScalarBytes(a.Bytes()) +} + +func (k Ed25519Scalar) Random() (*big.Int, error) { + return k.RandomWithReader(crand.Reader) +} + +func (k Ed25519Scalar) RandomWithReader(r io.Reader) (*big.Int, error) { + b := make([]byte, 64) + n, err := r.Read(b) + if err != nil { + return nil, err + } + if n != 64 { + return nil, fmt.Errorf("insufficient bytes read") + } + digest := sha512.Sum512(b) + var hBytes [32]byte + copy(hBytes[:], digest[:]) + s, err := edwards25519.NewScalar().SetBytesWithClamping(hBytes[:]) + if err != nil { + return nil, err + } + return new(big.Int).SetBytes(internal.ReverseScalarBytes(s.Bytes())), nil +} + +func (k Ed25519Scalar) IsValid(x *big.Int) bool { + return x.Cmp(ed25519N) == -1 +} diff --git a/core/curves/ecdsa.go b/core/curves/ecdsa.go new file mode 100755 index 0000000..41e8759 --- /dev/null +++ b/core/curves/ecdsa.go @@ -0,0 +1,39 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package curves + +import ( + "crypto/ecdsa" + "math/big" +) + +// EcdsaVerify runs a curve- or algorithm-specific ECDSA verification function on input +// an ECDSA public (verification) key, a message digest, and an ECDSA signature. +// It must return true if all the parameters are sane and the ECDSA signature is valid, +// and false otherwise +type EcdsaVerify func(pubKey *EcPoint, hash []byte, signature *EcdsaSignature) bool + +// EcdsaSignature represents a (composite) digital signature +type EcdsaSignature struct { + R *big.Int + S *big.Int + V int +} + +// Static type assertion +var _ EcdsaVerify = VerifyEcdsa + +// Verifies ECDSA signature using core types. +func VerifyEcdsa(pk *EcPoint, hash []byte, sig *EcdsaSignature) bool { + return ecdsa.Verify( + &ecdsa.PublicKey{ + Curve: pk.Curve, + X: pk.X, + Y: pk.Y, + }, + hash, sig.R, sig.S) +} diff --git a/core/curves/ed25519_curve.go b/core/curves/ed25519_curve.go new file mode 100644 index 0000000..443e0a6 --- /dev/null +++ b/core/curves/ed25519_curve.go @@ -0,0 +1,864 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package curves + +import ( + "bytes" + "crypto/sha512" + "crypto/subtle" + "fmt" + "io" + "math/big" + + "filippo.io/edwards25519" + "filippo.io/edwards25519/field" + "github.com/bwesterb/go-ristretto" + ed "github.com/bwesterb/go-ristretto/edwards25519" + + "github.com/sonr-io/sonr/crypto/internal" +) + +type ScalarEd25519 struct { + value *edwards25519.Scalar +} + +type PointEd25519 struct { + value *edwards25519.Point +} + +var scOne, _ = edwards25519.NewScalar(). + SetCanonicalBytes([]byte{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) + +func (s *ScalarEd25519) Random(reader io.Reader) Scalar { + if reader == nil { + return nil + } + var seed [64]byte + _, _ = reader.Read(seed[:]) + return s.Hash(seed[:]) +} + +func (s *ScalarEd25519) Hash(bytes []byte) Scalar { + v := new(ristretto.Scalar).Derive(bytes) + var data [32]byte + v.BytesInto(&data) + value, err := edwards25519.NewScalar().SetCanonicalBytes(data[:]) + if err != nil { + return nil + } + return &ScalarEd25519{value} +} + +func (s *ScalarEd25519) Zero() Scalar { + return &ScalarEd25519{ + value: edwards25519.NewScalar(), + } +} + +func (s *ScalarEd25519) One() Scalar { + return &ScalarEd25519{ + value: edwards25519.NewScalar().Set(scOne), + } +} + +func (s *ScalarEd25519) IsZero() bool { + i := byte(0) + for _, b := range s.value.Bytes() { + i |= b + } + return i == 0 +} + +func (s *ScalarEd25519) IsOne() bool { + data := s.value.Bytes() + i := byte(0) + for j := 1; j < len(data); j++ { + i |= data[j] + } + return i == 0 && data[0] == 1 +} + +func (s *ScalarEd25519) IsOdd() bool { + return s.value.Bytes()[0]&1 == 1 +} + +func (s *ScalarEd25519) IsEven() bool { + return s.value.Bytes()[0]&1 == 0 +} + +func (s *ScalarEd25519) New(input int) Scalar { + var data [64]byte + i := input + if input < 0 { + i = -input + } + data[0] = byte(i) + data[1] = byte(i >> 8) + data[2] = byte(i >> 16) + data[3] = byte(i >> 24) + value, err := edwards25519.NewScalar().SetUniformBytes(data[:]) + if err != nil { + return nil + } + if input < 0 { + value.Negate(value) + } + + return &ScalarEd25519{ + value, + } +} + +func (s *ScalarEd25519) Cmp(rhs Scalar) int { + r := s.Sub(rhs) + if r != nil && r.IsZero() { + return 0 + } else { + return -2 + } +} + +func (s *ScalarEd25519) Square() Scalar { + value := edwards25519.NewScalar().Multiply(s.value, s.value) + return &ScalarEd25519{value} +} + +func (s *ScalarEd25519) Double() Scalar { + return &ScalarEd25519{ + value: edwards25519.NewScalar().Add(s.value, s.value), + } +} + +func (s *ScalarEd25519) Invert() (Scalar, error) { + return &ScalarEd25519{ + value: edwards25519.NewScalar().Invert(s.value), + }, nil +} + +func (s *ScalarEd25519) Sqrt() (Scalar, error) { + bi25519, _ := new( + big.Int, + ).SetString("1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED", 16) + x := s.BigInt() + x.ModSqrt(x, bi25519) + return s.SetBigInt(x) +} + +func (s *ScalarEd25519) Cube() Scalar { + value := edwards25519.NewScalar().Multiply(s.value, s.value) + value.Multiply(value, s.value) + return &ScalarEd25519{value} +} + +func (s *ScalarEd25519) Add(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarEd25519) + if ok { + return &ScalarEd25519{ + value: edwards25519.NewScalar().Add(s.value, r.value), + } + } else { + return nil + } +} + +func (s *ScalarEd25519) Sub(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarEd25519) + if ok { + return &ScalarEd25519{ + value: edwards25519.NewScalar().Subtract(s.value, r.value), + } + } else { + return nil + } +} + +func (s *ScalarEd25519) Mul(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarEd25519) + if ok { + return &ScalarEd25519{ + value: edwards25519.NewScalar().Multiply(s.value, r.value), + } + } else { + return nil + } +} + +func (s *ScalarEd25519) MulAdd(y, z Scalar) Scalar { + yy, ok := y.(*ScalarEd25519) + if !ok { + return nil + } + zz, ok := z.(*ScalarEd25519) + if !ok { + return nil + } + return &ScalarEd25519{value: edwards25519.NewScalar().MultiplyAdd(s.value, yy.value, zz.value)} +} + +func (s *ScalarEd25519) Div(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarEd25519) + if ok { + value := edwards25519.NewScalar().Invert(r.value) + value.Multiply(value, s.value) + return &ScalarEd25519{value} + } else { + return nil + } +} + +func (s *ScalarEd25519) Neg() Scalar { + return &ScalarEd25519{ + value: edwards25519.NewScalar().Negate(s.value), + } +} + +func (s *ScalarEd25519) SetBigInt(x *big.Int) (Scalar, error) { + if x == nil { + return nil, fmt.Errorf("invalid value") + } + + bi25519, _ := new( + big.Int, + ).SetString("1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED", 16) + var v big.Int + buf := v.Mod(x, bi25519).Bytes() + var rBuf [32]byte + for i := 0; i < len(buf) && i < 32; i++ { + rBuf[i] = buf[len(buf)-i-1] + } + value, err := edwards25519.NewScalar().SetCanonicalBytes(rBuf[:]) + if err != nil { + return nil, err + } + return &ScalarEd25519{value}, nil +} + +func (s *ScalarEd25519) BigInt() *big.Int { + var ret big.Int + buf := internal.ReverseScalarBytes(s.value.Bytes()) + return ret.SetBytes(buf) +} + +func (s *ScalarEd25519) Bytes() []byte { + return s.value.Bytes() +} + +// SetBytes takes input a 32-byte long array and returns a ed25519 scalar. +// The input must be 32-byte long and must be a reduced bytes. +func (s *ScalarEd25519) SetBytes(input []byte) (Scalar, error) { + if len(input) != 32 { + return nil, fmt.Errorf("invalid byte sequence") + } + value, err := edwards25519.NewScalar().SetCanonicalBytes(input) + if err != nil { + return nil, err + } + return &ScalarEd25519{value}, nil +} + +// SetBytesWide takes input a 64-byte long byte array, reduce it and return an ed25519 scalar. +// It uses SetUniformBytes of fillipo.io/edwards25519 - https://github.com/FiloSottile/edwards25519/blob/v1.0.0-rc.1/scalar.go#L85 +// If bytes is not of the right length, it returns nil and an error +func (s *ScalarEd25519) SetBytesWide(bytes []byte) (Scalar, error) { + value, err := edwards25519.NewScalar().SetUniformBytes(bytes) + if err != nil { + return nil, err + } + return &ScalarEd25519{value}, nil +} + +// SetBytesClamping uses SetBytesWithClamping of fillipo.io/edwards25519- https://github.com/FiloSottile/edwards25519/blob/v1.0.0-rc.1/scalar.go#L135 +// which applies the buffer pruning described in RFC 8032, Section 5.1.5 (also known as clamping) +// and sets bytes to the result. The input must be 32-byte long, and it is not modified. +// If bytes is not of the right length, SetBytesWithClamping returns nil and an error, and the receiver is unchanged. +func (s *ScalarEd25519) SetBytesClamping(bytes []byte) (Scalar, error) { + value, err := edwards25519.NewScalar().SetBytesWithClamping(bytes) + if err != nil { + return nil, err + } + return &ScalarEd25519{value}, nil +} + +// SetBytesCanonical uses SetCanonicalBytes of fillipo.io/edwards25519. +// https://github.com/FiloSottile/edwards25519/blob/v1.0.0-rc.1/scalar.go#L98 +// This function takes an input x and sets s = x, where x is a 32-byte little-endian +// encoding of s, then it returns the corresponding ed25519 scalar. If the input is +// not a canonical encoding of s, it returns nil and an error. +func (s *ScalarEd25519) SetBytesCanonical(bytes []byte) (Scalar, error) { + return s.SetBytes(bytes) +} + +func (s *ScalarEd25519) Point() Point { + return new(PointEd25519).Identity() +} + +func (s *ScalarEd25519) Clone() Scalar { + return &ScalarEd25519{ + value: edwards25519.NewScalar().Set(s.value), + } +} + +func (s *ScalarEd25519) MarshalBinary() ([]byte, error) { + return scalarMarshalBinary(s) +} + +func (s *ScalarEd25519) UnmarshalBinary(input []byte) error { + sc, err := scalarUnmarshalBinary(input) + if err != nil { + return err + } + ss, ok := sc.(*ScalarEd25519) + if !ok { + return fmt.Errorf("invalid scalar") + } + s.value = ss.value + return nil +} + +func (s *ScalarEd25519) MarshalText() ([]byte, error) { + return scalarMarshalText(s) +} + +func (s *ScalarEd25519) UnmarshalText(input []byte) error { + sc, err := scalarUnmarshalText(input) + if err != nil { + return err + } + ss, ok := sc.(*ScalarEd25519) + if !ok { + return fmt.Errorf("invalid scalar") + } + s.value = ss.value + return nil +} + +func (s *ScalarEd25519) GetEdwardsScalar() *edwards25519.Scalar { + return edwards25519.NewScalar().Set(s.value) +} + +func (s *ScalarEd25519) SetEdwardsScalar(sc *edwards25519.Scalar) *ScalarEd25519 { + return &ScalarEd25519{value: edwards25519.NewScalar().Set(sc)} +} + +func (s *ScalarEd25519) MarshalJSON() ([]byte, error) { + return scalarMarshalJson(s) +} + +func (s *ScalarEd25519) UnmarshalJSON(input []byte) error { + sc, err := scalarUnmarshalJson(input) + if err != nil { + return err + } + S, ok := sc.(*ScalarEd25519) + if !ok { + return fmt.Errorf("invalid type") + } + s.value = S.value + return nil +} + +func (p *PointEd25519) Random(reader io.Reader) Point { + var seed [64]byte + _, _ = reader.Read(seed[:]) + return p.Hash(seed[:]) +} + +func (p *PointEd25519) Hash(bytes []byte) Point { + /// Perform hashing to the group using the Elligator2 map + /// + /// See https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-11#section-6.7.1 + h := sha512.Sum512(bytes) + var res [32]byte + copy(res[:], h[:32]) + signBit := (res[31] & 0x80) >> 7 + + fe := new(ed.FieldElement).SetBytes(&res).BytesInto(&res) + m1 := elligatorEncode(fe) + + return toEdwards(m1, signBit) +} + +func (p *PointEd25519) Identity() Point { + return &PointEd25519{ + value: edwards25519.NewIdentityPoint(), + } +} + +func (p *PointEd25519) Generator() Point { + return &PointEd25519{ + value: edwards25519.NewGeneratorPoint(), + } +} + +func (p *PointEd25519) IsIdentity() bool { + return p.Equal(p.Identity()) +} + +func (p *PointEd25519) IsNegative() bool { + // Negative points don't really exist in ed25519 + return false +} + +func (p *PointEd25519) IsOnCurve() bool { + _, err := edwards25519.NewIdentityPoint().SetBytes(p.ToAffineCompressed()) + return err == nil +} + +func (p *PointEd25519) Double() Point { + return &PointEd25519{value: edwards25519.NewIdentityPoint().Add(p.value, p.value)} +} + +func (p *PointEd25519) Scalar() Scalar { + return new(ScalarEd25519).Zero() +} + +func (p *PointEd25519) Neg() Point { + return &PointEd25519{value: edwards25519.NewIdentityPoint().Negate(p.value)} +} + +func (p *PointEd25519) Add(rhs Point) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*PointEd25519) + if ok { + return &PointEd25519{value: edwards25519.NewIdentityPoint().Add(p.value, r.value)} + } else { + return nil + } +} + +func (p *PointEd25519) Sub(rhs Point) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*PointEd25519) + if ok { + rTmp := edwards25519.NewIdentityPoint().Negate(r.value) + return &PointEd25519{value: edwards25519.NewIdentityPoint().Add(p.value, rTmp)} + } else { + return nil + } +} + +func (p *PointEd25519) Mul(rhs Scalar) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*ScalarEd25519) + if ok { + value := edwards25519.NewIdentityPoint().ScalarMult(r.value, p.value) + return &PointEd25519{value} + } else { + return nil + } +} + +// MangleScalarBitsAndMulByBasepointToProducePublicKey +// is a function for mangling the bits of a (formerly +// mathematically well-defined) "scalar" and multiplying it to produce a +// public key. +func (p *PointEd25519) MangleScalarBitsAndMulByBasepointToProducePublicKey( + rhs *ScalarEd25519, +) *PointEd25519 { + data := rhs.value.Bytes() + s, err := edwards25519.NewScalar().SetBytesWithClamping(data[:]) + if err != nil { + return nil + } + value := edwards25519.NewIdentityPoint().ScalarBaseMult(s) + return &PointEd25519{value} +} + +func (p *PointEd25519) Equal(rhs Point) bool { + r, ok := rhs.(*PointEd25519) + if ok { + // We would like to check that the point (X/Z, Y/Z) is equal to + // the point (X'/Z', Y'/Z') without converting into affine + // coordinates (x, y) and (x', y'), which requires two inversions. + // We have that X = xZ and X' = x'Z'. Thus, x = x' is equivalent to + // (xZ)Z' = (x'Z')Z, and similarly for the y-coordinate. + return p.value.Equal(r.value) == 1 + //lhs1 := new(ed.FieldElement).Mul(&p.value.X, &r.value.Z) + //rhs1 := new(ed.FieldElement).Mul(&r.value.X, &p.value.Z) + //lhs2 := new(ed.FieldElement).Mul(&p.value.Y, &r.value.Z) + //rhs2 := new(ed.FieldElement).Mul(&r.value.Y, &p.value.Z) + // + //return lhs1.Equals(rhs1) && lhs2.Equals(rhs2) + } else { + return false + } +} + +func (p *PointEd25519) Set(x, y *big.Int) (Point, error) { + // check is identity + xx := subtle.ConstantTimeCompare(x.Bytes(), []byte{}) + yy := subtle.ConstantTimeCompare(y.Bytes(), []byte{}) + if (xx | yy) == 1 { + return p.Identity(), nil + } + xElem := new(ed.FieldElement).SetBigInt(x) + yElem := new(ed.FieldElement).SetBigInt(y) + + var data [32]byte + var affine [64]byte + xElem.BytesInto(&data) + copy(affine[:32], data[:]) + yElem.BytesInto(&data) + copy(affine[32:], data[:]) + return p.FromAffineUncompressed(affine[:]) +} + +// sqrtRatio sets r to the non-negative square root of the ratio of u and v. +// +// If u/v is square, sqrtRatio returns r and 1. If u/v is not square, SqrtRatio +// sets r according to Section 4.3 of draft-irtf-cfrg-ristretto255-decaf448-00, +// and returns r and 0. +func sqrtRatio(u, v *ed.FieldElement) (r *ed.FieldElement, wasSquare bool) { + sqrtM1 := ed.FieldElement{ + 533094393274173, 2016890930128738, 18285341111199, + 134597186663265, 1486323764102114, + } + a := new(ed.FieldElement) + b := new(ed.FieldElement) + r = new(ed.FieldElement) + + // r = (u * v3) * (u * v7)^((p-5)/8) + v2 := a.Square(v) + uv3 := b.Mul(u, b.Mul(v2, v)) + uv7 := a.Mul(uv3, a.Square(v2)) + r.Mul(uv3, r.Exp22523(uv7)) + + check := a.Mul(v, a.Square(r)) // check = v * r^2 + + uNeg := b.Neg(u) + correctSignSqrt := check.Equals(u) + flippedSignSqrt := check.Equals(uNeg) + flippedSignSqrtI := check.Equals(uNeg.Mul(uNeg, &sqrtM1)) + + rPrime := b.Mul(r, &sqrtM1) // r_prime = SQRT_M1 * r + // r = CT_SELECT(r_prime IF flipped_sign_sqrt | flipped_sign_sqrt_i ELSE r) + cselect(r, rPrime, r, flippedSignSqrt || flippedSignSqrtI) + + r.Abs(r) // Choose the nonnegative square root. + return r, correctSignSqrt || flippedSignSqrt +} + +// cselect sets v to a if cond == 1, and to b if cond == 0. +func cselect(v, a, b *ed.FieldElement, cond bool) *ed.FieldElement { + const mask64Bits uint64 = (1 << 64) - 1 + + m := uint64(0) + if cond { + m = mask64Bits + } + + v[0] = (m & a[0]) | (^m & b[0]) + v[1] = (m & a[1]) | (^m & b[1]) + v[2] = (m & a[2]) | (^m & b[2]) + v[3] = (m & a[3]) | (^m & b[3]) + v[4] = (m & a[4]) | (^m & b[4]) + return v +} + +func (p *PointEd25519) ToAffineCompressed() []byte { + return p.value.Bytes() +} + +func (p *PointEd25519) ToAffineUncompressed() []byte { + x, y, z, _ := p.value.ExtendedCoordinates() + recip := new(field.Element).Invert(z) + x.Multiply(x, recip) + y.Multiply(y, recip) + var out [64]byte + copy(out[:32], x.Bytes()) + copy(out[32:], y.Bytes()) + return out[:] +} + +func (p *PointEd25519) FromAffineCompressed(inBytes []byte) (Point, error) { + pt, err := edwards25519.NewIdentityPoint().SetBytes(inBytes) + if err != nil { + return nil, err + } + return &PointEd25519{value: pt}, nil +} + +func (p *PointEd25519) FromAffineUncompressed(inBytes []byte) (Point, error) { + if len(inBytes) != 64 { + return nil, fmt.Errorf("invalid byte sequence") + } + if bytes.Equal( + inBytes, + []byte{ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + }, + ) { + return &PointEd25519{value: edwards25519.NewIdentityPoint()}, nil + } + x, err := new(field.Element).SetBytes(inBytes[:32]) + if err != nil { + return nil, err + } + y, err := new(field.Element).SetBytes(inBytes[32:]) + if err != nil { + return nil, err + } + z := new(field.Element).One() + t := new(field.Element).Multiply(x, y) + value, err := edwards25519.NewIdentityPoint().SetExtendedCoordinates(x, y, z, t) + if err != nil { + return nil, err + } + return &PointEd25519{value}, nil +} + +func (p *PointEd25519) CurveName() string { + return ED25519Name +} + +func (p *PointEd25519) SumOfProducts(points []Point, scalars []Scalar) Point { + nScalars := make([]*edwards25519.Scalar, len(scalars)) + nPoints := make([]*edwards25519.Point, len(points)) + for i, sc := range scalars { + s, err := edwards25519.NewScalar().SetCanonicalBytes(sc.Bytes()) + if err != nil { + return nil + } + nScalars[i] = s + } + for i, pt := range points { + pp, ok := pt.(*PointEd25519) + if !ok { + return nil + } + nPoints[i] = pp.value + } + pt := edwards25519.NewIdentityPoint().MultiScalarMult(nScalars, nPoints) + return &PointEd25519{value: pt} +} + +func (p *PointEd25519) VarTimeDoubleScalarBaseMult(a Scalar, A Point, b Scalar) Point { + AA, ok := A.(*PointEd25519) + if !ok { + return nil + } + aa, ok := a.(*ScalarEd25519) + if !ok { + return nil + } + bb, ok := b.(*ScalarEd25519) + if !ok { + return nil + } + value := edwards25519.NewIdentityPoint(). + VarTimeDoubleScalarBaseMult(aa.value, AA.value, bb.value) + return &PointEd25519{value} +} + +func (p *PointEd25519) MarshalBinary() ([]byte, error) { + return pointMarshalBinary(p) +} + +func (p *PointEd25519) UnmarshalBinary(input []byte) error { + pt, err := pointUnmarshalBinary(input) + if err != nil { + return err + } + ppt, ok := pt.(*PointEd25519) + if !ok { + return fmt.Errorf("invalid point") + } + p.value = ppt.value + return nil +} + +func (p *PointEd25519) MarshalText() ([]byte, error) { + return pointMarshalText(p) +} + +func (p *PointEd25519) UnmarshalText(input []byte) error { + pt, err := pointUnmarshalText(input) + if err != nil { + return err + } + ppt, ok := pt.(*PointEd25519) + if !ok { + return fmt.Errorf("invalid point") + } + p.value = ppt.value + return nil +} + +func (p *PointEd25519) MarshalJSON() ([]byte, error) { + return pointMarshalJSON(p) +} + +func (p *PointEd25519) UnmarshalJSON(input []byte) error { + pt, err := pointUnmarshalJSON(input) + if err != nil { + return err + } + P, ok := pt.(*PointEd25519) + if !ok { + return fmt.Errorf("invalid type") + } + p.value = P.value + return nil +} + +func (p *PointEd25519) GetEdwardsPoint() *edwards25519.Point { + return edwards25519.NewIdentityPoint().Set(p.value) +} + +func (p *PointEd25519) SetEdwardsPoint(pt *edwards25519.Point) *PointEd25519 { + return &PointEd25519{value: edwards25519.NewIdentityPoint().Set(pt)} +} + +// Attempt to convert to an `EdwardsPoint`, using the supplied +// choice of sign for the `EdwardsPoint`. +// - `sign`: a `u8` donating the desired sign of the resulting +// `EdwardsPoint`. `0` denotes positive and `1` negative. +func toEdwards(u *ed.FieldElement, sign byte) *PointEd25519 { + one := new(ed.FieldElement).SetOne() + // To decompress the Montgomery u coordinate to an + // `EdwardsPoint`, we apply the birational map to obtain the + // Edwards y coordinate, then do Edwards decompression. + // + // The birational map is y = (u-1)/(u+1). + // + // The exceptional points are the zeros of the denominator, + // i.e., u = -1. + // + // But when u = -1, v^2 = u*(u^2+486662*u+1) = 486660. + // + // Since this is nonsquare mod p, u = -1 corresponds to a point + // on the twist, not the curve, so we can reject it early. + if u.Equals(new(ed.FieldElement).Neg(one)) { + return nil + } + + // y = (u-1)/(u+1) + yLhs := new(ed.FieldElement).Sub(u, one) + yRhs := new(ed.FieldElement).Add(u, one) + yInv := new(ed.FieldElement).Inverse(yRhs) + y := new(ed.FieldElement).Mul(yLhs, yInv) + yBytes := y.Bytes() + yBytes[31] ^= sign << 7 + + pt, err := edwards25519.NewIdentityPoint().SetBytes(yBytes[:]) + if err != nil { + return nil + } + pt.MultByCofactor(pt) + return &PointEd25519{value: pt} +} + +// Perform the Elligator2 mapping to a Montgomery point encoded as a 32 byte value +// +// See +func elligatorEncode(r0 *ed.FieldElement) *ed.FieldElement { + montgomeryA := &ed.FieldElement{ + 486662, 0, 0, 0, 0, + } + // montgomeryANeg is equal to -486662. + montgomeryANeg := &ed.FieldElement{ + 2251799813198567, + 2251799813685247, + 2251799813685247, + 2251799813685247, + 2251799813685247, + } + t := new(ed.FieldElement) + one := new(ed.FieldElement).SetOne() + // 2r^2 + d1 := new(ed.FieldElement).Add(one, t.DoubledSquare(r0)) + // A/(1+2r^2) + d := new(ed.FieldElement).Mul(montgomeryANeg, t.Inverse(d1)) + dsq := new(ed.FieldElement).Square(d) + au := new(ed.FieldElement).Mul(montgomeryA, d) + + inner := new(ed.FieldElement).Add(dsq, au) + inner.Add(inner, one) + + // d^3 + Ad^2 + d + eps := new(ed.FieldElement).Mul(d, inner) + _, wasSquare := sqrtRatio(eps, one) + + zero := new(ed.FieldElement).SetZero() + aTemp := new(ed.FieldElement).SetZero() + // 0 or A if non-square + cselect(aTemp, zero, montgomeryA, wasSquare) + // d, or d+A if non-square + u := new(ed.FieldElement).Add(d, aTemp) + // d or -d-A if non-square + cselect(u, u, new(ed.FieldElement).Neg(u), wasSquare) + return u +} diff --git a/core/curves/ed25519_curve_test.go b/core/curves/ed25519_curve_test.go new file mode 100644 index 0000000..16238a0 --- /dev/null +++ b/core/curves/ed25519_curve_test.go @@ -0,0 +1,621 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package curves + +import ( + crand "crypto/rand" + "encoding/hex" + "math/big" + "testing" + + ed "filippo.io/edwards25519" + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/internal" +) + +func TestScalarEd25519Random(t *testing.T) { + ed25519 := ED25519() + sc := ed25519.Scalar.Random(testRng()) + s, ok := sc.(*ScalarEd25519) + require.True(t, ok) + expected := toRSc("feaa6a9d6dda758da6145f7d411a3af9f8a120698e0093faa97085b384c3f00e") + require.Equal(t, s.value.Equal(expected), 1) + // Try 10 random values + for i := 0; i < 10; i++ { + sc := ed25519.Scalar.Random(crand.Reader) + _, ok := sc.(*ScalarEd25519) + require.True(t, ok) + require.True(t, !sc.IsZero()) + } +} + +func TestScalarEd25519Hash(t *testing.T) { + var b [32]byte + ed25519 := ED25519() + sc := ed25519.Scalar.Hash(b[:]) + s, ok := sc.(*ScalarEd25519) + require.True(t, ok) + expected := toRSc("9d574494a02d72f5ff311cf0fb844d0fdd6103b17255274e029bdeed7207d409") + require.Equal(t, s.value.Equal(expected), 1) +} + +func TestScalarEd25519Zero(t *testing.T) { + ed25519 := ED25519() + sc := ed25519.Scalar.Zero() + require.True(t, sc.IsZero()) + require.True(t, sc.IsEven()) +} + +func TestScalarEd25519One(t *testing.T) { + ed25519 := ED25519() + sc := ed25519.Scalar.One() + require.True(t, sc.IsOne()) + require.True(t, sc.IsOdd()) +} + +func TestScalarEd25519New(t *testing.T) { + ed25519 := ED25519() + three := ed25519.Scalar.New(3) + require.True(t, three.IsOdd()) + four := ed25519.Scalar.New(4) + require.True(t, four.IsEven()) + neg1 := ed25519.Scalar.New(-1) + require.True(t, neg1.IsEven()) + neg2 := ed25519.Scalar.New(-2) + require.True(t, neg2.IsOdd()) +} + +func TestScalarEd25519Square(t *testing.T) { + ed25519 := ED25519() + three := ed25519.Scalar.New(3) + nine := ed25519.Scalar.New(9) + require.Equal(t, three.Square().Cmp(nine), 0) +} + +func TestScalarEd25519Cube(t *testing.T) { + ed25519 := ED25519() + three := ed25519.Scalar.New(3) + twentySeven := ed25519.Scalar.New(27) + require.Equal(t, three.Cube().Cmp(twentySeven), 0) +} + +func TestScalarEd25519Double(t *testing.T) { + ed25519 := ED25519() + three := ed25519.Scalar.New(3) + six := ed25519.Scalar.New(6) + require.Equal(t, three.Double().Cmp(six), 0) +} + +func TestScalarEd25519Neg(t *testing.T) { + ed25519 := ED25519() + one := ed25519.Scalar.One() + neg1 := ed25519.Scalar.New(-1) + require.Equal(t, one.Neg().Cmp(neg1), 0) + lotsOfThrees := ed25519.Scalar.New(333333) + expected := ed25519.Scalar.New(-333333) + require.Equal(t, lotsOfThrees.Neg().Cmp(expected), 0) +} + +func TestScalarEd25519Invert(t *testing.T) { + ed25519 := ED25519() + nine := ed25519.Scalar.New(9) + actual, _ := nine.Invert() + sa, _ := actual.(*ScalarEd25519) + expected := toRSc("c3d9c4db0516043013b1e1ce8637dc92e3388ee3388ee3388ee3388ee3388e03") + require.Equal(t, sa.value.Equal(expected), 1) +} + +func TestScalarEd25519Sqrt(t *testing.T) { + ed25519 := ED25519() + nine := ed25519.Scalar.New(9) + actual, err := nine.Sqrt() + sa, _ := actual.(*ScalarEd25519) + expected := toRSc("03") + require.NoError(t, err) + require.Equal(t, sa.value.Equal(expected), 1) +} + +func TestScalarEd25519Add(t *testing.T) { + ed25519 := ED25519() + nine := ed25519.Scalar.New(9) + six := ed25519.Scalar.New(6) + fifteen := nine.Add(six) + require.NotNil(t, fifteen) + expected := ed25519.Scalar.New(15) + require.Equal(t, expected.Cmp(fifteen), 0) + + upper := ed25519.Scalar.New(-3) + actual := upper.Add(nine) + require.NotNil(t, actual) + require.Equal(t, actual.Cmp(six), 0) +} + +func TestScalarEd25519Sub(t *testing.T) { + ed25519 := ED25519() + nine := ed25519.Scalar.New(9) + six := ed25519.Scalar.New(6) + expected := ed25519.Scalar.New(-3) + + actual := six.Sub(nine) + require.Equal(t, expected.Cmp(actual), 0) + + actual = nine.Sub(six) + require.Equal(t, actual.Cmp(ed25519.Scalar.New(3)), 0) +} + +func TestScalarEd25519Mul(t *testing.T) { + ed25519 := ED25519() + nine := ed25519.Scalar.New(9) + six := ed25519.Scalar.New(6) + actual := nine.Mul(six) + require.Equal(t, actual.Cmp(ed25519.Scalar.New(54)), 0) + + upper := ed25519.Scalar.New(-1) + require.Equal(t, upper.Mul(upper).Cmp(ed25519.Scalar.New(1)), 0) +} + +func TestScalarEd25519Div(t *testing.T) { + ed25519 := ED25519() + nine := ed25519.Scalar.New(9) + actual := nine.Div(nine) + require.Equal(t, actual.Cmp(ed25519.Scalar.New(1)), 0) + require.Equal(t, ed25519.Scalar.New(54).Div(nine).Cmp(ed25519.Scalar.New(6)), 0) +} + +func TestScalarEd25519Serialize(t *testing.T) { + ed25519 := ED25519() + sc := ed25519.Scalar.New(255) + sequence := sc.Bytes() + require.Equal(t, len(sequence), 32) + require.Equal( + t, + sequence, + []byte{ + 0xff, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + }, + ) + ret, err := ed25519.Scalar.SetBytes(sequence) + require.NoError(t, err) + require.Equal(t, ret.Cmp(sc), 0) + + // Try 10 random values + for i := 0; i < 10; i++ { + sc = ed25519.Scalar.Random(crand.Reader) + sequence = sc.Bytes() + require.Equal(t, len(sequence), 32) + ret, err = ed25519.Scalar.SetBytes(sequence) + require.NoError(t, err) + require.Equal(t, ret.Cmp(sc), 0) + } +} + +func TestScalarEd25519Nil(t *testing.T) { + ed25519 := ED25519() + one := ed25519.Scalar.New(1) + require.Nil(t, one.Add(nil)) + require.Nil(t, one.Sub(nil)) + require.Nil(t, one.Mul(nil)) + require.Nil(t, one.Div(nil)) + require.Nil(t, ed25519.Scalar.Random(nil)) + require.Equal(t, one.Cmp(nil), -2) + _, err := ed25519.Scalar.SetBigInt(nil) + require.Error(t, err) +} + +func TestPointEd25519Random(t *testing.T) { + ed25519 := ED25519() + sc := ed25519.Point.Random(testRng()) + s, ok := sc.(*PointEd25519) + require.True(t, ok) + expected := toRPt("6011540c6231421a70ced5f577432531f198d318facfaad6e52cc42fba6e6fc5") + require.True(t, s.Equal(&PointEd25519{expected})) + // Try 25 random values + for i := 0; i < 25; i++ { + sc := ed25519.Point.Random(crand.Reader) + _, ok := sc.(*PointEd25519) + require.True(t, ok) + require.True(t, !sc.IsIdentity()) + pBytes := sc.ToAffineCompressed() + _, err := ed.NewIdentityPoint().SetBytes(pBytes) + require.NoError(t, err) + } +} + +func TestPointEd25519Hash(t *testing.T) { + var b [32]byte + ed25519 := ED25519() + sc := ed25519.Point.Hash(b[:]) + s, ok := sc.(*PointEd25519) + require.True(t, ok) + expected := toRPt("b4d75c3bb03ca644ab6c6d2a955c911003d8cfa719415de93a6b85eeb0c8dd97") + require.True(t, s.Equal(&PointEd25519{expected})) + + // Fuzz test + for i := 0; i < 25; i++ { + _, _ = crand.Read(b[:]) + sc = ed25519.Point.Hash(b[:]) + require.NotNil(t, sc) + } +} + +func TestPointEd25519Identity(t *testing.T) { + ed25519 := ED25519() + sc := ed25519.Point.Identity() + require.True(t, sc.IsIdentity()) + require.Equal( + t, + sc.ToAffineCompressed(), + []byte{ + 1, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + }, + ) +} + +func TestPointEd25519Generator(t *testing.T) { + ed25519 := ED25519() + sc := ed25519.Point.Generator() + s, ok := sc.(*PointEd25519) + require.True(t, ok) + require.Equal( + t, + s.ToAffineCompressed(), + []byte{ + 0x58, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + 0x66, + }, + ) +} + +func TestPointEd25519Set(t *testing.T) { + ed25519 := ED25519() + iden, err := ed25519.Point.Set(big.NewInt(0), big.NewInt(0)) + require.NoError(t, err) + require.True(t, iden.IsIdentity()) + xBytes, _ := hex.DecodeString( + "1ad5258f602d56c9b2a7259560c72c695cdcd6fd31e2a4c0fe536ecdd3366921", + ) + yBytes, _ := hex.DecodeString( + "5866666666666666666666666666666666666666666666666666666666666666", + ) + x := new(big.Int).SetBytes(internal.ReverseScalarBytes(xBytes)) + y := new(big.Int).SetBytes(internal.ReverseScalarBytes(yBytes)) + newPoint, err := ed25519.Point.Set(x, y) + require.NoError(t, err) + require.NotEqualf( + t, + iden, + newPoint, + "after setting valid x and y, the point should NOT be identity point", + ) + + emptyX := new(big.Int).SetBytes(internal.ReverseScalarBytes([]byte{})) + identityPoint, err := ed25519.Point.Set(emptyX, y) + require.NoError(t, err) + require.Equalf(t, iden, identityPoint, "When x is empty, the point will be identity") +} + +func TestPointEd25519Double(t *testing.T) { + ed25519 := ED25519() + g := ed25519.Point.Generator() + g2 := g.Double() + require.True(t, g2.Equal(g.Mul(ed25519.Scalar.New(2)))) + i := ed25519.Point.Identity() + require.True(t, i.Double().Equal(i)) +} + +func TestPointEd25519Neg(t *testing.T) { + ed25519 := ED25519() + g := ed25519.Point.Generator().Neg() + require.True(t, g.Neg().Equal(ed25519.Point.Generator())) + require.True(t, ed25519.Point.Identity().Neg().Equal(ed25519.Point.Identity())) +} + +func TestPointEd25519Add(t *testing.T) { + ed25519 := ED25519() + pt := ed25519.Point.Generator() + require.True(t, pt.Add(pt).Equal(pt.Double())) + require.True(t, pt.Mul(ed25519.Scalar.New(3)).Equal(pt.Add(pt).Add(pt))) +} + +func TestPointEd25519Sub(t *testing.T) { + ed25519 := ED25519() + g := ed25519.Point.Generator() + pt := ed25519.Point.Generator().Mul(ed25519.Scalar.New(4)) + require.True(t, pt.Sub(g).Sub(g).Sub(g).Equal(g)) + require.True(t, pt.Sub(g).Sub(g).Sub(g).Sub(g).IsIdentity()) +} + +func TestPointEd25519Mul(t *testing.T) { + ed25519 := ED25519() + g := ed25519.Point.Generator() + pt := ed25519.Point.Generator().Mul(ed25519.Scalar.New(4)) + require.True(t, g.Double().Double().Equal(pt)) +} + +func TestPointEd25519Serialize(t *testing.T) { + ed25519 := ED25519() + ss := ed25519.Scalar.Random(testRng()) + g := ed25519.Point.Generator() + + ppt := g.Mul(ss) + expectedC := []byte{ + 0x7f, + 0x5b, + 0xa, + 0xd9, + 0xb8, + 0xce, + 0xb7, + 0x7, + 0x4c, + 0x10, + 0xc8, + 0xb4, + 0x27, + 0xe8, + 0xd2, + 0x28, + 0x50, + 0x42, + 0x6c, + 0x0, + 0x8a, + 0x3, + 0x72, + 0x2b, + 0x7c, + 0x3c, + 0x37, + 0x6f, + 0xf8, + 0x8f, + 0x42, + 0x5d, + } + expectedU := []byte{ + 0x70, + 0xad, + 0x4, + 0xa1, + 0x6, + 0x8, + 0x9f, + 0x47, + 0xe1, + 0xe8, + 0x9b, + 0x9c, + 0x81, + 0x5a, + 0xfb, + 0xb9, + 0x85, + 0x6a, + 0x2c, + 0xa, + 0xbc, + 0xff, + 0xe, + 0xc6, + 0xa0, + 0xb0, + 0xac, + 0x75, + 0xc, + 0xd8, + 0x59, + 0x53, + 0x7f, + 0x5b, + 0xa, + 0xd9, + 0xb8, + 0xce, + 0xb7, + 0x7, + 0x4c, + 0x10, + 0xc8, + 0xb4, + 0x27, + 0xe8, + 0xd2, + 0x28, + 0x50, + 0x42, + 0x6c, + 0x0, + 0x8a, + 0x3, + 0x72, + 0x2b, + 0x7c, + 0x3c, + 0x37, + 0x6f, + 0xf8, + 0x8f, + 0x42, + 0x5d, + } + require.Equal(t, ppt.ToAffineCompressed(), expectedC) + require.Equal(t, ppt.ToAffineUncompressed(), expectedU) + retP, err := ppt.FromAffineCompressed(ppt.ToAffineCompressed()) + require.NoError(t, err) + require.True(t, ppt.Equal(retP)) + retP, err = ppt.FromAffineUncompressed(ppt.ToAffineUncompressed()) + require.NoError(t, err) + require.True(t, ppt.Equal(retP)) + + // smoke test + for i := 0; i < 25; i++ { + s := ed25519.Scalar.Random(crand.Reader) + pt := g.Mul(s) + cmprs := pt.ToAffineCompressed() + require.Equal(t, len(cmprs), 32) + retC, err := pt.FromAffineCompressed(cmprs) + require.NoError(t, err) + require.True(t, pt.Equal(retC)) + + un := pt.ToAffineUncompressed() + require.Equal(t, len(un), 64) + retU, err := pt.FromAffineUncompressed(un) + require.NoError(t, err) + require.True(t, pt.Equal(retU)) + } +} + +func TestPointEd25519Nil(t *testing.T) { + ed25519 := ED25519() + one := ed25519.Point.Generator() + require.Nil(t, one.Add(nil)) + require.Nil(t, one.Sub(nil)) + require.Nil(t, one.Mul(nil)) + require.Nil(t, ed25519.Scalar.Random(nil)) + require.False(t, one.Equal(nil)) + _, err := ed25519.Scalar.SetBigInt(nil) + require.Error(t, err) +} + +func TestPointEd25519SumOfProducts(t *testing.T) { + lhs := new(PointEd25519).Generator().Mul(new(ScalarEd25519).New(50)) + points := make([]Point, 5) + for i := range points { + points[i] = new(PointEd25519).Generator() + } + scalars := []Scalar{ + new(ScalarEd25519).New(8), + new(ScalarEd25519).New(9), + new(ScalarEd25519).New(10), + new(ScalarEd25519).New(11), + new(ScalarEd25519).New(12), + } + rhs := lhs.SumOfProducts(points, scalars) + require.NotNil(t, rhs) + require.True(t, lhs.Equal(rhs)) +} + +func TestPointEd25519VarTimeDoubleScalarBaseMult(t *testing.T) { + curve := ED25519() + h := curve.Point.Hash([]byte("TestPointEd25519VarTimeDoubleScalarBaseMult")) + a := curve.Scalar.New(23) + b := curve.Scalar.New(77) + H, ok := h.(*PointEd25519) + require.True(t, ok) + rhs := H.VarTimeDoubleScalarBaseMult(a, H, b) + lhs := h.Mul(a).Add(curve.Point.Generator().Mul(b)) + require.True(t, lhs.Equal(rhs)) +} + +func toRSc(hx string) *ed.Scalar { + e, _ := hex.DecodeString(hx) + var data [32]byte + copy(data[:], e) + value, _ := new(ed.Scalar).SetCanonicalBytes(data[:]) + return value +} + +func toRPt(hx string) *ed.Point { + e, _ := hex.DecodeString(hx) + var data [32]byte + copy(data[:], e) + pt, _ := new(PointEd25519).FromAffineCompressed(data[:]) + return pt.(*PointEd25519).value +} diff --git a/core/curves/field.go b/core/curves/field.go new file mode 100755 index 0000000..87ca0c2 --- /dev/null +++ b/core/curves/field.go @@ -0,0 +1,282 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Package curves: Field implementation IS NOT constant time as it leverages math/big for big number operations. +package curves + +import ( + "crypto/rand" + "encoding/json" + "fmt" + "io" + "math/big" + "sync" +) + +var ( + ed25519SubGroupOrderOnce sync.Once + ed25519SubGroupOrder *big.Int +) + +// Field is a finite field. +type Field struct { + *big.Int +} + +// Element is a group element within a finite field. +type Element struct { + Modulus *Field `json:"modulus"` + Value *big.Int `json:"value"` +} + +// ElementJSON is used in JSON<>Element conversions. +// For years, big.Int hasn't properly supported JSON unmarshaling +// https://github.com/golang/go/issues/28154 +type ElementJSON struct { + Modulus string `json:"modulus"` + Value string `json:"value"` +} + +// Marshal Element to JSON +func (x *Element) MarshalJSON() ([]byte, error) { + return json.Marshal(ElementJSON{ + Modulus: x.Modulus.String(), + Value: x.Value.String(), + }) +} + +func (x *Element) UnmarshalJSON(bytes []byte) error { + var e ElementJSON + err := json.Unmarshal(bytes, &e) + if err != nil { + return err + } + // Convert the strings to big.Ints + modulus, ok := new(big.Int).SetString(e.Modulus, 10) + if !ok { + return fmt.Errorf("failed to unmarshal modulus string '%v' to big.Int", e.Modulus) + } + x.Modulus = &Field{modulus} + x.Value, ok = new(big.Int).SetString(e.Value, 10) + if !ok { + return fmt.Errorf("failed to unmarshal value string '%v' to big.Int", e.Value) + } + return nil +} + +// The probability of returning true for a randomly chosen +// non-prime is at most ¼ⁿ. 64 is a widely used standard +// that is more than sufficient. +const millerRabinRounds = 64 + +// New is a constructor for a Field. +func NewField(modulus *big.Int) *Field { + // For our purposes we never expect to be dealing with a non-prime field. This provides some protection against + // accidentally doing that. + if !modulus.ProbablyPrime(millerRabinRounds) { + panic(fmt.Sprintf("modulus: %x is not a prime", modulus)) + } + + return &Field{modulus} +} + +func newElement(field *Field, value *big.Int) *Element { + if !field.IsValid(value) { + panic(fmt.Sprintf("value: %x is not within field: %x", value, field)) + } + + return &Element{field, value} +} + +// IsValid returns whether or not the value is within [0, modulus) +func (f Field) IsValid(value *big.Int) bool { + // value < modulus && value >= 0 + return value.Cmp(f.Int) < 0 && value.Sign() >= 0 +} + +func (f Field) NewElement(value *big.Int) *Element { + return newElement(&f, value) +} + +func (f Field) Zero() *Element { + return newElement(&f, big.NewInt(0)) +} + +func (f Field) One() *Element { + return newElement(&f, big.NewInt(1)) +} + +func (f Field) RandomElement(r io.Reader) (*Element, error) { + if r == nil { + r = rand.Reader + } + var randInt *big.Int + var err error + // Ed25519 needs to do special handling + // in case the value is used in + // Scalar multiplications with points + if f.Int.Cmp(Ed25519Order()) == 0 { + scalar := NewEd25519Scalar() + randInt, err = scalar.RandomWithReader(r) + } else { + // Read a random integer within the field. This is defined as [0, max) so we don't need to + // explicitly check it is within the field. If it is not, NewElement will panic anyways. + randInt, err = rand.Int(r, f.Int) + } + if err != nil { + return nil, err + } + return newElement(&f, randInt), nil +} + +// ElementFromBytes initializes a new field element from big-endian bytes +func (f Field) ElementFromBytes(bytes []byte) *Element { + return newElement(&f, new(big.Int).SetBytes(bytes)) +} + +// ReducedElementFromBytes initializes a new field element from big-endian bytes and reduces it by +// the modulus of the field. +// +// WARNING: If this is used with cryptographic constructions which rely on a uniform distribution of +// values, this may introduce a bias to the value of the returned field element. This happens when +// the integer range of the provided bytes is not an integer multiple of the field order. +// +// Assume we are working in field which a modulus of 3 and the range of the uniform random bytes we +// provide as input is 5. Thus, the set of field elements is {0, 1, 2} and the set of integer values +// for the input bytes is: {0, 1, 2, 3, 4}. What is the distribution of the output values produced +// by this function? +// +// ReducedElementFromBytes(0) => 0 +// ReducedElementFromBytes(1) => 1 +// ReducedElementFromBytes(2) => 2 +// ReducedElementFromBytes(3) => 0 +// ReducedElementFromBytes(4) => 1 +// +// For a value space V and random value v, a uniform distribution is defined as P[V = v] = 1/|V| +// where |V| is to the order of the field. Using the results from above, we see that P[v = 0] = 2/5, +// P[v = 1] = 2/5, and P[v = 2] = 1/5. For a uniform distribution we would expect these to each be +// equal to 1/3. As they do not, this does not return uniform output for that example. +// +// To see why this is okay if the range is a multiple of the field order, change the input range to +// 6 and notice that now each output has a probability of 2/6 = 1/3, and the output is uniform. +func (f Field) ReducedElementFromBytes(bytes []byte) *Element { + value := new(big.Int).SetBytes(bytes) + value.Mod(value, f.Int) + return newElement(&f, value) +} + +func (x Element) Field() *Field { + return x.Modulus +} + +// Add returns the sum x+y +func (x Element) Add(y *Element) *Element { + x.validateFields(y) + + sum := new(big.Int).Add(x.Value, y.Value) + sum.Mod(sum, x.Modulus.Int) + return newElement(x.Modulus, sum) +} + +// Sub returns the difference x-y +func (x Element) Sub(y *Element) *Element { + x.validateFields(y) + + difference := new(big.Int).Sub(x.Value, y.Value) + difference.Mod(difference, x.Modulus.Int) + return newElement(x.Modulus, difference) +} + +// Neg returns the field negation +func (x Element) Neg() *Element { + z := new(big.Int).Neg(x.Value) + z.Mod(z, x.Modulus.Int) + return newElement(x.Modulus, z) +} + +// Mul returns the product x*y +func (x Element) Mul(y *Element) *Element { + x.validateFields(y) + + product := new(big.Int).Mul(x.Value, y.Value) + product.Mod(product, x.Modulus.Int) + return newElement(x.Modulus, product) +} + +// Div returns the quotient x/y +func (x Element) Div(y *Element) *Element { + x.validateFields(y) + + yInv := new(big.Int).ModInverse(y.Value, x.Modulus.Int) + quotient := new(big.Int).Mul(x.Value, yInv) + quotient.Mod(quotient, x.Modulus.Int) + return newElement(x.Modulus, quotient) +} + +// Pow computes x^y reduced by the modulus +func (x Element) Pow(y *Element) *Element { + x.validateFields(y) + + return newElement(x.Modulus, new(big.Int).Exp(x.Value, y.Value, x.Modulus.Int)) +} + +func (x Element) Invert() *Element { + return newElement(x.Modulus, new(big.Int).ModInverse(x.Value, x.Modulus.Int)) +} + +func (x Element) Sqrt() *Element { + return newElement(x.Modulus, new(big.Int).ModSqrt(x.Value, x.Modulus.Int)) +} + +// BigInt returns value as a big.Int +func (x Element) BigInt() *big.Int { + return x.Value +} + +// Bytes returns the value as bytes +func (x Element) Bytes() []byte { + return x.BigInt().Bytes() +} + +// IsEqual returns x == y +func (x Element) IsEqual(y *Element) bool { + if !x.isEqualFields(y) { + return false + } + + return x.Value.Cmp(y.Value) == 0 +} + +// Clone returns a new copy of the element +func (x Element) Clone() *Element { + return x.Modulus.ElementFromBytes(x.Bytes()) +} + +func (x Element) isEqualFields(y *Element) bool { + return x.Modulus.Int.Cmp(y.Modulus.Int) == 0 +} + +func (x Element) validateFields(y *Element) { + if !x.isEqualFields(y) { + panic("fields must match for valid binary operation") + } +} + +// SubgroupOrder returns the order of the Ed25519 base Point. +func Ed25519Order() *big.Int { + ed25519SubGroupOrderOnce.Do(func() { + order, ok := new(big.Int).SetString( + "1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED", + 16, + ) + if !ok { + panic("invalid hex string provided. This should never happen as it is constant.") + } + ed25519SubGroupOrder = order + }) + + return ed25519SubGroupOrder +} diff --git a/core/curves/field_test.go b/core/curves/field_test.go new file mode 100755 index 0000000..5b2f327 --- /dev/null +++ b/core/curves/field_test.go @@ -0,0 +1,305 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package curves + +import ( + "encoding/json" + "errors" + "fmt" + "math/big" + "testing" + + "github.com/stretchr/testify/require" +) + +var ( + one = big.NewInt(1) + modulus, modulusOk = new(big.Int).SetString( + "1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED", + 16, + ) + oneBelowModulus = zero().Sub(modulus, one) + oneAboveModulus = zero().Add(modulus, one) + field25519 = NewField(modulus) +) + +type buggedReader struct{} + +func (r buggedReader) Read(p []byte) (n int, err error) { + return 0, errors.New("EOF") +} + +func zero() *big.Int { + return new(big.Int) +} + +func assertElementZero(t *testing.T, e *Element) { + require.Equal(t, zero().Bytes(), e.Bytes()) +} + +type binaryOperation func(*Element) *Element + +func assertUnequalFieldsPanic(t *testing.T, b binaryOperation) { + altField := NewField(big.NewInt(23)) + altElement := altField.NewElement(one) + + require.PanicsWithValue( + t, + "fields must match for valid binary operation", + func() { b(altElement) }, + ) +} + +func TestFieldModulus(t *testing.T) { + require.True(t, modulusOk) +} + +func TestNewField(t *testing.T) { + require.PanicsWithValue( + t, + fmt.Sprintf("modulus: %x is not a prime", oneBelowModulus), + func() { NewField(oneBelowModulus) }, + ) + require.NotPanics( + t, + func() { NewField(modulus) }, + ) +} + +func TestNewElement(t *testing.T) { + require.PanicsWithValue( + t, + fmt.Sprintf("value: %x is not within field: %x", modulus, field25519.Int), + func() { newElement(field25519, modulus) }, + ) + require.NotPanics( + t, + func() { newElement(field25519, oneBelowModulus) }, + ) +} + +func TestElementIsValid(t *testing.T) { + require.False(t, field25519.IsValid(zero().Neg(one))) + require.False(t, field25519.IsValid(modulus)) + require.False(t, field25519.IsValid(oneAboveModulus)) + require.True(t, field25519.IsValid(oneBelowModulus)) +} + +func TestFieldNewElement(t *testing.T) { + element := field25519.NewElement(oneBelowModulus) + + require.Equal(t, oneBelowModulus, element.Value) + require.Equal(t, field25519, element.Field()) +} + +func TestZeroElement(t *testing.T) { + require.Equal(t, zero(), field25519.Zero().Value) + require.Equal(t, field25519, field25519.Zero().Field()) +} + +func TestOneElement(t *testing.T) { + require.Equal(t, field25519.One().Value, one) + require.Equal(t, field25519.One().Field(), field25519) +} + +func TestRandomElement(t *testing.T) { + randomElement1, err := field25519.RandomElement(nil) + require.NoError(t, err) + randomElement2, err := field25519.RandomElement(nil) + require.NoError(t, err) + randomElement3, err := field25519.RandomElement(new(buggedReader)) + require.Error(t, err) + + require.Equal(t, field25519, randomElement1.Field()) + require.Equal(t, field25519, randomElement2.Field()) + require.NotEqual(t, randomElement1.Value, randomElement2.Value) + require.Nil(t, randomElement3) +} + +func TestElementFromBytes(t *testing.T) { + element := field25519.ElementFromBytes(oneBelowModulus.Bytes()) + + require.Equal(t, field25519, element.Field()) + require.Equal(t, oneBelowModulus, element.Value) +} + +func TestReducedElementFromBytes(t *testing.T) { + element := field25519.ReducedElementFromBytes(oneBelowModulus.Bytes()) + + require.Equal(t, field25519, element.Field()) + require.Equal(t, oneBelowModulus, element.Value) + + element = field25519.ReducedElementFromBytes(oneAboveModulus.Bytes()) + + require.Equal(t, field25519, element.Field()) + require.Equal(t, one, element.Value) +} + +func TestAddElement(t *testing.T) { + element1 := field25519.NewElement(one) + element2 := field25519.NewElement(big.NewInt(2)) + element3 := field25519.NewElement(oneBelowModulus) + element4 := &Element{field25519, modulus} + + require.Equal(t, element2, element1.Add(element1)) + require.Equal(t, big.NewInt(3), element1.Add(element2).Value) + require.Equal(t, big.NewInt(3), element2.Add(element1).Value) + require.Equal(t, one, element1.Add(element4).Value) + require.Equal(t, one, element3.Add(element2).Value) + assertElementZero(t, element1.Add(element3)) + assertUnequalFieldsPanic(t, element1.Add) +} + +func TestSubElement(t *testing.T) { + element1 := field25519.NewElement(one) + element2 := field25519.NewElement(big.NewInt(2)) + element3 := field25519.NewElement(oneBelowModulus) + element4 := &Element{field25519, modulus} + + assertElementZero(t, element1.Sub(element1)) + require.Equal(t, element3, element1.Sub(element2)) + require.Equal(t, element1, element2.Sub(element1)) + require.Equal(t, element1, element1.Sub(element4)) + require.Equal(t, element3, element4.Sub(element1)) + require.Equal(t, element1, element4.Sub(element3)) + require.Equal(t, element3, element3.Sub(element4)) + assertUnequalFieldsPanic(t, element1.Sub) +} + +func TestMulElement(t *testing.T) { + element1 := field25519.NewElement(one) + element2 := field25519.NewElement(big.NewInt(2)) + element3 := field25519.NewElement(oneBelowModulus) + element4 := field25519.NewElement(zero()) + expectedProduct, ok := new(big.Int).SetString( + "1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3eb", + 16, + ) + require.True(t, ok) + + assertElementZero(t, element1.Mul(element4)) + assertElementZero(t, element4.Mul(element1)) + require.Equal(t, element3, element1.Mul(element3)) + require.Equal(t, element3, element3.Mul(element1)) + require.Equal(t, expectedProduct, element3.Mul(element2).Value) + require.Equal(t, expectedProduct, element2.Mul(element3).Value) + assertUnequalFieldsPanic(t, element1.Mul) +} + +func TestDivElement(t *testing.T) { + element1 := field25519.NewElement(one) + element2 := field25519.NewElement(big.NewInt(2)) + element3 := field25519.NewElement(oneBelowModulus) + element4 := field25519.NewElement(zero()) + expectedQuotient1, ok := new(big.Int).SetString( + "80000000000000000000000000000000a6f7cef517bce6b2c09318d2e7ae9f6", + 16, + ) + require.True(t, ok) + expectedQuotient2, ok := new(big.Int).SetString( + "1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3eb", + 16, + ) + require.True(t, ok) + + assertElementZero(t, element4.Div(element3)) + require.Equal(t, element3, element3.Div(element1)) + require.Equal(t, expectedQuotient1, element3.Div(element2).Value) + require.Equal(t, expectedQuotient2, element2.Div(element3).Value) + require.Panics(t, func() { element3.Div(element4) }) + assertUnequalFieldsPanic(t, element1.Div) +} + +func TestIsEqualElement(t *testing.T) { + element1 := field25519.NewElement(oneBelowModulus) + element2 := field25519.NewElement(big.NewInt(23)) + element3 := field25519.NewElement(oneBelowModulus) + altField := NewField(big.NewInt(23)) + altElement1 := altField.NewElement(one) + + require.False(t, element1.IsEqual(element2)) + require.True(t, element1.IsEqual(element3)) + require.True(t, element1.IsEqual(element1)) + require.False(t, element1.IsEqual(altElement1)) +} + +func TestBigIntElement(t *testing.T) { + element := field25519.NewElement(oneBelowModulus) + + require.Equal(t, oneBelowModulus, element.BigInt()) +} + +func TestBytesElement(t *testing.T) { + element := field25519.NewElement(oneBelowModulus) + + require.Equal( + t, + []byte{ + 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x14, 0xde, 0xf9, 0xde, 0xa2, + 0xf7, 0x9c, 0xd6, 0x58, 0x12, 0x63, 0x1a, 0x5c, 0xf5, + 0xd3, 0xec, + }, + element.Bytes(), + ) +} + +func TestCloneElement(t *testing.T) { + element := field25519.NewElement(oneBelowModulus) + clone := element.Clone() + + require.Equal(t, clone, element) + + clone.Value.Add(one, one) + + require.NotEqual(t, clone, element) +} + +// Tests un/marshaling Element +func TestElementMarshalJsonRoundTrip(t *testing.T) { + reallyBigInt1, ok := new( + big.Int, + ).SetString("12365234878725472538962348629568356835892346729834725643857832", 10) + require.True(t, ok) + + reallyBigInt2, ok := new( + big.Int, + ).SetString("123652348787DEF9DEA2F79CD65812631A5CF5D3ED46729834725643857832", 16) + require.True(t, ok) + + ins := []*Element{ + newElement(field25519, big.NewInt(300)), + newElement(field25519, big.NewInt(300000)), + newElement(field25519, big.NewInt(12812798)), + newElement(field25519, big.NewInt(17)), + newElement(field25519, big.NewInt(5066680)), + newElement(field25519, big.NewInt(3005)), + newElement(field25519, big.NewInt(317)), + newElement(field25519, big.NewInt(323)), + newElement(field25519, reallyBigInt1), + newElement(field25519, reallyBigInt2), + newElement(field25519, oneBelowModulus), + } + + // Run all the tests! + for _, in := range ins { + bytes, err := json.Marshal(in) + require.NoError(t, err) + require.NotNil(t, bytes) + + // Unmarshal and test + out := &Element{} + err = json.Unmarshal(bytes, &out) + require.NoError(t, err) + require.NotNil(t, out) + require.NotNil(t, out.Modulus) + require.NotNil(t, out.Value) + + require.Equal(t, in.Modulus.Bytes(), out.Modulus.Bytes()) + require.Equal(t, in.Value.Bytes(), out.Value.Bytes()) + } +} diff --git a/core/curves/k256_bench_test.go b/core/curves/k256_bench_test.go new file mode 100644 index 0000000..ed8a9dc --- /dev/null +++ b/core/curves/k256_bench_test.go @@ -0,0 +1,446 @@ +package curves + +import ( + crand "crypto/rand" + "crypto/sha256" + "io" + "math/big" + "testing" + + "github.com/btcsuite/btcd/btcec/v2" + + mod "github.com/sonr-io/sonr/crypto/core" + "github.com/sonr-io/sonr/crypto/internal" +) + +func BenchmarkK256(b *testing.B) { + // 1000 points + b.Run("1000 point add - btcec", func(b *testing.B) { + b.StopTimer() + points := make([]*BenchPoint, 1000) + for i := range points { + points[i] = points[i].Random(crand.Reader).(*BenchPoint) + } + acc := new(BenchPoint).Identity() + b.StartTimer() + for _, pt := range points { + acc = acc.Add(pt) + } + }) + b.Run("1000 point add - ct k256", func(b *testing.B) { + b.StopTimer() + curve := K256() + points := make([]*PointK256, 1000) + for i := range points { + points[i] = curve.NewIdentityPoint().Random(crand.Reader).(*PointK256) + } + acc := curve.NewIdentityPoint() + b.StartTimer() + for _, pt := range points { + acc = acc.Add(pt) + } + }) + b.Run("1000 point double - btcec", func(b *testing.B) { + b.StopTimer() + acc := new(BenchPoint).Generator() + b.StartTimer() + for i := 0; i < 1000; i++ { + acc = acc.Double() + } + }) + b.Run("1000 point double - ct k256", func(b *testing.B) { + b.StopTimer() + acc := new(PointK256).Generator() + b.StartTimer() + for i := 0; i < 1000; i++ { + acc = acc.Double() + } + }) + b.Run("1000 point multiply - btcec", func(b *testing.B) { + b.StopTimer() + scalars := make([]*BenchScalar, 1000) + for i := range scalars { + s := new(BenchScalar).Random(crand.Reader) + scalars[i] = s.(*BenchScalar) + } + acc := new(BenchPoint).Generator().Mul(new(BenchScalar).New(2)) + b.StartTimer() + for _, sc := range scalars { + acc = acc.Mul(sc) + } + }) + b.Run("1000 point multiply - ct k256", func(b *testing.B) { + b.StopTimer() + scalars := make([]*ScalarK256, 1000) + for i := range scalars { + s := new(ScalarK256).Random(crand.Reader) + scalars[i] = s.(*ScalarK256) + } + acc := new(PointK256).Generator() + b.StartTimer() + for _, sc := range scalars { + acc = acc.Mul(sc) + } + }) + b.Run("1000 scalar invert - btcec", func(b *testing.B) { + b.StopTimer() + scalars := make([]*BenchScalar, 1000) + for i := range scalars { + s := new(BenchScalar).Random(crand.Reader) + scalars[i] = s.(*BenchScalar) + } + b.StartTimer() + for _, sc := range scalars { + _, _ = sc.Invert() + } + }) + b.Run("1000 scalar invert - ct k256", func(b *testing.B) { + b.StopTimer() + scalars := make([]*ScalarK256, 1000) + for i := range scalars { + s := new(ScalarK256).Random(crand.Reader) + scalars[i] = s.(*ScalarK256) + } + b.StartTimer() + for _, sc := range scalars { + _, _ = sc.Invert() + } + }) + b.Run("1000 scalar sqrt - btcec", func(b *testing.B) { + b.StopTimer() + scalars := make([]*BenchScalar, 1000) + for i := range scalars { + s := new(BenchScalar).Random(crand.Reader) + scalars[i] = s.(*BenchScalar) + } + b.StartTimer() + for _, sc := range scalars { + _, _ = sc.Sqrt() + } + }) + b.Run("1000 scalar sqrt - ct k256", func(b *testing.B) { + b.StopTimer() + scalars := make([]*ScalarK256, 1000) + for i := range scalars { + s := new(ScalarK256).Random(crand.Reader) + scalars[i] = s.(*ScalarK256) + } + b.StartTimer() + for _, sc := range scalars { + _, _ = sc.Sqrt() + } + }) +} + +type BenchScalar struct { + value *big.Int +} + +func (s *BenchScalar) Random(reader io.Reader) Scalar { + var v [32]byte + _, _ = reader.Read(v[:]) + value := new(big.Int).SetBytes(v[:]) + return &BenchScalar{ + value: value.Mod(value, btcec.S256().N), + } +} + +func (s *BenchScalar) Hash(bytes []byte) Scalar { + h := sha256.Sum256(bytes) + value := new(big.Int).SetBytes(h[:]) + return &BenchScalar{ + value: value.Mod(value, btcec.S256().N), + } +} + +func (s *BenchScalar) Zero() Scalar { + return &BenchScalar{ + value: big.NewInt(0), + } +} + +func (s *BenchScalar) One() Scalar { + return &BenchScalar{ + value: big.NewInt(1), + } +} + +func (s *BenchScalar) IsZero() bool { + return s.value.Cmp(big.NewInt(0)) == 0 +} + +func (s *BenchScalar) IsOne() bool { + return s.value.Cmp(big.NewInt(1)) == 0 +} + +func (s *BenchScalar) IsOdd() bool { + return s.value.Bit(0) == 1 +} + +func (s *BenchScalar) IsEven() bool { + return s.value.Bit(0) == 0 +} + +func (s *BenchScalar) New(value int) Scalar { + v := big.NewInt(int64(value)) + return &BenchScalar{ + value: v.Mod(v, btcec.S256().N), + } +} + +func (s *BenchScalar) Cmp(rhs Scalar) int { + r := rhs.(*BenchScalar) + return s.value.Cmp(r.value) +} + +func (s *BenchScalar) Square() Scalar { + v := new(big.Int).Mul(s.value, s.value) + return &BenchScalar{ + value: v.Mod(v, btcec.S256().N), + } +} + +func (s *BenchScalar) Double() Scalar { + v := new(big.Int).Add(s.value, s.value) + return &BenchScalar{ + value: v.Mod(v, btcec.S256().N), + } +} + +func (s *BenchScalar) Invert() (Scalar, error) { + return &BenchScalar{ + value: new(big.Int).ModInverse(s.value, btcec.S256().N), + }, nil +} + +func (s *BenchScalar) Sqrt() (Scalar, error) { + return &BenchScalar{ + value: new(big.Int).ModSqrt(s.value, btcec.S256().N), + }, nil +} + +func (s *BenchScalar) Cube() Scalar { + v := new(big.Int).Mul(s.value, s.value) + v.Mul(v, s.value) + return &BenchScalar{ + value: v.Mod(v, btcec.S256().N), + } +} + +func (s *BenchScalar) Add(rhs Scalar) Scalar { + r := rhs.(*BenchScalar) + v := new(big.Int).Add(s.value, r.value) + return &BenchScalar{ + value: v.Mod(v, btcec.S256().N), + } +} + +func (s *BenchScalar) Sub(rhs Scalar) Scalar { + r := rhs.(*BenchScalar) + v := new(big.Int).Sub(s.value, r.value) + return &BenchScalar{ + value: v.Mod(v, btcec.S256().N), + } +} + +func (s *BenchScalar) Mul(rhs Scalar) Scalar { + r := rhs.(*BenchScalar) + v := new(big.Int).Mul(s.value, r.value) + return &BenchScalar{ + value: v.Mod(v, btcec.S256().N), + } +} + +func (s *BenchScalar) MulAdd(y, z Scalar) Scalar { + yy := y.(*BenchScalar) + zz := z.(*BenchScalar) + v := new(big.Int).Mul(s.value, yy.value) + v.Add(v, zz.value) + return &BenchScalar{ + value: v.Mod(v, btcec.S256().N), + } +} + +func (s *BenchScalar) Div(rhs Scalar) Scalar { + r := rhs.(*BenchScalar) + v := new(big.Int).ModInverse(r.value, btcec.S256().N) + v.Mul(v, s.value) + return &BenchScalar{ + value: v.Mod(v, btcec.S256().N), + } +} + +func (s *BenchScalar) Neg() Scalar { + v, _ := mod.Neg(s.value, btcec.S256().N) + return &BenchScalar{ + value: v, + } +} + +func (s *BenchScalar) SetBigInt(v *big.Int) (Scalar, error) { + return &BenchScalar{ + value: new(big.Int).Set(v), + }, nil +} + +func (s *BenchScalar) BigInt() *big.Int { + return new(big.Int).Set(s.value) +} + +func (s *BenchScalar) Point() Point { + return (&BenchPoint{}).Identity() +} + +func (s *BenchScalar) Bytes() []byte { + return internal.ReverseScalarBytes(s.value.Bytes()) +} + +func (s *BenchScalar) SetBytes(bytes []byte) (Scalar, error) { + value := new(big.Int).SetBytes(internal.ReverseScalarBytes(bytes)) + value.Mod(value, btcec.S256().N) + return &BenchScalar{ + value, + }, nil +} + +func (s *BenchScalar) SetBytesWide(bytes []byte) (Scalar, error) { + value := new(big.Int).SetBytes(internal.ReverseScalarBytes(bytes)) + value.Mod(value, btcec.S256().N) + return &BenchScalar{ + value, + }, nil +} + +func (s *BenchScalar) Clone() Scalar { + return &BenchScalar{ + value: new(big.Int).Set(s.value), + } +} + +type BenchPoint struct { + x, y *big.Int +} + +func (p *BenchPoint) Random(reader io.Reader) Point { + var k [32]byte + curve := btcec.S256() + _, _ = reader.Read(k[:]) + x, y := curve.ScalarBaseMult(k[:]) + for !curve.IsOnCurve(x, y) { + _, _ = reader.Read(k[:]) + x, y = curve.ScalarBaseMult(k[:]) + } + return &BenchPoint{x, y} +} + +func (p *BenchPoint) Hash(bytes []byte) Point { + return nil +} + +func (p *BenchPoint) Identity() Point { + return &BenchPoint{x: big.NewInt(0), y: big.NewInt(0)} +} + +func (p *BenchPoint) Generator() Point { + return &BenchPoint{ + x: new(big.Int).Set(btcec.S256().Gx), + y: new(big.Int).Set(btcec.S256().Gy), + } +} + +func (p *BenchPoint) IsIdentity() bool { + return false +} + +func (p *BenchPoint) IsNegative() bool { + return false +} + +func (p *BenchPoint) IsOnCurve() bool { + return btcec.S256().IsOnCurve(p.x, p.y) +} + +func (p *BenchPoint) Double() Point { + x, y := btcec.S256().Double(p.x, p.y) + return &BenchPoint{ + x, y, + } +} + +func (p *BenchPoint) Scalar() Scalar { + return &BenchScalar{value: big.NewInt(0)} +} + +func (p *BenchPoint) Neg() Point { + y, _ := mod.Neg(p.y, btcec.S256().P) + return &BenchPoint{ + x: new(big.Int).Set(p.x), y: y, + } +} + +func (p *BenchPoint) Add(rhs Point) Point { + r := rhs.(*BenchPoint) + x, y := btcec.S256().Add(p.x, p.y, r.x, r.y) + return &BenchPoint{ + x, y, + } +} + +func (p *BenchPoint) Sub(rhs Point) Point { + t := rhs.Neg().(*BenchPoint) + return t.Add(p) +} + +func (p *BenchPoint) Mul(rhs Scalar) Point { + k := rhs.Bytes() + x, y := btcec.S256().ScalarMult(p.x, p.y, k) + return &BenchPoint{ + x, y, + } +} + +func (p *BenchPoint) Equal(rhs Point) bool { + r := rhs.(*BenchPoint) + return p.x.Cmp(r.x) == 0 && p.y.Cmp(r.y) == 0 +} + +func (p *BenchPoint) Set(x, y *big.Int) (Point, error) { + return &BenchPoint{ + x, y, + }, nil +} + +func (p *BenchPoint) ToAffineCompressed() []byte { + return nil +} + +func (p *BenchPoint) ToAffineUncompressed() []byte { + return nil +} + +func (p *BenchPoint) FromAffineCompressed(bytes []byte) (Point, error) { + return nil, nil +} + +func (p *BenchPoint) FromAffineUncompressed(bytes []byte) (Point, error) { + return nil, nil +} + +func (p *BenchPoint) CurveName() string { + return btcec.S256().Name +} + +func (p *BenchPoint) SumOfProducts(points []Point, scalars []Scalar) Point { + biScalars := make([]*big.Int, len(scalars)) + for i := 0; i < len(scalars); i++ { + biScalars[i] = scalars[i].BigInt() + } + return sumOfProductsPippenger(points, biScalars) +} + +//func rhsK256(x *big.Int) *big.Int { +// // y^2 = x^3 + B +// x3, _ := mod.Exp(x, big.NewInt(3), btcec.S256().P) +// x3.Add(x3, btcec.S256().B) +// return x3.ModSqrt(x3, btcec.S256().P) +//} diff --git a/core/curves/k256_curve.go b/core/curves/k256_curve.go new file mode 100644 index 0000000..54af5df --- /dev/null +++ b/core/curves/k256_curve.go @@ -0,0 +1,672 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package curves + +import ( + "crypto/elliptic" + "fmt" + "io" + "math/big" + "sync" + + "github.com/btcsuite/btcd/btcec/v2" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + secp256k1 "github.com/sonr-io/sonr/crypto/core/curves/native/k256" + "github.com/sonr-io/sonr/crypto/core/curves/native/k256/fp" + "github.com/sonr-io/sonr/crypto/core/curves/native/k256/fq" + "github.com/sonr-io/sonr/crypto/internal" +) + +var ( + oldK256Initonce sync.Once + oldK256 Koblitz256 +) + +type Koblitz256 struct { + *elliptic.CurveParams +} + +func oldK256InitAll() { + curve := btcec.S256() + oldK256.CurveParams = new(elliptic.CurveParams) + oldK256.P = curve.P + oldK256.N = curve.N + oldK256.Gx = curve.Gx + oldK256.Gy = curve.Gy + oldK256.B = curve.B + oldK256.BitSize = curve.BitSize + oldK256.Name = K256Name +} + +func K256Curve() *Koblitz256 { + oldK256Initonce.Do(oldK256InitAll) + return &oldK256 +} + +func (curve *Koblitz256) Params() *elliptic.CurveParams { + return curve.CurveParams +} + +func (curve *Koblitz256) IsOnCurve(x, y *big.Int) bool { + _, err := secp256k1.K256PointNew().SetBigInt(x, y) + return err == nil +} + +func (curve *Koblitz256) Add(x1, y1, x2, y2 *big.Int) (*big.Int, *big.Int) { + p1, err := secp256k1.K256PointNew().SetBigInt(x1, y1) + if err != nil { + return nil, nil + } + p2, err := secp256k1.K256PointNew().SetBigInt(x2, y2) + if err != nil { + return nil, nil + } + return p1.Add(p1, p2).BigInt() +} + +func (curve *Koblitz256) Double(x1, y1 *big.Int) (*big.Int, *big.Int) { + p1, err := secp256k1.K256PointNew().SetBigInt(x1, y1) + if err != nil { + return nil, nil + } + return p1.Double(p1).BigInt() +} + +func (curve *Koblitz256) ScalarMult(Bx, By *big.Int, k []byte) (*big.Int, *big.Int) { + p1, err := secp256k1.K256PointNew().SetBigInt(Bx, By) + if err != nil { + return nil, nil + } + var bytes [32]byte + copy(bytes[:], internal.ReverseScalarBytes(k)) + s, err := fq.K256FqNew().SetBytes(&bytes) + if err != nil { + return nil, nil + } + return p1.Mul(p1, s).BigInt() +} + +func (curve *Koblitz256) ScalarBaseMult(k []byte) (*big.Int, *big.Int) { + var bytes [32]byte + copy(bytes[:], internal.ReverseScalarBytes(k)) + s, err := fq.K256FqNew().SetBytes(&bytes) + if err != nil { + return nil, nil + } + p1 := secp256k1.K256PointNew().Generator() + return p1.Mul(p1, s).BigInt() +} + +type ScalarK256 struct { + value *native.Field +} + +type PointK256 struct { + value *native.EllipticPoint +} + +func (s *ScalarK256) Random(reader io.Reader) Scalar { + if reader == nil { + return nil + } + var seed [64]byte + _, _ = reader.Read(seed[:]) + return s.Hash(seed[:]) +} + +func (s *ScalarK256) Hash(bytes []byte) Scalar { + dst := []byte("secp256k1_XMD:SHA-256_SSWU_RO_") + xmd := native.ExpandMsgXmd(native.EllipticPointHasherSha256(), bytes, dst, 48) + var t [64]byte + copy(t[:48], internal.ReverseScalarBytes(xmd)) + + return &ScalarK256{ + value: fq.K256FqNew().SetBytesWide(&t), + } +} + +func (s *ScalarK256) Zero() Scalar { + return &ScalarK256{ + value: fq.K256FqNew().SetZero(), + } +} + +func (s *ScalarK256) One() Scalar { + return &ScalarK256{ + value: fq.K256FqNew().SetOne(), + } +} + +func (s *ScalarK256) IsZero() bool { + return s.value.IsZero() == 1 +} + +func (s *ScalarK256) IsOne() bool { + return s.value.IsOne() == 1 +} + +func (s *ScalarK256) IsOdd() bool { + return s.value.Bytes()[0]&1 == 1 +} + +func (s *ScalarK256) IsEven() bool { + return s.value.Bytes()[0]&1 == 0 +} + +func (s *ScalarK256) New(value int) Scalar { + t := fq.K256FqNew() + v := big.NewInt(int64(value)) + if value < 0 { + v.Mod(v, t.Params.BiModulus) + } + return &ScalarK256{ + value: t.SetBigInt(v), + } +} + +func (s *ScalarK256) Cmp(rhs Scalar) int { + r, ok := rhs.(*ScalarK256) + if ok { + return s.value.Cmp(r.value) + } else { + return -2 + } +} + +func (s *ScalarK256) Square() Scalar { + return &ScalarK256{ + value: fq.K256FqNew().Square(s.value), + } +} + +func (s *ScalarK256) Double() Scalar { + return &ScalarK256{ + value: fq.K256FqNew().Double(s.value), + } +} + +func (s *ScalarK256) Invert() (Scalar, error) { + value, wasInverted := fq.K256FqNew().Invert(s.value) + if !wasInverted { + return nil, fmt.Errorf("inverse doesn't exist") + } + return &ScalarK256{ + value, + }, nil +} + +func (s *ScalarK256) Sqrt() (Scalar, error) { + value, wasSquare := fq.K256FqNew().Sqrt(s.value) + if !wasSquare { + return nil, fmt.Errorf("not a square") + } + return &ScalarK256{ + value, + }, nil +} + +func (s *ScalarK256) Cube() Scalar { + value := fq.K256FqNew().Mul(s.value, s.value) + value.Mul(value, s.value) + return &ScalarK256{ + value, + } +} + +func (s *ScalarK256) Add(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarK256) + if ok { + return &ScalarK256{ + value: fq.K256FqNew().Add(s.value, r.value), + } + } else { + return nil + } +} + +func (s *ScalarK256) Sub(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarK256) + if ok { + return &ScalarK256{ + value: fq.K256FqNew().Sub(s.value, r.value), + } + } else { + return nil + } +} + +func (s *ScalarK256) Mul(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarK256) + if ok { + return &ScalarK256{ + value: fq.K256FqNew().Mul(s.value, r.value), + } + } else { + return nil + } +} + +func (s *ScalarK256) MulAdd(y, z Scalar) Scalar { + return s.Mul(y).Add(z) +} + +func (s *ScalarK256) Div(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarK256) + if ok { + v, wasInverted := fq.K256FqNew().Invert(r.value) + if !wasInverted { + return nil + } + v.Mul(v, s.value) + return &ScalarK256{value: v} + } else { + return nil + } +} + +func (s *ScalarK256) Neg() Scalar { + return &ScalarK256{ + value: fq.K256FqNew().Neg(s.value), + } +} + +func (s *ScalarK256) SetBigInt(v *big.Int) (Scalar, error) { + if v == nil { + return nil, fmt.Errorf("'v' cannot be nil") + } + value := fq.K256FqNew().SetBigInt(v) + return &ScalarK256{ + value, + }, nil +} + +func (s *ScalarK256) BigInt() *big.Int { + return s.value.BigInt() +} + +func (s *ScalarK256) Bytes() []byte { + t := s.value.Bytes() + return internal.ReverseScalarBytes(t[:]) +} + +func (s *ScalarK256) SetBytes(bytes []byte) (Scalar, error) { + if len(bytes) != 32 { + return nil, fmt.Errorf("invalid length") + } + var seq [32]byte + copy(seq[:], internal.ReverseScalarBytes(bytes)) + value, err := fq.K256FqNew().SetBytes(&seq) + if err != nil { + return nil, err + } + return &ScalarK256{ + value, + }, nil +} + +func (s *ScalarK256) SetBytesWide(bytes []byte) (Scalar, error) { + if len(bytes) != 64 { + return nil, fmt.Errorf("invalid length") + } + var seq [64]byte + copy(seq[:], bytes) + return &ScalarK256{ + value: fq.K256FqNew().SetBytesWide(&seq), + }, nil +} + +func (s *ScalarK256) Point() Point { + return new(PointK256).Identity() +} + +func (s *ScalarK256) Clone() Scalar { + return &ScalarK256{ + value: fq.K256FqNew().Set(s.value), + } +} + +func (s *ScalarK256) MarshalBinary() ([]byte, error) { + return scalarMarshalBinary(s) +} + +func (s *ScalarK256) UnmarshalBinary(input []byte) error { + sc, err := scalarUnmarshalBinary(input) + if err != nil { + return err + } + ss, ok := sc.(*ScalarK256) + if !ok { + return fmt.Errorf("invalid scalar") + } + s.value = ss.value + return nil +} + +func (s *ScalarK256) MarshalText() ([]byte, error) { + return scalarMarshalText(s) +} + +func (s *ScalarK256) UnmarshalText(input []byte) error { + sc, err := scalarUnmarshalText(input) + if err != nil { + return err + } + ss, ok := sc.(*ScalarK256) + if !ok { + return fmt.Errorf("invalid scalar") + } + s.value = ss.value + return nil +} + +func (s *ScalarK256) MarshalJSON() ([]byte, error) { + return scalarMarshalJson(s) +} + +func (s *ScalarK256) UnmarshalJSON(input []byte) error { + sc, err := scalarUnmarshalJson(input) + if err != nil { + return err + } + S, ok := sc.(*ScalarK256) + if !ok { + return fmt.Errorf("invalid type") + } + s.value = S.value + return nil +} + +func (p *PointK256) Random(reader io.Reader) Point { + var seed [64]byte + _, _ = reader.Read(seed[:]) + return p.Hash(seed[:]) +} + +func (p *PointK256) Hash(bytes []byte) Point { + value, err := secp256k1.K256PointNew().Hash(bytes, native.EllipticPointHasherSha256()) + // Note: Interface constraint prevents returning error directly + // Returning nil on error for now, caller should check for nil + if err != nil { + // Log error for debugging if logger is available + return nil + } + + return &PointK256{value} +} + +func (p *PointK256) Identity() Point { + return &PointK256{ + value: secp256k1.K256PointNew().Identity(), + } +} + +func (p *PointK256) Generator() Point { + return &PointK256{ + value: secp256k1.K256PointNew().Generator(), + } +} + +func (p *PointK256) IsIdentity() bool { + return p.value.IsIdentity() +} + +func (p *PointK256) IsNegative() bool { + return p.value.GetY().Value[0]&1 == 1 +} + +func (p *PointK256) IsOnCurve() bool { + return p.value.IsOnCurve() +} + +func (p *PointK256) Double() Point { + value := secp256k1.K256PointNew().Double(p.value) + return &PointK256{value} +} + +func (p *PointK256) Scalar() Scalar { + return new(ScalarK256).Zero() +} + +func (p *PointK256) Neg() Point { + value := secp256k1.K256PointNew().Neg(p.value) + return &PointK256{value} +} + +func (p *PointK256) Add(rhs Point) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*PointK256) + if ok { + value := secp256k1.K256PointNew().Add(p.value, r.value) + return &PointK256{value} + } else { + return nil + } +} + +func (p *PointK256) Sub(rhs Point) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*PointK256) + if ok { + value := secp256k1.K256PointNew().Sub(p.value, r.value) + return &PointK256{value} + } else { + return nil + } +} + +func (p *PointK256) Mul(rhs Scalar) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*ScalarK256) + if ok { + value := secp256k1.K256PointNew().Mul(p.value, r.value) + return &PointK256{value} + } else { + return nil + } +} + +func (p *PointK256) Equal(rhs Point) bool { + r, ok := rhs.(*PointK256) + if ok { + return p.value.Equal(r.value) == 1 + } else { + return false + } +} + +func (p *PointK256) Set(x, y *big.Int) (Point, error) { + value, err := secp256k1.K256PointNew().SetBigInt(x, y) + if err != nil { + return nil, err + } + return &PointK256{value}, nil +} + +func (p *PointK256) ToAffineCompressed() []byte { + var x [33]byte + x[0] = byte(2) + + t := secp256k1.K256PointNew().ToAffine(p.value) + + x[0] |= t.Y.Bytes()[0] & 1 + + xBytes := t.X.Bytes() + copy(x[1:], internal.ReverseScalarBytes(xBytes[:])) + return x[:] +} + +func (p *PointK256) ToAffineUncompressed() []byte { + var out [65]byte + out[0] = byte(4) + t := secp256k1.K256PointNew().ToAffine(p.value) + arr := t.X.Bytes() + copy(out[1:33], internal.ReverseScalarBytes(arr[:])) + arr = t.Y.Bytes() + copy(out[33:], internal.ReverseScalarBytes(arr[:])) + return out[:] +} + +func (p *PointK256) FromAffineCompressed(bytes []byte) (Point, error) { + var raw [native.FieldBytes]byte + if len(bytes) != 33 { + return nil, fmt.Errorf("invalid byte sequence") + } + sign := int(bytes[0]) + if sign != 2 && sign != 3 { + return nil, fmt.Errorf("invalid sign byte") + } + sign &= 0x1 + + copy(raw[:], internal.ReverseScalarBytes(bytes[1:])) + x, err := fp.K256FpNew().SetBytes(&raw) + if err != nil { + return nil, err + } + + value := secp256k1.K256PointNew().Identity() + rhs := fp.K256FpNew() + p.value.Arithmetic.RhsEq(rhs, x) + // test that rhs is quadratic residue + // if not, then this Point is at infinity + y, wasQr := fp.K256FpNew().Sqrt(rhs) + if wasQr { + // fix the sign + sigY := int(y.Bytes()[0] & 1) + if sigY != sign { + y.Neg(y) + } + value.X = x + value.Y = y + value.Z.SetOne() + } + return &PointK256{value}, nil +} + +func (p *PointK256) FromAffineUncompressed(bytes []byte) (Point, error) { + var arr [native.FieldBytes]byte + if len(bytes) != 65 { + return nil, fmt.Errorf("invalid byte sequence") + } + if bytes[0] != 4 { + return nil, fmt.Errorf("invalid sign byte") + } + + copy(arr[:], internal.ReverseScalarBytes(bytes[1:33])) + x, err := fp.K256FpNew().SetBytes(&arr) + if err != nil { + return nil, err + } + copy(arr[:], internal.ReverseScalarBytes(bytes[33:])) + y, err := fp.K256FpNew().SetBytes(&arr) + if err != nil { + return nil, err + } + value := secp256k1.K256PointNew() + value.X = x + value.Y = y + value.Z.SetOne() + return &PointK256{value}, nil +} + +func (p *PointK256) CurveName() string { + return p.value.Params.Name +} + +func (p *PointK256) SumOfProducts(points []Point, scalars []Scalar) Point { + nPoints := make([]*native.EllipticPoint, len(points)) + nScalars := make([]*native.Field, len(scalars)) + for i, pt := range points { + ptv, ok := pt.(*PointK256) + if !ok { + return nil + } + nPoints[i] = ptv.value + } + for i, sc := range scalars { + s, ok := sc.(*ScalarK256) + if !ok { + return nil + } + nScalars[i] = s.value + } + value := secp256k1.K256PointNew() + _, err := value.SumOfProducts(nPoints, nScalars) + if err != nil { + return nil + } + return &PointK256{value} +} + +func (p *PointK256) X() *native.Field { + return p.value.GetX() +} + +func (p *PointK256) Y() *native.Field { + return p.value.GetY() +} + +func (p *PointK256) Params() *elliptic.CurveParams { + return K256Curve().Params() +} + +func (p *PointK256) MarshalBinary() ([]byte, error) { + return pointMarshalBinary(p) +} + +func (p *PointK256) UnmarshalBinary(input []byte) error { + pt, err := pointUnmarshalBinary(input) + if err != nil { + return err + } + ppt, ok := pt.(*PointK256) + if !ok { + return fmt.Errorf("invalid point") + } + p.value = ppt.value + return nil +} + +func (p *PointK256) MarshalText() ([]byte, error) { + return pointMarshalText(p) +} + +func (p *PointK256) UnmarshalText(input []byte) error { + pt, err := pointUnmarshalText(input) + if err != nil { + return err + } + ppt, ok := pt.(*PointK256) + if !ok { + return fmt.Errorf("invalid point") + } + p.value = ppt.value + return nil +} + +func (p *PointK256) MarshalJSON() ([]byte, error) { + return pointMarshalJSON(p) +} + +func (p *PointK256) UnmarshalJSON(input []byte) error { + pt, err := pointUnmarshalJSON(input) + if err != nil { + return err + } + P, ok := pt.(*PointK256) + if !ok { + return fmt.Errorf("invalid type") + } + p.value = P.value + return nil +} diff --git a/core/curves/k256_curve_test.go b/core/curves/k256_curve_test.go new file mode 100755 index 0000000..dc4c1b7 --- /dev/null +++ b/core/curves/k256_curve_test.go @@ -0,0 +1,620 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package curves + +import ( + crand "crypto/rand" + "math/big" + "sync" + "testing" + + "github.com/btcsuite/btcd/btcec/v2" + "github.com/stretchr/testify/require" +) + +type mockReader struct { + index int + seed []byte +} + +var ( + mockRngInitonce sync.Once + mockRng mockReader +) + +func newMockReader() { + mockRng.index = 0 + mockRng.seed = make([]byte, 32) + for i := range mockRng.seed { + mockRng.seed[i] = 1 + } +} + +func testRng() *mockReader { + mockRngInitonce.Do(newMockReader) + return &mockRng +} + +func (m *mockReader) Read(p []byte) (n int, err error) { + limit := len(m.seed) + for i := range p { + p[i] = m.seed[m.index] + m.index += 1 + m.index %= limit + } + n = len(p) + err = nil + return n, err +} + +func TestScalarK256Random(t *testing.T) { + curve := K256() + sc := curve.Scalar.Random(testRng()) + s, ok := sc.(*ScalarK256) + require.True(t, ok) + expected, _ := new( + big.Int, + ).SetString("2f71aaec5e14d747c72e46cdcaffffe6f542f38b3f0925469ceb24ac1c65885d", 16) + require.Equal(t, s.value.BigInt(), expected) + // Try 10 random values + for i := 0; i < 10; i++ { + sc := curve.Scalar.Random(crand.Reader) + _, ok := sc.(*ScalarK256) + require.True(t, ok) + require.True(t, !sc.IsZero()) + } +} + +func TestScalarK256Hash(t *testing.T) { + var b [32]byte + k256 := K256() + sc := k256.Scalar.Hash(b[:]) + s, ok := sc.(*ScalarK256) + require.True(t, ok) + expected, _ := new( + big.Int, + ).SetString("e5cb3500b809a8202de0834a805068bc21bde09bd6367815e7523a37adf8f52e", 16) + require.Equal(t, s.value.BigInt(), expected) +} + +func TestScalarK256Zero(t *testing.T) { + k256 := K256() + sc := k256.Scalar.Zero() + require.True(t, sc.IsZero()) + require.True(t, sc.IsEven()) +} + +func TestScalarK256One(t *testing.T) { + k256 := K256() + sc := k256.Scalar.One() + require.True(t, sc.IsOne()) + require.True(t, sc.IsOdd()) +} + +func TestScalarK256New(t *testing.T) { + k256 := K256() + three := k256.Scalar.New(3) + require.True(t, three.IsOdd()) + four := k256.Scalar.New(4) + require.True(t, four.IsEven()) + neg1 := k256.Scalar.New(-1) + require.True(t, neg1.IsEven()) + neg2 := k256.Scalar.New(-2) + require.True(t, neg2.IsOdd()) +} + +func TestScalarK256Square(t *testing.T) { + k256 := K256() + three := k256.Scalar.New(3) + nine := k256.Scalar.New(9) + require.Equal(t, three.Square().Cmp(nine), 0) +} + +func TestScalarK256Cube(t *testing.T) { + k256 := K256() + three := k256.Scalar.New(3) + twentySeven := k256.Scalar.New(27) + require.Equal(t, three.Cube().Cmp(twentySeven), 0) +} + +func TestScalarK256Double(t *testing.T) { + k256 := K256() + three := k256.Scalar.New(3) + six := k256.Scalar.New(6) + require.Equal(t, three.Double().Cmp(six), 0) +} + +func TestScalarK256Neg(t *testing.T) { + k256 := K256() + one := k256.Scalar.One() + neg1 := k256.Scalar.New(-1) + require.Equal(t, one.Neg().Cmp(neg1), 0) + lotsOfThrees := k256.Scalar.New(333333) + expected := k256.Scalar.New(-333333) + require.Equal(t, lotsOfThrees.Neg().Cmp(expected), 0) +} + +func TestScalarK256Invert(t *testing.T) { + k256 := K256() + nine := k256.Scalar.New(9) + actual, _ := nine.Invert() + sa, _ := actual.(*ScalarK256) + bn, _ := new( + big.Int, + ).SetString("8e38e38e38e38e38e38e38e38e38e38d842841d57dd303af6a9150f8e5737996", 16) + expected, err := k256.Scalar.SetBigInt(bn) + require.NoError(t, err) + require.Equal(t, sa.Cmp(expected), 0) +} + +func TestScalarK256Sqrt(t *testing.T) { + k256 := K256() + nine := k256.Scalar.New(9) + actual, err := nine.Sqrt() + sa, _ := actual.(*ScalarK256) + expected := k256.Scalar.New(3) + require.NoError(t, err) + require.Equal(t, sa.Cmp(expected), 0) +} + +func TestScalarK256Add(t *testing.T) { + k256 := K256() + nine := k256.Scalar.New(9) + six := k256.Scalar.New(6) + fifteen := nine.Add(six) + require.NotNil(t, fifteen) + expected := k256.Scalar.New(15) + require.Equal(t, expected.Cmp(fifteen), 0) + n := new(big.Int).Set(btcec.S256().N) + n.Sub(n, big.NewInt(3)) + + upper, err := k256.Scalar.SetBigInt(n) + require.NoError(t, err) + actual := upper.Add(nine) + require.NotNil(t, actual) + require.Equal(t, actual.Cmp(six), 0) +} + +func TestScalarK256Sub(t *testing.T) { + k256 := K256() + nine := k256.Scalar.New(9) + six := k256.Scalar.New(6) + n := new(big.Int).Set(btcec.S256().N) + n.Sub(n, big.NewInt(3)) + + expected, err := k256.Scalar.SetBigInt(n) + require.NoError(t, err) + actual := six.Sub(nine) + require.Equal(t, expected.Cmp(actual), 0) + + actual = nine.Sub(six) + require.Equal(t, actual.Cmp(k256.Scalar.New(3)), 0) +} + +func TestScalarK256Mul(t *testing.T) { + k256 := K256() + nine := k256.Scalar.New(9) + six := k256.Scalar.New(6) + actual := nine.Mul(six) + require.Equal(t, actual.Cmp(k256.Scalar.New(54)), 0) + n := new(big.Int).Set(btcec.S256().N) + n.Sub(n, big.NewInt(1)) + upper, err := k256.Scalar.SetBigInt(n) + require.NoError(t, err) + require.Equal(t, upper.Mul(upper).Cmp(k256.Scalar.New(1)), 0) +} + +func TestScalarK256Div(t *testing.T) { + k256 := K256() + nine := k256.Scalar.New(9) + actual := nine.Div(nine) + require.Equal(t, actual.Cmp(k256.Scalar.New(1)), 0) + require.Equal(t, k256.Scalar.New(54).Div(nine).Cmp(k256.Scalar.New(6)), 0) +} + +func TestScalarK256Serialize(t *testing.T) { + k256 := K256() + sc := k256.Scalar.New(255) + sequence := sc.Bytes() + require.Equal(t, len(sequence), 32) + require.Equal( + t, + sequence, + []byte{ + 0x00, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0xff, + }, + ) + ret, err := k256.Scalar.SetBytes(sequence) + require.NoError(t, err) + require.Equal(t, ret.Cmp(sc), 0) + + // Try 10 random values + for i := 0; i < 10; i++ { + sc = k256.Scalar.Random(crand.Reader) + sequence = sc.Bytes() + require.Equal(t, len(sequence), 32) + ret, err = k256.Scalar.SetBytes(sequence) + require.NoError(t, err) + require.Equal(t, ret.Cmp(sc), 0) + } +} + +func TestScalarK256Nil(t *testing.T) { + k256 := K256() + one := k256.Scalar.New(1) + require.Nil(t, one.Add(nil)) + require.Nil(t, one.Sub(nil)) + require.Nil(t, one.Mul(nil)) + require.Nil(t, one.Div(nil)) + require.Nil(t, k256.Scalar.Random(nil)) + require.Equal(t, one.Cmp(nil), -2) + _, err := k256.Scalar.SetBigInt(nil) + require.Error(t, err) +} + +func TestPointK256Random(t *testing.T) { + curve := K256() + sc := curve.Point.Random(testRng()) + s, ok := sc.(*PointK256) + require.True(t, ok) + expectedX, _ := new( + big.Int, + ).SetString("c6e18a1d7cf834462675b31581639a18e14fd0f73f8dfd5fe2993f88f6fbe008", 16) + expectedY, _ := new( + big.Int, + ).SetString("b65fab3243c5d07cef005d7fb335ebe8019efd954e95e68c86ef9b3bd7bccd36", 16) + require.Equal(t, s.X().BigInt(), expectedX) + require.Equal(t, s.Y().BigInt(), expectedY) + // Try 10 random values + for i := 0; i < 10; i++ { + sc := curve.Point.Random(crand.Reader) + _, ok := sc.(*PointK256) + require.True(t, ok) + require.True(t, !sc.IsIdentity()) + } +} + +func TestPointK256Hash(t *testing.T) { + var b [32]byte + curve := K256() + sc := curve.Point.Hash(b[:]) + s, ok := sc.(*PointK256) + require.True(t, ok) + expectedX, _ := new( + big.Int, + ).SetString("95d0ad42f68ddb5a808469dd75fa866890dcc7d039844e0e2d58a6d25bd9a66b", 16) + expectedY, _ := new( + big.Int, + ).SetString("f37c564d05168dab4413caacdb8e3426143fc5fb24a470ccd8a51856c11d163c", 16) + require.Equal(t, s.X().BigInt(), expectedX) + require.Equal(t, s.Y().BigInt(), expectedY) +} + +func TestPointK256Identity(t *testing.T) { + k256 := K256() + sc := k256.Point.Identity() + require.True(t, sc.IsIdentity()) + require.Equal( + t, + sc.ToAffineCompressed(), + []byte{ + 2, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + }, + ) +} + +func TestPointK256Generator(t *testing.T) { + curve := K256() + sc := curve.Point.Generator() + s, ok := sc.(*PointK256) + require.True(t, ok) + require.Equal(t, s.X().BigInt().Cmp(btcec.S256().Gx), 0) + require.Equal(t, s.Y().BigInt().Cmp(btcec.S256().Gy), 0) +} + +func TestPointK256Set(t *testing.T) { + k256 := K256() + iden, err := k256.Point.Set(big.NewInt(0), big.NewInt(0)) + require.NoError(t, err) + require.True(t, iden.IsIdentity()) + _, err = k256.Point.Set(btcec.S256().Gx, btcec.S256().Gy) + require.NoError(t, err) +} + +func TestPointK256Double(t *testing.T) { + curve := K256() + g := curve.Point.Generator() + g2 := g.Double() + require.True(t, g2.Equal(g.Mul(curve.Scalar.New(2)))) + i := curve.Point.Identity() + require.True(t, i.Double().Equal(i)) + gg := curve.Point.Generator().Add(curve.Point.Generator()) + require.True(t, g2.Equal(gg)) +} + +func TestPointK256Neg(t *testing.T) { + k256 := K256() + g := k256.Point.Generator().Neg() + require.True(t, g.Neg().Equal(k256.Point.Generator())) + require.True(t, k256.Point.Identity().Neg().Equal(k256.Point.Identity())) +} + +func TestPointK256Add(t *testing.T) { + curve := K256() + pt := curve.Point.Generator().(*PointK256) + pt1 := pt.Add(pt).(*PointK256) + pt2 := pt.Double().(*PointK256) + pt3 := pt.Mul(curve.Scalar.New(2)).(*PointK256) + + require.True(t, pt1.Equal(pt2)) + require.True(t, pt1.Equal(pt3)) + require.True(t, pt.Add(pt).Equal(pt.Double())) + require.True(t, pt.Mul(curve.Scalar.New(3)).Equal(pt.Add(pt).Add(pt))) +} + +func TestPointK256Sub(t *testing.T) { + curve := K256() + g := curve.Point.Generator() + pt := curve.Point.Generator().Mul(curve.Scalar.New(4)) + + require.True(t, pt.Sub(g).Sub(g).Sub(g).Equal(g)) + require.True(t, pt.Sub(g).Sub(g).Sub(g).Sub(g).IsIdentity()) +} + +func TestPointK256Mul(t *testing.T) { + curve := K256() + g := curve.Point.Generator() + pt := curve.Point.Generator().Mul(curve.Scalar.New(4)) + require.True(t, g.Double().Double().Equal(pt)) +} + +func TestPointK256Serialize(t *testing.T) { + curve := K256() + ss := curve.Scalar.Random(testRng()) + + g := curve.Point.Generator() + ppt := g.Mul(ss).(*PointK256) + + require.Equal( + t, + ppt.ToAffineCompressed(), + []byte{ + 0x2, + 0x1b, + 0xa7, + 0x7e, + 0x98, + 0xd6, + 0xd8, + 0x49, + 0x45, + 0xa4, + 0x75, + 0xd8, + 0x6, + 0xc0, + 0x94, + 0x5b, + 0x8c, + 0xf0, + 0x5b, + 0x8a, + 0xb2, + 0x76, + 0xbb, + 0x9f, + 0x6e, + 0x52, + 0x9a, + 0x11, + 0x9c, + 0x79, + 0xdd, + 0xf6, + 0x5a, + }, + ) + require.Equal( + t, + ppt.ToAffineUncompressed(), + []byte{ + 0x4, + 0x1b, + 0xa7, + 0x7e, + 0x98, + 0xd6, + 0xd8, + 0x49, + 0x45, + 0xa4, + 0x75, + 0xd8, + 0x6, + 0xc0, + 0x94, + 0x5b, + 0x8c, + 0xf0, + 0x5b, + 0x8a, + 0xb2, + 0x76, + 0xbb, + 0x9f, + 0x6e, + 0x52, + 0x9a, + 0x11, + 0x9c, + 0x79, + 0xdd, + 0xf6, + 0x5a, + 0xb2, + 0x96, + 0x7c, + 0x59, + 0x4, + 0xeb, + 0x9a, + 0xaa, + 0xa9, + 0x1d, + 0x4d, + 0xd0, + 0x2d, + 0xc6, + 0x37, + 0xee, + 0x4a, + 0x95, + 0x51, + 0x60, + 0xab, + 0xab, + 0xf7, + 0xdb, + 0x30, + 0x7d, + 0x7d, + 0x0, + 0x68, + 0x6c, + 0xcf, + 0xf6, + }, + ) + retP, err := ppt.FromAffineCompressed(ppt.ToAffineCompressed()) + require.NoError(t, err) + require.True(t, ppt.Equal(retP)) + retP, err = ppt.FromAffineUncompressed(ppt.ToAffineUncompressed()) + require.NoError(t, err) + require.True(t, ppt.Equal(retP)) + + // smoke test + for i := 0; i < 25; i++ { + s := curve.Scalar.Random(crand.Reader) + pt := g.Mul(s) + cmprs := pt.ToAffineCompressed() + require.Equal(t, len(cmprs), 33) + retC, err := pt.FromAffineCompressed(cmprs) + require.NoError(t, err) + require.True(t, pt.Equal(retC)) + + un := pt.ToAffineUncompressed() + require.Equal(t, len(un), 65) + retU, err := pt.FromAffineUncompressed(un) + require.NoError(t, err) + require.True(t, pt.Equal(retU)) + } +} + +func TestPointK256Nil(t *testing.T) { + k256 := K256() + one := k256.Point.Generator() + require.Nil(t, one.Add(nil)) + require.Nil(t, one.Sub(nil)) + require.Nil(t, one.Mul(nil)) + require.Nil(t, k256.Scalar.Random(nil)) + require.False(t, one.Equal(nil)) + _, err := k256.Scalar.SetBigInt(nil) + require.Error(t, err) +} + +func TestPointK256SumOfProducts(t *testing.T) { + lhs := new(PointK256).Generator().Mul(new(ScalarK256).New(50)) + points := make([]Point, 5) + for i := range points { + points[i] = new(PointK256).Generator() + } + scalars := []Scalar{ + new(ScalarK256).New(8), + new(ScalarK256).New(9), + new(ScalarK256).New(10), + new(ScalarK256).New(11), + new(ScalarK256).New(12), + } + rhs := lhs.SumOfProducts(points, scalars) + require.NotNil(t, rhs) + require.True(t, lhs.Equal(rhs)) + + for j := 0; j < 25; j++ { + lhs = lhs.Identity() + for i := range points { + points[i] = new(PointK256).Random(crand.Reader) + scalars[i] = new(ScalarK256).Random(crand.Reader) + lhs = lhs.Add(points[i].Mul(scalars[i])) + } + rhs = lhs.SumOfProducts(points, scalars) + require.NotNil(t, rhs) + require.True(t, lhs.Equal(rhs)) + } +} diff --git a/core/curves/native/bls12381/bls12381.go b/core/curves/native/bls12381/bls12381.go new file mode 100644 index 0000000..4bd7489 --- /dev/null +++ b/core/curves/native/bls12381/bls12381.go @@ -0,0 +1,81 @@ +package bls12381 + +import ( + "math/bits" + + "github.com/sonr-io/sonr/crypto/core/curves/native" +) + +var fqModulusBytes = [native.FieldBytes]byte{ + 0x01, + 0x00, + 0x00, + 0x00, + 0xff, + 0xff, + 0xff, + 0xff, + 0xfe, + 0x5b, + 0xfe, + 0xff, + 0x02, + 0xa4, + 0xbd, + 0x53, + 0x05, + 0xd8, + 0xa1, + 0x09, + 0x08, + 0xd8, + 0x39, + 0x33, + 0x48, + 0x7d, + 0x9d, + 0x29, + 0x53, + 0xa7, + 0xed, + 0x73, +} + +const ( + // The BLS parameter x for BLS12-381 is -0xd201000000010000 + paramX = uint64(0xd201000000010000) + Limbs = 6 + FieldBytes = 48 + WideFieldBytes = 96 + DoubleWideFieldBytes = 192 +) + +// mac Multiply and Accumulate - compute a + (b * c) + d, return the result and new carry +func mac(a, b, c, d uint64) (uint64, uint64) { + hi, lo := bits.Mul64(b, c) + carry2, carry := bits.Add64(a, d, 0) + hi, _ = bits.Add64(hi, 0, carry) + lo, carry = bits.Add64(lo, carry2, 0) + hi, _ = bits.Add64(hi, 0, carry) + + return lo, hi +} + +// adc Add w/Carry +func adc(x, y, carry uint64) (uint64, uint64) { + sum := x + y + carry + // The sum will overflow if both top bits are set (x & y) or if one of them + // is (x | y), and a carry from the lower place happened. If such a carry + // happens, the top bit will be 1 + 0 + 1 = 0 (&^ sum). + carryOut := ((x & y) | ((x | y) &^ sum)) >> 63 + carryOut |= ((x & carry) | ((x | carry) &^ sum)) >> 63 + carryOut |= ((y & carry) | ((y | carry) &^ sum)) >> 63 + return sum, carryOut +} + +// sbb Subtract with borrow +func sbb(x, y, borrow uint64) (uint64, uint64) { + diff := x - (y + borrow) + borrowOut := ((^x & y) | (^(x ^ y) & diff)) >> 63 + return diff, borrowOut +} diff --git a/core/curves/native/bls12381/fp.go b/core/curves/native/bls12381/fp.go new file mode 100644 index 0000000..a19f28e --- /dev/null +++ b/core/curves/native/bls12381/fp.go @@ -0,0 +1,697 @@ +package bls12381 + +import ( + "encoding/binary" + "fmt" + "io" + "math/big" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/internal" +) + +// fp field element mod p +type fp [Limbs]uint64 + +var ( + modulus = fp{ + 0xb9feffffffffaaab, + 0x1eabfffeb153ffff, + 0x6730d2a0f6b0f624, + 0x64774b84f38512bf, + 0x4b1ba7b6434bacd7, + 0x1a0111ea397fe69a, + } + halfModulus = fp{ + 0xdcff_7fff_ffff_d556, + 0x0f55_ffff_58a9_ffff, + 0xb398_6950_7b58_7b12, + 0xb23b_a5c2_79c2_895f, + 0x258d_d3db_21a5_d66b, + 0x0d00_88f5_1cbf_f34d, + } + // 2^256 mod p + r = fp{ + 0x760900000002fffd, + 0xebf4000bc40c0002, + 0x5f48985753c758ba, + 0x77ce585370525745, + 0x5c071a97a256ec6d, + 0x15f65ec3fa80e493, + } + // 2^512 mod p + r2 = fp{ + 0xf4df1f341c341746, + 0x0a76e6a609d104f1, + 0x8de5476c4c95b6d5, + 0x67eb88a9939d83c0, + 0x9a793e85b519952d, + 0x11988fe592cae3aa, + } + // 2^768 mod p + r3 = fp{ + 0xed48ac6bd94ca1e0, + 0x315f831e03a7adf8, + 0x9a53352a615e29dd, + 0x34c04e5e921e1761, + 0x2512d43565724728, + 0x0aa6346091755d4d, + } + biModulus = new(big.Int).SetBytes([]byte{ + 0x1a, 0x01, 0x11, 0xea, 0x39, 0x7f, 0xe6, 0x9a, 0x4b, 0x1b, 0xa7, 0xb6, 0x43, 0x4b, 0xac, 0xd7, 0x64, 0x77, 0x4b, 0x84, 0xf3, 0x85, 0x12, 0xbf, 0x67, 0x30, 0xd2, 0xa0, 0xf6, 0xb0, 0xf6, 0x24, 0x1e, 0xab, 0xff, 0xfe, 0xb1, 0x53, 0xff, 0xff, 0xb9, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xaa, 0xab, + }, + ) +) + +// inv = -(p^{-1} mod 2^64) mod 2^64 +const ( + inv = 0x89f3_fffc_fffc_fffd + hashBytes = 64 +) + +// IsZero returns 1 if fp == 0, 0 otherwise +func (f *fp) IsZero() int { + t := f[0] + t |= f[1] + t |= f[2] + t |= f[3] + t |= f[4] + t |= f[5] + return int(((int64(t) | int64(-t)) >> 63) + 1) +} + +// IsNonZero returns 1 if fp != 0, 0 otherwise +func (f *fp) IsNonZero() int { + t := f[0] + t |= f[1] + t |= f[2] + t |= f[3] + t |= f[4] + t |= f[5] + return int(-((int64(t) | int64(-t)) >> 63)) +} + +// IsOne returns 1 if fp == 1, 0 otherwise +func (f *fp) IsOne() int { + return f.Equal(&r) +} + +// Cmp returns -1 if f < rhs +// 0 if f == rhs +// 1 if f > rhs +func (f *fp) Cmp(rhs *fp) int { + gt := uint64(0) + lt := uint64(0) + for i := 5; i >= 0; i-- { + // convert to two 64-bit numbers where + // the leading bits are zeros and hold no meaning + // so rhs - f actually means gt + // and f - rhs actually means lt. + rhsH := rhs[i] >> 32 + rhsL := rhs[i] & 0xffffffff + lhsH := f[i] >> 32 + lhsL := f[i] & 0xffffffff + + // Check the leading bit + // if negative then f > rhs + // if positive then f < rhs + gt |= (rhsH - lhsH) >> 32 & 1 &^ lt + lt |= (lhsH - rhsH) >> 32 & 1 &^ gt + gt |= (rhsL - lhsL) >> 32 & 1 &^ lt + lt |= (lhsL - rhsL) >> 32 & 1 &^ gt + } + // Make the result -1 for <, 0 for =, 1 for > + return int(gt) - int(lt) +} + +// Equal returns 1 if fp == rhs, 0 otherwise +func (f *fp) Equal(rhs *fp) int { + t := f[0] ^ rhs[0] + t |= f[1] ^ rhs[1] + t |= f[2] ^ rhs[2] + t |= f[3] ^ rhs[3] + t |= f[4] ^ rhs[4] + t |= f[5] ^ rhs[5] + return int(((int64(t) | int64(-t)) >> 63) + 1) +} + +// LexicographicallyLargest returns 1 if +// this element is strictly lexicographically larger than its negation +// 0 otherwise +func (f *fp) LexicographicallyLargest() int { + var ff fp + ff.fromMontgomery(f) + + _, borrow := sbb(ff[0], halfModulus[0], 0) + _, borrow = sbb(ff[1], halfModulus[1], borrow) + _, borrow = sbb(ff[2], halfModulus[2], borrow) + _, borrow = sbb(ff[3], halfModulus[3], borrow) + _, borrow = sbb(ff[4], halfModulus[4], borrow) + _, borrow = sbb(ff[5], halfModulus[5], borrow) + + return (int(borrow) - 1) & 1 +} + +// Sgn0 returns the lowest bit value +func (f *fp) Sgn0() int { + t := new(fp).fromMontgomery(f) + return int(t[0] & 1) +} + +// SetOne fp = r +func (f *fp) SetOne() *fp { + f[0] = r[0] + f[1] = r[1] + f[2] = r[2] + f[3] = r[3] + f[4] = r[4] + f[5] = r[5] + return f +} + +// SetZero fp = 0 +func (f *fp) SetZero() *fp { + f[0] = 0 + f[1] = 0 + f[2] = 0 + f[3] = 0 + f[4] = 0 + f[5] = 0 + return f +} + +// SetUint64 fp = rhs +func (f *fp) SetUint64(rhs uint64) *fp { + f[0] = rhs + f[1] = 0 + f[2] = 0 + f[3] = 0 + f[4] = 0 + f[5] = 0 + return f.toMontgomery(f) +} + +// Random generates a random field element +func (f *fp) Random(reader io.Reader) (*fp, error) { + var t [WideFieldBytes]byte + n, err := reader.Read(t[:]) + if err != nil { + return nil, err + } + if n != WideFieldBytes { + return nil, fmt.Errorf("can only read %d when %d are needed", n, WideFieldBytes) + } + return f.Hash(t[:]), nil +} + +// Hash converts the byte sequence into a field element +func (f *fp) Hash(input []byte) *fp { + dst := []byte("BLS12381_XMD:SHA-256_SSWU_RO_") + xmd := native.ExpandMsgXmd(native.EllipticPointHasherSha256(), input, dst, hashBytes) + var t [WideFieldBytes]byte + copy(t[:hashBytes], internal.ReverseScalarBytes(xmd)) + return f.SetBytesWide(&t) +} + +// toMontgomery converts this field to montgomery form +func (f *fp) toMontgomery(a *fp) *fp { + // arg.R^0 * R^2 / R = arg.R + return f.Mul(a, &r2) +} + +// fromMontgomery converts this field from montgomery form +func (f *fp) fromMontgomery(a *fp) *fp { + // Mul by 1 is division by 2^256 mod q + // out.Mul(arg, &[native.FieldLimbs]uint64{1, 0, 0, 0}) + return f.montReduce(&[Limbs * 2]uint64{a[0], a[1], a[2], a[3], a[4], a[5], 0, 0, 0, 0, 0, 0}) +} + +// Neg performs modular negation +func (f *fp) Neg(a *fp) *fp { + // Subtract `arg` from `modulus`. Ignore final borrow + // since it can't underflow. + var t [Limbs]uint64 + var borrow uint64 + t[0], borrow = sbb(modulus[0], a[0], 0) + t[1], borrow = sbb(modulus[1], a[1], borrow) + t[2], borrow = sbb(modulus[2], a[2], borrow) + t[3], borrow = sbb(modulus[3], a[3], borrow) + t[4], borrow = sbb(modulus[4], a[4], borrow) + t[5], _ = sbb(modulus[5], a[5], borrow) + + // t could be `modulus` if `arg`=0. Set mask=0 if self=0 + // and 0xff..ff if `arg`!=0 + mask := a[0] | a[1] | a[2] | a[3] | a[4] | a[5] + mask = -((mask | -mask) >> 63) + f[0] = t[0] & mask + f[1] = t[1] & mask + f[2] = t[2] & mask + f[3] = t[3] & mask + f[4] = t[4] & mask + f[5] = t[5] & mask + return f +} + +// Square performs modular square +func (f *fp) Square(a *fp) *fp { + var r [2 * Limbs]uint64 + var carry uint64 + + r[1], carry = mac(0, a[0], a[1], 0) + r[2], carry = mac(0, a[0], a[2], carry) + r[3], carry = mac(0, a[0], a[3], carry) + r[4], carry = mac(0, a[0], a[4], carry) + r[5], r[6] = mac(0, a[0], a[5], carry) + + r[3], carry = mac(r[3], a[1], a[2], 0) + r[4], carry = mac(r[4], a[1], a[3], carry) + r[5], carry = mac(r[5], a[1], a[4], carry) + r[6], r[7] = mac(r[6], a[1], a[5], carry) + + r[5], carry = mac(r[5], a[2], a[3], 0) + r[6], carry = mac(r[6], a[2], a[4], carry) + r[7], r[8] = mac(r[7], a[2], a[5], carry) + + r[7], carry = mac(r[7], a[3], a[4], 0) + r[8], r[9] = mac(r[8], a[3], a[5], carry) + + r[9], r[10] = mac(r[9], a[4], a[5], 0) + + r[11] = r[10] >> 63 + r[10] = (r[10] << 1) | r[9]>>63 + r[9] = (r[9] << 1) | r[8]>>63 + r[8] = (r[8] << 1) | r[7]>>63 + r[7] = (r[7] << 1) | r[6]>>63 + r[6] = (r[6] << 1) | r[5]>>63 + r[5] = (r[5] << 1) | r[4]>>63 + r[4] = (r[4] << 1) | r[3]>>63 + r[3] = (r[3] << 1) | r[2]>>63 + r[2] = (r[2] << 1) | r[1]>>63 + r[1] = r[1] << 1 + + r[0], carry = mac(0, a[0], a[0], 0) + r[1], carry = adc(0, r[1], carry) + r[2], carry = mac(r[2], a[1], a[1], carry) + r[3], carry = adc(0, r[3], carry) + r[4], carry = mac(r[4], a[2], a[2], carry) + r[5], carry = adc(0, r[5], carry) + r[6], carry = mac(r[6], a[3], a[3], carry) + r[7], carry = adc(0, r[7], carry) + r[8], carry = mac(r[8], a[4], a[4], carry) + r[9], carry = adc(0, r[9], carry) + r[10], carry = mac(r[10], a[5], a[5], carry) + r[11], _ = adc(0, r[11], carry) + + return f.montReduce(&r) +} + +// Double this element +func (f *fp) Double(a *fp) *fp { + return f.Add(a, a) +} + +// Mul performs modular multiplication +func (f *fp) Mul(arg1, arg2 *fp) *fp { + // Schoolbook multiplication + var r [2 * Limbs]uint64 + var carry uint64 + + r[0], carry = mac(0, arg1[0], arg2[0], 0) + r[1], carry = mac(0, arg1[0], arg2[1], carry) + r[2], carry = mac(0, arg1[0], arg2[2], carry) + r[3], carry = mac(0, arg1[0], arg2[3], carry) + r[4], carry = mac(0, arg1[0], arg2[4], carry) + r[5], r[6] = mac(0, arg1[0], arg2[5], carry) + + r[1], carry = mac(r[1], arg1[1], arg2[0], 0) + r[2], carry = mac(r[2], arg1[1], arg2[1], carry) + r[3], carry = mac(r[3], arg1[1], arg2[2], carry) + r[4], carry = mac(r[4], arg1[1], arg2[3], carry) + r[5], carry = mac(r[5], arg1[1], arg2[4], carry) + r[6], r[7] = mac(r[6], arg1[1], arg2[5], carry) + + r[2], carry = mac(r[2], arg1[2], arg2[0], 0) + r[3], carry = mac(r[3], arg1[2], arg2[1], carry) + r[4], carry = mac(r[4], arg1[2], arg2[2], carry) + r[5], carry = mac(r[5], arg1[2], arg2[3], carry) + r[6], carry = mac(r[6], arg1[2], arg2[4], carry) + r[7], r[8] = mac(r[7], arg1[2], arg2[5], carry) + + r[3], carry = mac(r[3], arg1[3], arg2[0], 0) + r[4], carry = mac(r[4], arg1[3], arg2[1], carry) + r[5], carry = mac(r[5], arg1[3], arg2[2], carry) + r[6], carry = mac(r[6], arg1[3], arg2[3], carry) + r[7], carry = mac(r[7], arg1[3], arg2[4], carry) + r[8], r[9] = mac(r[8], arg1[3], arg2[5], carry) + + r[4], carry = mac(r[4], arg1[4], arg2[0], 0) + r[5], carry = mac(r[5], arg1[4], arg2[1], carry) + r[6], carry = mac(r[6], arg1[4], arg2[2], carry) + r[7], carry = mac(r[7], arg1[4], arg2[3], carry) + r[8], carry = mac(r[8], arg1[4], arg2[4], carry) + r[9], r[10] = mac(r[9], arg1[4], arg2[5], carry) + + r[5], carry = mac(r[5], arg1[5], arg2[0], 0) + r[6], carry = mac(r[6], arg1[5], arg2[1], carry) + r[7], carry = mac(r[7], arg1[5], arg2[2], carry) + r[8], carry = mac(r[8], arg1[5], arg2[3], carry) + r[9], carry = mac(r[9], arg1[5], arg2[4], carry) + r[10], r[11] = mac(r[10], arg1[5], arg2[5], carry) + + return f.montReduce(&r) +} + +// MulBy3b returns arg * 12 or 3 * b +func (f *fp) MulBy3b(arg *fp) *fp { + var a, t fp + a.Double(arg) // 2 + t.Double(&a) // 4 + a.Double(&t) // 8 + a.Add(&a, &t) // 12 + return f.Set(&a) +} + +// Add performs modular addition +func (f *fp) Add(arg1, arg2 *fp) *fp { + var t fp + var carry uint64 + + t[0], carry = adc(arg1[0], arg2[0], 0) + t[1], carry = adc(arg1[1], arg2[1], carry) + t[2], carry = adc(arg1[2], arg2[2], carry) + t[3], carry = adc(arg1[3], arg2[3], carry) + t[4], carry = adc(arg1[4], arg2[4], carry) + t[5], _ = adc(arg1[5], arg2[5], carry) + + // Subtract the modulus to ensure the value + // is smaller. + return f.Sub(&t, &modulus) +} + +// Sub performs modular subtraction +func (f *fp) Sub(arg1, arg2 *fp) *fp { + d0, borrow := sbb(arg1[0], arg2[0], 0) + d1, borrow := sbb(arg1[1], arg2[1], borrow) + d2, borrow := sbb(arg1[2], arg2[2], borrow) + d3, borrow := sbb(arg1[3], arg2[3], borrow) + d4, borrow := sbb(arg1[4], arg2[4], borrow) + d5, borrow := sbb(arg1[5], arg2[5], borrow) + + // If underflow occurred on the final limb, borrow 0xff...ff, otherwise + // borrow = 0x00...00. Conditionally mask to add the modulus + borrow = -borrow + d0, carry := adc(d0, modulus[0]&borrow, 0) + d1, carry = adc(d1, modulus[1]&borrow, carry) + d2, carry = adc(d2, modulus[2]&borrow, carry) + d3, carry = adc(d3, modulus[3]&borrow, carry) + d4, carry = adc(d4, modulus[4]&borrow, carry) + d5, _ = adc(d5, modulus[5]&borrow, carry) + + f[0] = d0 + f[1] = d1 + f[2] = d2 + f[3] = d3 + f[4] = d4 + f[5] = d5 + return f +} + +// Sqrt performs modular square root +func (f *fp) Sqrt(a *fp) (*fp, int) { + // Shank's method, as p = 3 (mod 4). This means + // exponentiate by (p+1)/4. This only works for elements + // that are actually quadratic residue, + // so check the result at the end. + var c, z fp + z.pow(a, &fp{ + 0xee7fbfffffffeaab, + 0x07aaffffac54ffff, + 0xd9cc34a83dac3d89, + 0xd91dd2e13ce144af, + 0x92c6e9ed90d2eb35, + 0x0680447a8e5ff9a6, + }) + + c.Square(&z) + wasSquare := c.Equal(a) + f.CMove(f, &z, wasSquare) + return f, wasSquare +} + +// Invert performs modular inverse +func (f *fp) Invert(a *fp) (*fp, int) { + // Exponentiate by p - 2 + t := &fp{} + t.pow(a, &fp{ + 0xb9feffffffffaaa9, + 0x1eabfffeb153ffff, + 0x6730d2a0f6b0f624, + 0x64774b84f38512bf, + 0x4b1ba7b6434bacd7, + 0x1a0111ea397fe69a, + }) + wasInverted := a.IsNonZero() + f.CMove(a, t, wasInverted) + return f, wasInverted +} + +// SetBytes converts a little endian byte array into a field element +// return 0 if the bytes are not in the field, 1 if they are +func (f *fp) SetBytes(arg *[FieldBytes]byte) (*fp, int) { + var borrow uint64 + t := &fp{} + + t[0] = binary.LittleEndian.Uint64(arg[:8]) + t[1] = binary.LittleEndian.Uint64(arg[8:16]) + t[2] = binary.LittleEndian.Uint64(arg[16:24]) + t[3] = binary.LittleEndian.Uint64(arg[24:32]) + t[4] = binary.LittleEndian.Uint64(arg[32:40]) + t[5] = binary.LittleEndian.Uint64(arg[40:]) + + // Try to subtract the modulus + _, borrow = sbb(t[0], modulus[0], 0) + _, borrow = sbb(t[1], modulus[1], borrow) + _, borrow = sbb(t[2], modulus[2], borrow) + _, borrow = sbb(t[3], modulus[3], borrow) + _, borrow = sbb(t[4], modulus[4], borrow) + _, borrow = sbb(t[5], modulus[5], borrow) + + // If the element is smaller than modulus then the + // subtraction will underflow, producing a borrow value + // of 1. Otherwise, it'll be zero. + mask := int(borrow) + return f.CMove(f, t.toMontgomery(t), mask), mask +} + +// SetBytesWide takes 96 bytes as input and treats them as a 512-bit number. +// Attributed to https://github.com/zcash/pasta_curves/blob/main/src/fields/Fp.rs#L255 +// We reduce an arbitrary 768-bit number by decomposing it into two 384-bit digits +// with the higher bits multiplied by 2^384. Thus, we perform two reductions +// +// 1. the lower bits are multiplied by r^2, as normal +// 2. the upper bits are multiplied by r^2 * 2^384 = r^3 +// +// and computing their sum in the field. It remains to see that arbitrary 384-bit +// numbers can be placed into Montgomery form safely using the reduction. The +// reduction works so long as the product is less than r=2^384 multiplied by +// the modulus. This holds because for any `c` smaller than the modulus, we have +// that (2^384 - 1)*c is an acceptable product for the reduction. Therefore, the +// reduction always works so long as `c` is in the field; in this case it is either the +// constant `r2` or `r3`. +func (f *fp) SetBytesWide(a *[WideFieldBytes]byte) *fp { + d0 := &fp{ + binary.LittleEndian.Uint64(a[:8]), + binary.LittleEndian.Uint64(a[8:16]), + binary.LittleEndian.Uint64(a[16:24]), + binary.LittleEndian.Uint64(a[24:32]), + binary.LittleEndian.Uint64(a[32:40]), + binary.LittleEndian.Uint64(a[40:48]), + } + d1 := &fp{ + binary.LittleEndian.Uint64(a[48:56]), + binary.LittleEndian.Uint64(a[56:64]), + binary.LittleEndian.Uint64(a[64:72]), + binary.LittleEndian.Uint64(a[72:80]), + binary.LittleEndian.Uint64(a[80:88]), + binary.LittleEndian.Uint64(a[88:96]), + } + // d0*r2 + d1*r3 + d0.Mul(d0, &r2) + d1.Mul(d1, &r3) + return f.Add(d0, d1) +} + +// SetBigInt initializes an element from big.Int +// The value is reduced by the modulus +func (f *fp) SetBigInt(bi *big.Int) *fp { + var buffer [FieldBytes]byte + t := new(big.Int).Set(bi) + t.Mod(t, biModulus) + t.FillBytes(buffer[:]) + copy(buffer[:], internal.ReverseScalarBytes(buffer[:])) + _, _ = f.SetBytes(&buffer) + return f +} + +// Set copies a into fp +func (f *fp) Set(a *fp) *fp { + f[0] = a[0] + f[1] = a[1] + f[2] = a[2] + f[3] = a[3] + f[4] = a[4] + f[5] = a[5] + return f +} + +// SetLimbs converts an array into a field element +// by converting to montgomery form +func (f *fp) SetLimbs(a *[Limbs]uint64) *fp { + return f.toMontgomery((*fp)(a)) +} + +// SetRaw converts a raw array into a field element +// Assumes input is already in montgomery form +func (f *fp) SetRaw(a *[Limbs]uint64) *fp { + f[0] = a[0] + f[1] = a[1] + f[2] = a[2] + f[3] = a[3] + f[4] = a[4] + f[5] = a[5] + return f +} + +// Bytes converts a field element to a little endian byte array +func (f *fp) Bytes() [FieldBytes]byte { + var out [FieldBytes]byte + t := new(fp).fromMontgomery(f) + binary.LittleEndian.PutUint64(out[:8], t[0]) + binary.LittleEndian.PutUint64(out[8:16], t[1]) + binary.LittleEndian.PutUint64(out[16:24], t[2]) + binary.LittleEndian.PutUint64(out[24:32], t[3]) + binary.LittleEndian.PutUint64(out[32:40], t[4]) + binary.LittleEndian.PutUint64(out[40:], t[5]) + return out +} + +// BigInt converts this element into the big.Int struct +func (f *fp) BigInt() *big.Int { + buffer := f.Bytes() + return new(big.Int).SetBytes(internal.ReverseScalarBytes(buffer[:])) +} + +// Raw converts this element into the a [FieldLimbs]uint64 +func (f *fp) Raw() [Limbs]uint64 { + t := new(fp).fromMontgomery(f) + return *t +} + +// CMove performs conditional select. +// selects arg1 if choice == 0 and arg2 if choice == 1 +func (f *fp) CMove(arg1, arg2 *fp, choice int) *fp { + mask := uint64(-choice) + f[0] = arg1[0] ^ ((arg1[0] ^ arg2[0]) & mask) + f[1] = arg1[1] ^ ((arg1[1] ^ arg2[1]) & mask) + f[2] = arg1[2] ^ ((arg1[2] ^ arg2[2]) & mask) + f[3] = arg1[3] ^ ((arg1[3] ^ arg2[3]) & mask) + f[4] = arg1[4] ^ ((arg1[4] ^ arg2[4]) & mask) + f[5] = arg1[5] ^ ((arg1[5] ^ arg2[5]) & mask) + return f +} + +// CNeg conditionally negates a if choice == 1 +func (f *fp) CNeg(a *fp, choice int) *fp { + var t fp + t.Neg(a) + return f.CMove(f, &t, choice) +} + +// Exp raises base^exp. +func (f *fp) Exp(base, exp *fp) *fp { + e := (&fp{}).fromMontgomery(exp) + return f.pow(base, e) +} + +func (f *fp) pow(base, e *fp) *fp { + var tmp, res fp + res.SetOne() + + for i := len(e) - 1; i >= 0; i-- { + for j := 63; j >= 0; j-- { + res.Square(&res) + tmp.Mul(&res, base) + res.CMove(&res, &tmp, int(e[i]>>j)&1) + } + } + f[0] = res[0] + f[1] = res[1] + f[2] = res[2] + f[3] = res[3] + f[4] = res[4] + f[5] = res[5] + return f +} + +// montReduce performs the montgomery reduction +func (f *fp) montReduce(r *[2 * Limbs]uint64) *fp { + // Taken from Algorithm 14.32 in Handbook of Applied Cryptography + var r1, r2, r3, r4, r5, r6, r7, r8, r9, r10, r11, carry, k uint64 + var rr fp + + k = r[0] * inv + _, carry = mac(r[0], k, modulus[0], 0) + r1, carry = mac(r[1], k, modulus[1], carry) + r2, carry = mac(r[2], k, modulus[2], carry) + r3, carry = mac(r[3], k, modulus[3], carry) + r4, carry = mac(r[4], k, modulus[4], carry) + r5, carry = mac(r[5], k, modulus[5], carry) + r6, r7 = adc(r[6], 0, carry) + + k = r1 * inv + _, carry = mac(r1, k, modulus[0], 0) + r2, carry = mac(r2, k, modulus[1], carry) + r3, carry = mac(r3, k, modulus[2], carry) + r4, carry = mac(r4, k, modulus[3], carry) + r5, carry = mac(r5, k, modulus[4], carry) + r6, carry = mac(r6, k, modulus[5], carry) + r7, r8 = adc(r7, r[7], carry) + + k = r2 * inv + _, carry = mac(r2, k, modulus[0], 0) + r3, carry = mac(r3, k, modulus[1], carry) + r4, carry = mac(r4, k, modulus[2], carry) + r5, carry = mac(r5, k, modulus[3], carry) + r6, carry = mac(r6, k, modulus[4], carry) + r7, carry = mac(r7, k, modulus[5], carry) + r8, r9 = adc(r8, r[8], carry) + + k = r3 * inv + _, carry = mac(r3, k, modulus[0], 0) + r4, carry = mac(r4, k, modulus[1], carry) + r5, carry = mac(r5, k, modulus[2], carry) + r6, carry = mac(r6, k, modulus[3], carry) + r7, carry = mac(r7, k, modulus[4], carry) + r8, carry = mac(r8, k, modulus[5], carry) + r9, r10 = adc(r9, r[9], carry) + + k = r4 * inv + _, carry = mac(r4, k, modulus[0], 0) + r5, carry = mac(r5, k, modulus[1], carry) + r6, carry = mac(r6, k, modulus[2], carry) + r7, carry = mac(r7, k, modulus[3], carry) + r8, carry = mac(r8, k, modulus[4], carry) + r9, carry = mac(r9, k, modulus[5], carry) + r10, r11 = adc(r10, r[10], carry) + + k = r5 * inv + _, carry = mac(r5, k, modulus[0], 0) + rr[0], carry = mac(r6, k, modulus[1], carry) + rr[1], carry = mac(r7, k, modulus[2], carry) + rr[2], carry = mac(r8, k, modulus[3], carry) + rr[3], carry = mac(r9, k, modulus[4], carry) + rr[4], carry = mac(r10, k, modulus[5], carry) + rr[5], _ = adc(r11, r[11], carry) + + return f.Sub(&rr, &modulus) +} diff --git a/core/curves/native/bls12381/fp12.go b/core/curves/native/bls12381/fp12.go new file mode 100755 index 0000000..847930f --- /dev/null +++ b/core/curves/native/bls12381/fp12.go @@ -0,0 +1,231 @@ +package bls12381 + +import "io" + +// fp12 represents an element a + b w of fp^12 = fp^6 / w^2 - v. +type fp12 struct { + A, B fp6 +} + +// SetFp creates an element from a lower field +func (f *fp12) SetFp(a *fp) *fp12 { + f.A.SetFp(a) + f.B.SetZero() + return f +} + +// SetFp2 creates an element from a lower field +func (f *fp12) SetFp2(a *fp2) *fp12 { + f.A.SetFp2(a) + f.B.SetZero() + return f +} + +// SetFp6 creates an element from a lower field +func (f *fp12) SetFp6(a *fp6) *fp12 { + f.A.Set(a) + f.B.SetZero() + return f +} + +// Set copies the value `a` +func (f *fp12) Set(a *fp12) *fp12 { + f.A.Set(&a.A) + f.B.Set(&a.B) + return f +} + +// SetZero fp6 to zero +func (f *fp12) SetZero() *fp12 { + f.A.SetZero() + f.B.SetZero() + return f +} + +// SetOne fp6 to multiplicative identity element +func (f *fp12) SetOne() *fp12 { + f.A.SetOne() + f.B.SetZero() + return f +} + +// Random generates a random field element +func (f *fp12) Random(reader io.Reader) (*fp12, error) { + a, err := new(fp6).Random(reader) + if err != nil { + return nil, err + } + b, err := new(fp6).Random(reader) + if err != nil { + return nil, err + } + f.A.Set(a) + f.B.Set(b) + return f, nil +} + +// Square computes arg^2 +func (f *fp12) Square(arg *fp12) *fp12 { + var ab, apb, aTick, bTick, t fp6 + + ab.Mul(&arg.A, &arg.B) + apb.Add(&arg.A, &arg.B) + + aTick.MulByNonResidue(&arg.B) + aTick.Add(&aTick, &arg.A) + aTick.Mul(&aTick, &apb) + aTick.Sub(&aTick, &ab) + t.MulByNonResidue(&ab) + aTick.Sub(&aTick, &t) + + bTick.Double(&ab) + + f.A.Set(&aTick) + f.B.Set(&bTick) + return f +} + +// Invert computes this element's field inversion +func (f *fp12) Invert(arg *fp12) (*fp12, int) { + var a, b, t fp6 + a.Square(&arg.A) + b.Square(&arg.B) + b.MulByNonResidue(&b) + a.Sub(&a, &b) + _, wasInverted := t.Invert(&a) + + a.Mul(&arg.A, &t) + t.Neg(&t) + b.Mul(&arg.B, &t) + f.A.CMove(&f.A, &a, wasInverted) + f.B.CMove(&f.B, &b, wasInverted) + return f, wasInverted +} + +// Add computes arg1+arg2 +func (f *fp12) Add(arg1, arg2 *fp12) *fp12 { + f.A.Add(&arg1.A, &arg2.A) + f.B.Add(&arg1.B, &arg2.B) + return f +} + +// Sub computes arg1-arg2 +func (f *fp12) Sub(arg1, arg2 *fp12) *fp12 { + f.A.Sub(&arg1.A, &arg2.A) + f.B.Sub(&arg1.B, &arg2.B) + return f +} + +// Mul computes arg1*arg2 +func (f *fp12) Mul(arg1, arg2 *fp12) *fp12 { + var aa, bb, a2b2, a, b fp6 + + aa.Mul(&arg1.A, &arg2.A) + bb.Mul(&arg1.B, &arg2.B) + a2b2.Add(&arg2.A, &arg2.B) + b.Add(&arg1.A, &arg1.B) + b.Mul(&b, &a2b2) + b.Sub(&b, &aa) + b.Sub(&b, &bb) + a.MulByNonResidue(&bb) + a.Add(&a, &aa) + + f.A.Set(&a) + f.B.Set(&b) + return f +} + +// Neg computes the field negation +func (f *fp12) Neg(arg *fp12) *fp12 { + f.A.Neg(&arg.A) + f.B.Neg(&arg.B) + return f +} + +// MulByABD computes arg * a * b * c +func (f *fp12) MulByABD(arg *fp12, a, b, d *fp2) *fp12 { + var aa, bb, aTick, bTick fp6 + var bd fp2 + + aa.MulByAB(&arg.A, a, b) + bb.MulByB(&arg.B, d) + bd.Add(b, d) + + bTick.Add(&arg.A, &arg.B) + bTick.MulByAB(&bTick, a, &bd) + bTick.Sub(&bTick, &aa) + bTick.Sub(&bTick, &bb) + + aTick.MulByNonResidue(&bb) + aTick.Add(&aTick, &aa) + + f.A.Set(&aTick) + f.B.Set(&bTick) + + return f +} + +// Conjugate computes the field conjugation +func (f *fp12) Conjugate(arg *fp12) *fp12 { + f.A.Set(&arg.A) + f.B.Neg(&arg.B) + return f +} + +// FrobeniusMap raises this element to p. +func (f *fp12) FrobeniusMap(arg *fp12) *fp12 { + var a, b, up1epm1div6 fp6 + + // (u + 1)^((p - 1) / 6) + up1epm1div6.A = fp2{ + A: fp{ + 0x07089552b319d465, + 0xc6695f92b50a8313, + 0x97e83cccd117228f, + 0xa35baecab2dc29ee, + 0x1ce393ea5daace4d, + 0x08f2220fb0fb66eb, + }, + B: fp{ + 0xb2f66aad4ce5d646, + 0x5842a06bfc497cec, + 0xcf4895d42599d394, + 0xc11b9cba40a8e8d0, + 0x2e3813cbe5a0de89, + 0x110eefda88847faf, + }, + } + + a.FrobeniusMap(&arg.A) + b.FrobeniusMap(&arg.B) + + // b' = b' * (u + 1)^((p - 1) / 6) + b.Mul(&b, &up1epm1div6) + + f.A.Set(&a) + f.B.Set(&b) + return f +} + +// Equal returns 1 if fp12 == rhs, 0 otherwise +func (f *fp12) Equal(rhs *fp12) int { + return f.A.Equal(&rhs.A) & f.B.Equal(&rhs.B) +} + +// IsZero returns 1 if fp6 == 0, 0 otherwise +func (f *fp12) IsZero() int { + return f.A.IsZero() & f.B.IsZero() +} + +// IsOne returns 1 if fp12 == 1, 0 otherwise +func (f *fp12) IsOne() int { + return f.A.IsOne() & f.B.IsZero() +} + +// CMove performs conditional select. +// selects arg1 if choice == 0 and arg2 if choice == 1 +func (f *fp12) CMove(arg1, arg2 *fp12, choice int) *fp12 { + f.A.CMove(&arg1.A, &arg2.A, choice) + f.B.CMove(&arg1.B, &arg2.B, choice) + return f +} diff --git a/core/curves/native/bls12381/fp12_test.go b/core/curves/native/bls12381/fp12_test.go new file mode 100755 index 0000000..c0ee091 --- /dev/null +++ b/core/curves/native/bls12381/fp12_test.go @@ -0,0 +1,417 @@ +package bls12381 + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestFp12Arithmetic(t *testing.T) { + var aa, bb, cc, d, e, f fp12 + a := fp12{ + A: fp6{ + A: fp2{ + A: fp{ + 0x47f9cb98b1b82d58, + 0x5fe911eba3aa1d9d, + 0x96bf1b5f4dd81db3, + 0x8100d27cc9259f5b, + 0xafa20b9674640eab, + 0x09bbcea7d8d9497d, + }, + B: fp{ + 0x0303cb98b1662daa, + 0xd93110aa0a621d5a, + 0xbfa9820c5be4a468, + 0x0ba3643ecb05a348, + 0xdc3534bb1f1c25a6, + 0x06c305bb19c0e1c1, + }, + }, + B: fp2{ + A: fp{ + 0x46f9cb98b162d858, + 0x0be9109cf7aa1d57, + 0xc791bc55fece41d2, + 0xf84c57704e385ec2, + 0xcb49c1d9c010e60f, + 0x0acdb8e158bfe3c8, + }, + B: fp{ + 0x8aefcb98b15f8306, + 0x3ea1108fe4f21d54, + 0xcf79f69fa1b7df3b, + 0xe4f54aa1d16b1a3c, + 0xba5e4ef86105a679, + 0x0ed86c0797bee5cf, + }, + }, + C: fp2{ + A: fp{ + 0xcee5cb98b15c2db4, + 0x71591082d23a1d51, + 0xd76230e944a17ca4, + 0xd19e3dd3549dd5b6, + 0xa972dc1701fa66e3, + 0x12e31f2dd6bde7d6, + }, + B: fp{ + 0xad2acb98b1732d9d, + 0x2cfd10dd06961d64, + 0x07396b86c6ef24e8, + 0xbd76e2fdb1bfc820, + 0x6afea7f6de94d0d5, + 0x10994b0c5744c040, + }, + }, + }, + B: fp6{ + A: fp2{ + A: fp{ + 0x47f9cb98b1b82d58, + 0x5fe911eba3aa1d9d, + 0x96bf1b5f4dd81db3, + 0x8100d27cc9259f5b, + 0xafa20b9674640eab, + 0x09bbcea7d8d9497d, + }, + B: fp{ + 0x0303cb98b1662daa, + 0xd93110aa0a621d5a, + 0xbfa9820c5be4a468, + 0x0ba3643ecb05a348, + 0xdc3534bb1f1c25a6, + 0x06c305bb19c0e1c1, + }, + }, + B: fp2{ + A: fp{ + 0x46f9cb98b162d858, + 0x0be9109cf7aa1d57, + 0xc791bc55fece41d2, + 0xf84c57704e385ec2, + 0xcb49c1d9c010e60f, + 0x0acdb8e158bfe3c8, + }, + B: fp{ + 0x8aefcb98b15f8306, + 0x3ea1108fe4f21d54, + 0xcf79f69fa1b7df3b, + 0xe4f54aa1d16b1a3c, + 0xba5e4ef86105a679, + 0x0ed86c0797bee5cf, + }, + }, + C: fp2{ + A: fp{ + 0xcee5cb98b15c2db4, + 0x71591082d23a1d51, + 0xd76230e944a17ca4, + 0xd19e3dd3549dd5b6, + 0xa972dc1701fa66e3, + 0x12e31f2dd6bde7d6, + }, + B: fp{ + 0xad2acb98b1732d9d, + 0x2cfd10dd06961d64, + 0x07396b86c6ef24e8, + 0xbd76e2fdb1bfc820, + 0x6afea7f6de94d0d5, + 0x10994b0c5744c040, + }, + }, + }, + } + + b := fp12{ + A: fp6{ + A: fp2{ + A: fp{ + 0x47f9_cb98_b1b8_2d58, + 0x5fe9_11eb_a3aa_1d9d, + 0x96bf_1b5f_4dd8_1db3, + 0x8100_d272_c925_9f5b, + 0xafa2_0b96_7464_0eab, + 0x09bb_cea7_d8d9_497d, + }, + B: fp{ + 0x0303_cb98_b166_2daa, + 0xd931_10aa_0a62_1d5a, + 0xbfa9_820c_5be4_a468, + 0x0ba3_643e_cb05_a348, + 0xdc35_34bb_1f1c_25a6, + 0x06c3_05bb_19c0_e1c1, + }, + }, + B: fp2{ + A: fp{ + 0x46f9_cb98_b162_d858, + 0x0be9_109c_f7aa_1d57, + 0xc791_bc55_fece_41d2, + 0xf84c_5770_4e38_5ec2, + 0xcb49_c1d9_c010_e60f, + 0x0acd_b8e1_58bf_e348, + }, + B: fp{ + 0x8aef_cb98_b15f_8306, + 0x3ea1_108f_e4f2_1d54, + 0xcf79_f69f_a1b7_df3b, + 0xe4f5_4aa1_d16b_1a3c, + 0xba5e_4ef8_6105_a679, + 0x0ed8_6c07_97be_e5cf, + }, + }, + C: fp2{ + A: fp{ + 0xcee5_cb98_b15c_2db4, + 0x7159_1082_d23a_1d51, + 0xd762_30e9_44a1_7ca4, + 0xd19e_3dd3_549d_d5b6, + 0xa972_dc17_01fa_66e3, + 0x12e3_1f2d_d6bd_e7d6, + }, + B: fp{ + 0xad2a_cb98_b173_2d9d, + 0x2cfd_10dd_0696_1d64, + 0x0739_6b86_c6ef_24e8, + 0xbd76_e2fd_b1bf_c820, + 0x6afe_a7f6_de94_d0d5, + 0x1099_4b0c_5744_c040, + }, + }, + }, + B: fp6{ + A: fp2{ + A: fp{ + 0x47f9_cb98_b1b8_2d58, + 0x5fe9_11eb_a3aa_1d9d, + 0x96bf_1b5f_4dd2_1db3, + 0x8100_d27c_c925_9f5b, + 0xafa2_0b96_7464_0eab, + 0x09bb_cea7_d8d9_497d, + }, + B: fp{ + 0x0303_cb98_b166_2daa, + 0xd931_10aa_0a62_1d5a, + 0xbfa9_820c_5be4_a468, + 0x0ba3_643e_cb05_a348, + 0xdc35_34bb_1f1c_25a6, + 0x06c3_05bb_19c0_e1c1, + }, + }, + B: fp2{ + A: fp{ + 0x46f9_cb98_b162_d858, + 0x0be9_109c_f7aa_1d57, + 0xc791_bc55_fece_41d2, + 0xf84c_5770_4e38_5ec2, + 0xcb49_c1d9_c010_e60f, + 0x0acd_b8e1_58bf_e3c8, + }, + B: fp{ + 0x8aef_cb98_b15f_8306, + 0x3ea1_108f_e4f2_1d54, + 0xcf79_f69f_a117_df3b, + 0xe4f5_4aa1_d16b_1a3c, + 0xba5e_4ef8_6105_a679, + 0x0ed8_6c07_97be_e5cf, + }, + }, + C: fp2{ + A: fp{ + 0xcee5_cb98_b15c_2db4, + 0x7159_1082_d23a_1d51, + 0xd762_30e9_44a1_7ca4, + 0xd19e_3dd3_549d_d5b6, + 0xa972_dc17_01fa_66e3, + 0x12e3_1f2d_d6bd_e7d6, + }, + B: fp{ + 0xad2a_cb98_b173_2d9d, + 0x2cfd_10dd_0696_1d64, + 0x0739_6b86_c6ef_24e8, + 0xbd76_e2fd_b1bf_c820, + 0x6afe_a7f6_de94_d0d5, + 0x1099_4b0c_5744_c040, + }, + }, + }, + } + + c := fp12{ + A: fp6{ + A: fp2{ + A: fp{ + 0x47f9_cb98_71b8_2d58, + 0x5fe9_11eb_a3aa_1d9d, + 0x96bf_1b5f_4dd8_1db3, + 0x8100_d27c_c925_9f5b, + 0xafa2_0b96_7464_0eab, + 0x09bb_cea7_d8d9_497d, + }, + B: fp{ + 0x0303_cb98_b166_2daa, + 0xd931_10aa_0a62_1d5a, + 0xbfa9_820c_5be4_a468, + 0x0ba3_643e_cb05_a348, + 0xdc35_34bb_1f1c_25a6, + 0x06c3_05bb_19c0_e1c1, + }, + }, + B: fp2{ + A: fp{ + 0x46f9_cb98_b162_d858, + 0x0be9_109c_f7aa_1d57, + 0x7791_bc55_fece_41d2, + 0xf84c_5770_4e38_5ec2, + 0xcb49_c1d9_c010_e60f, + 0x0acd_b8e1_58bf_e3c8, + }, + B: fp{ + 0x8aef_cb98_b15f_8306, + 0x3ea1_108f_e4f2_1d54, + 0xcf79_f69f_a1b7_df3b, + 0xe4f5_4aa1_d16b_133c, + 0xba5e_4ef8_6105_a679, + 0x0ed8_6c07_97be_e5cf, + }, + }, + C: fp2{ + A: fp{ + 0xcee5_cb98_b15c_2db4, + 0x7159_1082_d23a_1d51, + 0xd762_40e9_44a1_7ca4, + 0xd19e_3dd3_549d_d5b6, + 0xa972_dc17_01fa_66e3, + 0x12e3_1f2d_d6bd_e7d6, + }, + B: fp{ + 0xad2a_cb98_b173_2d9d, + 0x2cfd_10dd_0696_1d64, + 0x0739_6b86_c6ef_24e8, + 0xbd76_e2fd_b1bf_c820, + 0x6afe_a7f6_de94_d0d5, + 0x1099_4b0c_1744_c040, + }, + }, + }, + B: fp6{ + A: fp2{ + A: fp{ + 0x47f9_cb98_b1b8_2d58, + 0x5fe9_11eb_a3aa_1d9d, + 0x96bf_1b5f_4dd8_1db3, + 0x8100_d27c_c925_9f5b, + 0xafa2_0b96_7464_0eab, + 0x09bb_cea7_d8d9_497d, + }, + B: fp{ + 0x0303_cb98_b166_2daa, + 0xd931_10aa_0a62_1d5a, + 0xbfa9_820c_5be4_a468, + 0x0ba3_643e_cb05_a348, + 0xdc35_34bb_1f1c_25a6, + 0x06c3_05bb_19c0_e1c1, + }, + }, + B: fp2{ + A: fp{ + 0x46f9_cb98_b162_d858, + 0x0be9_109c_f7aa_1d57, + 0xc791_bc55_fece_41d2, + 0xf84c_5770_4e38_5ec2, + 0xcb49_c1d3_c010_e60f, + 0x0acd_b8e1_58bf_e3c8, + }, + B: fp{ + 0x8aef_cb98_b15f_8306, + 0x3ea1_108f_e4f2_1d54, + 0xcf79_f69f_a1b7_df3b, + 0xe4f5_4aa1_d16b_1a3c, + 0xba5e_4ef8_6105_a679, + 0x0ed8_6c07_97be_e5cf, + }, + }, + C: fp2{ + A: fp{ + 0xcee5_cb98_b15c_2db4, + 0x7159_1082_d23a_1d51, + 0xd762_30e9_44a1_7ca4, + 0xd19e_3dd3_549d_d5b6, + 0xa972_dc17_01fa_66e3, + 0x12e3_1f2d_d6bd_e7d6, + }, + B: fp{ + 0xad2a_cb98_b173_2d9d, + 0x2cfd_10dd_0696_1d64, + 0x0739_6b86_c6ef_24e8, + 0xbd76_e2fd_b1bf_c820, + 0x6afe_a7f6_de94_d0d5, + 0x1099_4b0c_5744_1040, + }, + }, + }, + } + + aa.Square(&a) + aa.Invert(&aa) + aa.Square(&aa) + aa.Add(&aa, &c) + + bb.Square(&b) + bb.Invert(&bb) + bb.Square(&bb) + bb.Add(&bb, &aa) + + cc.Square(&c) + cc.Invert(&cc) + cc.Square(&cc) + cc.Add(&cc, &bb) + + d.Square(&aa) + e.Mul(&aa, &aa) + require.Equal(t, 1, e.Equal(&d)) + + d.Square(&bb) + e.Mul(&bb, &bb) + require.Equal(t, 1, e.Equal(&d)) + + d.Square(&cc) + e.Mul(&cc, &cc) + require.Equal(t, 1, e.Equal(&d)) + + d.Square(&cc) + e.Add(&aa, &bb) + d.Mul(&d, &e) + + e.Mul(&cc, &cc) + e.Mul(&e, &aa) + f.Mul(&cc, &cc) + f.Mul(&f, &bb) + e.Add(&e, &f) + + require.Equal(t, 1, e.Equal(&d)) + + d.Invert(&aa) + e.Invert(&bb) + f.Mul(&aa, &bb) + f.Invert(&f) + require.Equal(t, 1, f.Equal(e.Mul(&e, &d))) + + require.Equal(t, 1, d.Mul(&d, &aa).IsOne()) + + require.Equal(t, 0, aa.Equal(d.FrobeniusMap(&aa))) + d.FrobeniusMap(&aa). + FrobeniusMap(&d). + FrobeniusMap(&d). + FrobeniusMap(&d). + FrobeniusMap(&d). + FrobeniusMap(&d). + FrobeniusMap(&d). + FrobeniusMap(&d). + FrobeniusMap(&d). + FrobeniusMap(&d). + FrobeniusMap(&d). + FrobeniusMap(&d) + require.Equal(t, 1, aa.Equal(&d)) +} diff --git a/core/curves/native/bls12381/fp2.go b/core/curves/native/bls12381/fp2.go new file mode 100755 index 0000000..85cdd10 --- /dev/null +++ b/core/curves/native/bls12381/fp2.go @@ -0,0 +1,344 @@ +package bls12381 + +import ( + "io" +) + +// fp2 is a point in p^2 +type fp2 struct { + A, B fp +} + +// Set copies a into fp2 +func (f *fp2) Set(a *fp2) *fp2 { + f.A.Set(&a.A) + f.B.Set(&a.B) + return f +} + +// SetZero fp2 = 0 +func (f *fp2) SetZero() *fp2 { + f.A.SetZero() + f.B.SetZero() + return f +} + +// SetOne fp2 to the multiplicative identity element +func (f *fp2) SetOne() *fp2 { + f.A.SetOne() + f.B.SetZero() + return f +} + +// SetFp creates an element from a lower field +func (f *fp2) SetFp(a *fp) *fp2 { + f.A.Set(a) + f.B.SetZero() + return f +} + +// Random generates a random field element +func (f *fp2) Random(reader io.Reader) (*fp2, error) { + a, err := new(fp).Random(reader) + if err != nil { + return nil, err + } + b, err := new(fp).Random(reader) + if err != nil { + return nil, err + } + f.A = *a + f.B = *b + return f, nil +} + +// IsZero returns 1 if fp2 == 0, 0 otherwise +func (f *fp2) IsZero() int { + return f.A.IsZero() & f.B.IsZero() +} + +// IsOne returns 1 if fp2 == 1, 0 otherwise +func (f *fp2) IsOne() int { + return f.A.IsOne() & f.B.IsZero() +} + +// Equal returns 1 if f == rhs, 0 otherwise +func (f *fp2) Equal(rhs *fp2) int { + return f.A.Equal(&rhs.A) & f.B.Equal(&rhs.B) +} + +// LexicographicallyLargest returns 1 if +// this element is strictly lexicographically larger than its negation +// 0 otherwise +func (f *fp2) LexicographicallyLargest() int { + // If this element's B coefficient is lexicographically largest + // then it is lexicographically largest. Otherwise, in the event + // the B coefficient is zero and the A coefficient is + // lexicographically largest, then this element is lexicographically + // largest. + + return f.B.LexicographicallyLargest() | + f.B.IsZero()&f.A.LexicographicallyLargest() +} + +// Sgn0 returns the lowest bit value +func (f *fp2) Sgn0() int { + // if A = 0 return B.Sgn0 else A.Sgn0 + a := f.A.IsZero() + t := f.B.Sgn0() & a + a = -a + 1 + t |= f.A.Sgn0() & a + return t +} + +// FrobeniusMap raises this element to p. +func (f *fp2) FrobeniusMap(a *fp2) *fp2 { + // This is always just a conjugation. If you're curious why, here's + // an article about it: https://alicebob.cryptoland.net/the-frobenius-endomorphism-with-finite-fields/ + return f.Conjugate(a) +} + +// Conjugate computes the conjugation of this element +func (f *fp2) Conjugate(a *fp2) *fp2 { + f.A.Set(&a.A) + f.B.Neg(&a.B) + return f +} + +// MulByNonResidue computes the following: +// multiply a + bu by u + 1, getting +// au + a + bu^2 + bu +// and because u^2 = -1, we get +// (a - b) + (a + b)u +func (f *fp2) MulByNonResidue(a *fp2) *fp2 { + var aa, bb fp + aa.Sub(&a.A, &a.B) + bb.Add(&a.A, &a.B) + f.A.Set(&aa) + f.B.Set(&bb) + return f +} + +// Square computes the square of this element +func (f *fp2) Square(arg *fp2) *fp2 { + var a, b, c fp + + // Complex squaring: + // + // v0 = a * b + // a' = (a + b) * (a + \beta*b) - v0 - \beta * v0 + // b' = 2 * v0 + // + // In BLS12-381's F_{p^2}, our \beta is -1, so we + // can modify this formula: + // + // a' = (a + b) * (a - b) + // b' = 2 * a * b + a.Add(&arg.A, &arg.B) + b.Sub(&arg.A, &arg.B) + c.Add(&arg.A, &arg.A) + + f.A.Mul(&a, &b) + f.B.Mul(&c, &arg.B) + return f +} + +// Add performs field addition +func (f *fp2) Add(arg1, arg2 *fp2) *fp2 { + f.A.Add(&arg1.A, &arg2.A) + f.B.Add(&arg1.B, &arg2.B) + return f +} + +// Double doubles specified element +func (f *fp2) Double(a *fp2) *fp2 { + f.A.Double(&a.A) + f.B.Double(&a.B) + return f +} + +// Sub performs field subtraction +func (f *fp2) Sub(arg1, arg2 *fp2) *fp2 { + f.A.Sub(&arg1.A, &arg2.A) + f.B.Sub(&arg1.B, &arg2.B) + return f +} + +// Mul computes Karatsuba multiplication +func (f *fp2) Mul(arg1, arg2 *fp2) *fp2 { + var v0, v1, t, a, b fp + + // Karatsuba multiplication: + // + // v0 = a0 * b0 + // v1 = a1 * b1 + // c0 = v0 + \beta * v1 + // c1 = (a0 + a1) * (b0 + b1) - v0 - v1 + // + // In BLS12-381's F_{p^2}, our \beta is -1, so we + // can modify this formula. (Also, since we always + // subtract v1, we can compute v1 = -a1 * b1.) + // + // v0 = a0 * a1 + // v1 = (-b0) * b1 + // a' = v0 + v1 + // b' = (a0 + b0) * (a1 + b1) - v0 + v1 + v0.Mul(&arg1.A, &arg2.A) + v1.Mul(new(fp).Neg(&arg1.B), &arg2.B) + + a.Add(&v0, &v1) + b.Add(&arg1.A, &arg1.B) + t.Add(&arg2.A, &arg2.B) + b.Mul(&b, &t) + b.Sub(&b, &v0) + b.Add(&b, &v1) + f.A.Set(&a) + f.B.Set(&b) + return f +} + +func (f *fp2) Mul0(arg1 *fp2, arg2 *fp) *fp2 { + f.A.Mul(&arg1.A, arg2) + f.B.Mul(&arg1.B, arg2) + return f +} + +// MulBy3b returns arg * 12 or 3 * b +func (f *fp2) MulBy3b(arg *fp2) *fp2 { + return f.Mul(arg, &curveG23B) +} + +// Neg performs field negation +func (f *fp2) Neg(a *fp2) *fp2 { + f.A.Neg(&a.A) + f.B.Neg(&a.B) + return f +} + +// Sqrt performs field square root +func (f *fp2) Sqrt(a *fp2) (*fp2, int) { + // Algorithm 9, https://eprint.iacr.org/2012/685.pdf + // with constant time modifications. + var a1, alpha, x0, t, res, res2 fp2 + e1 := a.IsZero() + // a1 = self^((p - 3) / 4) + a1.pow(a, &[Limbs]uint64{ + 0xee7fbfffffffeaaa, + 0x07aaffffac54ffff, + 0xd9cc34a83dac3d89, + 0xd91dd2e13ce144af, + 0x92c6e9ed90d2eb35, + 0x0680447a8e5ff9a6, + }) + + // alpha = a1^2 * a = a^((p - 3) / 2 + 1) = a^((p - 1) / 2) + alpha.Square(&a1) + alpha.Mul(&alpha, a) + + // x0 = self^((p + 1) / 4) + x0.Mul(&a1, a) + + // In the event that alpha = -1, the element is order p - 1. So + // we're just trying to get the square of an element of the subfield + // fp. This is given by x0 * u, since u = sqrt(-1). Since the element + // x0 = a + bu has b = 0, the solution is therefore au. + res2.A.Neg(&x0.B) + res2.B.Set(&x0.A) + // alpha == -1 + e2 := alpha.Equal(&fp2{ + A: fp{ + 0x43f5fffffffcaaae, + 0x32b7fff2ed47fffd, + 0x07e83a49a2e99d69, + 0xeca8f3318332bb7a, + 0xef148d1ea0f4c069, + 0x040ab3263eff0206, + }, + B: fp{}, + }) + + // Otherwise, the correct solution is (1 + alpha)^((p - 1) // 2) * x0 + t.SetOne() + t.Add(&t, &alpha) + t.pow(&t, &[Limbs]uint64{ + 0xdcff7fffffffd555, + 0x0f55ffff58a9ffff, + 0xb39869507b587b12, + 0xb23ba5c279c2895f, + 0x258dd3db21a5d66b, + 0x0d0088f51cbff34d, + }) + t.Mul(&t, &x0) + // if a = 0, then its zero + res.CMove(&res2, &res, e1) + // if alpha = -1, its not (1 + alpha)^((p - 1) // 2) * x0 + // but au + res.CMove(&t, &res, e2) + + // is the result^2 = a + t.Square(&res) + e3 := t.Equal(a) + f.CMove(f, &res, e3) + return f, e3 +} + +// Invert computes the multiplicative inverse of this field +// element, returning the original value of fp2 +// in the case that this element is zero. +func (f *fp2) Invert(arg *fp2) (*fp2, int) { + // We wish to find the multiplicative inverse of a nonzero + // element a + bu in fp2. We leverage an identity + // + // (a + bu)(a - bu) = a^2 + b^2 + // + // which holds because u^2 = -1. This can be rewritten as + // + // (a + bu)(a - bu)/(a^2 + b^2) = 1 + // + // because a^2 + b^2 = 0 has no nonzero solutions for (a, b). + // This gives that (a - bu)/(a^2 + b^2) is the inverse + // of (a + bu). Importantly, this can be computing using + // only a single inversion in fp. + var a, b, t fp + a.Square(&arg.A) + b.Square(&arg.B) + a.Add(&a, &b) + _, wasInverted := t.Invert(&a) + // a * t + a.Mul(&arg.A, &t) + // b * -t + b.Neg(&t) + b.Mul(&b, &arg.B) + f.A.CMove(&f.A, &a, wasInverted) + f.B.CMove(&f.B, &b, wasInverted) + return f, wasInverted +} + +// CMove performs conditional select. +// selects arg1 if choice == 0 and arg2 if choice == 1 +func (f *fp2) CMove(arg1, arg2 *fp2, choice int) *fp2 { + f.A.CMove(&arg1.A, &arg2.A, choice) + f.B.CMove(&arg1.B, &arg2.B, choice) + return f +} + +// CNeg conditionally negates a if choice == 1 +func (f *fp2) CNeg(a *fp2, choice int) *fp2 { + var t fp2 + t.Neg(a) + return f.CMove(f, &t, choice) +} + +func (f *fp2) pow(base *fp2, exp *[Limbs]uint64) *fp2 { + res := (&fp2{}).SetOne() + tmp := (&fp2{}).SetZero() + + for i := len(exp) - 1; i >= 0; i-- { + for j := 63; j >= 0; j-- { + res.Square(res) + tmp.Mul(res, base) + res.CMove(res, tmp, int(exp[i]>>j)&1) + } + } + return f.Set(res) +} diff --git a/core/curves/native/bls12381/fp2_test.go b/core/curves/native/bls12381/fp2_test.go new file mode 100755 index 0000000..c10acee --- /dev/null +++ b/core/curves/native/bls12381/fp2_test.go @@ -0,0 +1,424 @@ +package bls12381 + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestFp2Square(t *testing.T) { + a := fp2{ + A: fp{ + 0xc9a2183163ee70d4, + 0xbc3770a7196b5c91, + 0xa247f8c1304c5f44, + 0xb01fc2a3726c80b5, + 0xe1d293e5bbd919c9, + 0x04b78e80020ef2ca, + }, + B: fp{ + 0x952ea4460462618f, + 0x238d5eddf025c62f, + 0xf6c94b012ea92e72, + 0x03ce24eac1c93808, + 0x055950f945da483c, + 0x010a768d0df4eabc, + }, + } + b := fp2{ + A: fp{ + 0xa1e09175a4d2c1fe, + 0x8b33acfc204eff12, + 0xe24415a11b456e42, + 0x61d996b1b6ee1936, + 0x1164dbe8667c853c, + 0x0788557acc7d9c79, + }, + B: fp{ + 0xda6a87cc6f48fa36, + 0x0fc7b488277c1903, + 0x9445ac4adc448187, + 0x02616d5bc9099209, + 0xdbed46772db58d48, + 0x11b94d5076c7b7b1, + }, + } + + require.Equal(t, &b, a.Square(&a)) +} + +func TestFp2Mul(t *testing.T) { + a := fp2{ + A: fp{ + 0xc9a2183163ee70d4, + 0xbc3770a7196b5c91, + 0xa247f8c1304c5f44, + 0xb01fc2a3726c80b5, + 0xe1d293e5bbd919c9, + 0x04b78e80020ef2ca, + }, + B: fp{ + 0x952ea4460462618f, + 0x238d5eddf025c62f, + 0xf6c94b012ea92e72, + 0x03ce24eac1c93808, + 0x055950f945da483c, + 0x010a768d0df4eabc, + }, + } + b := fp2{ + A: fp{ + 0xa1e09175a4d2c1fe, + 0x8b33acfc204eff12, + 0xe24415a11b456e42, + 0x61d996b1b6ee1936, + 0x1164dbe8667c853c, + 0x0788557acc7d9c79, + }, + B: fp{ + 0xda6a87cc6f48fa36, + 0x0fc7b488277c1903, + 0x9445ac4adc448187, + 0x02616d5bc9099209, + 0xdbed46772db58d48, + 0x11b94d5076c7b7b1, + }, + } + c := fp2{ + A: fp{ + 0xf597483e27b4e0f7, + 0x610fbadf811dae5f, + 0x8432af917714327a, + 0x6a9a9603cf88f09e, + 0xf05a7bf8bad0eb01, + 0x09549131c003ffae, + }, + B: fp{ + 0x963b02d0f93d37cd, + 0xc95ce1cdb30a73d4, + 0x308725fa3126f9b8, + 0x56da3c167fab0d50, + 0x6b5086b5f4b6d6af, + 0x09c39f062f18e9f2, + }, + } + + require.Equal(t, 1, c.Equal(new(fp2).Mul(&a, &b))) +} + +func TestFp2Add(t *testing.T) { + a := fp2{ + A: fp{ + 0xc9a2183163ee70d4, + 0xbc3770a7196b5c91, + 0xa247f8c1304c5f44, + 0xb01fc2a3726c80b5, + 0xe1d293e5bbd919c9, + 0x04b78e80020ef2ca, + }, + B: fp{ + 0x952ea4460462618f, + 0x238d5eddf025c62f, + 0xf6c94b012ea92e72, + 0x03ce24eac1c93808, + 0x055950f945da483c, + 0x010a768d0df4eabc, + }, + } + b := fp2{ + A: fp{ + 0xa1e09175a4d2c1fe, + 0x8b33acfc204eff12, + 0xe24415a11b456e42, + 0x61d996b1b6ee1936, + 0x1164dbe8667c853c, + 0x0788557acc7d9c79, + }, + B: fp{ + 0xda6a87cc6f48fa36, + 0x0fc7b488277c1903, + 0x9445ac4adc448187, + 0x02616d5bc9099209, + 0xdbed46772db58d48, + 0x11b94d5076c7b7b1, + }, + } + c := fp2{ + A: fp{ + 0x6b82a9a708c132d2, + 0x476b1da339ba5ba4, + 0x848c0e624b91cd87, + 0x11f95955295a99ec, + 0xf3376fce22559f06, + 0x0c3fe3face8c8f43, + }, + B: fp{ + 0x6f992c1273ab5bc5, + 0x3355136617a1df33, + 0x8b0ef74c0aedaff9, + 0x062f92468ad2ca12, + 0xe1469770738fd584, + 0x12c3c3dd84bca26d, + }, + } + + require.Equal(t, 1, c.Equal(new(fp2).Add(&a, &b))) +} + +func TestFp2Sub(t *testing.T) { + a := fp2{ + A: fp{ + 0xc9a2183163ee70d4, + 0xbc3770a7196b5c91, + 0xa247f8c1304c5f44, + 0xb01fc2a3726c80b5, + 0xe1d293e5bbd919c9, + 0x04b78e80020ef2ca, + }, + B: fp{ + 0x952ea4460462618f, + 0x238d5eddf025c62f, + 0xf6c94b012ea92e72, + 0x03ce24eac1c93808, + 0x055950f945da483c, + 0x010a768d0df4eabc, + }, + } + b := fp2{ + A: fp{ + 0xa1e09175a4d2c1fe, + 0x8b33acfc204eff12, + 0xe24415a11b456e42, + 0x61d996b1b6ee1936, + 0x1164dbe8667c853c, + 0x0788557acc7d9c79, + }, + B: fp{ + 0xda6a87cc6f48fa36, + 0x0fc7b488277c1903, + 0x9445ac4adc448187, + 0x02616d5bc9099209, + 0xdbed46772db58d48, + 0x11b94d5076c7b7b1, + }, + } + c := fp2{ + A: fp{ + 0xe1c086bbbf1b5981, + 0x4fafc3a9aa705d7e, + 0x2734b5c10bb7e726, + 0xb2bd7776af037a3e, + 0x1b895fb398a84164, + 0x17304aef6f113cec, + }, + B: fp{ + 0x74c31c7995191204, + 0x3271aa5479fdad2b, + 0xc9b471574915a30f, + 0x65e40313ec44b8be, + 0x7487b2385b7067cb, + 0x09523b26d0ad19a4, + }, + } + + require.Equal(t, 1, c.Equal(new(fp2).Sub(&a, &b))) +} + +func TestFp2Neg(t *testing.T) { + a := fp2{ + A: fp{ + 0xc9a2183163ee70d4, + 0xbc3770a7196b5c91, + 0xa247f8c1304c5f44, + 0xb01fc2a3726c80b5, + 0xe1d293e5bbd919c9, + 0x04b78e80020ef2ca, + }, + B: fp{ + 0x952ea4460462618f, + 0x238d5eddf025c62f, + 0xf6c94b012ea92e72, + 0x03ce24eac1c93808, + 0x055950f945da483c, + 0x010a768d0df4eabc, + }, + } + b := fp2{ + A: fp{ + 0xf05ce7ce9c1139d7, + 0x62748f5797e8a36d, + 0xc4e8d9dfc66496df, + 0xb45788e181189209, + 0x694913d08772930d, + 0x1549836a3770f3cf, + }, + B: fp{ + 0x24d05bb9fb9d491c, + 0xfb1ea120c12e39d0, + 0x7067879fc807c7b1, + 0x60a9269a31bbdab6, + 0x45c256bcfd71649b, + 0x18f69b5d2b8afbde, + }, + } + + require.Equal(t, 1, b.Equal(new(fp2).Neg(&a))) +} + +func TestFp2Sqrt(t *testing.T) { + a := fp2{ + A: fp{ + 0x2beed14627d7f9e9, + 0xb6614e06660e5dce, + 0x06c4cc7c2f91d42c, + 0x996d78474b7a63cc, + 0xebaebc4c820d574e, + 0x18865e12d93fd845, + }, + B: fp{ + 0x7d828664baf4f566, + 0xd17e663996ec7339, + 0x679ead55cb4078d0, + 0xfe3b2260e001ec28, + 0x305993d043d91b68, + 0x0626f03c0489b72d, + }, + } + + asq, wasSquare := (&fp2{}).Sqrt(&a) + require.Equal(t, 1, wasSquare) + require.Equal(t, 1, a.Equal(asq.Square(asq))) + + b := fp2{ + A: fp{ + 0x6631000000105545, + 0x211400400eec000d, + 0x3fa7af30c820e316, + 0xc52a8b8d6387695d, + 0x9fb4e61d1e83eac5, + 0x005cb922afe84dc7, + }, + B: fp{}, + } + bsq, wasSquare := (&fp2{}).Sqrt(&b) + require.Equal(t, 1, wasSquare) + require.Equal(t, 1, b.Equal(bsq.Square(bsq))) + + c := fp2{ + A: fp{ + 0x44f600000051ffae, + 0x86b8014199480043, + 0xd7159952f1f3794a, + 0x755d6e3dfe1ffc12, + 0xd36cd6db5547e905, + 0x02f8c8ecbf1867bb, + }, + B: fp{}, + } + csq, wasSquare := (&fp2{}).Sqrt(&c) + require.Equal(t, 1, wasSquare) + require.Equal(t, 1, c.Equal(csq.Square(csq))) + + d := fp2{ + A: fp{ + 0xc5fa1bc8fd00d7f6, + 0x3830ca454606003b, + 0x2b287f1104b102da, + 0xa7fb30f28230f23e, + 0x339cdb9ee953dbf0, + 0x0d78ec51d989fc57, + }, + B: fp{ + 0x27ec4898cf87f613, + 0x9de1394e1abb05a5, + 0x0947f85dc170fc14, + 0x586fbc696b6114b7, + 0x2b3475a4077d7169, + 0x13e1c895cc4b6c22, + }, + } + _, wasSquare = (&fp2{}).Sqrt(&d) + require.Equal(t, 0, wasSquare) + + _, wasSquare = (&fp2{}).Sqrt(&fp2{}) + require.Equal(t, 1, wasSquare) +} + +func TestFp2Invert(t *testing.T) { + a := fp2{ + A: fp{ + 0x1128ecad67549455, + 0x9e7a1cff3a4ea1a8, + 0xeb208d51e08bcf27, + 0xe98ad40811f5fc2b, + 0x736c3a59232d511d, + 0x10acd42d29cfcbb6, + }, + B: fp{ + 0xd328e37cc2f58d41, + 0x948df0858a605869, + 0x6032f9d56f93a573, + 0x2be483ef3fffdc87, + 0x30ef61f88f483c2a, + 0x1333f55a35725be0, + }, + } + + b := fp2{ + A: fp{ + 0x0581a1333d4f48a6, + 0x58242f6ef0748500, + 0x0292c955349e6da5, + 0xba37721ddd95fcd0, + 0x70d167903aa5dfc5, + 0x11895e118b58a9d5, + }, + B: fp{ + 0x0eda09d2d7a85d17, + 0x8808e137a7d1a2cf, + 0x43ae2625c1ff21db, + 0xf85ac9fdf7a74c64, + 0x8fccdda5b8da9738, + 0x08e84f0cb32cd17d, + }, + } + + ainv, wasInverted := (&fp2{}).Invert(&a) + require.Equal(t, 1, wasInverted) + require.Equal(t, 1, b.Equal(ainv)) + + _, wasInverted = (&fp2{}).Invert(&fp2{}) + require.Equal(t, 0, wasInverted) +} + +func TestFp2LexicographicallyLargest(t *testing.T) { + require.Equal(t, 0, new(fp2).SetZero().LexicographicallyLargest()) + require.Equal(t, 0, new(fp2).SetOne().LexicographicallyLargest()) + + a := fp2{ + A: fp{ + 0x1128_ecad_6754_9455, + 0x9e7a_1cff_3a4e_a1a8, + 0xeb20_8d51_e08b_cf27, + 0xe98a_d408_11f5_fc2b, + 0x736c_3a59_232d_511d, + 0x10ac_d42d_29cf_cbb6, + }, + B: fp{ + 0xd328_e37c_c2f5_8d41, + 0x948d_f085_8a60_5869, + 0x6032_f9d5_6f93_a573, + 0x2be4_83ef_3fff_dc87, + 0x30ef_61f8_8f48_3c2a, + 0x1333_f55a_3572_5be0, + }, + } + + require.Equal(t, 1, a.LexicographicallyLargest()) + aNeg := new(fp2).Neg(&a) + require.Equal(t, 0, aNeg.LexicographicallyLargest()) + a.B.SetZero() + require.Equal(t, 0, a.LexicographicallyLargest()) + aNeg.B.SetZero() + require.Equal(t, 1, aNeg.LexicographicallyLargest()) +} diff --git a/core/curves/native/bls12381/fp6.go b/core/curves/native/bls12381/fp6.go new file mode 100755 index 0000000..0863fd1 --- /dev/null +++ b/core/curves/native/bls12381/fp6.go @@ -0,0 +1,340 @@ +package bls12381 + +import "io" + +// fp6 represents an element +// a + b v + c v^2 of fp^6 = fp^2 / v^3 - u - 1. +type fp6 struct { + A, B, C fp2 +} + +// Set fp6 = a +func (f *fp6) Set(a *fp6) *fp6 { + f.A.Set(&a.A) + f.B.Set(&a.B) + f.C.Set(&a.C) + return f +} + +// SetFp creates an element from a lower field +func (f *fp6) SetFp(a *fp) *fp6 { + f.A.SetFp(a) + f.B.SetZero() + f.C.SetZero() + return f +} + +// SetFp2 creates an element from a lower field +func (f *fp6) SetFp2(a *fp2) *fp6 { + f.A.Set(a) + f.B.SetZero() + f.C.SetZero() + return f +} + +// SetZero fp6 to zero +func (f *fp6) SetZero() *fp6 { + f.A.SetZero() + f.B.SetZero() + f.C.SetZero() + return f +} + +// SetOne fp6 to multiplicative identity element +func (f *fp6) SetOne() *fp6 { + f.A.SetOne() + f.B.SetZero() + f.C.SetZero() + return f +} + +// Random generates a random field element +func (f *fp6) Random(reader io.Reader) (*fp6, error) { + a, err := new(fp2).Random(reader) + if err != nil { + return nil, err + } + b, err := new(fp2).Random(reader) + if err != nil { + return nil, err + } + c, err := new(fp2).Random(reader) + if err != nil { + return nil, err + } + f.A.Set(a) + f.B.Set(b) + f.C.Set(c) + return f, nil +} + +// Add computes arg1+arg2 +func (f *fp6) Add(arg1, arg2 *fp6) *fp6 { + f.A.Add(&arg1.A, &arg2.A) + f.B.Add(&arg1.B, &arg2.B) + f.C.Add(&arg1.C, &arg2.C) + return f +} + +// Double computes arg1+arg1 +func (f *fp6) Double(arg *fp6) *fp6 { + return f.Add(arg, arg) +} + +// Sub computes arg1-arg2 +func (f *fp6) Sub(arg1, arg2 *fp6) *fp6 { + f.A.Sub(&arg1.A, &arg2.A) + f.B.Sub(&arg1.B, &arg2.B) + f.C.Sub(&arg1.C, &arg2.C) + return f +} + +// Mul computes arg1*arg2 +func (f *fp6) Mul(arg1, arg2 *fp6) *fp6 { + var aa, bb, cc, s, t1, t2, t3 fp2 + + aa.Mul(&arg1.A, &arg2.A) + bb.Mul(&arg1.B, &arg2.B) + cc.Mul(&arg1.C, &arg2.C) + + t1.Add(&arg2.B, &arg2.C) + s.Add(&arg1.B, &arg1.C) + t1.Mul(&t1, &s) + t1.Sub(&t1, &bb) + t1.Sub(&t1, &cc) + t1.MulByNonResidue(&t1) + t1.Add(&t1, &aa) + + t3.Add(&arg2.A, &arg2.C) + s.Add(&arg1.A, &arg1.C) + t3.Mul(&t3, &s) + t3.Sub(&t3, &aa) + t3.Add(&t3, &bb) + t3.Sub(&t3, &cc) + + t2.Add(&arg2.A, &arg2.B) + s.Add(&arg1.A, &arg1.B) + t2.Mul(&t2, &s) + t2.Sub(&t2, &aa) + t2.Sub(&t2, &bb) + cc.MulByNonResidue(&cc) + t2.Add(&t2, &cc) + + f.A.Set(&t1) + f.B.Set(&t2) + f.C.Set(&t3) + return f +} + +// MulByB scales this field by a scalar in the B coefficient +func (f *fp6) MulByB(arg *fp6, b *fp2) *fp6 { + var bB, t1, t2 fp2 + bB.Mul(&arg.B, b) + // (b + c) * arg2 - bB + t1.Add(&arg.B, &arg.C) + t1.Mul(&t1, b) + t1.Sub(&t1, &bB) + t1.MulByNonResidue(&t1) + + t2.Add(&arg.A, &arg.B) + t2.Mul(&t2, b) + t2.Sub(&t2, &bB) + + f.A.Set(&t1) + f.B.Set(&t2) + f.C.Set(&bB) + return f +} + +// MulByAB scales this field by scalars in the A and B coefficients +func (f *fp6) MulByAB(arg *fp6, a, b *fp2) *fp6 { + var aA, bB, t1, t2, t3 fp2 + + aA.Mul(&arg.A, a) + bB.Mul(&arg.B, b) + + t1.Add(&arg.B, &arg.C) + t1.Mul(&t1, b) + t1.Sub(&t1, &bB) + t1.MulByNonResidue(&t1) + t1.Add(&t1, &aA) + + t2.Add(a, b) + t3.Add(&arg.A, &arg.B) + t2.Mul(&t2, &t3) + t2.Sub(&t2, &aA) + t2.Sub(&t2, &bB) + + t3.Add(&arg.A, &arg.C) + t3.Mul(&t3, a) + t3.Sub(&t3, &aA) + t3.Add(&t3, &bB) + + f.A.Set(&t1) + f.B.Set(&t2) + f.C.Set(&t3) + + return f +} + +// MulByNonResidue multiplies by quadratic nonresidue v. +func (f *fp6) MulByNonResidue(arg *fp6) *fp6 { + // Given a + bv + cv^2, this produces + // av + bv^2 + cv^3 + // but because v^3 = u + 1, we have + // c(u + 1) + av + bv^2 + var a, b, c fp2 + a.MulByNonResidue(&arg.C) + b.Set(&arg.A) + c.Set(&arg.B) + f.A.Set(&a) + f.B.Set(&b) + f.C.Set(&c) + return f +} + +// FrobeniusMap raises this element to p. +func (f *fp6) FrobeniusMap(arg *fp6) *fp6 { + var a, b, c fp2 + pm1Div3 := fp2{ + A: fp{}, + B: fp{ + 0xcd03c9e48671f071, + 0x5dab22461fcda5d2, + 0x587042afd3851b95, + 0x8eb60ebe01bacb9e, + 0x03f97d6e83d050d2, + 0x18f0206554638741, + }, + } + p2m2Div3 := fp2{ + A: fp{ + 0x890dc9e4867545c3, + 0x2af322533285a5d5, + 0x50880866309b7e2c, + 0xa20d1b8c7e881024, + 0x14e4f04fe2db9068, + 0x14e56d3f1564853a, + }, + B: fp{}, + } + a.FrobeniusMap(&arg.A) + b.FrobeniusMap(&arg.B) + c.FrobeniusMap(&arg.C) + + // b = b * (u + 1)^((p - 1) / 3) + b.Mul(&b, &pm1Div3) + + // c = c * (u + 1)^((2p - 2) / 3) + c.Mul(&c, &p2m2Div3) + + f.A.Set(&a) + f.B.Set(&b) + f.C.Set(&c) + return f +} + +// Square computes fp6^2 +func (f *fp6) Square(arg *fp6) *fp6 { + var s0, s1, s2, s3, s4, ab, bc fp2 + + s0.Square(&arg.A) + ab.Mul(&arg.A, &arg.B) + s1.Double(&ab) + s2.Sub(&arg.A, &arg.B) + s2.Add(&s2, &arg.C) + s2.Square(&s2) + bc.Mul(&arg.B, &arg.C) + s3.Double(&bc) + s4.Square(&arg.C) + + f.A.MulByNonResidue(&s3) + f.A.Add(&f.A, &s0) + + f.B.MulByNonResidue(&s4) + f.B.Add(&f.B, &s1) + + // s1 + s2 + s3 - s0 - s4 + f.C.Add(&s1, &s2) + f.C.Add(&f.C, &s3) + f.C.Sub(&f.C, &s0) + f.C.Sub(&f.C, &s4) + + return f +} + +// Invert computes this element's field inversion +func (f *fp6) Invert(arg *fp6) (*fp6, int) { + var a, b, c, s, t fp2 + + // a' = a^2 - (b * c).mul_by_nonresidue() + a.Mul(&arg.B, &arg.C) + a.MulByNonResidue(&a) + t.Square(&arg.A) + a.Sub(&t, &a) + + // b' = (c^2).mul_by_nonresidue() - (a * b) + b.Square(&arg.C) + b.MulByNonResidue(&b) + t.Mul(&arg.A, &arg.B) + b.Sub(&b, &t) + + // c' = b^2 - (a * c) + c.Square(&arg.B) + t.Mul(&arg.A, &arg.C) + c.Sub(&c, &t) + + // t = ((b * c') + (c * b')).mul_by_nonresidue() + (a * a') + s.Mul(&arg.B, &c) + t.Mul(&arg.C, &b) + s.Add(&s, &t) + s.MulByNonResidue(&s) + + t.Mul(&arg.A, &a) + s.Add(&s, &t) + + _, wasInverted := t.Invert(&s) + + // newA = a' * t^-1 + s.Mul(&a, &t) + f.A.CMove(&f.A, &s, wasInverted) + // newB = b' * t^-1 + s.Mul(&b, &t) + f.B.CMove(&f.B, &s, wasInverted) + // newC = c' * t^-1 + s.Mul(&c, &t) + f.C.CMove(&f.C, &s, wasInverted) + return f, wasInverted +} + +// Neg computes the field negation +func (f *fp6) Neg(arg *fp6) *fp6 { + f.A.Neg(&arg.A) + f.B.Neg(&arg.B) + f.C.Neg(&arg.C) + return f +} + +// IsZero returns 1 if fp6 == 0, 0 otherwise +func (f *fp6) IsZero() int { + return f.A.IsZero() & f.B.IsZero() & f.C.IsZero() +} + +// IsOne returns 1 if fp6 == 1, 0 otherwise +func (f *fp6) IsOne() int { + return f.A.IsOne() & f.B.IsZero() & f.B.IsZero() +} + +// Equal returns 1 if fp6 == rhs, 0 otherwise +func (f *fp6) Equal(rhs *fp6) int { + return f.A.Equal(&rhs.A) & f.B.Equal(&rhs.B) & f.C.Equal(&rhs.C) +} + +// CMove performs conditional select. +// selects arg1 if choice == 0 and arg2 if choice == 1 +func (f *fp6) CMove(arg1, arg2 *fp6, choice int) *fp6 { + f.A.CMove(&arg1.A, &arg2.A, choice) + f.B.CMove(&arg1.B, &arg2.B, choice) + f.C.CMove(&arg1.C, &arg2.C, choice) + return f +} diff --git a/core/curves/native/bls12381/fp6_test.go b/core/curves/native/bls12381/fp6_test.go new file mode 100755 index 0000000..82319d3 --- /dev/null +++ b/core/curves/native/bls12381/fp6_test.go @@ -0,0 +1,217 @@ +package bls12381 + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestFp6Arithmetic(t *testing.T) { + a := fp6{ + A: fp2{ + A: fp{ + 0x47f9cb98b1b82d58, + 0x5fe911eba3aa1d9d, + 0x96bf1b5f4dd81db3, + 0x8100d27cc9259f5b, + 0xafa20b9674640eab, + 0x09bbcea7d8d9497d, + }, + B: fp{ + 0x0303cb98b1662daa, + 0xd93110aa0a621d5a, + 0xbfa9820c5be4a468, + 0x0ba3643ecb05a348, + 0xdc3534bb1f1c25a6, + 0x06c305bb19c0e1c1, + }, + }, + B: fp2{ + A: fp{ + 0x46f9cb98b162d858, + 0x0be9109cf7aa1d57, + 0xc791bc55fece41d2, + 0xf84c57704e385ec2, + 0xcb49c1d9c010e60f, + 0x0acdb8e158bfe3c8, + }, + B: fp{ + 0x8aefcb98b15f8306, + 0x3ea1108fe4f21d54, + 0xcf79f69fa1b7df3b, + 0xe4f54aa1d16b1a3c, + 0xba5e4ef86105a679, + 0x0ed86c0797bee5cf, + }, + }, + C: fp2{ + A: fp{ + 0xcee5cb98b15c2db4, + 0x71591082d23a1d51, + 0xd76230e944a17ca4, + 0xd19e3dd3549dd5b6, + 0xa972dc1701fa66e3, + 0x12e31f2dd6bde7d6, + }, + B: fp{ + 0xad2acb98b1732d9d, + 0x2cfd10dd06961d64, + 0x07396b86c6ef24e8, + 0xbd76e2fdb1bfc820, + 0x6afea7f6de94d0d5, + 0x10994b0c5744c040, + }, + }, + } + b := fp6{ + A: fp2{ + A: fp{ + 0xf120cb98b16fd84b, + 0x5fb510cff3de1d61, + 0x0f21a5d069d8c251, + 0xaa1fd62f34f2839a, + 0x5a1335157f89913f, + 0x14a3fe329643c247, + }, + B: fp{ + 0x3516cb98b16c82f9, + 0x926d10c2e1261d5f, + 0x1709e01a0cc25fba, + 0x96c8c960b8253f14, + 0x4927c234207e51a9, + 0x18aeb158d542c44e, + }, + }, + B: fp2{ + A: fp{ + 0xbf0dcb98b16982fc, + 0xa67910b71d1a1d5c, + 0xb7c147c2b8fb06ff, + 0x1efa710d47d2e7ce, + 0xed20a79c7e27653c, + 0x02b85294dac1dfba, + }, + B: fp{ + 0x9d52cb98b18082e5, + 0x621d111151761d6f, + 0xe79882603b48af43, + 0x0ad31637a4f4da37, + 0xaeac737c5ac1cf2e, + 0x006e7e735b48b824, + }, + }, + C: fp2{ + A: fp{ + 0xe148cb98b17d2d93, + 0x94d511043ebe1d6c, + 0xef80bca9de324cac, + 0xf77c0969282795b1, + 0x9dc1009afbb68f97, + 0x047931999a47ba2b, + }, + B: fp{ + 0x253ecb98b179d841, + 0xc78d10f72c061d6a, + 0xf768f6f3811bea15, + 0xe424fc9aab5a512b, + 0x8cd58db99cab5001, + 0x0883e4bfd946bc32, + }, + }, + } + c := fp6{ + A: fp2{ + A: fp{ + 0x6934cb98b17682ef, + 0xfa4510ea194e1d67, + 0xff51313d2405877e, + 0xd0cdefcc2e8d0ca5, + 0x7bea1ad83da0106b, + 0x0c8e97e61845be39, + }, + B: fp{ + 0x4779cb98b18d82d8, + 0xb5e911444daa1d7a, + 0x2f286bdaa6532fc2, + 0xbca694f68baeff0f, + 0x3d75e6b81a3a7a5d, + 0x0a44c3c498cc96a3, + }, + }, + B: fp2{ + A: fp{ + 0x8b6fcb98b18a2d86, + 0xe8a111373af21d77, + 0x3710a624493ccd2b, + 0xa94f88280ee1ba89, + 0x2c8a73d6bb2f3ac7, + 0x0e4f76ead7cb98aa, + }, + B: fp{ + 0xcf65cb98b186d834, + 0x1b59112a283a1d74, + 0x3ef8e06dec266a95, + 0x95f87b5992147603, + 0x1b9f00f55c23fb31, + 0x125a2a1116ca9ab1, + }, + }, + C: fp2{ + A: fp{ + 0x135bcb98b18382e2, + 0x4e11111d15821d72, + 0x46e11ab78f1007fe, + 0x82a16e8b1547317d, + 0x0ab38e13fd18bb9b, + 0x1664dd3755c99cb8, + }, + B: fp{ + 0xce65cb98b1318334, + 0xc7590fdb7c3a1d2e, + 0x6fcb81649d1c8eb3, + 0x0d44004d1727356a, + 0x3746b738a7d0d296, + 0x136c144a96b134fc, + }, + }, + } + + d := new(fp6).Square(&a) + e := new(fp6).Mul(&a, &a) + require.Equal(t, 1, e.Equal(d)) + + d.Square(&b) + e.Mul(&b, &b) + require.Equal(t, 1, e.Equal(d)) + + d.Square(&c) + e.Mul(&c, &c) + require.Equal(t, 1, e.Equal(d)) + + // (a + b) * c^2 + d.Add(&a, &b) + d.Mul(d, new(fp6).Square(&c)) + + e.Mul(&c, &c) + e.Mul(e, &a) + tt := new(fp6).Mul(&c, &c) + tt.Mul(tt, &b) + e.Add(e, tt) + + require.Equal(t, 1, d.Equal(e)) + + _, wasInverted := d.Invert(&a) + require.Equal(t, 1, wasInverted) + _, wasInverted = e.Invert(&b) + require.Equal(t, 1, wasInverted) + + tt.Mul(&a, &b) + _, wasInverted = tt.Invert(tt) + require.Equal(t, 1, wasInverted) + d.Mul(d, e) + require.Equal(t, 1, tt.Equal(d)) + + _, _ = d.Invert(&a) + e.SetOne() + require.Equal(t, 1, e.Equal(d.Mul(d, &a))) +} diff --git a/core/curves/native/bls12381/fp_test.go b/core/curves/native/bls12381/fp_test.go new file mode 100644 index 0000000..225eaa6 --- /dev/null +++ b/core/curves/native/bls12381/fp_test.go @@ -0,0 +1,346 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bls12381 + +import ( + crand "crypto/rand" + "math/big" + "math/rand" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/internal" +) + +func TestFpSetOne(t *testing.T) { + var fp fp + fp.SetOne() + require.NotNil(t, fp) + require.Equal(t, fp, r) +} + +func TestFpSetUint64(t *testing.T) { + var act fp + act.SetUint64(1 << 60) + require.NotNil(t, act) + // Remember it will be in montgomery form + require.Equal(t, act[0], uint64(0xf6ea9fde37db5e8c)) +} + +func TestFpAdd(t *testing.T) { + var lhs, rhs, exp, res fp + lhs.SetOne() + rhs.SetOne() + exp.SetUint64(2) + res.Add(&lhs, &rhs) + require.NotNil(t, res) + require.Equal(t, 1, res.Equal(&exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint64() >> 2 + r := rand.Uint64() >> 2 + e := l + r + lhs.SetUint64(l) + rhs.SetUint64(r) + exp.SetUint64(e) + + res.Add(&lhs, &rhs) + require.NotNil(t, res) + require.Equal(t, exp, res) + } +} + +func TestFpSub(t *testing.T) { + var lhs, rhs, exp, res fp + lhs.SetOne() + rhs.SetOne() + exp.SetZero() + res.Sub(&lhs, &rhs) + require.NotNil(t, res) + require.Equal(t, 1, res.Equal(&exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint64() >> 2 + r := rand.Uint64() >> 2 + if l < r { + l, r = r, l + } + e := l - r + lhs.SetUint64(l) + rhs.SetUint64(r) + exp.SetUint64(e) + + res.Sub(&lhs, &rhs) + require.NotNil(t, res) + require.Equal(t, exp, res) + } +} + +func TestFpMul(t *testing.T) { + var lhs, rhs, exp, res fp + lhs.SetOne() + rhs.SetOne() + exp.SetOne() + res.Mul(&lhs, &rhs) + require.NotNil(t, res) + require.Equal(t, 1, res.Equal(&exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint32() + r := rand.Uint32() + e := uint64(l) * uint64(r) + lhs.SetUint64(uint64(l)) + rhs.SetUint64(uint64(r)) + exp.SetUint64(e) + + res.Mul(&lhs, &rhs) + require.NotNil(t, res) + require.Equal(t, exp, res) + } +} + +func TestFpDouble(t *testing.T) { + var a, e, res fp + a.SetUint64(2) + e.SetUint64(4) + require.Equal(t, &e, res.Double(&a)) + + for i := 0; i < 25; i++ { + tv := rand.Uint32() + ttv := uint64(tv) * 2 + a.SetUint64(uint64(tv)) + e.SetUint64(ttv) + require.Equal(t, &e, res.Double(&a)) + } +} + +func TestFpSquare(t *testing.T) { + var a, e, res fp + a.SetUint64(4) + e.SetUint64(16) + require.Equal(t, 1, e.Equal(res.Square(&a))) + + a.SetUint64(2854263694) + e.SetUint64(8146821234886525636) + require.Equal(t, 1, e.Equal(res.Square(&a))) + + for i := 0; i < 25; i++ { + j := rand.Uint32() + exp := uint64(j) * uint64(j) + e.SetUint64(exp) + a.SetUint64(uint64(j)) + require.Equal(t, 1, e.Equal(res.Square(&a)), "exp = %d, j = %d", exp, j) + } +} + +func TestFpNeg(t *testing.T) { + var g, a, e fp + g.SetLimbs(&[Limbs]uint64{7, 0, 0, 0, 0, 0}) + a.SetOne() + a.Neg(&a) + e.SetRaw( + &[Limbs]uint64{ + 0x43f5fffffffcaaae, + 0x32b7fff2ed47fffd, + 0x07e83a49a2e99d69, + 0xeca8f3318332bb7a, + 0xef148d1ea0f4c069, + 0x040ab3263eff0206, + }, + ) + require.Equal(t, 1, e.Equal(&a)) + a.Neg(&g) + e.SetRaw( + &[Limbs]uint64{ + 0x21baffffffe90017, + 0x445bffa5cba3ffed, + 0xd028c5627db257bc, + 0x14275ad5a2de0d96, + 0x3e7434202365960e, + 0x0249d4217f792796, + }, + ) + require.Equal(t, e, a) +} + +func TestFpExp(t *testing.T) { + var a, e, by fp + e.SetUint64(8) + a.SetUint64(2) + by.SetUint64(3) + require.Equal(t, &e, a.Exp(&a, &by)) +} + +func TestFpSqrt(t *testing.T) { + var t1, t2, t3 fp + t1.SetUint64(2) + t2.Neg(&t1) + t3.Square(&t1) + _, wasSquare := t3.Sqrt(&t3) + + require.Equal(t, 1, wasSquare) + require.Equal(t, 1, t1.Equal(&t3)|t2.Equal(&t3)) + t1.SetUint64(5) + _, wasSquare = t1.Sqrt(&t1) + require.Equal(t, 0, wasSquare) +} + +func TestFpInvert(t *testing.T) { + var two, twoInv, a, lhs, rhs, rhsInv fp + twoInv.SetRaw( + &[Limbs]uint64{ + 0x1804000000015554, + 0x855000053ab00001, + 0x633cb57c253c276f, + 0x6e22d1ec31ebb502, + 0xd3916126f2d14ca2, + 0x17fbb8571a006596, + }, + ) + two.SetUint64(2) + _, inverted := a.Invert(&two) + require.Equal(t, 1, inverted) + require.Equal(t, &a, &twoInv) + + lhs.SetUint64(9) + rhs.SetUint64(3) + _, inverted = rhsInv.Invert(&rhs) + require.Equal(t, 1, inverted) + require.Equal(t, &rhs, lhs.Mul(&lhs, &rhsInv)) + + rhs.SetZero() + _, inverted = lhs.Invert(&rhs) + require.Equal(t, 0, inverted) +} + +func TestFpCMove(t *testing.T) { + var t1, t2, tt fp + t1.SetUint64(5) + t2.SetUint64(10) + require.Equal(t, &t1, tt.CMove(&t1, &t2, 0)) + require.Equal(t, &t2, tt.CMove(&t1, &t2, 1)) +} + +func TestFpBytes(t *testing.T) { + var t1, t2 fp + t1.SetUint64(99) + seq := t1.Bytes() + _, suc := t2.SetBytes(&seq) + require.Equal(t, 1, suc) + require.Equal(t, t1, t2) + + for i := 0; i < 25; i++ { + t1.SetUint64(rand.Uint64()) + seq = t1.Bytes() + _, suc = t2.SetBytes(&seq) + require.Equal(t, 1, suc) + require.Equal(t, t1, t2) + } +} + +func TestFpBigInt(t *testing.T) { + var t1, t2, e fp + t1.SetBigInt(big.NewInt(9999)) + t2.SetBigInt(t1.BigInt()) + require.Equal(t, t1, t2) + + e.SetRaw( + &[Limbs]uint64{ + 0x922af810e5e35f31, + 0x6bc75973ed382d59, + 0xd4716c9d4d491d42, + 0x69d98d1ebeeb3f6e, + 0x7e425d7b46d4a82b, + 0x12d04b0965870e92, + }, + ) + b := new( + big.Int, + ).SetBytes([]byte{9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9}) + t1.SetBigInt(b) + require.Equal(t, e, t1) + e.Neg(&e) + b.Neg(b) + t1.SetBigInt(b) + require.Equal(t, e, t1) +} + +func TestFpSetBytesWideBigInt(t *testing.T) { + var a fp + var tv2 [96]byte + for i := 0; i < 25; i++ { + _, _ = crand.Read(tv2[:]) + e := new(big.Int).SetBytes(tv2[:]) + e.Mod(e, biModulus) + + tv := internal.ReverseScalarBytes(tv2[:]) + copy(tv2[:], tv) + a.SetBytesWide(&tv2) + require.Equal(t, 0, e.Cmp(a.BigInt())) + } +} + +func TestFpToMontgomery(t *testing.T) { + var v fp + v.SetUint64(2) + require.Equal( + t, + fp{ + 0x321300000006554f, + 0xb93c0018d6c40005, + 0x57605e0db0ddbb51, + 0x8b256521ed1f9bcb, + 0x6cf28d7901622c03, + 0x11ebab9dbb81e28c, + }, + v, + ) +} + +func TestFpFromMontgomery(t *testing.T) { + var v fp + e := fp{2, 0, 0, 0, 0, 0} + v.SetUint64(2) + v.fromMontgomery(&v) + require.Equal(t, e, v) +} + +func TestFpLexicographicallyLargest(t *testing.T) { + require.Equal(t, 0, new(fp).SetZero().LexicographicallyLargest()) + require.Equal(t, 0, new(fp).SetOne().LexicographicallyLargest()) + require.Equal(t, 0, (&fp{ + 0xa1fafffffffe5557, + 0x995bfff976a3fffe, + 0x03f41d24d174ceb4, + 0xf6547998c1995dbd, + 0x778a468f507a6034, + 0x020559931f7f8103, + }).LexicographicallyLargest()) + require.Equal(t, 1, (&fp{ + 0x1804000000015554, + 0x855000053ab00001, + 0x633cb57c253c276f, + 0x6e22d1ec31ebb502, + 0xd3916126f2d14ca2, + 0x17fbb8571a006596, + }).LexicographicallyLargest()) + require.Equal(t, 1, (&fp{ + 0x43f5fffffffcaaae, + 0x32b7fff2ed47fffd, + 0x07e83a49a2e99d69, + 0xeca8f3318332bb7a, + 0xef148d1ea0f4c069, + 0x040ab3263eff0206, + }).LexicographicallyLargest()) +} diff --git a/core/curves/native/bls12381/fq.go b/core/curves/native/bls12381/fq.go new file mode 100644 index 0000000..059e85d --- /dev/null +++ b/core/curves/native/bls12381/fq.go @@ -0,0 +1,455 @@ +package bls12381 + +import ( + "encoding/binary" + "math/big" + "sync" + + "github.com/sonr-io/sonr/crypto/core/curves/native" +) + +type Fq [native.FieldLimbs]uint64 + +var ( + bls12381FqInitonce sync.Once + bls12381FqParams native.FieldParams +) + +// 2^S * t = MODULUS - 1 with t odd +const fqS = 32 + +// qInv = -(q^{-1} mod 2^64) mod 2^64 +const qInv = 0xfffffffeffffffff + +// fqGenerator = 7 (multiplicative fqGenerator of r-1 order, that is also quadratic nonresidue) +var fqGenerator = [native.FieldLimbs]uint64{ + 0x0000000efffffff1, + 0x17e363d300189c0f, + 0xff9c57876f8457b0, + 0x351332208fc5a8c4, +} + +// fqModulus +var fqModulus = [native.FieldLimbs]uint64{ + 0xffffffff00000001, + 0x53bda402fffe5bfe, + 0x3339d80809a1d805, + 0x73eda753299d7d48, +} + +func Bls12381FqNew() *native.Field { + return &native.Field{ + Value: [native.FieldLimbs]uint64{}, + Params: getBls12381FqParams(), + Arithmetic: bls12381FqArithmetic{}, + } +} + +func bls12381FqParamsInit() { + bls12381FqParams = native.FieldParams{ + R: [native.FieldLimbs]uint64{ + 0x00000001fffffffe, + 0x5884b7fa00034802, + 0x998c4fefecbc4ff5, + 0x1824b159acc5056f, + }, + R2: [native.FieldLimbs]uint64{ + 0xc999e990f3f29c6d, + 0x2b6cedcb87925c23, + 0x05d314967254398f, + 0x0748d9d99f59ff11, + }, + R3: [native.FieldLimbs]uint64{ + 0xc62c1807439b73af, + 0x1b3e0d188cf06990, + 0x73d13c71c7b5f418, + 0x6e2a5bb9c8db33e9, + }, + Modulus: [native.FieldLimbs]uint64{ + 0xffffffff00000001, + 0x53bda402fffe5bfe, + 0x3339d80809a1d805, + 0x73eda753299d7d48, + }, + BiModulus: new(big.Int).SetBytes([]byte{ + 0x73, 0xed, 0xa7, 0x53, 0x29, 0x9d, 0x7d, 0x48, 0x33, 0x39, 0xd8, 0x08, 0x09, 0xa1, 0xd8, 0x05, 0x53, 0xbd, 0xa4, 0x02, 0xff, 0xfe, 0x5b, 0xfe, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x01, + }), + } +} + +func getBls12381FqParams() *native.FieldParams { + bls12381FqInitonce.Do(bls12381FqParamsInit) + return &bls12381FqParams +} + +// bls12381FqArithmetic is a struct with all the methods needed for working +// in mod q +type bls12381FqArithmetic struct{} + +// ToMontgomery converts this field to montgomery form +func (f bls12381FqArithmetic) ToMontgomery(out, arg *[native.FieldLimbs]uint64) { + // arg.R^0 * R^2 / R = arg.R + f.Mul(out, arg, &getBls12381FqParams().R2) +} + +// FromMontgomery converts this field from montgomery form +func (f bls12381FqArithmetic) FromMontgomery(out, arg *[native.FieldLimbs]uint64) { + // Mul by 1 is division by 2^256 mod q + // f.Mul(out, arg, &[native.FieldLimbs]uint64{1, 0, 0, 0}) + f.montReduce(out, &[native.FieldLimbs * 2]uint64{arg[0], arg[1], arg[2], arg[3], 0, 0, 0, 0}) +} + +// Neg performs modular negation +func (f bls12381FqArithmetic) Neg(out, arg *[native.FieldLimbs]uint64) { + // Subtract `arg` from `fqModulus`. Ignore final borrow + // since it can't underflow. + var t [native.FieldLimbs]uint64 + var borrow uint64 + t[0], borrow = sbb(fqModulus[0], arg[0], 0) + t[1], borrow = sbb(fqModulus[1], arg[1], borrow) + t[2], borrow = sbb(fqModulus[2], arg[2], borrow) + t[3], _ = sbb(fqModulus[3], arg[3], borrow) + + // t could be `fqModulus` if `arg`=0. Set mask=0 if self=0 + // and 0xff..ff if `arg`!=0 + mask := t[0] | t[1] | t[2] | t[3] + mask = -((mask | -mask) >> 63) + out[0] = t[0] & mask + out[1] = t[1] & mask + out[2] = t[2] & mask + out[3] = t[3] & mask +} + +// Square performs modular square +func (f bls12381FqArithmetic) Square(out, arg *[native.FieldLimbs]uint64) { + var r [2 * native.FieldLimbs]uint64 + var carry uint64 + + r[1], carry = mac(0, arg[0], arg[1], 0) + r[2], carry = mac(0, arg[0], arg[2], carry) + r[3], r[4] = mac(0, arg[0], arg[3], carry) + + r[3], carry = mac(r[3], arg[1], arg[2], 0) + r[4], r[5] = mac(r[4], arg[1], arg[3], carry) + + r[5], r[6] = mac(r[5], arg[2], arg[3], 0) + + r[7] = r[6] >> 63 + r[6] = (r[6] << 1) | r[5]>>63 + r[5] = (r[5] << 1) | r[4]>>63 + r[4] = (r[4] << 1) | r[3]>>63 + r[3] = (r[3] << 1) | r[2]>>63 + r[2] = (r[2] << 1) | r[1]>>63 + r[1] = r[1] << 1 + + r[0], carry = mac(0, arg[0], arg[0], 0) + r[1], carry = adc(0, r[1], carry) + r[2], carry = mac(r[2], arg[1], arg[1], carry) + r[3], carry = adc(0, r[3], carry) + r[4], carry = mac(r[4], arg[2], arg[2], carry) + r[5], carry = adc(0, r[5], carry) + r[6], carry = mac(r[6], arg[3], arg[3], carry) + r[7], _ = adc(0, r[7], carry) + + f.montReduce(out, &r) +} + +// Mul performs modular multiplication +func (f bls12381FqArithmetic) Mul(out, arg1, arg2 *[native.FieldLimbs]uint64) { + // Schoolbook multiplication + var r [2 * native.FieldLimbs]uint64 + var carry uint64 + + r[0], carry = mac(0, arg1[0], arg2[0], 0) + r[1], carry = mac(0, arg1[0], arg2[1], carry) + r[2], carry = mac(0, arg1[0], arg2[2], carry) + r[3], r[4] = mac(0, arg1[0], arg2[3], carry) + + r[1], carry = mac(r[1], arg1[1], arg2[0], 0) + r[2], carry = mac(r[2], arg1[1], arg2[1], carry) + r[3], carry = mac(r[3], arg1[1], arg2[2], carry) + r[4], r[5] = mac(r[4], arg1[1], arg2[3], carry) + + r[2], carry = mac(r[2], arg1[2], arg2[0], 0) + r[3], carry = mac(r[3], arg1[2], arg2[1], carry) + r[4], carry = mac(r[4], arg1[2], arg2[2], carry) + r[5], r[6] = mac(r[5], arg1[2], arg2[3], carry) + + r[3], carry = mac(r[3], arg1[3], arg2[0], 0) + r[4], carry = mac(r[4], arg1[3], arg2[1], carry) + r[5], carry = mac(r[5], arg1[3], arg2[2], carry) + r[6], r[7] = mac(r[6], arg1[3], arg2[3], carry) + + f.montReduce(out, &r) +} + +// Add performs modular addition +func (f bls12381FqArithmetic) Add(out, arg1, arg2 *[native.FieldLimbs]uint64) { + var t [native.FieldLimbs]uint64 + var carry uint64 + + t[0], carry = adc(arg1[0], arg2[0], 0) + t[1], carry = adc(arg1[1], arg2[1], carry) + t[2], carry = adc(arg1[2], arg2[2], carry) + t[3], _ = adc(arg1[3], arg2[3], carry) + + // Subtract the fqModulus to ensure the value + // is smaller. + f.Sub(out, &t, &fqModulus) +} + +// Sub performs modular subtraction +func (f bls12381FqArithmetic) Sub(out, arg1, arg2 *[native.FieldLimbs]uint64) { + d0, borrow := sbb(arg1[0], arg2[0], 0) + d1, borrow := sbb(arg1[1], arg2[1], borrow) + d2, borrow := sbb(arg1[2], arg2[2], borrow) + d3, borrow := sbb(arg1[3], arg2[3], borrow) + + // If underflow occurred on the final limb, borrow 0xff...ff, otherwise + // borrow = 0x00...00. Conditionally mask to add the fqModulus + borrow = -borrow + d0, carry := adc(d0, fqModulus[0]&borrow, 0) + d1, carry = adc(d1, fqModulus[1]&borrow, carry) + d2, carry = adc(d2, fqModulus[2]&borrow, carry) + d3, _ = adc(d3, fqModulus[3]&borrow, carry) + + out[0] = d0 + out[1] = d1 + out[2] = d2 + out[3] = d3 +} + +// Sqrt performs modular square root +func (f bls12381FqArithmetic) Sqrt(wasSquare *int, out, arg *[native.FieldLimbs]uint64) { + // See sqrt_ts_ct at + // https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-11#appendix-I.4 + // c1 := fqS + // c2 := (q - 1) / (2^c1) + c2 := [4]uint64{ + 0xfffe5bfeffffffff, + 0x09a1d80553bda402, + 0x299d7d483339d808, + 0x0000000073eda753, + } + // c3 := (c2 - 1) / 2 + c3 := [native.FieldLimbs]uint64{ + 0x7fff2dff7fffffff, + 0x04d0ec02a9ded201, + 0x94cebea4199cec04, + 0x0000000039f6d3a9, + } + // c4 := fqGenerator + var c5 [native.FieldLimbs]uint64 + native.Pow(&c5, &fqGenerator, &c2, getBls12381FqParams(), f) + // c5 := [native.FieldLimbs]uint64{0x1015708f7e368fe1, 0x31c6c5456ecc4511, 0x5281fe8998a19ea1, 0x0279089e10c63fe8} + var z, t, b, c, tv [native.FieldLimbs]uint64 + + native.Pow(&z, arg, &c3, getBls12381FqParams(), f) + f.Square(&t, &z) + f.Mul(&t, &t, arg) + f.Mul(&z, &z, arg) + + copy(b[:], t[:]) + copy(c[:], c5[:]) + + for i := fqS; i >= 2; i-- { + for j := 1; j <= i-2; j++ { + f.Square(&b, &b) + } + // if b == 1 flag = 0 else flag = 1 + flag := -(&native.Field{ + Value: b, + Params: getBls12381FqParams(), + Arithmetic: f, + }).IsOne() + 1 + f.Mul(&tv, &z, &c) + f.Selectznz(&z, &z, &tv, flag) + f.Square(&c, &c) + f.Mul(&tv, &t, &c) + f.Selectznz(&t, &t, &tv, flag) + copy(b[:], t[:]) + } + f.Square(&c, &z) + *wasSquare = (&native.Field{ + Value: c, + Params: getBls12381FqParams(), + Arithmetic: f, + }).Equal(&native.Field{ + Value: *arg, + Params: getBls12381FqParams(), + Arithmetic: f, + }) + f.Selectznz(out, out, &z, *wasSquare) +} + +// Invert performs modular inverse +func (f bls12381FqArithmetic) Invert(wasInverted *int, out, arg *[native.FieldLimbs]uint64) { + // Using an addition chain from + // https://github.com/kwantam/addchain + var t0, t1, t2, t3, t4, t5, t6, t7, t8 [native.FieldLimbs]uint64 + var t9, t11, t12, t13, t14, t15, t16, t17 [native.FieldLimbs]uint64 + + f.Square(&t0, arg) + f.Mul(&t1, &t0, arg) + f.Square(&t16, &t0) + f.Square(&t6, &t16) + f.Mul(&t5, &t6, &t0) + f.Mul(&t0, &t6, &t16) + f.Mul(&t12, &t5, &t16) + f.Square(&t2, &t6) + f.Mul(&t7, &t5, &t6) + f.Mul(&t15, &t0, &t5) + f.Square(&t17, &t12) + f.Mul(&t1, &t1, &t17) + f.Mul(&t3, &t7, &t2) + f.Mul(&t8, &t1, &t17) + f.Mul(&t4, &t8, &t2) + f.Mul(&t9, &t8, &t7) + f.Mul(&t7, &t4, &t5) + f.Mul(&t11, &t4, &t17) + f.Mul(&t5, &t9, &t17) + f.Mul(&t14, &t7, &t15) + f.Mul(&t13, &t11, &t12) + f.Mul(&t12, &t11, &t17) + f.Mul(&t15, &t15, &t12) + f.Mul(&t16, &t16, &t15) + f.Mul(&t3, &t3, &t16) + f.Mul(&t17, &t17, &t3) + f.Mul(&t0, &t0, &t17) + f.Mul(&t6, &t6, &t0) + f.Mul(&t2, &t2, &t6) + native.Pow2k(&t0, &t0, 8, f) + f.Mul(&t0, &t0, &t17) + native.Pow2k(&t0, &t0, 9, f) + f.Mul(&t0, &t0, &t16) + native.Pow2k(&t0, &t0, 9, f) + f.Mul(&t0, &t0, &t15) + native.Pow2k(&t0, &t0, 9, f) + f.Mul(&t0, &t0, &t15) + native.Pow2k(&t0, &t0, 7, f) + f.Mul(&t0, &t0, &t14) + native.Pow2k(&t0, &t0, 7, f) + f.Mul(&t0, &t0, &t13) + native.Pow2k(&t0, &t0, 10, f) + f.Mul(&t0, &t0, &t12) + native.Pow2k(&t0, &t0, 9, f) + f.Mul(&t0, &t0, &t11) + native.Pow2k(&t0, &t0, 8, f) + f.Mul(&t0, &t0, &t8) + native.Pow2k(&t0, &t0, 8, f) + f.Mul(&t0, &t0, arg) + native.Pow2k(&t0, &t0, 14, f) + f.Mul(&t0, &t0, &t9) + native.Pow2k(&t0, &t0, 10, f) + f.Mul(&t0, &t0, &t8) + native.Pow2k(&t0, &t0, 15, f) + f.Mul(&t0, &t0, &t7) + native.Pow2k(&t0, &t0, 10, f) + f.Mul(&t0, &t0, &t6) + native.Pow2k(&t0, &t0, 8, f) + f.Mul(&t0, &t0, &t5) + native.Pow2k(&t0, &t0, 16, f) + f.Mul(&t0, &t0, &t3) + native.Pow2k(&t0, &t0, 8, f) + f.Mul(&t0, &t0, &t2) + native.Pow2k(&t0, &t0, 7, f) + f.Mul(&t0, &t0, &t4) + native.Pow2k(&t0, &t0, 9, f) + f.Mul(&t0, &t0, &t2) + native.Pow2k(&t0, &t0, 8, f) + f.Mul(&t0, &t0, &t3) + native.Pow2k(&t0, &t0, 8, f) + f.Mul(&t0, &t0, &t2) + native.Pow2k(&t0, &t0, 8, f) + f.Mul(&t0, &t0, &t2) + native.Pow2k(&t0, &t0, 8, f) + f.Mul(&t0, &t0, &t2) + native.Pow2k(&t0, &t0, 8, f) + f.Mul(&t0, &t0, &t3) + native.Pow2k(&t0, &t0, 8, f) + f.Mul(&t0, &t0, &t2) + native.Pow2k(&t0, &t0, 8, f) + f.Mul(&t0, &t0, &t2) + native.Pow2k(&t0, &t0, 5, f) + f.Mul(&t0, &t0, &t1) + native.Pow2k(&t0, &t0, 5, f) + f.Mul(&t0, &t0, &t1) + + *wasInverted = (&native.Field{ + Value: *arg, + Params: getBls12381FqParams(), + Arithmetic: f, + }).IsNonZero() + f.Selectznz(out, out, &t0, *wasInverted) +} + +// FromBytes converts a little endian byte array into a field element +func (f bls12381FqArithmetic) FromBytes( + out *[native.FieldLimbs]uint64, + arg *[native.FieldBytes]byte, +) { + out[0] = binary.LittleEndian.Uint64(arg[:8]) + out[1] = binary.LittleEndian.Uint64(arg[8:16]) + out[2] = binary.LittleEndian.Uint64(arg[16:24]) + out[3] = binary.LittleEndian.Uint64(arg[24:]) +} + +// ToBytes converts a field element to a little endian byte array +func (f bls12381FqArithmetic) ToBytes( + out *[native.FieldBytes]byte, + arg *[native.FieldLimbs]uint64, +) { + binary.LittleEndian.PutUint64(out[:8], arg[0]) + binary.LittleEndian.PutUint64(out[8:16], arg[1]) + binary.LittleEndian.PutUint64(out[16:24], arg[2]) + binary.LittleEndian.PutUint64(out[24:], arg[3]) +} + +// Selectznz performs conditional select. +// selects arg1 if choice == 0 and arg2 if choice == 1 +func (f bls12381FqArithmetic) Selectznz(out, arg1, arg2 *[native.FieldLimbs]uint64, choice int) { + b := uint64(-choice) + out[0] = arg1[0] ^ ((arg1[0] ^ arg2[0]) & b) + out[1] = arg1[1] ^ ((arg1[1] ^ arg2[1]) & b) + out[2] = arg1[2] ^ ((arg1[2] ^ arg2[2]) & b) + out[3] = arg1[3] ^ ((arg1[3] ^ arg2[3]) & b) +} + +func (f bls12381FqArithmetic) montReduce( + out *[native.FieldLimbs]uint64, + r *[2 * native.FieldLimbs]uint64, +) { + // Taken from Algorithm 14.32 in Handbook of Applied Cryptography + var r1, r2, r3, r4, r5, r6, carry, carry2, k uint64 + var rr [native.FieldLimbs]uint64 + + k = r[0] * qInv + _, carry = mac(r[0], k, fqModulus[0], 0) + r1, carry = mac(r[1], k, fqModulus[1], carry) + r2, carry = mac(r[2], k, fqModulus[2], carry) + r3, carry = mac(r[3], k, fqModulus[3], carry) + r4, carry2 = adc(r[4], 0, carry) + + k = r1 * qInv + _, carry = mac(r1, k, fqModulus[0], 0) + r2, carry = mac(r2, k, fqModulus[1], carry) + r3, carry = mac(r3, k, fqModulus[2], carry) + r4, carry = mac(r4, k, fqModulus[3], carry) + r5, carry2 = adc(r[5], carry2, carry) + + k = r2 * qInv + _, carry = mac(r2, k, fqModulus[0], 0) + r3, carry = mac(r3, k, fqModulus[1], carry) + r4, carry = mac(r4, k, fqModulus[2], carry) + r5, carry = mac(r5, k, fqModulus[3], carry) + r6, carry2 = adc(r[6], carry2, carry) + + k = r3 * qInv + _, carry = mac(r3, k, fqModulus[0], 0) + rr[0], carry = mac(r4, k, fqModulus[1], carry) + rr[1], carry = mac(r5, k, fqModulus[2], carry) + rr[2], carry = mac(r6, k, fqModulus[3], carry) + rr[3], _ = adc(r[7], carry2, carry) + + f.Sub(out, &rr, &fqModulus) +} diff --git a/core/curves/native/bls12381/fq_test.go b/core/curves/native/bls12381/fq_test.go new file mode 100644 index 0000000..431a2b6 --- /dev/null +++ b/core/curves/native/bls12381/fq_test.go @@ -0,0 +1,353 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bls12381 + +import ( + crand "crypto/rand" + "math/big" + "math/rand" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/internal" +) + +func TestFqSetOne(t *testing.T) { + fq := Bls12381FqNew().SetOne() + require.NotNil(t, fq) + require.Equal(t, fq.Value, getBls12381FqParams().R) +} + +func TestFqSetUint64(t *testing.T) { + act := Bls12381FqNew().SetUint64(1 << 60) + require.NotNil(t, act) + // Remember it will be in montgomery form + require.Equal(t, act.Value[0], uint64(0xbc98da2820121c89)) +} + +func TestFqAdd(t *testing.T) { + lhs := Bls12381FqNew().SetOne() + rhs := Bls12381FqNew().SetOne() + exp := Bls12381FqNew().SetUint64(2) + res := Bls12381FqNew().Add(lhs, rhs) + require.NotNil(t, res) + require.Equal(t, 1, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint64() >> 2 + r := rand.Uint64() >> 2 + e := l + r + lhs.SetUint64(l) + rhs.SetUint64(r) + exp.SetUint64(e) + + a := Bls12381FqNew().Add(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFqSub(t *testing.T) { + lhs := Bls12381FqNew().SetOne() + rhs := Bls12381FqNew().SetOne() + exp := Bls12381FqNew().SetZero() + res := Bls12381FqNew().Sub(lhs, rhs) + require.NotNil(t, res) + require.Equal(t, 1, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint64() >> 2 + r := rand.Uint64() >> 2 + if l < r { + l, r = r, l + } + e := l - r + lhs.SetUint64(l) + rhs.SetUint64(r) + exp.SetUint64(e) + + a := Bls12381FqNew().Sub(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFqMul(t *testing.T) { + lhs := Bls12381FqNew().SetOne() + rhs := Bls12381FqNew().SetOne() + exp := Bls12381FqNew().SetOne() + res := Bls12381FqNew().Mul(lhs, rhs) + require.NotNil(t, res) + require.Equal(t, 1, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint32() + r := rand.Uint32() + e := uint64(l) * uint64(r) + lhs.SetUint64(uint64(l)) + rhs.SetUint64(uint64(r)) + exp.SetUint64(e) + + a := Bls12381FqNew().Mul(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFqDouble(t *testing.T) { + a := Bls12381FqNew().SetUint64(2) + e := Bls12381FqNew().SetUint64(4) + require.Equal(t, e, Bls12381FqNew().Double(a)) + + for i := 0; i < 25; i++ { + tv := rand.Uint32() + ttv := uint64(tv) * 2 + a = Bls12381FqNew().SetUint64(uint64(tv)) + e = Bls12381FqNew().SetUint64(ttv) + require.Equal(t, e, Bls12381FqNew().Double(a)) + } +} + +func TestFqSquare(t *testing.T) { + a := Bls12381FqNew().SetUint64(4) + e := Bls12381FqNew().SetUint64(16) + require.Equal(t, 1, e.Equal(a.Square(a))) + + a.SetUint64(2854263694) + e.SetUint64(8146821234886525636) + require.Equal(t, 1, e.Equal(a.Square(a))) + + for i := 0; i < 25; i++ { + j := rand.Uint32() + exp := uint64(j) * uint64(j) + e.SetUint64(exp) + a.SetUint64(uint64(j)) + require.Equal(t, 1, e.Equal(a.Square(a)), "exp = %d, j = %d", exp, j) + } +} + +func TestFqNeg(t *testing.T) { + g := Bls12381FqNew().SetRaw(&fqGenerator) + a := Bls12381FqNew().SetOne() + a.Neg(a) + e := Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{0xfffffffd00000003, 0xfb38ec08fffb13fc, 0x99ad88181ce5880f, 0x5bc8f5f97cd877d8}) + require.Equal(t, 1, e.Equal(a)) + a.Neg(g) + e = Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{0xfffffff000000010, 0x3bda402fffe5bfef, 0x339d80809a1d8055, 0x3eda753299d7d483}) + require.Equal(t, e, a) +} + +func TestFqExp(t *testing.T) { + e := Bls12381FqNew().SetUint64(8) + a := Bls12381FqNew().SetUint64(2) + by := Bls12381FqNew().SetUint64(3) + require.Equal(t, e, a.Exp(a, by)) +} + +func TestFqSqrt(t *testing.T) { + t1 := Bls12381FqNew().SetUint64(2) + t2 := Bls12381FqNew().Neg(t1) + t3 := Bls12381FqNew().Square(t1) + _, wasSquare := t3.Sqrt(t3) + + require.True(t, wasSquare) + require.Equal(t, 1, t1.Equal(t3)|t2.Equal(t3)) + t1.SetUint64(5) + _, wasSquare = Bls12381FqNew().Sqrt(t1) + require.False(t, wasSquare) +} + +func TestFqInvert(t *testing.T) { + twoInv := Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{0xffffffff, 0xac425bfd0001a401, 0xccc627f7f65e27fa, 0xc1258acd66282b7}) + two := Bls12381FqNew().SetUint64(2) + a, inverted := Bls12381FqNew().Invert(two) + require.True(t, inverted) + require.Equal(t, a, twoInv) + + rootOfUnity := Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{0xb9b58d8c5f0e466a, 0x5b1b4c801819d7ec, 0x0af53ae352a31e64, 0x5bf3adda19e9b27b}) + rootOfUnityInv := Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{0x4256481adcf3219a, 0x45f37b7f96b6cad3, 0xf9c3f1d75f7a3b27, 0x2d2fc049658afd43}) + a, inverted = Bls12381FqNew().Invert(rootOfUnity) + require.True(t, inverted) + require.Equal(t, a, rootOfUnityInv) + + lhs := Bls12381FqNew().SetUint64(9) + rhs := Bls12381FqNew().SetUint64(3) + rhsInv, inverted := Bls12381FqNew().Invert(rhs) + require.True(t, inverted) + require.Equal(t, rhs, Bls12381FqNew().Mul(lhs, rhsInv)) + + rhs.SetZero() + _, inverted = Bls12381FqNew().Invert(rhs) + require.False(t, inverted) +} + +func TestFqCMove(t *testing.T) { + t1 := Bls12381FqNew().SetUint64(5) + t2 := Bls12381FqNew().SetUint64(10) + require.Equal(t, t1, Bls12381FqNew().CMove(t1, t2, 0)) + require.Equal(t, t2, Bls12381FqNew().CMove(t1, t2, 1)) +} + +func TestFqBytes(t *testing.T) { + t1 := Bls12381FqNew().SetUint64(99) + seq := t1.Bytes() + t2, err := Bls12381FqNew().SetBytes(&seq) + require.NoError(t, err) + require.Equal(t, t1, t2) + + for i := 0; i < 25; i++ { + t1.SetUint64(rand.Uint64()) + seq = t1.Bytes() + _, err = t2.SetBytes(&seq) + require.NoError(t, err) + require.Equal(t, t1, t2) + } +} + +func TestFqCmp(t *testing.T) { + tests := []struct { + a *native.Field + b *native.Field + e int + }{ + { + a: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{2731658267414164836, 14655288906067898431, 6537465423330262322, 8306191141697566219}), + b: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{6472764012681988529, 10848812988401906064, 2961825807536828898, 4282183981941645679}), + e: 1, + }, + { + a: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{8023004109510539223, 4652004072850285717, 1877219145646046927, 383214385093921911}), + b: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{10099384440823804262, 16139476942229308465, 8636966320777393798, 5435928725024696785}), + e: -1, + }, + { + a: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{3741840066202388211, 12165774400417314871, 16619312580230515379, 16195032234110087705}), + b: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{3905865991286066744, 543690822309071825, 17963103015950210055, 3745476720756119742}), + e: 1, + }, + { + a: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{16660853697936147788, 7799793619412111108, 13515141085171033220, 2641079731236069032}), + b: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{17790588295388238399, 571847801379669440, 14537208974498222469, 12792570372087452754}), + e: -1, + }, + { + a: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{3912839285384959186, 2701177075110484070, 6453856448115499033, 6475797457962597458}), + b: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{1282566391665688512, 13503640416992806563, 2962240104675990153, 3374904770947067689}), + e: 1, + }, + { + a: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{5716631803409360103, 7859567470082614154, 12747956220853330146, 18434584096087315020}), + b: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{16317076441459028418, 12854146980376319601, 2258436689269031143, 9531877130792223752}), + e: 1, + }, + { + a: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{17955191469941083403, 10350326247207200880, 17263512235150705075, 12700328451238078022}), + b: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{6767595547459644695, 7146403825494928147, 12269344038346710612, 9122477829383225603}), + e: 1, + }, + { + a: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{17099388671847024438, 6426264987820696548, 10641143464957227405, 7709745403700754098}), + b: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{10799154372990268556, 17178492485719929374, 5705777922258988797, 8051037767683567782}), + e: -1, + }, + { + a: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{4567139260680454325, 1629385880182139061, 16607020832317899145, 1261011562621553200}), + b: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{13487234491304534488, 17872642955936089265, 17651026784972590233, 9468934643333871559}), + e: -1, + }, + { + a: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{18071070103467571798, 11787850505799426140, 10631355976141928593, 4867785203635092610}), + b: Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{12596443599426461624, 10176122686151524591, 17075755296887483439, 6726169532695070719}), + e: -1, + }, + } + + for _, test := range tests { + require.Equal(t, test.e, test.a.Cmp(test.b)) + require.Equal(t, -test.e, test.b.Cmp(test.a)) + require.Equal(t, 0, test.a.Cmp(test.a)) + require.Equal(t, 0, test.b.Cmp(test.b)) + } +} + +func TestFqBigInt(t *testing.T) { + t1 := Bls12381FqNew().SetBigInt(big.NewInt(9999)) + t2 := Bls12381FqNew().SetBigInt(t1.BigInt()) + require.Equal(t, t1, t2) + + e := Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{0x673053fc60e06500, 0x86e6d480b4f76ada, 0x7fc68f9fefa23291, 0x3fb17f49bdda126d}) + b := new( + big.Int, + ).SetBytes([]byte{9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9}) + t1.SetBigInt(b) + require.Equal(t, e, t1) + e.Neg(e) + b.Neg(b) + t1.SetBigInt(b) + require.Equal(t, e, t1) +} + +func TestFqSetBytesWide(t *testing.T) { + e := Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{0xc759fba87ff8c5a6, 0x9ef5194839e7df44, 0x21375d22b678bf0e, 0x38b105387033fd57}) + + a := Bls12381FqNew().SetBytesWide(&[64]byte{ + 0x69, 0x23, 0x5a, 0x0b, 0xce, 0x0c, 0xa8, 0x64, + 0x3c, 0x78, 0xbc, 0x01, 0x05, 0xef, 0xf2, 0x84, + 0xde, 0xbb, 0x6b, 0xc8, 0x63, 0x5e, 0x6e, 0x69, + 0x62, 0xcc, 0xc6, 0x2d, 0xf5, 0x72, 0x40, 0x92, + 0x28, 0x11, 0xd6, 0xc8, 0x07, 0xa5, 0x88, 0x82, + 0xfe, 0xe3, 0x97, 0xf6, 0x1e, 0xfb, 0x2e, 0x3b, + 0x27, 0x5f, 0x85, 0x06, 0x8d, 0x99, 0xa4, 0x75, + 0xc0, 0x2c, 0x71, 0x69, 0x9e, 0x58, 0xea, 0x52, + }) + require.Equal(t, e, a) +} + +func TestFqSetBytesWideBigInt(t *testing.T) { + params := getBls12381FqParams() + var tv2 [64]byte + for i := 0; i < 25; i++ { + _, _ = crand.Read(tv2[:]) + e := new(big.Int).SetBytes(tv2[:]) + e.Mod(e, params.BiModulus) + + tv := internal.ReverseScalarBytes(tv2[:]) + copy(tv2[:], tv) + a := Bls12381FqNew().SetBytesWide(&tv2) + require.Equal(t, 0, e.Cmp(a.BigInt())) + } +} + +func TestFqToMontgomery(t *testing.T) { + v := Bls12381FqNew().SetUint64(2) + require.Equal( + t, + [native.FieldLimbs]uint64{ + 0x3fffffffc, + 0xb1096ff400069004, + 0x33189fdfd9789fea, + 0x304962b3598a0adf, + }, + v.Value, + ) +} + +func TestFqFromMontgomery(t *testing.T) { + e := [native.FieldLimbs]uint64{2, 0, 0, 0} + a := [native.FieldLimbs]uint64{0, 0, 0, 0} + v := Bls12381FqNew().SetUint64(2) + v.Arithmetic.FromMontgomery(&a, &v.Value) + require.Equal(t, e, a) +} diff --git a/core/curves/native/bls12381/g1.go b/core/curves/native/bls12381/g1.go new file mode 100644 index 0000000..ee3c090 --- /dev/null +++ b/core/curves/native/bls12381/g1.go @@ -0,0 +1,1123 @@ +package bls12381 + +import ( + "fmt" + "io" + "math/big" + + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/internal" +) + +var ( + g1x = fp{ + 0x5cb38790fd530c16, + 0x7817fc679976fff5, + 0x154f95c7143ba1c1, + 0xf0ae6acdf3d0e747, + 0xedce6ecc21dbf440, + 0x120177419e0bfb75, + } + g1y = fp{ + 0xbaac93d50ce72271, + 0x8c22631a7918fd8e, + 0xdd595f13570725ce, + 0x51ac582950405194, + 0x0e1c8c3fad0059c0, + 0x0bbc3efc5008a26a, + } + curveG1B = fp{ + 0xaa270000000cfff3, + 0x53cc0032fc34000a, + 0x478fe97a6b0a807f, + 0xb1d37ebee6ba24d7, + 0x8ec9733bbf78ab2f, + 0x09d645513d83de7e, + } + osswuMapA = fp{ + 0x2f65aa0e9af5aa51, + 0x86464c2d1e8416c3, + 0xb85ce591b7bd31e2, + 0x27e11c91b5f24e7c, + 0x28376eda6bfc1835, + 0x155455c3e5071d85, + } + osswuMapB = fp{ + 0xfb996971fe22a1e0, + 0x9aa93eb35b742d6f, + 0x8c476013de99c5c4, + 0x873e27c3a221e571, + 0xca72b5e45a52d888, + 0x06824061418a386b, + } + osswuMapC1 = fp{ + 0xee7fbfffffffeaaa, + 0x07aaffffac54ffff, + 0xd9cc34a83dac3d89, + 0xd91dd2e13ce144af, + 0x92c6e9ed90d2eb35, + 0x0680447a8e5ff9a6, + } + osswuMapC2 = fp{ + 0x43b571cad3215f1f, + 0xccb460ef1c702dc2, + 0x742d884f4f97100b, + 0xdb2c3e3238a3382b, + 0xe40f3fa13fce8f88, + 0x0073a2af9892a2ff, + } + oswwuMapZ = fp{ + 0x886c00000023ffdc, + 0x0f70008d3090001d, + 0x77672417ed5828c3, + 0x9dac23e943dc1740, + 0x50553f1b9c131521, + 0x078c712fbe0ab6e8, + } + oswwuMapXd1 = *((&fp{}).Mul(&oswwuMapZ, &osswuMapA)) + negOsswuMapA = *(&fp{}).Neg(&osswuMapA) + + g1IsoXNum = []fp{ + { + 0x4d18b6f3af00131c, + 0x19fa219793fee28c, + 0x3f2885f1467f19ae, + 0x23dcea34f2ffb304, + 0xd15b58d2ffc00054, + 0x0913be200a20bef4, + }, + { + 0x898985385cdbbd8b, + 0x3c79e43cc7d966aa, + 0x1597e193f4cd233a, + 0x8637ef1e4d6623ad, + 0x11b22deed20d827b, + 0x07097bc5998784ad, + }, + { + 0xa542583a480b664b, + 0xfc7169c026e568c6, + 0x5ba2ef314ed8b5a6, + 0x5b5491c05102f0e7, + 0xdf6e99707d2a0079, + 0x0784151ed7605524, + }, + { + 0x494e212870f72741, + 0xab9be52fbda43021, + 0x26f5577994e34c3d, + 0x049dfee82aefbd60, + 0x65dadd7828505289, + 0x0e93d431ea011aeb, + }, + { + 0x90ee774bd6a74d45, + 0x7ada1c8a41bfb185, + 0x0f1a8953b325f464, + 0x104c24211be4805c, + 0x169139d319ea7a8f, + 0x09f20ead8e532bf6, + }, + { + 0x6ddd93e2f43626b7, + 0xa5482c9aa1ccd7bd, + 0x143245631883f4bd, + 0x2e0a94ccf77ec0db, + 0xb0282d480e56489f, + 0x18f4bfcbb4368929, + }, + { + 0x23c5f0c953402dfd, + 0x7a43ff6958ce4fe9, + 0x2c390d3d2da5df63, + 0xd0df5c98e1f9d70f, + 0xffd89869a572b297, + 0x1277ffc72f25e8fe, + }, + { + 0x79f4f0490f06a8a6, + 0x85f894a88030fd81, + 0x12da3054b18b6410, + 0xe2a57f6505880d65, + 0xbba074f260e400f1, + 0x08b76279f621d028, + }, + { + 0xe67245ba78d5b00b, + 0x8456ba9a1f186475, + 0x7888bff6e6b33bb4, + 0xe21585b9a30f86cb, + 0x05a69cdcef55feee, + 0x09e699dd9adfa5ac, + }, + { + 0x0de5c357bff57107, + 0x0a0db4ae6b1a10b2, + 0xe256bb67b3b3cd8d, + 0x8ad456574e9db24f, + 0x0443915f50fd4179, + 0x098c4bf7de8b6375, + }, + { + 0xe6b0617e7dd929c7, + 0xfe6e37d442537375, + 0x1dafdeda137a489e, + 0xe4efd1ad3f767ceb, + 0x4a51d8667f0fe1cf, + 0x054fdf4bbf1d821c, + }, + { + 0x72db2a50658d767b, + 0x8abf91faa257b3d5, + 0xe969d6833764ab47, + 0x464170142a1009eb, + 0xb14f01aadb30be2f, + 0x18ae6a856f40715d, + }, + } + g1IsoXDen = []fp{ + { + 0xb962a077fdb0f945, + 0xa6a9740fefda13a0, + 0xc14d568c3ed6c544, + 0xb43fc37b908b133e, + 0x9c0b3ac929599016, + 0x0165aa6c93ad115f, + }, + { + 0x23279a3ba506c1d9, + 0x92cfca0a9465176a, + 0x3b294ab13755f0ff, + 0x116dda1c5070ae93, + 0xed4530924cec2045, + 0x083383d6ed81f1ce, + }, + { + 0x9885c2a6449fecfc, + 0x4a2b54ccd37733f0, + 0x17da9ffd8738c142, + 0xa0fba72732b3fafd, + 0xff364f36e54b6812, + 0x0f29c13c660523e2, + }, + { + 0xe349cc118278f041, + 0xd487228f2f3204fb, + 0xc9d325849ade5150, + 0x43a92bd69c15c2df, + 0x1c2c7844bc417be4, + 0x12025184f407440c, + }, + { + 0x587f65ae6acb057b, + 0x1444ef325140201f, + 0xfbf995e71270da49, + 0xccda066072436a42, + 0x7408904f0f186bb2, + 0x13b93c63edf6c015, + }, + { + 0xfb918622cd141920, + 0x4a4c64423ecaddb4, + 0x0beb232927f7fb26, + 0x30f94df6f83a3dc2, + 0xaeedd424d780f388, + 0x06cc402dd594bbeb, + }, + { + 0xd41f761151b23f8f, + 0x32a92465435719b3, + 0x64f436e888c62cb9, + 0xdf70a9a1f757c6e4, + 0x6933a38d5b594c81, + 0x0c6f7f7237b46606, + }, + { + 0x693c08747876c8f7, + 0x22c9850bf9cf80f0, + 0x8e9071dab950c124, + 0x89bc62d61c7baf23, + 0xbc6be2d8dad57c23, + 0x17916987aa14a122, + }, + { + 0x1be3ff439c1316fd, + 0x9965243a7571dfa7, + 0xc7f7f62962f5cd81, + 0x32c6aa9af394361c, + 0xbbc2ee18e1c227f4, + 0x0c102cbac531bb34, + }, + { + 0x997614c97bacbf07, + 0x61f86372b99192c0, + 0x5b8c95fc14353fc3, + 0xca2b066c2a87492f, + 0x16178f5bbf698711, + 0x12a6dcd7f0f4e0e8, + }, + { + 0x760900000002fffd, + 0xebf4000bc40c0002, + 0x5f48985753c758ba, + 0x77ce585370525745, + 0x5c071a97a256ec6d, + 0x15f65ec3fa80e493, + }, + } + g1IsoYNum = []fp{ + { + 0x2b567ff3e2837267, + 0x1d4d9e57b958a767, + 0xce028fea04bd7373, + 0xcc31a30a0b6cd3df, + 0x7d7b18a682692693, + 0x0d300744d42a0310, + }, + { + 0x99c2555fa542493f, + 0xfe7f53cc4874f878, + 0x5df0608b8f97608a, + 0x14e03832052b49c8, + 0x706326a6957dd5a4, + 0x0a8dadd9c2414555, + }, + { + 0x13d942922a5cf63a, + 0x357e33e36e261e7d, + 0xcf05a27c8456088d, + 0x0000bd1de7ba50f0, + 0x83d0c7532f8c1fde, + 0x13f70bf38bbf2905, + }, + { + 0x5c57fd95bfafbdbb, + 0x28a359a65e541707, + 0x3983ceb4f6360b6d, + 0xafe19ff6f97e6d53, + 0xb3468f4550192bf7, + 0x0bb6cde49d8ba257, + }, + { + 0x590b62c7ff8a513f, + 0x314b4ce372cacefd, + 0x6bef32ce94b8a800, + 0x6ddf84a095713d5f, + 0x64eace4cb0982191, + 0x0386213c651b888d, + }, + { + 0xa5310a31111bbcdd, + 0xa14ac0f5da148982, + 0xf9ad9cc95423d2e9, + 0xaa6ec095283ee4a7, + 0xcf5b1f022e1c9107, + 0x01fddf5aed881793, + }, + { + 0x65a572b0d7a7d950, + 0xe25c2d8183473a19, + 0xc2fcebe7cb877dbd, + 0x05b2d36c769a89b0, + 0xba12961be86e9efb, + 0x07eb1b29c1dfde1f, + }, + { + 0x93e09572f7c4cd24, + 0x364e929076795091, + 0x8569467e68af51b5, + 0xa47da89439f5340f, + 0xf4fa918082e44d64, + 0x0ad52ba3e6695a79, + }, + { + 0x911429844e0d5f54, + 0xd03f51a3516bb233, + 0x3d587e5640536e66, + 0xfa86d2a3a9a73482, + 0xa90ed5adf1ed5537, + 0x149c9c326a5e7393, + }, + { + 0x462bbeb03c12921a, + 0xdc9af5fa0a274a17, + 0x9a558ebde836ebed, + 0x649ef8f11a4fae46, + 0x8100e1652b3cdc62, + 0x1862bd62c291dacb, + }, + { + 0x05c9b8ca89f12c26, + 0x0194160fa9b9ac4f, + 0x6a643d5a6879fa2c, + 0x14665bdd8846e19d, + 0xbb1d0d53af3ff6bf, + 0x12c7e1c3b28962e5, + }, + { + 0xb55ebf900b8a3e17, + 0xfedc77ec1a9201c4, + 0x1f07db10ea1a4df4, + 0x0dfbd15dc41a594d, + 0x389547f2334a5391, + 0x02419f98165871a4, + }, + { + 0xb416af000745fc20, + 0x8e563e9d1ea6d0f5, + 0x7c763e17763a0652, + 0x01458ef0159ebbef, + 0x8346fe421f96bb13, + 0x0d2d7b829ce324d2, + }, + { + 0x93096bb538d64615, + 0x6f2a2619951d823a, + 0x8f66b3ea59514fa4, + 0xf563e63704f7092f, + 0x724b136c4cf2d9fa, + 0x046959cfcfd0bf49, + }, + { + 0xea748d4b6e405346, + 0x91e9079c2c02d58f, + 0x41064965946d9b59, + 0xa06731f1d2bbe1ee, + 0x07f897e267a33f1b, + 0x1017290919210e5f, + }, + { + 0x872aa6c17d985097, + 0xeecc53161264562a, + 0x07afe37afff55002, + 0x54759078e5be6838, + 0xc4b92d15db8acca8, + 0x106d87d1b51d13b9, + }, + } + g1IsoYDen = []fp{ + { + 0xeb6c359d47e52b1c, + 0x18ef5f8a10634d60, + 0xddfa71a0889d5b7e, + 0x723e71dcc5fc1323, + 0x52f45700b70d5c69, + 0x0a8b981ee47691f1, + }, + { + 0x616a3c4f5535b9fb, + 0x6f5f037395dbd911, + 0xf25f4cc5e35c65da, + 0x3e50dffea3c62658, + 0x6a33dca523560776, + 0x0fadeff77b6bfe3e, + }, + { + 0x2be9b66df470059c, + 0x24a2c159a3d36742, + 0x115dbe7ad10c2a37, + 0xb6634a652ee5884d, + 0x04fe8bb2b8d81af4, + 0x01c2a7a256fe9c41, + }, + { + 0xf27bf8ef3b75a386, + 0x898b367476c9073f, + 0x24482e6b8c2f4e5f, + 0xc8e0bbd6fe110806, + 0x59b0c17f7631448a, + 0x11037cd58b3dbfbd, + }, + { + 0x31c7912ea267eec6, + 0x1dbf6f1c5fcdb700, + 0xd30d4fe3ba86fdb1, + 0x3cae528fbee9a2a4, + 0xb1cce69b6aa9ad9a, + 0x044393bb632d94fb, + }, + { + 0xc66ef6efeeb5c7e8, + 0x9824c289dd72bb55, + 0x71b1a4d2f119981d, + 0x104fc1aafb0919cc, + 0x0e49df01d942a628, + 0x096c3a09773272d4, + }, + { + 0x9abc11eb5fadeff4, + 0x32dca50a885728f0, + 0xfb1fa3721569734c, + 0xc4b76271ea6506b3, + 0xd466a75599ce728e, + 0x0c81d4645f4cb6ed, + }, + { + 0x4199f10e5b8be45b, + 0xda64e495b1e87930, + 0xcb353efe9b33e4ff, + 0x9e9efb24aa6424c6, + 0xf08d33680a237465, + 0x0d3378023e4c7406, + }, + { + 0x7eb4ae92ec74d3a5, + 0xc341b4aa9fac3497, + 0x5be603899e907687, + 0x03bfd9cca75cbdeb, + 0x564c2935a96bfa93, + 0x0ef3c33371e2fdb5, + }, + { + 0x7ee91fd449f6ac2e, + 0xe5d5bd5cb9357a30, + 0x773a8ca5196b1380, + 0xd0fda172174ed023, + 0x6cb95e0fa776aead, + 0x0d22d5a40cec7cff, + }, + { + 0xf727e09285fd8519, + 0xdc9d55a83017897b, + 0x7549d8bd057894ae, + 0x178419613d90d8f8, + 0xfce95ebdeb5b490a, + 0x0467ffaef23fc49e, + }, + { + 0xc1769e6a7c385f1b, + 0x79bc930deac01c03, + 0x5461c75a23ede3b5, + 0x6e20829e5c230c45, + 0x828e0f1e772a53cd, + 0x116aefa749127bff, + }, + { + 0x101c10bf2744c10a, + 0xbbf18d053a6a3154, + 0xa0ecf39ef026f602, + 0xfc009d4996dc5153, + 0xb9000209d5bd08d3, + 0x189e5fe4470cd73c, + }, + { + 0x7ebd546ca1575ed2, + 0xe47d5a981d081b55, + 0x57b2b625b6d4ca21, + 0xb0a1ba04228520cc, + 0x98738983c2107ff3, + 0x13dddbc4799d81d6, + }, + { + 0x09319f2e39834935, + 0x039e952cbdb05c21, + 0x55ba77a9a2f76493, + 0xfd04e3dfc6086467, + 0xfb95832e7d78742e, + 0x0ef9c24eccaf5e0e, + }, + { + 0x760900000002fffd, + 0xebf4000bc40c0002, + 0x5f48985753c758ba, + 0x77ce585370525745, + 0x5c071a97a256ec6d, + 0x15f65ec3fa80e493, + }, + } +) + +// G1 is a point in g1 +type G1 struct { + x, y, z fp +} + +// Random creates a random point on the curve +// from the specified reader +func (g1 *G1) Random(reader io.Reader) (*G1, error) { + var seed [native.WideFieldBytes]byte + n, err := reader.Read(seed[:]) + if err != nil { + return nil, errors.Wrap(err, "random could not read from stream") + } + if n != native.WideFieldBytes { + return nil, fmt.Errorf("insufficient bytes read %d when %d are needed", n, WideFieldBytes) + } + dst := []byte("BLS12381G1_XMD:SHA-256_SSWU_RO_") + return g1.Hash(native.EllipticPointHasherSha256(), seed[:], dst), nil +} + +// Hash uses the hasher to map bytes to a valid point +func (g1 *G1) Hash(hash *native.EllipticPointHasher, msg, dst []byte) *G1 { + var u []byte + var u0, u1 fp + var r0, r1, q0, q1 G1 + + switch hash.Type() { + case native.XMD: + u = native.ExpandMsgXmd(hash, msg, dst, 128) + case native.XOF: + u = native.ExpandMsgXof(hash, msg, dst, 128) + } + + var buf [WideFieldBytes]byte + copy(buf[:64], internal.ReverseScalarBytes(u[:64])) + u0.SetBytesWide(&buf) + copy(buf[:64], internal.ReverseScalarBytes(u[64:])) + u1.SetBytesWide(&buf) + + r0.osswu3mod4(&u0) + r1.osswu3mod4(&u1) + q0.isogenyMap(&r0) + q1.isogenyMap(&r1) + g1.Add(&q0, &q1) + return g1.ClearCofactor(g1) +} + +// Identity returns the identity point +func (g1 *G1) Identity() *G1 { + g1.x.SetZero() + g1.y.SetOne() + g1.z.SetZero() + return g1 +} + +// Generator returns the base point +func (g1 *G1) Generator() *G1 { + g1.x.Set(&g1x) + g1.y.Set(&g1y) + g1.z.SetOne() + return g1 +} + +// IsIdentity returns true if this point is at infinity +func (g1 *G1) IsIdentity() int { + return g1.z.IsZero() +} + +// IsOnCurve determines if this point represents a valid curve point +func (g1 *G1) IsOnCurve() int { + // Y^2 Z = X^3 + b Z^3 + var lhs, rhs, t fp + lhs.Square(&g1.y) + lhs.Mul(&lhs, &g1.z) + + rhs.Square(&g1.x) + rhs.Mul(&rhs, &g1.x) + t.Square(&g1.z) + t.Mul(&t, &g1.z) + t.Mul(&t, &curveG1B) + rhs.Add(&rhs, &t) + + return lhs.Equal(&rhs) +} + +// InCorrectSubgroup returns 1 if the point is torsion free, 0 otherwise +func (g1 *G1) InCorrectSubgroup() int { + var t G1 + t.multiply(g1, &fqModulusBytes) + return t.IsIdentity() +} + +// Add adds this point to another point. +func (g1 *G1) Add(arg1, arg2 *G1) *G1 { + // Algorithm 7, https://eprint.iacr.org/2015/1060.pdf + var t0, t1, t2, t3, t4, x3, y3, z3 fp + + t0.Mul(&arg1.x, &arg2.x) + t1.Mul(&arg1.y, &arg2.y) + t2.Mul(&arg1.z, &arg2.z) + t3.Add(&arg1.x, &arg1.y) + t4.Add(&arg2.x, &arg2.y) + t3.Mul(&t3, &t4) + t4.Add(&t0, &t1) + t3.Sub(&t3, &t4) + t4.Add(&arg1.y, &arg1.z) + x3.Add(&arg2.y, &arg2.z) + t4.Mul(&t4, &x3) + x3.Add(&t1, &t2) + t4.Sub(&t4, &x3) + x3.Add(&arg1.x, &arg1.z) + y3.Add(&arg2.x, &arg2.z) + x3.Mul(&x3, &y3) + y3.Add(&t0, &t2) + y3.Sub(&x3, &y3) + x3.Double(&t0) + t0.Add(&t0, &x3) + t2.MulBy3b(&t2) + z3.Add(&t1, &t2) + t1.Sub(&t1, &t2) + y3.MulBy3b(&y3) + x3.Mul(&t4, &y3) + t2.Mul(&t3, &t1) + x3.Sub(&t2, &x3) + y3.Mul(&y3, &t0) + t1.Mul(&t1, &z3) + y3.Add(&t1, &y3) + t0.Mul(&t0, &t3) + z3.Mul(&z3, &t4) + z3.Add(&z3, &t0) + + g1.x.Set(&x3) + g1.y.Set(&y3) + g1.z.Set(&z3) + return g1 +} + +// Sub subtracts the two points +func (g1 *G1) Sub(arg1, arg2 *G1) *G1 { + var t G1 + t.Neg(arg2) + return g1.Add(arg1, &t) +} + +// Double this point +func (g1 *G1) Double(a *G1) *G1 { + // Algorithm 9, https://eprint.iacr.org/2015/1060.pdf + var t0, t1, t2, x3, y3, z3 fp + + t0.Square(&a.y) + z3.Double(&t0) + z3.Double(&z3) + z3.Double(&z3) + t1.Mul(&a.y, &a.z) + t2.Square(&a.z) + t2.MulBy3b(&t2) + x3.Mul(&t2, &z3) + y3.Add(&t0, &t2) + z3.Mul(&t1, &z3) + t1.Double(&t2) + t2.Add(&t2, &t1) + t0.Sub(&t0, &t2) + y3.Mul(&t0, &y3) + y3.Add(&y3, &x3) + t1.Mul(&a.x, &a.y) + x3.Mul(&t0, &t1) + x3.Double(&x3) + + e := a.IsIdentity() + g1.x.CMove(&x3, t0.SetZero(), e) + g1.z.CMove(&z3, &t0, e) + g1.y.CMove(&y3, t0.SetOne(), e) + return g1 +} + +// Mul multiplies this point by the input scalar +func (g1 *G1) Mul(a *G1, s *native.Field) *G1 { + bytes := s.Bytes() + return g1.multiply(a, &bytes) +} + +func (g1 *G1) multiply(a *G1, bytes *[native.FieldBytes]byte) *G1 { + var p G1 + precomputed := [16]*G1{} + precomputed[0] = new(G1).Identity() + precomputed[1] = new(G1).Set(a) + for i := 2; i < 16; i += 2 { + precomputed[i] = new(G1).Double(precomputed[i>>1]) + precomputed[i+1] = new(G1).Add(precomputed[i], a) + } + p.Identity() + for i := 0; i < 256; i += 4 { + // Brouwer / windowing method. window size of 4. + for j := 0; j < 4; j++ { + p.Double(&p) + } + window := bytes[32-1-i>>3] >> (4 - i&0x04) & 0x0F + p.Add(&p, precomputed[window]) + } + return g1.Set(&p) +} + +// MulByX multiplies by BLS X using double and add +func (g1 *G1) MulByX(a *G1) *G1 { + // Skip first bit since its always zero + var s, t, r G1 + r.Identity() + t.Set(a) + + for x := paramX >> 1; x != 0; x >>= 1 { + t.Double(&t) + s.Add(&r, &t) + r.CMove(&r, &s, int(x&1)) + } + // Since BLS_X is negative, flip the sign + return g1.Neg(&r) +} + +// ClearCofactor multiplies by (1 - z), where z is the parameter of BLS12-381, which +// [suffices to clear](https://ia.cr/2019/403) the cofactor and map +// elliptic curve points to elements of G1. +func (g1 *G1) ClearCofactor(a *G1) *G1 { + var t G1 + t.MulByX(a) + return g1.Sub(a, &t) +} + +// Neg negates this point +func (g1 *G1) Neg(a *G1) *G1 { + g1.Set(a) + g1.y.CNeg(&a.y, -(a.IsIdentity() - 1)) + return g1 +} + +// Set copies a into g1 +func (g1 *G1) Set(a *G1) *G1 { + g1.x.Set(&a.x) + g1.y.Set(&a.y) + g1.z.Set(&a.z) + return g1 +} + +// BigInt returns the x and y as big.Ints in affine +func (g1 *G1) BigInt() (x, y *big.Int) { + var t G1 + t.ToAffine(g1) + x = t.x.BigInt() + y = t.y.BigInt() + return x, y +} + +// SetBigInt creates a point from affine x, y +// and returns the point if it is on the curve +func (g1 *G1) SetBigInt(x, y *big.Int) (*G1, error) { + var xx, yy fp + var pp G1 + pp.x = *(xx.SetBigInt(x)) + pp.y = *(yy.SetBigInt(y)) + + if pp.x.IsZero()&pp.y.IsZero() == 1 { + pp.Identity() + return g1.Set(&pp), nil + } + + pp.z.SetOne() + + // If not the identity point and not on the curve then invalid + if (pp.IsOnCurve()&pp.InCorrectSubgroup())|(xx.IsZero()&yy.IsZero()) == 0 { + return nil, fmt.Errorf("invalid coordinates") + } + return g1.Set(&pp), nil +} + +// ToCompressed serializes this element into compressed form. +func (g1 *G1) ToCompressed() [FieldBytes]byte { + var out [FieldBytes]byte + var t G1 + t.ToAffine(g1) + xBytes := t.x.Bytes() + copy(out[:], internal.ReverseScalarBytes(xBytes[:])) + isInfinity := byte(g1.IsIdentity()) + // Compressed flag + out[0] |= 1 << 7 + // Is infinity + out[0] |= (1 << 6) & -isInfinity + // Sign of y only set if not infinity + out[0] |= (byte(t.y.LexicographicallyLargest()) << 5) & (isInfinity - 1) + return out +} + +// FromCompressed deserializes this element from compressed form. +func (g1 *G1) FromCompressed(input *[FieldBytes]byte) (*G1, error) { + var xFp, yFp fp + var x [FieldBytes]byte + var p G1 + compressedFlag := int((input[0] >> 7) & 1) + infinityFlag := int((input[0] >> 6) & 1) + sortFlag := int((input[0] >> 5) & 1) + + if compressedFlag != 1 { + return nil, errors.New("compressed flag must be set") + } + + if infinityFlag == 1 { + return g1.Identity(), nil + } + + copy(x[:], internal.ReverseScalarBytes(input[:])) + // Mask away the flag bits + x[FieldBytes-1] &= 0x1F + _, valid := xFp.SetBytes(&x) + + if valid != 1 { + return nil, errors.New("invalid bytes - not in field") + } + + yFp.Square(&xFp) + yFp.Mul(&yFp, &xFp) + yFp.Add(&yFp, &curveG1B) + + _, wasSquare := yFp.Sqrt(&yFp) + if wasSquare != 1 { + return nil, errors.New("point is not on the curve") + } + + yFp.CNeg(&yFp, yFp.LexicographicallyLargest()^sortFlag) + p.x.Set(&xFp) + p.y.Set(&yFp) + p.z.SetOne() + if p.InCorrectSubgroup() == 0 { + return nil, errors.New("point is not in correct subgroup") + } + return g1.Set(&p), nil +} + +// ToUncompressed serializes this element into uncompressed form. +func (g1 *G1) ToUncompressed() [WideFieldBytes]byte { + var out [WideFieldBytes]byte + var t G1 + t.ToAffine(g1) + xBytes := t.x.Bytes() + yBytes := t.y.Bytes() + copy(out[:FieldBytes], internal.ReverseScalarBytes(xBytes[:])) + copy(out[FieldBytes:], internal.ReverseScalarBytes(yBytes[:])) + isInfinity := byte(g1.IsIdentity()) + out[0] |= (1 << 6) & -isInfinity + return out +} + +// FromUncompressed deserializes this element from uncompressed form. +func (g1 *G1) FromUncompressed(input *[WideFieldBytes]byte) (*G1, error) { + var xFp, yFp fp + var t [FieldBytes]byte + var p G1 + infinityFlag := int((input[0] >> 6) & 1) + + if infinityFlag == 1 { + return g1.Identity(), nil + } + + copy(t[:], internal.ReverseScalarBytes(input[:FieldBytes])) + // Mask away top bits + t[FieldBytes-1] &= 0x1F + + _, valid := xFp.SetBytes(&t) + if valid == 0 { + return nil, errors.New("invalid bytes - x not in field") + } + copy(t[:], internal.ReverseScalarBytes(input[FieldBytes:])) + _, valid = yFp.SetBytes(&t) + if valid == 0 { + return nil, errors.New("invalid bytes - y not in field") + } + + p.x.Set(&xFp) + p.y.Set(&yFp) + p.z.SetOne() + + if p.IsOnCurve() == 0 { + return nil, errors.New("point is not on the curve") + } + if p.InCorrectSubgroup() == 0 { + return nil, errors.New("point is not in correct subgroup") + } + return g1.Set(&p), nil +} + +// ToAffine converts the point into affine coordinates +func (g1 *G1) ToAffine(a *G1) *G1 { + var wasInverted int + var zero, x, y, z fp + _, wasInverted = z.Invert(&a.z) + x.Mul(&a.x, &z) + y.Mul(&a.y, &z) + + g1.x.CMove(&zero, &x, wasInverted) + g1.y.CMove(&zero, &y, wasInverted) + g1.z.CMove(&zero, z.SetOne(), wasInverted) + return g1 +} + +// GetX returns the affine X coordinate +func (g1 *G1) GetX() *fp { + var t G1 + t.ToAffine(g1) + return &t.x +} + +// GetY returns the affine Y coordinate +func (g1 *G1) GetY() *fp { + var t G1 + t.ToAffine(g1) + return &t.y +} + +// Equal returns 1 if the two points are equal 0 otherwise. +func (g1 *G1) Equal(rhs *G1) int { + var x1, x2, y1, y2 fp + var e1, e2 int + + // This technique avoids inversions + x1.Mul(&g1.x, &rhs.z) + x2.Mul(&rhs.x, &g1.z) + + y1.Mul(&g1.y, &rhs.z) + y2.Mul(&rhs.y, &g1.z) + + e1 = g1.z.IsZero() + e2 = rhs.z.IsZero() + + // Both at infinity or coordinates are the same + return (e1 & e2) | (^e1 & ^e2)&x1.Equal(&x2)&y1.Equal(&y2) +} + +// CMove sets g1 = arg1 if choice == 0 and g1 = arg2 if choice == 1 +func (g1 *G1) CMove(arg1, arg2 *G1, choice int) *G1 { + g1.x.CMove(&arg1.x, &arg2.x, choice) + g1.y.CMove(&arg1.y, &arg2.y, choice) + g1.z.CMove(&arg1.z, &arg2.z, choice) + return g1 +} + +// SumOfProducts computes the multi-exponentiation for the specified +// points and scalars and stores the result in `g1`. +// Returns an error if the lengths of the arguments is not equal. +func (g1 *G1) SumOfProducts(points []*G1, scalars []*native.Field) (*G1, error) { + const Upper = 256 + const W = 4 + const Windows = Upper / W // careful--use ceiling division in case this doesn't divide evenly + var sum G1 + if len(points) != len(scalars) { + return nil, fmt.Errorf("length mismatch") + } + + bucketSize := 1 << W + windows := make([]G1, Windows) + bytes := make([][32]byte, len(scalars)) + buckets := make([]G1, bucketSize) + + for i := 0; i < len(windows); i++ { + windows[i].Identity() + } + + for i, scalar := range scalars { + bytes[i] = scalar.Bytes() + } + + for j := 0; j < len(windows); j++ { + for i := 0; i < bucketSize; i++ { + buckets[i].Identity() + } + + for i := 0; i < len(scalars); i++ { + // j*W to get the nibble + // >> 3 to convert to byte, / 8 + // (W * j & W) gets the nibble, mod W + // 1 << W - 1 to get the offset + index := bytes[i][j*W>>3] >> (W * j & W) & (1< 0; i-- { + sum.Add(&sum, &buckets[i]) + windows[j].Add(&windows[j], &sum) + } + } + + g1.Identity() + for i := len(windows) - 1; i >= 0; i-- { + for j := 0; j < W; j++ { + g1.Double(g1) + } + + g1.Add(g1, &windows[i]) + } + return g1, nil +} + +func (g1 *G1) osswu3mod4(u *fp) *G1 { + // Taken from section 8.8.1 in + // + var tv1, tv2, tv3, tv4, xd, x1n, x2n, gxd, gx1, y1, y2 fp + + // tv1 = u^2 + tv1.Square(u) + // tv3 = Z * tv1 + tv3.Mul(&oswwuMapZ, &tv1) + // tv2 = tv3^2 + tv2.Square(&tv3) + // xd = tv2 + tv3 + xd.Add(&tv2, &tv3) + // x1n = xd + 1 + x1n.Add(&xd, &r) + // x1n = x1n * B + x1n.Mul(&x1n, &osswuMapB) + // xd = -A * xd + xd.Mul(&negOsswuMapA, &xd) + // xd = CMOV(xd, Z * A, xd == 0) + xd.CMove(&xd, &oswwuMapXd1, xd.IsZero()) + // tv2 = xd^2 + tv2.Square(&xd) + + gxd.Mul(&tv2, &xd) + tv2.Mul(&tv2, &osswuMapA) + gx1.Square(&x1n) + gx1.Add(&gx1, &tv2) + gx1.Mul(&gx1, &x1n) + tv2.Mul(&osswuMapB, &gxd) + + gx1.Add(&gx1, &tv2) + tv4.Square(&gxd) + tv2.Mul(&gx1, &gxd) + tv4.Mul(&tv4, &tv2) + y1.pow(&tv4, &osswuMapC1) + y1.Mul(&y1, &tv2) + + x2n.Mul(&tv3, &x1n) + y2.Mul(&y1, &osswuMapC2) + y2.Mul(&y2, &tv1) + y2.Mul(&y2, u) + + tv2.Square(&y1) + tv2.Mul(&tv2, &gxd) + e2 := tv2.Equal(&gx1) + + x2n.CMove(&x2n, &x1n, e2) + y2.CMove(&y2, &y1, e2) + + e3 := u.Sgn0() ^ y2.Sgn0() + y2.CNeg(&y2, e3) + + g1.z.SetOne() + g1.y.Set(&y2) + _, _ = g1.x.Invert(&xd) + g1.x.Mul(&g1.x, &x2n) + return g1 +} + +func (g1 *G1) isogenyMap(a *G1) *G1 { + const Degree = 16 + var xs [Degree]fp + xs[0] = r + xs[1].Set(&a.x) + xs[2].Square(&a.x) + for i := 3; i < Degree; i++ { + xs[i].Mul(&xs[i-1], &a.x) + } + + xNum := computeKFp(xs[:], g1IsoXNum) + xDen := computeKFp(xs[:], g1IsoXDen) + yNum := computeKFp(xs[:], g1IsoYNum) + yDen := computeKFp(xs[:], g1IsoYDen) + + g1.x.Invert(&xDen) + g1.x.Mul(&g1.x, &xNum) + + g1.y.Invert(&yDen) + g1.y.Mul(&g1.y, &yNum) + g1.y.Mul(&g1.y, &a.y) + g1.z.SetOne() + return g1 +} + +func computeKFp(xxs []fp, k []fp) fp { + var xx, t fp + for i := range k { + xx.Add(&xx, t.Mul(&xxs[i], &k[i])) + } + return xx +} diff --git a/core/curves/native/bls12381/g1_test.go b/core/curves/native/bls12381/g1_test.go new file mode 100644 index 0000000..0707972 --- /dev/null +++ b/core/curves/native/bls12381/g1_test.go @@ -0,0 +1,431 @@ +package bls12381 + +import ( + crand "crypto/rand" + "encoding/hex" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves/native" +) + +func TestG1IsOnCurve(t *testing.T) { + require.Equal(t, 1, new(G1).Identity().IsOnCurve()) + require.Equal(t, 1, new(G1).Generator().IsOnCurve()) + + z := fp{ + 0xba7afa1f9a6fe250, + 0xfa0f5b595eafe731, + 0x3bdc477694c306e7, + 0x2149be4b3949fa24, + 0x64aa6e0649b2078c, + 0x12b108ac33643c3e, + } + + gen := new(G1).Generator() + test := G1{ + x: *(gen.x.Mul(&gen.x, &z)), + y: *(gen.y.Mul(&gen.y, &z)), + z: z, + } + require.Equal(t, 1, test.IsOnCurve()) + test.x = z + require.Equal(t, 0, test.IsOnCurve()) +} + +func TestG1Equality(t *testing.T) { + a := new(G1).Generator() + b := new(G1).Identity() + + require.Equal(t, 1, a.Equal(a)) + require.Equal(t, 1, b.Equal(b)) + require.Equal(t, 0, a.Equal(b)) + require.Equal(t, 0, b.Equal(a)) + + z := fp{ + 0xba7afa1f9a6fe250, + 0xfa0f5b595eafe731, + 0x3bdc477694c306e7, + 0x2149be4b3949fa24, + 0x64aa6e0649b2078c, + 0x12b108ac33643c3e, + } + + c := G1{} + c.x.Mul(&a.x, &z) + c.y.Mul(&a.y, &z) + c.z.Set(&z) + + require.Equal(t, 1, c.IsOnCurve()) + + require.Equal(t, 1, a.Equal(&c)) + require.Equal(t, 0, b.Equal(&c)) + + c.y.Neg(&c.y) + require.Equal(t, 1, c.IsOnCurve()) + + require.Equal(t, 0, a.Equal(&c)) + + c.y.Neg(&c.y) + c.x.Set(&z) + require.Equal(t, 0, c.IsOnCurve()) +} + +func TestG1Double(t *testing.T) { + t0 := new(G1).Identity() + t0.Double(t0) + require.Equal(t, 1, t0.IsIdentity()) + require.Equal(t, 1, t0.IsOnCurve()) + + t0.Double(t0.Generator()) + require.Equal(t, 0, t0.IsIdentity()) + require.Equal(t, 1, t0.IsOnCurve()) + e := G1{ + x: fp{ + 0x53e978ce58a9ba3c, + 0x3ea0583c4f3d65f9, + 0x4d20bb47f0012960, + 0xa54c664ae5b2b5d9, + 0x26b552a39d7eb21f, + 0x0008895d26e68785, + }, + y: fp{ + 0x70110b3298293940, + 0xda33c5393f1f6afc, + 0xb86edfd16a5aa785, + 0xaec6d1c9e7b1c895, + 0x25cfc2b522d11720, + 0x06361c83f8d09b15, + }, + z: r, + } + + require.Equal(t, 1, e.Equal(t0)) +} + +func TestG1Add(t *testing.T) { + g := new(G1).Generator() + a := new(G1).Identity() + b := new(G1).Identity() + c := new(G1).Add(a, b) + require.Equal(t, 1, c.IsIdentity()) + require.Equal(t, 1, c.IsOnCurve()) + + b.Generator() + z := fp{ + 0xba7afa1f9a6fe250, + 0xfa0f5b595eafe731, + 0x3bdc477694c306e7, + 0x2149be4b3949fa24, + 0x64aa6e0649b2078c, + 0x12b108ac33643c3e, + } + b.x.Mul(&b.x, &z) + b.y.Mul(&b.y, &z) + b.z.Set(&z) + c.Add(a, b) + require.Equal(t, 0, c.IsIdentity()) + require.Equal(t, 1, g.Equal(c)) + + a.Generator() + a.Double(a) + a.Double(a) + b.Generator() + b.Double(b) + c.Add(a, b) + d := new(G1).Generator() + for i := 0; i < 5; i++ { + d.Add(d, g) + } + require.Equal(t, 0, c.IsIdentity()) + require.Equal(t, 1, c.IsOnCurve()) + require.Equal(t, 0, d.IsIdentity()) + require.Equal(t, 1, d.IsOnCurve()) + require.Equal(t, 1, c.Equal(d)) + + beta := fp{ + 0xcd03c9e48671f071, + 0x5dab22461fcda5d2, + 0x587042afd3851b95, + 0x8eb60ebe01bacb9e, + 0x03f97d6e83d050d2, + 0x18f0206554638741, + } + beta.Square(&beta) + a.Generator() + a.Double(a) + a.Double(a) + b.x.Mul(&a.x, &beta) + b.y.Neg(&a.y) + b.z.Set(&a.z) + require.Equal(t, 1, a.IsOnCurve()) + require.Equal(t, 1, b.IsOnCurve()) + c.Add(a, b) + d.x.Set(&fp{ + 0x29e1e987ef68f2d0, + 0xc5f3ec531db03233, + 0xacd6c4b6ca19730f, + 0x18ad9e827bc2bab7, + 0x46e3b2c5785cc7a9, + 0x07e571d42d22ddd6, + }) + d.y.Set(&fp{ + 0x94d117a7e5a539e7, + 0x8e17ef673d4b5d22, + 0x9d746aaf508a33ea, + 0x8c6d883d2516c9a2, + 0x0bc3b8d5fb0447f7, + 0x07bfa4c7210f4f44, + }) + d.z.SetOne() + require.Equal(t, 1, c.Equal(d)) +} + +func TestG1Sub(t *testing.T) { + a := new(G1).Generator() + b := new(G1).Generator() + require.Equal(t, 1, a.Sub(a, b).IsIdentity()) + b.Double(b) + a.Generator() + require.Equal(t, 1, b.Sub(b, a).Equal(a)) +} + +func TestG1Mul(t *testing.T) { + g := new(G1).Generator() + a := Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{ + 0x2b568297a56da71c, + 0xd8c39ecb0ef375d1, + 0x435c38da67bfbf96, + 0x8088a05026b659b2, + }) + b := Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{ + 0x785fdd9b26ef8b85, + 0xc997f25837695c18, + 0x4c8dbc39e7b756c1, + 0x70d9b6cc6d87df20, + }) + c := Bls12381FqNew().Mul(a, b) + t1 := new(G1).Generator() + t1.Mul(t1, a) + t1.Mul(t1, b) + require.Equal(t, 1, t1.Equal(g.Mul(g, c))) +} + +func TestG1Neg(t *testing.T) { + a := new(G1).Generator() + b := new(G1).Neg(a) + require.Equal(t, 1, new(G1).Add(a, b).IsIdentity()) + require.Equal(t, 1, new(G1).Sub(a, b.Neg(b)).IsIdentity()) + a.Identity() + require.Equal(t, 1, a.Neg(a).IsIdentity()) +} + +func TestG1InCorrectSubgroup(t *testing.T) { + // ZCash test vector + a := G1{ + x: fp{ + 0x0abaf895b97e43c8, + 0xba4c6432eb9b61b0, + 0x12506f52adfe307f, + 0x75028c3439336b72, + 0x84744f05b8e9bd71, + 0x113d554fb09554f7, + }, + y: fp{ + 0x73e90e88f5cf01c0, + 0x37007b65dd3197e2, + 0x5cf9a1992f0d7c78, + 0x4f83c10b9eb3330d, + 0xf6a63f6f07f60961, + 0x0c53b5b97e634df3, + }, + z: *(new(fp).SetOne()), + } + require.Equal(t, 0, a.InCorrectSubgroup()) + + require.Equal(t, 1, new(G1).Identity().InCorrectSubgroup()) + require.Equal(t, 1, new(G1).Generator().InCorrectSubgroup()) +} + +func TestG1MulByX(t *testing.T) { + // multiplying by `x` a point in G1 is the same as multiplying by + // the equivalent scalar. + generator := new(G1).Generator() + x := Bls12381FqNew().SetUint64(paramX) + x.Neg(x) + lhs := new(G1).Mul(generator, x) + rhs := new(G1).MulByX(generator) + require.Equal(t, 1, lhs.Equal(rhs)) + + pt := new(G1).Generator() + s := Bls12381FqNew().SetUint64(42) + pt.Mul(pt, s) + lhs.Mul(pt, x) + rhs.MulByX(pt) + require.Equal(t, 1, lhs.Equal(rhs)) +} + +func TestG1ClearCofactor(t *testing.T) { + // the generator (and the identity) are always on the curve, + // even after clearing the cofactor + generator := new(G1).Generator() + generator.ClearCofactor(generator) + require.Equal(t, 1, generator.IsOnCurve()) + id := new(G1).Identity() + id.ClearCofactor(id) + require.Equal(t, 1, id.IsOnCurve()) + + z := fp{ + 0x3d2d1c670671394e, + 0x0ee3a800a2f7c1ca, + 0x270f4f21da2e5050, + 0xe02840a53f1be768, + 0x55debeb597512690, + 0x08bd25353dc8f791, + } + + point := G1{ + x: fp{ + 0x48af5ff540c817f0, + 0xd73893acaf379d5a, + 0xe6c43584e18e023c, + 0x1eda39c30f188b3e, + 0xf618c6d3ccc0f8d8, + 0x0073542cd671e16c, + }, + y: fp{ + 0x57bf8be79461d0ba, + 0xfc61459cee3547c3, + 0x0d23567df1ef147b, + 0x0ee187bcce1d9b64, + 0xb0c8cfbe9dc8fdc1, + 0x1328661767ef368b, + }, + z: *(&fp{}).Set(&z), + } + point.x.Mul(&point.x, &z) + point.z.Square(&z) + point.z.Mul(&point.z, &z) + + require.Equal(t, 1, point.IsOnCurve()) + require.Equal(t, 0, point.InCorrectSubgroup()) + clearedPoint := new(G1).ClearCofactor(&point) + require.Equal(t, 1, clearedPoint.IsOnCurve()) + require.Equal(t, 1, clearedPoint.InCorrectSubgroup()) + + // in BLS12-381 the cofactor in G1 can be + // cleared multiplying by (1-x) + hEff := Bls12381FqNew().SetOne() + hEff.Add(hEff, Bls12381FqNew().SetUint64(paramX)) + point.Mul(&point, hEff) + require.Equal(t, 1, clearedPoint.Equal(&point)) +} + +func TestG1Hash(t *testing.T) { + dst := []byte("QUUX-V01-CS02-with-BLS12381G1_XMD:SHA-256_SSWU_RO_") + tests := []struct { + input, expected string + }{ + { + "", + "052926add2207b76ca4fa57a8734416c8dc95e24501772c814278700eed6d1e4e8cf62d9c09db0fac349612b759e79a108ba738453bfed09cb546dbb0783dbb3a5f1f566ed67bb6be0e8c67e2e81a4cc68ee29813bb7994998f3eae0c9c6a265", + }, + { + "abc", + "03567bc5ef9c690c2ab2ecdf6a96ef1c139cc0b2f284dca0a9a7943388a49a3aee664ba5379a7655d3c68900be2f69030b9c15f3fe6e5cf4211f346271d7b01c8f3b28be689c8429c85b67af215533311f0b8dfaaa154fa6b88176c229f2885d", + }, + { + "abcdef0123456789", + "11e0b079dea29a68f0383ee94fed1b940995272407e3bb916bbf268c263ddd57a6a27200a784cbc248e84f357ce82d9803a87ae2caf14e8ee52e51fa2ed8eefe80f02457004ba4d486d6aa1f517c0889501dc7413753f9599b099ebcbbd2d709", + }, + { + "q128_qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq", + "15f68eaa693b95ccb85215dc65fa81038d69629f70aeee0d0f677cf22285e7bf58d7cb86eefe8f2e9bc3f8cb84fac4881807a1d50c29f430b8cafc4f8638dfeeadf51211e1602a5f184443076715f91bb90a48ba1e370edce6ae1062f5e6dd38", + }, + { + "a512_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "082aabae8b7dedb0e78aeb619ad3bfd9277a2f77ba7fad20ef6aabdc6c31d19ba5a6d12283553294c1825c4b3ca2dcfe05b84ae5a942248eea39e1d91030458c40153f3b654ab7872d779ad1e942856a20c438e8d99bc8abfbf74729ce1f7ac8", + }, + } + + pt := new(G1).Identity() + ept := new(G1).Identity() + var b [WideFieldBytes]byte + for _, tst := range tests { + i := []byte(tst.input) + e, _ := hex.DecodeString(tst.expected) + copy(b[:], e) + _, _ = ept.FromUncompressed(&b) + pt.Hash(native.EllipticPointHasherSha256(), i, dst) + require.Equal(t, 1, pt.Equal(ept)) + } +} + +func TestSerialization(t *testing.T) { + a := new( + G1, + ).Hash(native.EllipticPointHasherSha256(), []byte("a"), []byte("BLS12381G1_XMD:SHA-256_SSWU_RO_")) + b := new( + G1, + ).Hash(native.EllipticPointHasherSha256(), []byte("b"), []byte("BLS12381G1_XMD:SHA-256_SSWU_RO_")) + + aBytes := a.ToCompressed() + bBytes := b.ToCompressed() + + aa, err := new(G1).FromCompressed(&aBytes) + require.NoError(t, err) + require.Equal(t, 1, a.Equal(aa)) + + bb, err := new(G1).FromCompressed(&bBytes) + require.NoError(t, err) + require.Equal(t, 1, b.Equal(bb)) + + auBytes := a.ToUncompressed() + buBytes := b.ToUncompressed() + + _, err = aa.FromUncompressed(&auBytes) + require.NoError(t, err) + require.Equal(t, 1, a.Equal(aa)) + + _, err = bb.FromUncompressed(&buBytes) + require.NoError(t, err) + require.Equal(t, 1, b.Equal(bb)) + + bBytes = a.ToCompressed() + a.Neg(a) + aBytes = a.ToCompressed() + _, err = aa.FromCompressed(&aBytes) + require.NoError(t, err) + require.Equal(t, 1, a.Equal(aa)) + _, err = aa.FromCompressed(&bBytes) + require.NoError(t, err) + require.Equal(t, 0, a.Equal(aa)) + require.Equal(t, 1, aa.Equal(a.Neg(a))) +} + +func TestSumOfProducts(t *testing.T) { + var b [64]byte + h0, _ := new(G1).Random(crand.Reader) + _, _ = crand.Read(b[:]) + s := Bls12381FqNew().SetBytesWide(&b) + _, _ = crand.Read(b[:]) + sTilde := Bls12381FqNew().SetBytesWide(&b) + _, _ = crand.Read(b[:]) + c := Bls12381FqNew().SetBytesWide(&b) + + lhs := new(G1).Mul(h0, s) + rhs, _ := new(G1).SumOfProducts([]*G1{h0}, []*native.Field{s}) + require.Equal(t, 1, lhs.Equal(rhs)) + + u := new(G1).Mul(h0, s) + uTilde := new(G1).Mul(h0, sTilde) + sHat := Bls12381FqNew().Mul(c, s) + sHat.Sub(sTilde, sHat) + + rhs.Mul(u, c) + rhs.Add(rhs, new(G1).Mul(h0, sHat)) + require.Equal(t, 1, uTilde.Equal(rhs)) + _, _ = rhs.SumOfProducts([]*G1{u, h0}, []*native.Field{c, sHat}) + require.Equal(t, 1, uTilde.Equal(rhs)) +} diff --git a/core/curves/native/bls12381/g2.go b/core/curves/native/bls12381/g2.go new file mode 100644 index 0000000..3209f05 --- /dev/null +++ b/core/curves/native/bls12381/g2.go @@ -0,0 +1,1122 @@ +package bls12381 + +import ( + "fmt" + "io" + "math/big" + + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/internal" +) + +var ( + g2x = fp2{ + A: fp{ + 0xf5f2_8fa2_0294_0a10, + 0xb3f5_fb26_87b4_961a, + 0xa1a8_93b5_3e2a_e580, + 0x9894_999d_1a3c_aee9, + 0x6f67_b763_1863_366b, + 0x0581_9192_4350_bcd7, + }, + B: fp{ + 0xa5a9_c075_9e23_f606, + 0xaaa0_c59d_bccd_60c3, + 0x3bb1_7e18_e286_7806, + 0x1b1a_b6cc_8541_b367, + 0xc2b6_ed0e_f215_8547, + 0x1192_2a09_7360_edf3, + }, + } + g2y = fp2{ + A: fp{ + 0x4c73_0af8_6049_4c4a, + 0x597c_fa1f_5e36_9c5a, + 0xe7e6_856c_aa0a_635a, + 0xbbef_b5e9_6e0d_495f, + 0x07d3_a975_f0ef_25a2, + 0x0083_fd8e_7e80_dae5, + }, + B: fp{ + 0xadc0_fc92_df64_b05d, + 0x18aa_270a_2b14_61dc, + 0x86ad_ac6a_3be4_eba0, + 0x7949_5c4e_c93d_a33a, + 0xe717_5850_a43c_caed, + 0x0b2b_c2a1_63de_1bf2, + }, + } + curveG2B = fp2{ + A: fp{ + 0xaa27_0000_000c_fff3, + 0x53cc_0032_fc34_000a, + 0x478f_e97a_6b0a_807f, + 0xb1d3_7ebe_e6ba_24d7, + 0x8ec9_733b_bf78_ab2f, + 0x09d6_4551_3d83_de7e, + }, + B: fp{ + 0xaa27_0000_000c_fff3, + 0x53cc_0032_fc34_000a, + 0x478f_e97a_6b0a_807f, + 0xb1d3_7ebe_e6ba_24d7, + 0x8ec9_733b_bf78_ab2f, + 0x09d6_4551_3d83_de7e, + }, + } + curveG23B = fp2{ + A: fp{ + 0x447600000027552e, + 0xdcb8009a43480020, + 0x6f7ee9ce4a6e8b59, + 0xb10330b7c0a95bc6, + 0x6140b1fcfb1e54b7, + 0x0381be097f0bb4e1, + }, + B: fp{ + 0x447600000027552e, + 0xdcb8009a43480020, + 0x6f7ee9ce4a6e8b59, + 0xb10330b7c0a95bc6, + 0x6140b1fcfb1e54b7, + 0x0381be097f0bb4e1, + }, + } + sswuMapA = fp2{ + A: fp{}, + B: fp{ + 0xe53a000003135242, + 0x01080c0fdef80285, + 0xe7889edbe340f6bd, + 0x0b51375126310601, + 0x02d6985717c744ab, + 0x1220b4e979ea5467, + }, + } + sswuMapB = fp2{ + A: fp{ + 0x22ea00000cf89db2, + 0x6ec832df71380aa4, + 0x6e1b94403db5a66e, + 0x75bf3c53a79473ba, + 0x3dd3a569412c0a34, + 0x125cdb5e74dc4fd1, + }, + B: fp{ + 0x22ea00000cf89db2, + 0x6ec832df71380aa4, + 0x6e1b94403db5a66e, + 0x75bf3c53a79473ba, + 0x3dd3a569412c0a34, + 0x125cdb5e74dc4fd1, + }, + } + sswuMapZ = fp2{ + A: fp{ + 0x87ebfffffff9555c, + 0x656fffe5da8ffffa, + 0x0fd0749345d33ad2, + 0xd951e663066576f4, + 0xde291a3d41e980d3, + 0x0815664c7dfe040d, + }, + B: fp{ + 0x43f5fffffffcaaae, + 0x32b7fff2ed47fffd, + 0x07e83a49a2e99d69, + 0xeca8f3318332bb7a, + 0xef148d1ea0f4c069, + 0x040ab3263eff0206, + }, + } + sswuMapZInv = fp2{ + A: fp{ + 0xacd0000000011110, + 0x9dd9999dc88ccccd, + 0xb5ca2ac9b76352bf, + 0xf1b574bcf4bc90ce, + 0x42dab41f28a77081, + 0x132fc6ac14cd1e12, + }, + B: fp{ + 0xe396ffffffff2223, + 0x4fbf332fcd0d9998, + 0x0c4bbd3c1aff4cc4, + 0x6b9c91267926ca58, + 0x29ae4da6aef7f496, + 0x10692e942f195791, + }, + } + swwuMapMbDivA = fp2{ + A: fp{ + 0x903c555555474fb3, + 0x5f98cc95ce451105, + 0x9f8e582eefe0fade, + 0xc68946b6aebbd062, + 0x467a4ad10ee6de53, + 0x0e7146f483e23a05, + }, + B: fp{ + 0x29c2aaaaaab85af8, + 0xbf133368e30eeefa, + 0xc7a27a7206cffb45, + 0x9dee04ce44c9425c, + 0x04a15ce53464ce83, + 0x0b8fcaf5b59dac95, + }, + } + + g2IsoXNum = []fp2{ + { + A: fp{ + 0x47f671c71ce05e62, + 0x06dd57071206393e, + 0x7c80cd2af3fd71a2, + 0x048103ea9e6cd062, + 0xc54516acc8d037f6, + 0x13808f550920ea41, + }, + B: fp{ + 0x47f671c71ce05e62, + 0x06dd57071206393e, + 0x7c80cd2af3fd71a2, + 0x048103ea9e6cd062, + 0xc54516acc8d037f6, + 0x13808f550920ea41, + }, + }, + { + A: fp{ + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + }, + B: fp{ + 0x5fe55555554c71d0, + 0x873fffdd236aaaa3, + 0x6a6b4619b26ef918, + 0x21c2888408874945, + 0x2836cda7028cabc5, + 0x0ac73310a7fd5abd, + }, + }, + { + A: fp{ + 0x0a0c5555555971c3, + 0xdb0c00101f9eaaae, + 0xb1fb2f941d797997, + 0xd3960742ef416e1c, + 0xb70040e2c20556f4, + 0x149d7861e581393b, + }, + B: fp{ + 0xaff2aaaaaaa638e8, + 0x439fffee91b55551, + 0xb535a30cd9377c8c, + 0x90e144420443a4a2, + 0x941b66d3814655e2, + 0x0563998853fead5e, + }, + }, + { + A: fp{ + 0x40aac71c71c725ed, + 0x190955557a84e38e, + 0xd817050a8f41abc3, + 0xd86485d4c87f6fb1, + 0x696eb479f885d059, + 0x198e1a74328002d2, + }, + B: fp{ + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + }, + }, + } + g2IsoXDen = []fp2{ + { + A: fp{ + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + }, + B: fp{ + 0x1f3affffff13ab97, + 0xf25bfc611da3ff3e, + 0xca3757cb3819b208, + 0x3e6427366f8cec18, + 0x03977bc86095b089, + 0x04f69db13f39a952, + }, + }, + { + A: fp{ + 0x447600000027552e, + 0xdcb8009a43480020, + 0x6f7ee9ce4a6e8b59, + 0xb10330b7c0a95bc6, + 0x6140b1fcfb1e54b7, + 0x0381be097f0bb4e1, + }, + B: fp{ + 0x7588ffffffd8557d, + 0x41f3ff646e0bffdf, + 0xf7b1e8d2ac426aca, + 0xb3741acd32dbb6f8, + 0xe9daf5b9482d581f, + 0x167f53e0ba7431b8, + }, + }, + { + A: fp{ + 0x760900000002fffd, + 0xebf4000bc40c0002, + 0x5f48985753c758ba, + 0x77ce585370525745, + 0x5c071a97a256ec6d, + 0x15f65ec3fa80e493, + }, + B: fp{ + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + }, + }, + } + g2IsoYNum = []fp2{ + { + A: fp{ + 0x96d8f684bdfc77be, + 0xb530e4f43b66d0e2, + 0x184a88ff379652fd, + 0x57cb23ecfae804e1, + 0x0fd2e39eada3eba9, + 0x08c8055e31c5d5c3, + }, + B: fp{ + 0x96d8f684bdfc77be, + 0xb530e4f43b66d0e2, + 0x184a88ff379652fd, + 0x57cb23ecfae804e1, + 0x0fd2e39eada3eba9, + 0x08c8055e31c5d5c3, + }, + }, + { + A: fp{ + 0o00000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + }, + B: fp{ + 0xbf0a71c71c91b406, + 0x4d6d55d28b7638fd, + 0x9d82f98e5f205aee, + 0xa27aa27b1d1a18d5, + 0x02c3b2b2d2938e86, + 0x0c7d13420b09807f, + }, + }, + { + A: fp{ + 0xd7f9555555531c74, + 0x21cffff748daaaa8, + 0x5a9ad1866c9bbe46, + 0x4870a2210221d251, + 0x4a0db369c0a32af1, + 0x02b1ccc429ff56af, + }, + B: fp{ + 0xe205aaaaaaac8e37, + 0xfcdc000768795556, + 0x0c96011a8a1537dd, + 0x1c06a963f163406e, + 0x010df44c82a881e6, + 0x174f45260f808feb, + }, + }, + { + A: fp{ + 0xa470bda12f67f35c, + 0xc0fe38e23327b425, + 0xc9d3d0f2c6f0678d, + 0x1c55c9935b5a982e, + 0x27f6c0e2f0746764, + 0x117c5e6e28aa9054, + }, + B: fp{ + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + }, + }, + } + g2IsoYDen = []fp2{ + { + A: fp{ + 0x0162fffffa765adf, + 0x8f7bea480083fb75, + 0x561b3c2259e93611, + 0x11e19fc1a9c875d5, + 0xca713efc00367660, + 0x03c6a03d41da1151, + }, + B: fp{ + 0x0162fffffa765adf, + 0x8f7bea480083fb75, + 0x561b3c2259e93611, + 0x11e19fc1a9c875d5, + 0xca713efc00367660, + 0x03c6a03d41da1151, + }, + }, + { + A: fp{ + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + }, + B: fp{ + 0x5db0fffffd3b02c5, + 0xd713f52358ebfdba, + 0x5ea60761a84d161a, + 0xbb2c75a34ea6c44a, + 0x0ac6735921c1119b, + 0x0ee3d913bdacfbf6, + }, + }, + { + A: fp{ + 0x66b10000003affc5, + 0xcb1400e764ec0030, + 0xa73e5eb56fa5d106, + 0x8984c913a0fe09a9, + 0x11e10afb78ad7f13, + 0x05429d0e3e918f52, + }, + B: fp{ + 0x534dffffffc4aae6, + 0x5397ff174c67ffcf, + 0xbff273eb870b251d, + 0xdaf2827152870915, + 0x393a9cbaca9e2dc3, + 0x14be74dbfaee5748, + }, + }, + { + A: fp{ + 0x760900000002fffd, + 0xebf4000bc40c0002, + 0x5f48985753c758ba, + 0x77ce585370525745, + 0x5c071a97a256ec6d, + 0x15f65ec3fa80e493, + }, + B: fp{ + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + }, + }, + } + + // 1 / ((u+1) ^ ((q-1)/3)) + psiCoeffX = fp2{ + A: fp{}, + B: fp{ + 0x890dc9e4867545c3, + 0x2af322533285a5d5, + 0x50880866309b7e2c, + 0xa20d1b8c7e881024, + 0x14e4f04fe2db9068, + 0x14e56d3f1564853a, + }, + } + // 1 / ((u+1) ^ (p-1)/2) + psiCoeffY = fp2{ + A: fp{ + 0x3e2f585da55c9ad1, + 0x4294213d86c18183, + 0x382844c88b623732, + 0x92ad2afd19103e18, + 0x1d794e4fac7cf0b9, + 0x0bd592fc7d825ec8, + }, + B: fp{ + 0x7bcfa7a25aa30fda, + 0xdc17dec12a927e7c, + 0x2f088dd86b4ebef1, + 0xd1ca2087da74d4a7, + 0x2da2596696cebc1d, + 0x0e2b7eedbbfd87d2, + }, + } + + // 1 / 2 ^ ((q-1)/3) + psi2CoeffX = fp2{ + A: fp{ + 0xcd03c9e48671f071, + 0x5dab22461fcda5d2, + 0x587042afd3851b95, + 0x8eb60ebe01bacb9e, + 0x03f97d6e83d050d2, + 0x18f0206554638741, + }, + B: fp{}, + } +) + +// G2 is a point in g2 +type G2 struct { + x, y, z fp2 +} + +// Random creates a random point on the curve +// from the specified reader +func (g2 *G2) Random(reader io.Reader) (*G2, error) { + var seed [native.WideFieldBytes]byte + n, err := reader.Read(seed[:]) + if err != nil { + return nil, errors.Wrap(err, "random could not read from stream") + } + if n != native.WideFieldBytes { + return nil, fmt.Errorf("insufficient bytes read %d when %d are needed", n, WideFieldBytes) + } + dst := []byte("BLS12381G2_XMD:SHA-256_SSWU_RO_") + return g2.Hash(native.EllipticPointHasherSha256(), seed[:], dst), nil +} + +// Hash uses the hasher to map bytes to a valid point +func (g2 *G2) Hash(hash *native.EllipticPointHasher, msg, dst []byte) *G2 { + var u []byte + var u0, u1 fp2 + var r0, r1, q0, q1 G2 + + switch hash.Type() { + case native.XMD: + u = native.ExpandMsgXmd(hash, msg, dst, 256) + case native.XOF: + u = native.ExpandMsgXof(hash, msg, dst, 256) + } + + var buf [96]byte + copy(buf[:64], internal.ReverseScalarBytes(u[:64])) + u0.A.SetBytesWide(&buf) + copy(buf[:64], internal.ReverseScalarBytes(u[64:128])) + u0.B.SetBytesWide(&buf) + copy(buf[:64], internal.ReverseScalarBytes(u[128:192])) + u1.A.SetBytesWide(&buf) + copy(buf[:64], internal.ReverseScalarBytes(u[192:])) + u1.B.SetBytesWide(&buf) + + r0.sswu(&u0) + r1.sswu(&u1) + q0.isogenyMap(&r0) + q1.isogenyMap(&r1) + g2.Add(&q0, &q1) + return g2.ClearCofactor(g2) +} + +// Identity returns the identity point +func (g2 *G2) Identity() *G2 { + g2.x.SetZero() + g2.y.SetOne() + g2.z.SetZero() + return g2 +} + +// Generator returns the base point +func (g2 *G2) Generator() *G2 { + g2.x.Set(&g2x) + g2.y.Set(&g2y) + g2.z.SetOne() + return g2 +} + +// IsIdentity returns true if this point is at infinity +func (g2 *G2) IsIdentity() int { + return g2.z.IsZero() +} + +// IsOnCurve determines if this point represents a valid curve point +func (g2 *G2) IsOnCurve() int { + // Y^2 Z = X^3 + b Z^3 + var lhs, rhs, t fp2 + lhs.Square(&g2.y) + lhs.Mul(&lhs, &g2.z) + + rhs.Square(&g2.x) + rhs.Mul(&rhs, &g2.x) + t.Square(&g2.z) + t.Mul(&t, &g2.z) + t.Mul(&t, &curveG2B) + rhs.Add(&rhs, &t) + + return lhs.Equal(&rhs) +} + +// InCorrectSubgroup returns 1 if the point is torsion free, 0 otherwise +func (g2 *G2) InCorrectSubgroup() int { + var t G2 + t.multiply(g2, &fqModulusBytes) + return t.IsIdentity() +} + +// Add adds this point to another point. +func (g2 *G2) Add(arg1, arg2 *G2) *G2 { + // Algorithm 7, https://eprint.iacr.org/2015/1060.pdf + var t0, t1, t2, t3, t4, x3, y3, z3 fp2 + + t0.Mul(&arg1.x, &arg2.x) + t1.Mul(&arg1.y, &arg2.y) + t2.Mul(&arg1.z, &arg2.z) + t3.Add(&arg1.x, &arg1.y) + t4.Add(&arg2.x, &arg2.y) + t3.Mul(&t3, &t4) + t4.Add(&t0, &t1) + t3.Sub(&t3, &t4) + t4.Add(&arg1.y, &arg1.z) + x3.Add(&arg2.y, &arg2.z) + t4.Mul(&t4, &x3) + x3.Add(&t1, &t2) + t4.Sub(&t4, &x3) + x3.Add(&arg1.x, &arg1.z) + y3.Add(&arg2.x, &arg2.z) + x3.Mul(&x3, &y3) + y3.Add(&t0, &t2) + y3.Sub(&x3, &y3) + x3.Double(&t0) + t0.Add(&t0, &x3) + t2.MulBy3b(&t2) + z3.Add(&t1, &t2) + t1.Sub(&t1, &t2) + y3.MulBy3b(&y3) + x3.Mul(&t4, &y3) + t2.Mul(&t3, &t1) + x3.Sub(&t2, &x3) + y3.Mul(&y3, &t0) + t1.Mul(&t1, &z3) + y3.Add(&t1, &y3) + t0.Mul(&t0, &t3) + z3.Mul(&z3, &t4) + z3.Add(&z3, &t0) + + g2.x.Set(&x3) + g2.y.Set(&y3) + g2.z.Set(&z3) + return g2 +} + +// Sub subtracts the two points +func (g2 *G2) Sub(arg1, arg2 *G2) *G2 { + var t G2 + t.Neg(arg2) + return g2.Add(arg1, &t) +} + +// Double this point +func (g2 *G2) Double(a *G2) *G2 { + // Algorithm 9, https://eprint.iacr.org/2015/1060.pdf + var t0, t1, t2, x3, y3, z3 fp2 + + t0.Square(&a.y) + z3.Double(&t0) + z3.Double(&z3) + z3.Double(&z3) + t1.Mul(&a.y, &a.z) + t2.Square(&a.z) + t2.MulBy3b(&t2) + x3.Mul(&t2, &z3) + y3.Add(&t0, &t2) + z3.Mul(&t1, &z3) + t1.Double(&t2) + t2.Add(&t2, &t1) + t0.Sub(&t0, &t2) + y3.Mul(&t0, &y3) + y3.Add(&y3, &x3) + t1.Mul(&a.x, &a.y) + x3.Mul(&t0, &t1) + x3.Double(&x3) + + e := a.IsIdentity() + g2.x.CMove(&x3, t0.SetZero(), e) + g2.z.CMove(&z3, &t0, e) + g2.y.CMove(&y3, t0.SetOne(), e) + return g2 +} + +// Mul multiplies this point by the input scalar +func (g2 *G2) Mul(a *G2, s *native.Field) *G2 { + bytes := s.Bytes() + return g2.multiply(a, &bytes) +} + +func (g2 *G2) multiply(a *G2, bytes *[native.FieldBytes]byte) *G2 { + var p G2 + precomputed := [16]*G2{} + precomputed[0] = new(G2).Identity() + precomputed[1] = new(G2).Set(a) + for i := 2; i < 16; i += 2 { + precomputed[i] = new(G2).Double(precomputed[i>>1]) + precomputed[i+1] = new(G2).Add(precomputed[i], a) + } + p.Identity() + for i := 0; i < 256; i += 4 { + // Brouwer / windowing method. window size of 4. + for j := 0; j < 4; j++ { + p.Double(&p) + } + window := bytes[32-1-i>>3] >> (4 - i&0x04) & 0x0F + p.Add(&p, precomputed[window]) + } + return g2.Set(&p) +} + +// MulByX multiplies by BLS X using double and add +func (g2 *G2) MulByX(a *G2) *G2 { + // Skip first bit since its always zero + var s, t, r G2 + r.Identity() + t.Set(a) + + for x := paramX >> 1; x != 0; x >>= 1 { + t.Double(&t) + s.Add(&r, &t) + r.CMove(&r, &s, int(x&1)) + } + // Since BLS_X is negative, flip the sign + return g2.Neg(&r) +} + +// ClearCofactor using [Budroni-Pintore](https://ia.cr/2017/419). +// This is equivalent to multiplying by h_{eff} = 3(z^2 - 1) * h_2 +// where h_2 is the cofactor of G_2 and z is the parameter of BLS12-381. +func (g2 *G2) ClearCofactor(a *G2) *G2 { + var t1, t2, t3, pt G2 + + t1.MulByX(a) + t2.psi(a) + + pt.Double(a) + pt.psi2(&pt) + + t3.Add(&t1, &t2) + t3.MulByX(&t3) + + pt.Add(&pt, &t3) + pt.Sub(&pt, &t1) + pt.Sub(&pt, &t2) + pt.Sub(&pt, a) + return g2.Set(&pt) +} + +// Neg negates this point +func (g2 *G2) Neg(a *G2) *G2 { + g2.Set(a) + g2.y.CNeg(&a.y, -(a.IsIdentity() - 1)) + return g2 +} + +// Set copies a into g2 +func (g2 *G2) Set(a *G2) *G2 { + g2.x.Set(&a.x) + g2.y.Set(&a.y) + g2.z.Set(&a.z) + return g2 +} + +// BigInt returns the x and y as big.Ints in affine +func (g2 *G2) BigInt() (x, y *big.Int) { + var t G2 + out := t.ToUncompressed() + x = new(big.Int).SetBytes(out[:WideFieldBytes]) + y = new(big.Int).SetBytes(out[WideFieldBytes:]) + return x, y +} + +// SetBigInt creates a point from affine x, y +// and returns the point if it is on the curve +func (g2 *G2) SetBigInt(x, y *big.Int) (*G2, error) { + var tt [DoubleWideFieldBytes]byte + + if len(x.Bytes()) == 0 && len(y.Bytes()) == 0 { + return g2.Identity(), nil + } + x.FillBytes(tt[:WideFieldBytes]) + y.FillBytes(tt[WideFieldBytes:]) + + return g2.FromUncompressed(&tt) +} + +// ToCompressed serializes this element into compressed form. +func (g2 *G2) ToCompressed() [WideFieldBytes]byte { + var out [WideFieldBytes]byte + var t G2 + t.ToAffine(g2) + xABytes := t.x.A.Bytes() + xBBytes := t.x.B.Bytes() + copy(out[:FieldBytes], internal.ReverseScalarBytes(xBBytes[:])) + copy(out[FieldBytes:], internal.ReverseScalarBytes(xABytes[:])) + isInfinity := byte(g2.IsIdentity()) + // Compressed flag + out[0] |= 1 << 7 + // Is infinity + out[0] |= (1 << 6) & -isInfinity + // Sign of y only set if not infinity + out[0] |= (byte(t.y.LexicographicallyLargest()) << 5) & (isInfinity - 1) + return out +} + +// FromCompressed deserializes this element from compressed form. +func (g2 *G2) FromCompressed(input *[WideFieldBytes]byte) (*G2, error) { + var xFp, yFp fp2 + var xA, xB [FieldBytes]byte + var p G2 + compressedFlag := int((input[0] >> 7) & 1) + infinityFlag := int((input[0] >> 6) & 1) + sortFlag := int((input[0] >> 5) & 1) + + if compressedFlag != 1 { + return nil, errors.New("compressed flag must be set") + } + + if infinityFlag == 1 { + return g2.Identity(), nil + } + + copy(xB[:], internal.ReverseScalarBytes(input[:FieldBytes])) + copy(xA[:], internal.ReverseScalarBytes(input[FieldBytes:])) + // Mask away the flag bits + xB[FieldBytes-1] &= 0x1F + _, validA := xFp.A.SetBytes(&xA) + _, validB := xFp.B.SetBytes(&xB) + + if validA&validB != 1 { + return nil, errors.New("invalid bytes - not in field") + } + + // Recover a y-coordinate given x by y = sqrt(x^3 + 4) + yFp.Square(&xFp) + yFp.Mul(&yFp, &xFp) + yFp.Add(&yFp, &curveG2B) + + _, wasSquare := yFp.Sqrt(&yFp) + if wasSquare != 1 { + return nil, errors.New("point is not on the curve") + } + + yFp.CNeg(&yFp, yFp.LexicographicallyLargest()^sortFlag) + p.x.Set(&xFp) + p.y.Set(&yFp) + p.z.SetOne() + if p.InCorrectSubgroup() == 0 { + return nil, errors.New("point is not in correct subgroup") + } + return g2.Set(&p), nil +} + +// ToUncompressed serializes this element into uncompressed form. +func (g2 *G2) ToUncompressed() [DoubleWideFieldBytes]byte { + var out [DoubleWideFieldBytes]byte + var t G2 + t.ToAffine(g2) + bytes := t.x.B.Bytes() + copy(out[:FieldBytes], internal.ReverseScalarBytes(bytes[:])) + bytes = t.x.A.Bytes() + copy(out[FieldBytes:WideFieldBytes], internal.ReverseScalarBytes(bytes[:])) + bytes = t.y.B.Bytes() + copy(out[WideFieldBytes:WideFieldBytes+FieldBytes], internal.ReverseScalarBytes(bytes[:])) + bytes = t.y.A.Bytes() + copy(out[WideFieldBytes+FieldBytes:], internal.ReverseScalarBytes(bytes[:])) + isInfinity := byte(g2.IsIdentity()) + out[0] |= (1 << 6) & -isInfinity + return out +} + +// FromUncompressed deserializes this element from uncompressed form. +func (g2 *G2) FromUncompressed(input *[DoubleWideFieldBytes]byte) (*G2, error) { + var a, b fp + var t [FieldBytes]byte + var p G2 + infinityFlag := int((input[0] >> 6) & 1) + + if infinityFlag == 1 { + return g2.Identity(), nil + } + + copy(t[:], internal.ReverseScalarBytes(input[:FieldBytes])) + // Mask away top bits + t[FieldBytes-1] &= 0x1F + + _, valid := b.SetBytes(&t) + if valid == 0 { + return nil, errors.New("invalid bytes - x.B not in field") + } + copy(t[:], internal.ReverseScalarBytes(input[FieldBytes:WideFieldBytes])) + _, valid = a.SetBytes(&t) + if valid == 0 { + return nil, errors.New("invalid bytes - x.A not in field") + } + + p.x.B.Set(&b) + p.x.A.Set(&a) + + copy(t[:], internal.ReverseScalarBytes(input[WideFieldBytes:WideFieldBytes+FieldBytes])) + _, valid = b.SetBytes(&t) + if valid == 0 { + return nil, errors.New("invalid bytes - y.B not in field") + } + copy(t[:], internal.ReverseScalarBytes(input[FieldBytes+WideFieldBytes:])) + _, valid = a.SetBytes(&t) + if valid == 0 { + return nil, errors.New("invalid bytes - y.A not in field") + } + + p.y.B.Set(&b) + p.y.A.Set(&a) + p.z.SetOne() + + if p.IsOnCurve() == 0 { + return nil, errors.New("point is not on the curve") + } + if p.InCorrectSubgroup() == 0 { + return nil, errors.New("point is not in correct subgroup") + } + return g2.Set(&p), nil +} + +// ToAffine converts the point into affine coordinates +func (g2 *G2) ToAffine(a *G2) *G2 { + var wasInverted int + var zero, x, y, z fp2 + _, wasInverted = z.Invert(&a.z) + x.Mul(&a.x, &z) + y.Mul(&a.y, &z) + + g2.x.CMove(&zero, &x, wasInverted) + g2.y.CMove(&zero, &y, wasInverted) + g2.z.CMove(&zero, z.SetOne(), wasInverted) + return g2 +} + +// GetX returns the affine X coordinate +func (g2 *G2) GetX() *fp2 { + var t G2 + t.ToAffine(g2) + return &t.x +} + +// GetY returns the affine Y coordinate +func (g2 *G2) GetY() *fp2 { + var t G2 + t.ToAffine(g2) + return &t.y +} + +// Equal returns 1 if the two points are equal 0 otherwise. +func (g2 *G2) Equal(rhs *G2) int { + var x1, x2, y1, y2 fp2 + var e1, e2 int + + // This technique avoids inversions + x1.Mul(&g2.x, &rhs.z) + x2.Mul(&rhs.x, &g2.z) + + y1.Mul(&g2.y, &rhs.z) + y2.Mul(&rhs.y, &g2.z) + + e1 = g2.z.IsZero() + e2 = rhs.z.IsZero() + + // Both at infinity or coordinates are the same + return (e1 & e2) | (^e1 & ^e2)&x1.Equal(&x2)&y1.Equal(&y2) +} + +// CMove sets g2 = arg1 if choice == 0 and g2 = arg2 if choice == 1 +func (g2 *G2) CMove(arg1, arg2 *G2, choice int) *G2 { + g2.x.CMove(&arg1.x, &arg2.x, choice) + g2.y.CMove(&arg1.y, &arg2.y, choice) + g2.z.CMove(&arg1.z, &arg2.z, choice) + return g2 +} + +// SumOfProducts computes the multi-exponentiation for the specified +// points and scalars and stores the result in `g2`. +// Returns an error if the lengths of the arguments is not equal. +func (g2 *G2) SumOfProducts(points []*G2, scalars []*native.Field) (*G2, error) { + const Upper = 256 + const W = 4 + const Windows = Upper / W // careful--use ceiling division in case this doesn't divide evenly + var sum G2 + if len(points) != len(scalars) { + return nil, fmt.Errorf("length mismatch") + } + + bucketSize := 1 << W + windows := make([]G2, Windows) + bytes := make([][32]byte, len(scalars)) + buckets := make([]G2, bucketSize) + for i := 0; i < len(windows); i++ { + windows[i].Identity() + } + + for i, scalar := range scalars { + bytes[i] = scalar.Bytes() + } + + for j := 0; j < len(windows); j++ { + for i := 0; i < bucketSize; i++ { + buckets[i].Identity() + } + + for i := 0; i < len(scalars); i++ { + // j*W to get the nibble + // >> 3 to convert to byte, / 8 + // (W * j & W) gets the nibble, mod W + // 1 << W - 1 to get the offset + index := bytes[i][j*W>>3] >> (W * j & W) & (1< 0; i-- { + sum.Add(&sum, &buckets[i]) + windows[j].Add(&windows[j], &sum) + } + } + + g2.Identity() + for i := len(windows) - 1; i >= 0; i-- { + for j := 0; j < W; j++ { + g2.Double(g2) + } + + g2.Add(g2, &windows[i]) + } + return g2, nil +} + +func (g2 *G2) psi(a *G2) *G2 { + g2.x.FrobeniusMap(&a.x) + g2.y.FrobeniusMap(&a.y) + // z = frobenius(z) + g2.z.FrobeniusMap(&a.z) + + // x = frobenius(x)/((u+1)^((p-1)/3)) + g2.x.Mul(&g2.x, &psiCoeffX) + // y = frobenius(y)/(u+1)^((p-1)/2) + g2.y.Mul(&g2.y, &psiCoeffY) + + return g2 +} + +func (g2 *G2) psi2(a *G2) *G2 { + // x = frobenius^2(x)/2^((p-1)/3); note that q^2 is the order of the field. + g2.x.Mul(&a.x, &psi2CoeffX) + // y = -frobenius^2(y); note that q^2 is the order of the field. + g2.y.Neg(&a.y) + g2.z.Set(&a.z) + return g2 +} + +func (g2 *G2) sswu(u *fp2) *G2 { + /// simplified swu map for q = 9 mod 16 where AB == 0 + // + var tv1, tv2, x1, x2, gx1, gx2, x, y, y2, t fp2 + + tv1.Square(u) + tv1.Mul(&tv1, &sswuMapZ) + + tv2.Square(&tv1) + + x1.Add(&tv1, &tv2) + x1.Invert(&x1) + x1.Add(&x1, (&fp2{}).SetOne()) + x1.CMove(&x1, &sswuMapZInv, x1.IsZero()) + x1.Mul(&x1, &swwuMapMbDivA) + + gx1.Square(&x1) + gx1.Add(&gx1, &sswuMapA) + gx1.Mul(&gx1, &x1) + gx1.Add(&gx1, &sswuMapB) + + x2.Mul(&tv1, &x1) + + tv2.Mul(&tv2, &tv1) + + gx2.Mul(&gx1, &tv2) + + _, e2 := t.Sqrt(&gx1) + + x.CMove(&x2, &x1, e2) + y2.CMove(&gx2, &gx1, e2) + + y.Sqrt(&y2) + + y.CNeg(&y, u.Sgn0()^y.Sgn0()) + g2.x.Set(&x) + g2.y.Set(&y) + g2.z.SetOne() + return g2 +} + +func (g2 *G2) isogenyMap(a *G2) *G2 { + const Degree = 4 + var xs [Degree]fp2 + xs[0].SetOne() + xs[1].Set(&a.x) + xs[2].Square(&a.x) + for i := 3; i < Degree; i++ { + xs[i].Mul(&xs[i-1], &a.x) + } + + xNum := computeKFp2(xs[:], g2IsoXNum) + xDen := computeKFp2(xs[:], g2IsoXDen) + yNum := computeKFp2(xs[:], g2IsoYNum) + yDen := computeKFp2(xs[:], g2IsoYDen) + + g2.x.Invert(&xDen) + g2.x.Mul(&g2.x, &xNum) + + g2.y.Invert(&yDen) + g2.y.Mul(&g2.y, &yNum) + g2.y.Mul(&g2.y, &a.y) + g2.z.SetOne() + return g2 +} + +func computeKFp2(xxs []fp2, k []fp2) fp2 { + var xx, t fp2 + for i := range k { + xx.Add(&xx, t.Mul(&xxs[i], &k[i])) + } + return xx +} diff --git a/core/curves/native/bls12381/g2_test.go b/core/curves/native/bls12381/g2_test.go new file mode 100644 index 0000000..67c7332 --- /dev/null +++ b/core/curves/native/bls12381/g2_test.go @@ -0,0 +1,569 @@ +package bls12381 + +import ( + crand "crypto/rand" + "encoding/hex" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves/native" +) + +func TestG2IsOnCurve(t *testing.T) { + require.Equal(t, 1, new(G2).Identity().IsOnCurve()) + require.Equal(t, 1, new(G2).Generator().IsOnCurve()) + + z := fp2{ + A: fp{ + 0xba7a_fa1f_9a6f_e250, + 0xfa0f_5b59_5eaf_e731, + 0x3bdc_4776_94c3_06e7, + 0x2149_be4b_3949_fa24, + 0x64aa_6e06_49b2_078c, + 0x12b1_08ac_3364_3c3e, + }, + B: fp{ + 0x1253_25df_3d35_b5a8, + 0xdc46_9ef5_555d_7fe3, + 0x02d7_16d2_4431_06a9, + 0x05a1_db59_a6ff_37d0, + 0x7cf7_784e_5300_bb8f, + 0x16a8_8922_c7a5_e844, + }, + } + + test := new(G2).Generator() + test.x.Mul(&test.x, &z) + test.y.Mul(&test.y, &z) + test.z.Set(&z) + + require.Equal(t, 1, test.IsOnCurve()) + + test.x.Set(&z) + require.Equal(t, 0, test.IsOnCurve()) +} + +func TestG2Equal(t *testing.T) { + a := new(G2).Generator() + b := new(G2).Identity() + + require.Equal(t, 1, a.Equal(a)) + require.Equal(t, 0, a.Equal(b)) + require.Equal(t, 1, b.Equal(b)) +} + +func TestG2ToAffine(t *testing.T) { + a := new(G2).Generator() + + z := fp2{ + A: fp{ + 0xba7afa1f9a6fe250, + 0xfa0f5b595eafe731, + 0x3bdc477694c306e7, + 0x2149be4b3949fa24, + 0x64aa6e0649b2078c, + 0x12b108ac33643c3e, + }, + B: fp{ + 0x125325df3d35b5a8, + 0xdc469ef5555d7fe3, + 0x02d716d2443106a9, + 0x05a1db59a6ff37d0, + 0x7cf7784e5300bb8f, + 0x16a88922c7a5e844, + }, + } + + a.x.Mul(&a.x, &z) + a.y.Mul(&a.y, &z) + a.z.Set(&z) + + require.Equal(t, 1, a.ToAffine(a).Equal(new(G2).Generator())) +} + +func TestG2Double(t *testing.T) { + a := new(G2).Identity() + require.Equal(t, 1, a.Double(a).IsIdentity()) + + a.Generator() + a.Double(a) + e := G2{ + x: fp2{ + A: fp{ + 0xe9d9e2da9620f98b, + 0x54f1199346b97f36, + 0x3db3b820376bed27, + 0xcfdb31c9b0b64f4c, + 0x41d7c12786354493, + 0x05710794c255c064, + }, + B: fp{ + 0xd6c1d3ca6ea0d06e, + 0xda0cbd905595489f, + 0x4f5352d43479221d, + 0x8ade5d736f8c97e0, + 0x48cc8433925ef70e, + 0x08d7ea71ea91ef81, + }, + }, + y: fp2{ + A: fp{ + 0x15ba26eb4b0d186f, + 0x0d086d64b7e9e01e, + 0xc8b848dd652f4c78, + 0xeecf46a6123bae4f, + 0x255e8dd8b6dc812a, + 0x164142af21dcf93f, + }, + B: fp{ + 0xf9b4a1a895984db4, + 0xd417b114cccff748, + 0x6856301fc89f086e, + 0x41c777878931e3da, + 0x3556b155066a2105, + 0x00acf7d325cb89cf, + }, + }, + z: *((&fp2{}).SetOne()), + } + require.Equal(t, 1, e.Equal(a)) +} + +func TestG2Add(t *testing.T) { + a := new(G2).Identity() + b := new(G2).Identity() + c := new(G2).Add(a, b) + require.Equal(t, 1, c.IsIdentity()) + b.Generator() + c.Add(a, b) + require.Equal(t, 1, c.Equal(b)) + + a.Generator() + a.Double(a) + a.Double(a) + b.Double(b) + c.Add(a, b) + + d := new(G2).Generator() + e := new(G2).Generator() + for i := 0; i < 5; i++ { + e.Add(e, d) + } + require.Equal(t, 1, e.Equal(c)) + + // Degenerate case + beta := fp2{ + A: fp{ + 0xcd03c9e48671f071, + 0x5dab22461fcda5d2, + 0x587042afd3851b95, + 0x8eb60ebe01bacb9e, + 0x03f97d6e83d050d2, + 0x18f0206554638741, + }, + B: fp{}, + } + beta.Square(&beta) + b.x.Mul(&a.x, &beta) + b.y.Neg(&a.y) + b.z.Set(&a.z) + require.Equal(t, 1, b.IsOnCurve()) + + c.Add(a, b) + + e.x.Set(&fp2{ + A: fp{ + 0x705abc799ca773d3, + 0xfe132292c1d4bf08, + 0xf37ece3e07b2b466, + 0x887e1c43f447e301, + 0x1e0970d033bc77e8, + 0x1985c81e20a693f2, + }, + B: fp{ + 0x1d79b25db36ab924, + 0x23948e4d529639d3, + 0x471ba7fb0d006297, + 0x2c36d4b4465dc4c0, + 0x82bbc3cfec67f538, + 0x051d2728b67bf952, + }, + }) + e.y.Set(&fp2{ + A: fp{ + 0x41b1bbf6576c0abf, + 0xb6cc93713f7a0f9a, + 0x6b65b43e48f3f01f, + 0xfb7a4cfcaf81be4f, + 0x3e32dadc6ec22cb6, + 0x0bb0fc49d79807e3, + }, + B: fp{ + 0x7d1397788f5f2ddf, + 0xab2907144ff0d8e8, + 0x5b7573e0cdb91f92, + 0x4cb8932dd31daf28, + 0x62bbfac6db052a54, + 0x11f95c16d14c3bbe, + }, + }) + e.z.SetOne() + require.Equal(t, 1, e.Equal(c)) +} + +func TestG2Neg(t *testing.T) { + a := new(G2).Generator() + b := new(G2).Neg(a) + require.Equal(t, 1, new(G2).Add(a, b).IsIdentity()) + require.Equal(t, 1, new(G2).Sub(a, b.Neg(b)).IsIdentity()) + a.Identity() + require.Equal(t, 1, a.Neg(a).IsIdentity()) +} + +func TestG2Mul(t *testing.T) { + g := new(G2).Generator() + a := Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{ + 0x2b56_8297_a56d_a71c, + 0xd8c3_9ecb_0ef3_75d1, + 0x435c_38da_67bf_bf96, + 0x8088_a050_26b6_59b2, + }) + b := Bls12381FqNew().SetRaw(&[native.FieldLimbs]uint64{ + 0x785f_dd9b_26ef_8b85, + 0xc997_f258_3769_5c18, + 0x4c8d_bc39_e7b7_56c1, + 0x70d9_b6cc_6d87_df20, + }) + c := Bls12381FqNew().Mul(a, b) + + t1 := new(G2).Generator() + t1.Mul(t1, a) + t1.Mul(t1, b) + require.Equal(t, 1, t1.Equal(g.Mul(g, c))) +} + +func TestG2InCorrectSubgroup(t *testing.T) { + a := G2{ + x: fp2{ + A: fp{ + 0x89f550c813db6431, + 0xa50be8c456cd8a1a, + 0xa45b374114cae851, + 0xbb6190f5bf7fff63, + 0x970ca02c3ba80bc7, + 0x02b85d24e840fbac, + }, + B: fp{ + 0x6888bc53d70716dc, + 0x3dea6b4117682d70, + 0xd8f5f930500ca354, + 0x6b5ecb6556f5c155, + 0xc96bef0434778ab0, + 0x05081505515006ad, + }, + }, + y: fp2{ + A: fp{ + 0x3cf1ea0d434b0f40, + 0x1a0dc610e603e333, + 0x7f89956160c72fa0, + 0x25ee03decf6431c5, + 0xeee8e206ec0fe137, + 0x097592b226dfef28, + }, + B: fp{ + 0x71e8bb5f29247367, + 0xa5fe049e211831ce, + 0x0ce6b354502a3896, + 0x93b012000997314e, + 0x6759f3b6aa5b42ac, + 0x156944c4dfe92bbb, + }, + }, + z: *(&fp2{}).SetOne(), + } + require.Equal(t, 0, a.InCorrectSubgroup()) + + require.Equal(t, 1, new(G2).Identity().InCorrectSubgroup()) + require.Equal(t, 1, new(G2).Generator().InCorrectSubgroup()) +} + +func TestG2MulByX(t *testing.T) { + // multiplying by `x` a point in G2 is the same as multiplying by + // the equivalent scalar. + x := Bls12381FqNew().SetUint64(paramX) + x.Neg(x) + t1 := new(G2).Generator() + t1.MulByX(t1) + t2 := new(G2).Generator() + t2.Mul(t2, x) + require.Equal(t, 1, t1.Equal(t2)) + + point := new(G2).Generator() + a := Bls12381FqNew().SetUint64(42) + point.Mul(point, a) + + t1.MulByX(point) + t2.Mul(point, x) + require.Equal(t, 1, t1.Equal(t2)) +} + +func TestG2Psi(t *testing.T) { + generator := new(G2).Generator() + + z := fp2{ + A: fp{ + 0x0ef2ddffab187c0a, + 0x2424522b7d5ecbfc, + 0xc6f341a3398054f4, + 0x5523ddf409502df0, + 0xd55c0b5a88e0dd97, + 0x066428d704923e52, + }, + B: fp{ + 0x538bbe0c95b4878d, + 0xad04a50379522881, + 0x6d5c05bf5c12fb64, + 0x4ce4a069a2d34787, + 0x59ea6c8d0dffaeaf, + 0x0d42a083a75bd6f3, + }, + } + + // `point` is a random point in the curve + point := G2{ + x: fp2{ + A: fp{ + 0xee4c8cb7c047eaf2, + 0x44ca22eee036b604, + 0x33b3affb2aefe101, + 0x15d3e45bbafaeb02, + 0x7bfc2154cd7419a4, + 0x0a2d0c2b756e5edc, + }, + B: fp{ + 0xfc224361029a8777, + 0x4cbf2baab8740924, + 0xc5008c6ec6592c89, + 0xecc2c57b472a9c2d, + 0x8613eafd9d81ffb1, + 0x10fe54daa2d3d495, + }, + }, + y: fp2{ + A: fp{ + 0x7de7edc43953b75c, + 0x58be1d2de35e87dc, + 0x5731d30b0e337b40, + 0xbe93b60cfeaae4c9, + 0x8b22c203764bedca, + 0x01616c8d1033b771, + }, + B: fp{ + 0xea126fe476b5733b, + 0x85cee68b5dae1652, + 0x98247779f7272b04, + 0xa649c8b468c6e808, + 0xb5b9a62dff0c4e45, + 0x1555b67fc7bbe73d, + }, + }, + z: *(&fp2{}).Set(&z), + } + point.x.Mul(&point.x, &z) + point.z.Square(&point.z) + point.z.Mul(&point.z, &z) + require.Equal(t, 1, point.IsOnCurve()) + + // psi2(P) = psi(psi(P)) + tv1 := new(G2).psi2(generator) + tv2 := new(G2).psi(generator) + tv2.psi(tv2) + require.Equal(t, 1, tv1.Equal(tv2)) + + tv1.psi2(&point) + tv2.psi(&point) + tv2.psi(tv2) + require.Equal(t, 1, tv1.Equal(tv2)) + + // psi(P) is a morphism + tv1.Double(generator) + tv1.psi(tv1) + tv2.psi(generator) + tv2.Double(tv2) + require.Equal(t, 1, tv1.Equal(tv2)) + + tv1.psi(&point) + tv2.psi(generator) + tv1.Add(tv1, tv2) + + tv2.Set(&point) + tv3 := new(G2).Generator() + tv2.Add(tv2, tv3) + tv2.psi(tv2) + require.Equal(t, 1, tv1.Equal(tv2)) +} + +func TestG2ClearCofactor(t *testing.T) { + z := fp2{ + A: fp{ + 0x0ef2ddffab187c0a, + 0x2424522b7d5ecbfc, + 0xc6f341a3398054f4, + 0x5523ddf409502df0, + 0xd55c0b5a88e0dd97, + 0x066428d704923e52, + }, + B: fp{ + 0x538bbe0c95b4878d, + 0xad04a50379522881, + 0x6d5c05bf5c12fb64, + 0x4ce4a069a2d34787, + 0x59ea6c8d0dffaeaf, + 0x0d42a083a75bd6f3, + }, + } + + // `point` is a random point in the curve + point := G2{ + x: fp2{ + A: fp{ + 0xee4c8cb7c047eaf2, + 0x44ca22eee036b604, + 0x33b3affb2aefe101, + 0x15d3e45bbafaeb02, + 0x7bfc2154cd7419a4, + 0x0a2d0c2b756e5edc, + }, + B: fp{ + 0xfc224361029a8777, + 0x4cbf2baab8740924, + 0xc5008c6ec6592c89, + 0xecc2c57b472a9c2d, + 0x8613eafd9d81ffb1, + 0x10fe54daa2d3d495, + }, + }, + y: fp2{ + A: fp{ + 0x7de7edc43953b75c, + 0x58be1d2de35e87dc, + 0x5731d30b0e337b40, + 0xbe93b60cfeaae4c9, + 0x8b22c203764bedca, + 0x01616c8d1033b771, + }, + B: fp{ + 0xea126fe476b5733b, + 0x85cee68b5dae1652, + 0x98247779f7272b04, + 0xa649c8b468c6e808, + 0xb5b9a62dff0c4e45, + 0x1555b67fc7bbe73d, + }, + }, + z: fp2{}, + } + point.x.Mul(&point.x, &z) + point.z.Square(&z) + point.z.Mul(&point.z, &z) + + require.Equal(t, 1, point.IsOnCurve()) + require.Equal(t, 0, point.InCorrectSubgroup()) + + clearedPoint := new(G2).ClearCofactor(&point) + + require.Equal(t, 1, clearedPoint.IsOnCurve()) + require.Equal(t, 1, clearedPoint.InCorrectSubgroup()) + + // the generator (and the identity) are always on the curve, + // even after clearing the cofactor + generator := new(G2).Generator() + generator.ClearCofactor(generator) + require.Equal(t, 1, generator.InCorrectSubgroup()) + id := new(G2).Identity() + id.ClearCofactor(id) + require.Equal(t, 1, id.InCorrectSubgroup()) + + // test the effect on q-torsion points multiplying by h_eff modulo q + // h_eff % q = 0x2b116900400069009a40200040001ffff + hEffModq := [native.FieldBytes]byte{ + 0xff, 0xff, 0x01, 0x00, 0x04, 0x00, 0x02, 0xa4, 0x09, 0x90, 0x06, 0x00, 0x04, 0x90, 0x16, + 0xb1, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, + } + generator.Generator() + generator.multiply(generator, &hEffModq) + point.Generator().ClearCofactor(&point) + require.Equal(t, 1, point.Equal(generator)) + point.ClearCofactor(clearedPoint) + require.Equal(t, 1, point.Equal(clearedPoint.multiply(clearedPoint, &hEffModq))) +} + +func TestG2Hash(t *testing.T) { + dst := []byte("QUUX-V01-CS02-with-BLS12381G2_XMD:SHA-256_SSWU_RO_") + + tests := []struct { + input, expected string + }{ + { + "", + "05cb8437535e20ecffaef7752baddf98034139c38452458baeefab379ba13dff5bf5dd71b72418717047f5b0f37da03d0141ebfbdca40eb85b87142e130ab689c673cf60f1a3e98d69335266f30d9b8d4ac44c1038e9dcdd5393faf5c41fb78a12424ac32561493f3fe3c260708a12b7c620e7be00099a974e259ddc7d1f6395c3c811cdd19f1e8dbf3e9ecfdcbab8d60503921d7f6a12805e72940b963c0cf3471c7b2a524950ca195d11062ee75ec076daf2d4bc358c4b190c0c98064fdd92", + }, + { + "abc", + "139cddbccdc5e91b9623efd38c49f81a6f83f175e80b06fc374de9eb4b41dfe4ca3a230ed250fbe3a2acf73a41177fd802c2d18e033b960562aae3cab37a27ce00d80ccd5ba4b7fe0e7a210245129dbec7780ccc7954725f4168aff2787776e600aa65dae3c8d732d10ecd2c50f8a1baf3001578f71c694e03866e9f3d49ac1e1ce70dd94a733534f106d4cec0eddd161787327b68159716a37440985269cf584bcb1e621d3a7202be6ea05c4cfe244aeb197642555a0645fb87bf7466b2ba48", + }, + { + "abcdef0123456789", + "190d119345b94fbd15497bcba94ecf7db2cbfd1e1fe7da034d26cbba169fb3968288b3fafb265f9ebd380512a71c3f2c121982811d2491fde9ba7ed31ef9ca474f0e1501297f68c298e9f4c0028add35aea8bb83d53c08cfc007c1e005723cd00bb5e7572275c567462d91807de765611490205a941a5a6af3b1691bfe596c31225d3aabdf15faff860cb4ef17c7c3be05571a0f8d3c08d094576981f4a3b8eda0a8e771fcdcc8ecceaf1356a6acf17574518acb506e435b639353c2e14827c8", + }, + { + "q128_qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq", + "0934aba516a52d8ae479939a91998299c76d39cc0c035cd18813bec433f587e2d7a4fef038260eef0cef4d02aae3eb9119a84dd7248a1066f737cc34502ee5555bd3c19f2ecdb3c7d9e24dc65d4e25e50d83f0f77105e955d78f4762d33c17da09bcccfa036b4847c9950780733633f13619994394c23ff0b32fa6b795844f4a0673e20282d07bc69641cee04f5e566214f81cd421617428bc3b9fe25afbb751d934a00493524bc4e065635b0555084dd54679df1536101b2c979c0152d09192", + }, + { + "a512_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "11fca2ff525572795a801eed17eb12785887c7b63fb77a42be46ce4a34131d71f7a73e95fee3f812aea3de78b4d0156901a6ba2f9a11fa5598b2d8ace0fbe0a0eacb65deceb476fbbcb64fd24557c2f4b18ecfc5663e54ae16a84f5ab7f6253403a47f8e6d1763ba0cad63d6114c0accbef65707825a511b251a660a9b3994249ae4e63fac38b23da0c398689ee2ab520b6798718c8aed24bc19cb27f866f1c9effcdbf92397ad6448b5c9db90d2b9da6cbabf48adc1adf59a1a28344e79d57e", + }, + } + + pt := new(G2).Identity() + ept := new(G2).Identity() + var b [DoubleWideFieldBytes]byte + for _, tst := range tests { + i := []byte(tst.input) + e, _ := hex.DecodeString(tst.expected) + copy(b[:], e) + _, _ = ept.FromUncompressed(&b) + pt.Hash(native.EllipticPointHasherSha256(), i, dst) + require.Equal(t, 1, pt.Equal(ept)) + } +} + +func TestG2SumOfProducts(t *testing.T) { + var b [64]byte + h0, _ := new(G2).Random(crand.Reader) + _, _ = crand.Read(b[:]) + s := Bls12381FqNew().SetBytesWide(&b) + _, _ = crand.Read(b[:]) + sTilde := Bls12381FqNew().SetBytesWide(&b) + _, _ = crand.Read(b[:]) + c := Bls12381FqNew().SetBytesWide(&b) + + lhs := new(G2).Mul(h0, s) + rhs, _ := new(G2).SumOfProducts([]*G2{h0}, []*native.Field{s}) + require.Equal(t, 1, lhs.Equal(rhs)) + + u := new(G2).Mul(h0, s) + uTilde := new(G2).Mul(h0, sTilde) + sHat := Bls12381FqNew().Mul(c, s) + sHat.Sub(sTilde, sHat) + + rhs.Mul(u, c) + rhs.Add(rhs, new(G2).Mul(h0, sHat)) + require.Equal(t, 1, uTilde.Equal(rhs)) + _, _ = rhs.SumOfProducts([]*G2{u, h0}, []*native.Field{c, sHat}) + require.Equal(t, 1, uTilde.Equal(rhs)) +} diff --git a/core/curves/native/bls12381/gt.go b/core/curves/native/bls12381/gt.go new file mode 100644 index 0000000..16ecc5d --- /dev/null +++ b/core/curves/native/bls12381/gt.go @@ -0,0 +1,434 @@ +package bls12381 + +import ( + "io" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/internal" +) + +// GtFieldBytes is the number of bytes needed to represent this field +const GtFieldBytes = 576 + +// Gt is the target group +type Gt fp12 + +// Random generates a random field element +func (gt *Gt) Random(reader io.Reader) (*Gt, error) { + _, err := (*fp12)(gt).Random(reader) + return gt, err +} + +// FinalExponentiation performs a "final exponentiation" routine to convert the result +// of a Miller loop into an element of `Gt` with help of efficient squaring +// operation in the so-called `cyclotomic subgroup` of `Fq6` so that +// it can be compared with other elements of `Gt`. +func (gt *Gt) FinalExponentiation(a *Gt) *Gt { + var t0, t1, t2, t3, t4, t5, t6, t fp12 + t0.FrobeniusMap((*fp12)(a)) + t0.FrobeniusMap(&t0) + t0.FrobeniusMap(&t0) + t0.FrobeniusMap(&t0) + t0.FrobeniusMap(&t0) + t0.FrobeniusMap(&t0) + + // Shouldn't happen since we enforce `a` to be non-zero but just in case + _, wasInverted := t1.Invert((*fp12)(a)) + t2.Mul(&t0, &t1) + t1.Set(&t2) + t2.FrobeniusMap(&t2) + t2.FrobeniusMap(&t2) + t2.Mul(&t2, &t1) + t1.cyclotomicSquare(&t2) + t1.Conjugate(&t1) + + t3.cyclotomicExp(&t2) + t4.cyclotomicSquare(&t3) + t5.Mul(&t1, &t3) + t1.cyclotomicExp(&t5) + t0.cyclotomicExp(&t1) + t6.cyclotomicExp(&t0) + t6.Mul(&t6, &t4) + t4.cyclotomicExp(&t6) + t5.Conjugate(&t5) + t4.Mul(&t4, &t5) + t4.Mul(&t4, &t2) + t5.Conjugate(&t2) + t1.Mul(&t1, &t2) + t1.FrobeniusMap(&t1) + t1.FrobeniusMap(&t1) + t1.FrobeniusMap(&t1) + t6.Mul(&t6, &t5) + t6.FrobeniusMap(&t6) + t3.Mul(&t3, &t0) + t3.FrobeniusMap(&t3) + t3.FrobeniusMap(&t3) + t3.Mul(&t3, &t1) + t3.Mul(&t3, &t6) + t.Mul(&t3, &t4) + (*fp12)(gt).CMove((*fp12)(gt), &t, wasInverted) + return gt +} + +// IsZero returns 1 if gt == 0, 0 otherwise +func (gt *Gt) IsZero() int { + return (*fp12)(gt).IsZero() +} + +// IsOne returns 1 if gt == 1, 0 otherwise +func (gt *Gt) IsOne() int { + return (*fp12)(gt).IsOne() +} + +// SetOne gt = one +func (gt *Gt) SetOne() *Gt { + (*fp12)(gt).SetOne() + return gt +} + +// Set copies a into gt +func (gt *Gt) Set(a *Gt) *Gt { + gt.A.Set(&a.A) + gt.B.Set(&a.B) + return gt +} + +// Bytes returns the Gt field byte representation +func (gt *Gt) Bytes() [GtFieldBytes]byte { + var out [GtFieldBytes]byte + t := gt.A.A.A.Bytes() + copy(out[:FieldBytes], internal.ReverseScalarBytes(t[:])) + t = gt.A.A.B.Bytes() + copy(out[FieldBytes:2*FieldBytes], internal.ReverseScalarBytes(t[:])) + t = gt.A.B.A.Bytes() + copy(out[2*FieldBytes:3*FieldBytes], internal.ReverseScalarBytes(t[:])) + t = gt.A.B.B.Bytes() + copy(out[3*FieldBytes:4*FieldBytes], internal.ReverseScalarBytes(t[:])) + t = gt.A.C.A.Bytes() + copy(out[4*FieldBytes:5*FieldBytes], internal.ReverseScalarBytes(t[:])) + t = gt.A.C.B.Bytes() + copy(out[5*FieldBytes:6*FieldBytes], internal.ReverseScalarBytes(t[:])) + t = gt.B.A.A.Bytes() + copy(out[6*FieldBytes:7*FieldBytes], internal.ReverseScalarBytes(t[:])) + t = gt.B.A.B.Bytes() + copy(out[7*FieldBytes:8*FieldBytes], internal.ReverseScalarBytes(t[:])) + t = gt.B.B.A.Bytes() + copy(out[8*FieldBytes:9*FieldBytes], internal.ReverseScalarBytes(t[:])) + t = gt.B.B.B.Bytes() + copy(out[9*FieldBytes:10*FieldBytes], internal.ReverseScalarBytes(t[:])) + t = gt.B.C.A.Bytes() + copy(out[10*FieldBytes:11*FieldBytes], internal.ReverseScalarBytes(t[:])) + t = gt.B.C.B.Bytes() + copy(out[11*FieldBytes:12*FieldBytes], internal.ReverseScalarBytes(t[:])) + + return out +} + +// SetBytes attempts to convert a big-endian byte representation of +// a scalar into a `Gt`, failing if the input is not canonical. +func (gt *Gt) SetBytes(input *[GtFieldBytes]byte) (*Gt, int) { + var t [FieldBytes]byte + var valid [12]int + copy(t[:], internal.ReverseScalarBytes(input[:FieldBytes])) + _, valid[0] = gt.A.A.A.SetBytes(&t) + copy(t[:], internal.ReverseScalarBytes(input[FieldBytes:2*FieldBytes])) + _, valid[1] = gt.A.A.B.SetBytes(&t) + copy(t[:], internal.ReverseScalarBytes(input[2*FieldBytes:3*FieldBytes])) + _, valid[2] = gt.A.B.A.SetBytes(&t) + copy(t[:], internal.ReverseScalarBytes(input[3*FieldBytes:4*FieldBytes])) + _, valid[3] = gt.A.B.B.SetBytes(&t) + copy(t[:], internal.ReverseScalarBytes(input[4*FieldBytes:5*FieldBytes])) + _, valid[4] = gt.A.C.A.SetBytes(&t) + copy(t[:], internal.ReverseScalarBytes(input[5*FieldBytes:6*FieldBytes])) + _, valid[5] = gt.A.C.B.SetBytes(&t) + copy(t[:], internal.ReverseScalarBytes(input[6*FieldBytes:7*FieldBytes])) + _, valid[6] = gt.B.A.A.SetBytes(&t) + copy(t[:], internal.ReverseScalarBytes(input[7*FieldBytes:8*FieldBytes])) + _, valid[7] = gt.B.A.B.SetBytes(&t) + copy(t[:], internal.ReverseScalarBytes(input[8*FieldBytes:9*FieldBytes])) + _, valid[8] = gt.B.B.A.SetBytes(&t) + copy(t[:], internal.ReverseScalarBytes(input[9*FieldBytes:10*FieldBytes])) + _, valid[9] = gt.B.B.B.SetBytes(&t) + copy(t[:], internal.ReverseScalarBytes(input[10*FieldBytes:11*FieldBytes])) + _, valid[10] = gt.B.C.A.SetBytes(&t) + copy(t[:], internal.ReverseScalarBytes(input[11*FieldBytes:12*FieldBytes])) + _, valid[11] = gt.B.C.B.SetBytes(&t) + + return gt, valid[0] & valid[1] & + valid[2] & valid[3] & + valid[4] & valid[5] & + valid[6] & valid[7] & + valid[8] & valid[9] & + valid[10] & valid[11] +} + +// Equal returns 1 if gt == rhs, 0 otherwise +func (gt *Gt) Equal(rhs *Gt) int { + return (*fp12)(gt).Equal((*fp12)(rhs)) +} + +// Generator returns the base point +func (gt *Gt) Generator() *Gt { + // pairing(&G1::generator(), &G2::generator()) + gt.Set((*Gt)(&fp12{ + A: fp6{ + A: fp2{ + A: fp{ + 0x1972e433a01f85c5, + 0x97d32b76fd772538, + 0xc8ce546fc96bcdf9, + 0xcef63e7366d40614, + 0xa611342781843780, + 0x13f3448a3fc6d825, + }, + B: fp{ + 0xd26331b02e9d6995, + 0x9d68a482f7797e7d, + 0x9c9b29248d39ea92, + 0xf4801ca2e13107aa, + 0xa16c0732bdbcb066, + 0x083ca4afba360478, + }, + }, + B: fp2{ + A: fp{ + 0x59e261db0916b641, + 0x2716b6f4b23e960d, + 0xc8e55b10a0bd9c45, + 0x0bdb0bd99c4deda8, + 0x8cf89ebf57fdaac5, + 0x12d6b7929e777a5e, + }, + B: fp{ + 0x5fc85188b0e15f35, + 0x34a06e3a8f096365, + 0xdb3126a6e02ad62c, + 0xfc6f5aa97d9a990b, + 0xa12f55f5eb89c210, + 0x1723703a926f8889, + }, + }, + C: fp2{ + A: fp{ + 0x93588f2971828778, + 0x43f65b8611ab7585, + 0x3183aaf5ec279fdf, + 0xfa73d7e18ac99df6, + 0x64e176a6a64c99b0, + 0x179fa78c58388f1f, + }, + B: fp{ + 0x672a0a11ca2aef12, + 0x0d11b9b52aa3f16b, + 0xa44412d0699d056e, + 0xc01d0177221a5ba5, + 0x66e0cede6c735529, + 0x05f5a71e9fddc339, + }, + }, + }, + B: fp6{ + A: fp2{ + A: fp{ + 0xd30a88a1b062c679, + 0x5ac56a5d35fc8304, + 0xd0c834a6a81f290d, + 0xcd5430c2da3707c7, + 0xf0c27ff780500af0, + 0x09245da6e2d72eae, + }, + B: fp{ + 0x9f2e0676791b5156, + 0xe2d1c8234918fe13, + 0x4c9e459f3c561bf4, + 0xa3e85e53b9d3e3c1, + 0x820a121e21a70020, + 0x15af618341c59acc, + }, + }, + B: fp2{ + A: fp{ + 0x7c95658c24993ab1, + 0x73eb38721ca886b9, + 0x5256d749477434bc, + 0x8ba41902ea504a8b, + 0x04a3d3f80c86ce6d, + 0x18a64a87fb686eaa, + }, + B: fp{ + 0xbb83e71bb920cf26, + 0x2a5277ac92a73945, + 0xfc0ee59f94f046a0, + 0x7158cdf3786058f7, + 0x7cc1061b82f945f6, + 0x03f847aa9fdbe567, + }, + }, + C: fp2{ + A: fp{ + 0x8078dba56134e657, + 0x1cd7ec9a43998a6e, + 0xb1aa599a1a993766, + 0xc9a0f62f0842ee44, + 0x8e159be3b605dffa, + 0x0c86ba0d4af13fc2, + }, + B: fp{ + 0xe80ff2a06a52ffb1, + 0x7694ca48721a906c, + 0x7583183e03b08514, + 0xf567afdd40cee4e2, + 0x9a6d96d2e526a5fc, + 0x197e9f49861f2242, + }, + }, + }, + })) + return gt +} + +// Add adds this value to another value. +func (gt *Gt) Add(arg1, arg2 *Gt) *Gt { + (*fp12)(gt).Mul((*fp12)(arg1), (*fp12)(arg2)) + return gt +} + +// Double this value +func (gt *Gt) Double(a *Gt) *Gt { + (*fp12)(gt).Square((*fp12)(a)) + return gt +} + +// Sub subtracts the two values +func (gt *Gt) Sub(arg1, arg2 *Gt) *Gt { + var t fp12 + t.Conjugate((*fp12)(arg2)) + (*fp12)(gt).Mul((*fp12)(arg1), &t) + return gt +} + +// Neg negates this value +func (gt *Gt) Neg(a *Gt) *Gt { + (*fp12)(gt).Conjugate((*fp12)(a)) + return gt +} + +// Mul multiplies this value by the input scalar +func (gt *Gt) Mul(a *Gt, s *native.Field) *Gt { + var f, p fp12 + f.Set((*fp12)(a)) + bytes := s.Bytes() + + precomputed := [16]fp12{} + precomputed[1].Set(&f) + for i := 2; i < 16; i += 2 { + precomputed[i].Square(&precomputed[i>>1]) + precomputed[i+1].Mul(&precomputed[i], &f) + } + for i := 0; i < 256; i += 4 { + // Brouwer / windowing method. window size of 4. + for j := 0; j < 4; j++ { + p.Square(&p) + } + window := bytes[32-1-i>>3] >> (4 - i&0x04) & 0x0F + p.Mul(&p, &precomputed[window]) + } + (*fp12)(gt).Set(&p) + return gt +} + +// Square this value +func (gt *Gt) Square(a *Gt) *Gt { + (*fp12)(gt).cyclotomicSquare((*fp12)(a)) + return gt +} + +// Invert this value +func (gt *Gt) Invert(a *Gt) (*Gt, int) { + _, wasInverted := (*fp12)(gt).Invert((*fp12)(a)) + return gt, wasInverted +} + +func fp4Square(a, b, arg1, arg2 *fp2) { + var t0, t1, t2 fp2 + + t0.Square(arg1) + t1.Square(arg2) + t2.MulByNonResidue(&t1) + a.Add(&t2, &t0) + t2.Add(arg1, arg2) + t2.Square(&t2) + t2.Sub(&t2, &t0) + b.Sub(&t2, &t1) +} + +func (f *fp12) cyclotomicSquare(a *fp12) *fp12 { + // Adaptation of Algorithm 5.5.4, Guide to Pairing-Based Cryptography + // Faster Squaring in the Cyclotomic Subgroup of Sixth Degree Extensions + // https://eprint.iacr.org/2009/565.pdf + var z0, z1, z2, z3, z4, z5, t0, t1, t2, t3 fp2 + z0.Set(&a.A.A) + z4.Set(&a.A.B) + z3.Set(&a.A.C) + z2.Set(&a.B.A) + z1.Set(&a.B.B) + z5.Set(&a.B.C) + + fp4Square(&t0, &t1, &z0, &z1) + z0.Sub(&t0, &z0) + z0.Double(&z0) + z0.Add(&z0, &t0) + + z1.Add(&t1, &z1) + z1.Double(&z1) + z1.Add(&z1, &t1) + + fp4Square(&t0, &t1, &z2, &z3) + fp4Square(&t2, &t3, &z4, &z5) + + z4.Sub(&t0, &z4) + z4.Double(&z4) + z4.Add(&z4, &t0) + + z5.Add(&z5, &t1) + z5.Double(&z5) + z5.Add(&z5, &t1) + + t0.MulByNonResidue(&t3) + z2.Add(&z2, &t0) + z2.Double(&z2) + z2.Add(&z2, &t0) + + z3.Sub(&t2, &z3) + z3.Double(&z3) + z3.Add(&z3, &t2) + + f.A.A.Set(&z0) + f.A.B.Set(&z4) + f.A.C.Set(&z3) + + f.B.A.Set(&z2) + f.B.B.Set(&z1) + f.B.C.Set(&z5) + return f +} + +func (f *fp12) cyclotomicExp(a *fp12) *fp12 { + var t fp12 + t.SetOne() + foundOne := 0 + + for i := 63; i >= 0; i-- { + b := int((paramX >> i) & 1) + if foundOne == 1 { + t.cyclotomicSquare(&t) + } else { + foundOne = b + } + if b == 1 { + t.Mul(&t, a) + } + } + f.Conjugate(&t) + return f +} diff --git a/core/curves/native/bls12381/pairings.go b/core/curves/native/bls12381/pairings.go new file mode 100755 index 0000000..c01ef09 --- /dev/null +++ b/core/curves/native/bls12381/pairings.go @@ -0,0 +1,254 @@ +package bls12381 + +const coefficientsG2 = 68 + +type Engine struct { + pairs []pair +} + +type pair struct { + g1 G1 + g2 G2 +} + +type g2Prepared struct { + identity int + coefficients []coefficients +} + +type coefficients struct { + a, b, c fp2 +} + +func (c *coefficients) CMove(arg1, arg2 *coefficients, choice int) *coefficients { + c.a.CMove(&arg1.a, &arg2.a, choice) + c.b.CMove(&arg1.b, &arg2.b, choice) + c.c.CMove(&arg1.c, &arg2.c, choice) + return c +} + +// AddPair adds a pair of points to be paired +func (e *Engine) AddPair(g1 *G1, g2 *G2) *Engine { + var p pair + p.g1.ToAffine(g1) + p.g2.ToAffine(g2) + if p.g1.IsIdentity()|p.g2.IsIdentity() == 0 { + e.pairs = append(e.pairs, p) + } + return e +} + +// AddPairInvG1 adds a pair of points to be paired. G1 point is negated +func (e *Engine) AddPairInvG1(g1 *G1, g2 *G2) *Engine { + var p G1 + p.Neg(g1) + return e.AddPair(&p, g2) +} + +// AddPairInvG2 adds a pair of points to be paired. G2 point is negated +func (e *Engine) AddPairInvG2(g1 *G1, g2 *G2) *Engine { + var p G2 + p.Neg(g2) + return e.AddPair(g1, &p) +} + +func (e *Engine) Reset() *Engine { + e.pairs = []pair{} + return e +} + +func (e *Engine) Check() bool { + return e.pairing().IsOne() == 1 +} + +func (e *Engine) Result() *Gt { + return e.pairing() +} + +func (e *Engine) pairing() *Gt { + f := new(Gt).SetOne() + if len(e.pairs) == 0 { + return f + } + coeffs := e.computeCoeffs() + e.millerLoop((*fp12)(f), coeffs) + return f.FinalExponentiation(f) +} + +func (e *Engine) millerLoop(f *fp12, coeffs []g2Prepared) { + newF := new(fp12).SetZero() + found := 0 + cIdx := 0 + for i := 63; i >= 0; i-- { + x := int(((paramX >> 1) >> i) & 1) + if found == 0 { + found |= x + continue + } + + // doubling + for j, terms := range coeffs { + identity := e.pairs[j].g1.IsIdentity() | terms.identity + newF.Set(f) + ell(newF, terms.coefficients[cIdx], &e.pairs[j].g1) + f.CMove(newF, f, identity) + } + cIdx++ + + if x == 1 { + // adding + for j, terms := range coeffs { + identity := e.pairs[j].g1.IsIdentity() | terms.identity + newF.Set(f) + ell(newF, terms.coefficients[cIdx], &e.pairs[j].g1) + f.CMove(newF, f, identity) + } + cIdx++ + } + f.Square(f) + } + for j, terms := range coeffs { + identity := e.pairs[j].g1.IsIdentity() | terms.identity + newF.Set(f) + ell(newF, terms.coefficients[cIdx], &e.pairs[j].g1) + f.CMove(newF, f, identity) + } + f.Conjugate(f) +} + +func (e *Engine) computeCoeffs() []g2Prepared { + coeffs := make([]g2Prepared, len(e.pairs)) + for i, p := range e.pairs { + identity := p.g2.IsIdentity() + q := new(G2).Generator() + q.CMove(&p.g2, q, identity) + c := new(G2).Set(q) + cfs := make([]coefficients, coefficientsG2) + found := 0 + k := 0 + + for j := 63; j >= 0; j-- { + x := int(((paramX >> 1) >> j) & 1) + if found == 0 { + found |= x + continue + } + cfs[k] = doublingStep(c) + k++ + + if x == 1 { + cfs[k] = additionStep(c, q) + k++ + } + } + cfs[k] = doublingStep(c) + coeffs[i] = g2Prepared{ + coefficients: cfs, identity: identity, + } + } + return coeffs +} + +func ell(f *fp12, coeffs coefficients, p *G1) { + var x, y fp2 + x.A.Mul(&coeffs.a.A, &p.y) + x.B.Mul(&coeffs.a.B, &p.y) + y.A.Mul(&coeffs.b.A, &p.x) + y.B.Mul(&coeffs.b.B, &p.x) + f.MulByABD(f, &coeffs.c, &y, &x) +} + +func doublingStep(p *G2) coefficients { + // Adaptation of Algorithm 26, https://eprint.iacr.org/2010/354.pdf + var t0, t1, t2, t3, t4, t5, t6, zsqr fp2 + t0.Square(&p.x) + t1.Square(&p.y) + t2.Square(&t1) + t3.Add(&t1, &p.x) + t3.Square(&t3) + t3.Sub(&t3, &t0) + t3.Sub(&t3, &t2) + t3.Double(&t3) + t4.Double(&t0) + t4.Add(&t4, &t0) + t6.Add(&p.x, &t4) + t5.Square(&t4) + zsqr.Square(&p.z) + p.x.Sub(&t5, &t3) + p.x.Sub(&p.x, &t3) + p.z.Add(&p.z, &p.y) + p.z.Square(&p.z) + p.z.Sub(&p.z, &t1) + p.z.Sub(&p.z, &zsqr) + p.y.Sub(&t3, &p.x) + p.y.Mul(&p.y, &t4) + t2.Double(&t2) + t2.Double(&t2) + t2.Double(&t2) + p.y.Sub(&p.y, &t2) + t3.Mul(&t4, &zsqr) + t3.Double(&t3) + t3.Neg(&t3) + t6.Square(&t6) + t6.Sub(&t6, &t0) + t6.Sub(&t6, &t5) + t1.Double(&t1) + t1.Double(&t1) + t6.Sub(&t6, &t1) + t0.Mul(&p.z, &zsqr) + t0.Double(&t0) + + return coefficients{ + a: t0, b: t3, c: t6, + } +} + +func additionStep(r, q *G2) coefficients { + // Adaptation of Algorithm 27, https://eprint.iacr.org/2010/354.pdf + var zsqr, ysqr fp2 + var t0, t1, t2, t3, t4, t5, t6, t7, t8, t9, t10 fp2 + zsqr.Square(&r.z) + ysqr.Square(&q.y) + t0.Mul(&zsqr, &q.x) + t1.Add(&q.y, &r.z) + t1.Square(&t1) + t1.Sub(&t1, &ysqr) + t1.Sub(&t1, &zsqr) + t1.Mul(&t1, &zsqr) + t2.Sub(&t0, &r.x) + t3.Square(&t2) + t4.Double(&t3) + t4.Double(&t4) + t5.Mul(&t4, &t2) + t6.Sub(&t1, &r.y) + t6.Sub(&t6, &r.y) + t9.Mul(&t6, &q.x) + t7.Mul(&t4, &r.x) + r.x.Square(&t6) + r.x.Sub(&r.x, &t5) + r.x.Sub(&r.x, &t7) + r.x.Sub(&r.x, &t7) + r.z.Add(&r.z, &t2) + r.z.Square(&r.z) + r.z.Sub(&r.z, &zsqr) + r.z.Sub(&r.z, &t3) + t10.Add(&q.y, &r.z) + t8.Sub(&t7, &r.x) + t8.Mul(&t8, &t6) + t0.Mul(&r.y, &t5) + t0.Double(&t0) + r.y.Sub(&t8, &t0) + t10.Square(&t10) + t10.Sub(&t10, &ysqr) + zsqr.Square(&r.z) + t10.Sub(&t10, &zsqr) + t9.Double(&t9) + t9.Sub(&t9, &t10) + t10.Double(&r.z) + t6.Neg(&t6) + t1.Double(&t6) + + return coefficients{ + a: t10, b: t1, c: t9, + } +} diff --git a/core/curves/native/bls12381/pairings_test.go b/core/curves/native/bls12381/pairings_test.go new file mode 100644 index 0000000..07e8028 --- /dev/null +++ b/core/curves/native/bls12381/pairings_test.go @@ -0,0 +1,64 @@ +package bls12381 + +import ( + crand "crypto/rand" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves/native" +) + +func TestSinglePairing(t *testing.T) { + g := new(G1).Generator() + h := new(G2).Generator() + + e := new(Engine) + e.AddPair(g, h) + p := e.Result() + p.Neg(p) + + e.Reset() + e.AddPairInvG2(g, h) + q := e.Result() + e.Reset() + e.AddPairInvG1(g, h) + r := e.Result() + + require.Equal(t, 1, p.Equal(q)) + require.Equal(t, 1, q.Equal(r)) +} + +func TestMultiPairing(t *testing.T) { + const Tests = 10 + e1 := new(Engine) + e2 := new(Engine) + + g1s := make([]*G1, Tests) + g2s := make([]*G2, Tests) + sc := make([]*native.Field, Tests) + res := make([]*Gt, Tests) + expected := new(Gt).SetOne() + + for i := 0; i < Tests; i++ { + var bytes [64]byte + g1s[i] = new(G1).Generator() + g2s[i] = new(G2).Generator() + sc[i] = Bls12381FqNew() + _, _ = crand.Read(bytes[:]) + sc[i].SetBytesWide(&bytes) + if i&1 == 0 { + g1s[i].Mul(g1s[i], sc[i]) + } else { + g2s[i].Mul(g2s[i], sc[i]) + } + e1.AddPair(g1s[i], g2s[i]) + e2.AddPair(g1s[i], g2s[i]) + res[i] = e1.Result() + e1.Reset() + expected.Add(expected, res[i]) + } + + actual := e2.Result() + require.Equal(t, 1, expected.Equal(actual)) +} diff --git a/core/curves/native/field.go b/core/curves/native/field.go new file mode 100644 index 0000000..4a2f29f --- /dev/null +++ b/core/curves/native/field.go @@ -0,0 +1,388 @@ +package native + +import ( + "encoding/binary" + "fmt" + "math/big" + + "github.com/sonr-io/sonr/crypto/internal" +) + +// FieldLimbs is the number of limbs needed to represent this field +const FieldLimbs = 4 + +// FieldBytes is the number of bytes needed to represent this field +const FieldBytes = 32 + +// WideFieldBytes is the number of bytes needed for safe conversion +// to this field to avoid bias when reduced +const WideFieldBytes = 64 + +// Field represents a field element +type Field struct { + // Value is the field elements value + Value [FieldLimbs]uint64 + // Params are the field parameters + Params *FieldParams + // Arithmetic are the field methods + Arithmetic FieldArithmetic +} + +// FieldParams are the field parameters +type FieldParams struct { + // R is 2^256 mod Modulus + R [FieldLimbs]uint64 + // R2 is 2^512 mod Modulus + R2 [FieldLimbs]uint64 + // R3 is 2^768 mod Modulus + R3 [FieldLimbs]uint64 + // Modulus of the field + Modulus [FieldLimbs]uint64 + // Modulus as big.Int + BiModulus *big.Int +} + +// FieldArithmetic are the methods that can be done on a field +type FieldArithmetic interface { + // ToMontgomery converts this field to montgomery form + ToMontgomery(out, arg *[FieldLimbs]uint64) + // FromMontgomery converts this field from montgomery form + FromMontgomery(out, arg *[FieldLimbs]uint64) + // Neg performs modular negation + Neg(out, arg *[FieldLimbs]uint64) + // Square performs modular square + Square(out, arg *[FieldLimbs]uint64) + // Mul performs modular multiplication + Mul(out, arg1, arg2 *[FieldLimbs]uint64) + // Add performs modular addition + Add(out, arg1, arg2 *[FieldLimbs]uint64) + // Sub performs modular subtraction + Sub(out, arg1, arg2 *[FieldLimbs]uint64) + // Sqrt performs modular square root + Sqrt(wasSquare *int, out, arg *[FieldLimbs]uint64) + // Invert performs modular inverse + Invert(wasInverted *int, out, arg *[FieldLimbs]uint64) + // FromBytes converts a little endian byte array into a field element + FromBytes(out *[FieldLimbs]uint64, arg *[FieldBytes]byte) + // ToBytes converts a field element to a little endian byte array + ToBytes(out *[FieldBytes]byte, arg *[FieldLimbs]uint64) + // Selectznz performs conditional select. + // selects arg1 if choice == 0 and arg2 if choice == 1 + Selectznz(out, arg1, arg2 *[FieldLimbs]uint64, choice int) +} + +// Cmp returns -1 if f < rhs +// 0 if f == rhs +// 1 if f > rhs +func (f *Field) Cmp(rhs *Field) int { + return cmpHelper(&f.Value, &rhs.Value) +} + +// cmpHelper returns -1 if lhs < rhs +// -1 if lhs == rhs +// 1 if lhs > rhs +// Public only for convenience for some internal implementations +func cmpHelper(lhs, rhs *[FieldLimbs]uint64) int { + gt := uint64(0) + lt := uint64(0) + for i := 3; i >= 0; i-- { + // convert to two 64-bit numbers where + // the leading bits are zeros and hold no meaning + // so rhs - fp actually means gt + // and fp - rhs actually means lt. + rhsH := rhs[i] >> 32 + rhsL := rhs[i] & 0xffffffff + lhsH := lhs[i] >> 32 + lhsL := lhs[i] & 0xffffffff + + // Check the leading bit + // if negative then fp > rhs + // if positive then fp < rhs + gt |= (rhsH - lhsH) >> 32 & 1 &^ lt + lt |= (lhsH - rhsH) >> 32 & 1 &^ gt + gt |= (rhsL - lhsL) >> 32 & 1 &^ lt + lt |= (lhsL - rhsL) >> 32 & 1 &^ gt + } + // Make the result -1 for <, 0 for =, 1 for > + return int(gt) - int(lt) +} + +// Equal returns 1 if f == rhs, 0 otherwise +func (f *Field) Equal(rhs *Field) int { + return equalHelper(&f.Value, &rhs.Value) +} + +func equalHelper(lhs, rhs *[FieldLimbs]uint64) int { + t := lhs[0] ^ rhs[0] + t |= lhs[1] ^ rhs[1] + t |= lhs[2] ^ rhs[2] + t |= lhs[3] ^ rhs[3] + return int(((int64(t) | int64(-t)) >> 63) + 1) +} + +// IsZero returns 1 if f == 0, 0 otherwise +func (f *Field) IsZero() int { + t := f.Value[0] + t |= f.Value[1] + t |= f.Value[2] + t |= f.Value[3] + return int(((int64(t) | int64(-t)) >> 63) + 1) +} + +// IsNonZero returns 1 if f != 0, 0 otherwise +func (f *Field) IsNonZero() int { + t := f.Value[0] + t |= f.Value[1] + t |= f.Value[2] + t |= f.Value[3] + return int(-((int64(t) | int64(-t)) >> 63)) +} + +// IsOne returns 1 if f == 1, 0 otherwise +func (f *Field) IsOne() int { + return equalHelper(&f.Value, &f.Params.R) +} + +// Set f = rhs +func (f *Field) Set(rhs *Field) *Field { + f.Value[0] = rhs.Value[0] + f.Value[1] = rhs.Value[1] + f.Value[2] = rhs.Value[2] + f.Value[3] = rhs.Value[3] + f.Params = rhs.Params + f.Arithmetic = rhs.Arithmetic + return f +} + +// SetUint64 f = rhs +func (f *Field) SetUint64(rhs uint64) *Field { + t := &[FieldLimbs]uint64{rhs, 0, 0, 0} + f.Arithmetic.ToMontgomery(&f.Value, t) + return f +} + +// SetOne f = r +func (f *Field) SetOne() *Field { + f.Value[0] = f.Params.R[0] + f.Value[1] = f.Params.R[1] + f.Value[2] = f.Params.R[2] + f.Value[3] = f.Params.R[3] + return f +} + +// SetZero f = 0 +func (f *Field) SetZero() *Field { + f.Value[0] = 0 + f.Value[1] = 0 + f.Value[2] = 0 + f.Value[3] = 0 + return f +} + +// SetBytesWide takes 64 bytes as input and treats them as a 512-bit number. +// Attributed to https://github.com/zcash/pasta_curves/blob/main/src/fields/Fp.rs#L255 +// We reduce an arbitrary 512-bit number by decomposing it into two 256-bit digits +// with the higher bits multiplied by 2^256. Thus, we perform two reductions +// +// 1. the lower bits are multiplied by r^2, as normal +// 2. the upper bits are multiplied by r^2 * 2^256 = r^3 +// +// and computing their sum in the field. It remains to see that arbitrary 256-bit +// numbers can be placed into Montgomery form safely using the reduction. The +// reduction works so long as the product is less than r=2^256 multiplied by +// the modulus. This holds because for any `c` smaller than the modulus, we have +// that (2^256 - 1)*c is an acceptable product for the reduction. Therefore, the +// reduction always works so long as `c` is in the field; in this case it is either the +// constant `r2` or `r3`. +func (f *Field) SetBytesWide(input *[WideFieldBytes]byte) *Field { + d0 := [FieldLimbs]uint64{ + binary.LittleEndian.Uint64(input[:8]), + binary.LittleEndian.Uint64(input[8:16]), + binary.LittleEndian.Uint64(input[16:24]), + binary.LittleEndian.Uint64(input[24:32]), + } + d1 := [FieldLimbs]uint64{ + binary.LittleEndian.Uint64(input[32:40]), + binary.LittleEndian.Uint64(input[40:48]), + binary.LittleEndian.Uint64(input[48:56]), + binary.LittleEndian.Uint64(input[56:64]), + } + // f.Arithmetic.ToMontgomery(&d0, &d0) + // f.Arithmetic.Mul(&d1, &d1, &f.Params.R2) + // f.Arithmetic.Add(&f.Value, &d0, &d0) + // Convert to Montgomery form + tv1 := &[FieldLimbs]uint64{} + tv2 := &[FieldLimbs]uint64{} + // d0*r2 + d1*r3 + f.Arithmetic.Mul(tv1, &d0, &f.Params.R2) + f.Arithmetic.Mul(tv2, &d1, &f.Params.R3) + f.Arithmetic.Add(&f.Value, tv1, tv2) + return f +} + +// SetBytes attempts to convert a little endian byte representation +// of a scalar into a `Fp`, failing if input is not canonical +func (f *Field) SetBytes(input *[FieldBytes]byte) (*Field, error) { + d0 := [FieldLimbs]uint64{0, 0, 0, 0} + f.Arithmetic.FromBytes(&d0, input) + + if cmpHelper(&d0, &f.Params.Modulus) != -1 { + return nil, fmt.Errorf("invalid byte sequence") + } + return f.SetLimbs(&d0), nil +} + +// SetBigInt initializes an element from big.Int +// The value is reduced by the modulus +func (f *Field) SetBigInt(bi *big.Int) *Field { + var buffer [FieldBytes]byte + t := new(big.Int).Set(bi) + t.Mod(t, f.Params.BiModulus) + t.FillBytes(buffer[:]) + copy(buffer[:], internal.ReverseScalarBytes(buffer[:])) + _, _ = f.SetBytes(&buffer) + return f +} + +// SetRaw converts a raw array into a field element +// Assumes input is already in montgomery form +func (f *Field) SetRaw(input *[FieldLimbs]uint64) *Field { + f.Value[0] = input[0] + f.Value[1] = input[1] + f.Value[2] = input[2] + f.Value[3] = input[3] + return f +} + +// SetLimbs converts an array into a field element +// by converting to montgomery form +func (f *Field) SetLimbs(input *[FieldLimbs]uint64) *Field { + f.Arithmetic.ToMontgomery(&f.Value, input) + return f +} + +// Bytes converts this element into a byte representation +// in little endian byte order +func (f *Field) Bytes() [FieldBytes]byte { + var output [FieldBytes]byte + tv := &[FieldLimbs]uint64{} + f.Arithmetic.FromMontgomery(tv, &f.Value) + f.Arithmetic.ToBytes(&output, tv) + return output +} + +// BigInt converts this element into the big.Int struct +func (f *Field) BigInt() *big.Int { + buffer := f.Bytes() + return new(big.Int).SetBytes(internal.ReverseScalarBytes(buffer[:])) +} + +// Raw converts this element into the a [FieldLimbs]uint64 +func (f *Field) Raw() [FieldLimbs]uint64 { + res := &[FieldLimbs]uint64{} + f.Arithmetic.FromMontgomery(res, &f.Value) + return *res +} + +// Double this element +func (f *Field) Double(a *Field) *Field { + f.Arithmetic.Add(&f.Value, &a.Value, &a.Value) + return f +} + +// Square this element +func (f *Field) Square(a *Field) *Field { + f.Arithmetic.Square(&f.Value, &a.Value) + return f +} + +// Sqrt this element, if it exists. If true, then value +// is a square root. If false, value is a QNR +func (f *Field) Sqrt(a *Field) (*Field, bool) { + wasSquare := 0 + f.Arithmetic.Sqrt(&wasSquare, &f.Value, &a.Value) + return f, wasSquare == 1 +} + +// Invert this element i.e. compute the multiplicative inverse +// return false, zero if this element is zero. +func (f *Field) Invert(a *Field) (*Field, bool) { + wasInverted := 0 + f.Arithmetic.Invert(&wasInverted, &f.Value, &a.Value) + return f, wasInverted == 1 +} + +// Mul returns the result from multiplying this element by rhs +func (f *Field) Mul(lhs, rhs *Field) *Field { + f.Arithmetic.Mul(&f.Value, &lhs.Value, &rhs.Value) + return f +} + +// Sub returns the result from subtracting rhs from this element +func (f *Field) Sub(lhs, rhs *Field) *Field { + f.Arithmetic.Sub(&f.Value, &lhs.Value, &rhs.Value) + return f +} + +// Add returns the result from adding rhs to this element +func (f *Field) Add(lhs, rhs *Field) *Field { + f.Arithmetic.Add(&f.Value, &lhs.Value, &rhs.Value) + return f +} + +// Neg returns negation of this element +func (f *Field) Neg(input *Field) *Field { + f.Arithmetic.Neg(&f.Value, &input.Value) + return f +} + +// Exp raises base^exp +func (f *Field) Exp(base, exp *Field) *Field { + e := [FieldLimbs]uint64{} + f.Arithmetic.FromMontgomery(&e, &exp.Value) + Pow(&f.Value, &base.Value, &e, f.Params, f.Arithmetic) + return f +} + +// CMove sets f = lhs if choice == 0 and f = rhs if choice == 1 +func (f *Field) CMove(lhs, rhs *Field, choice int) *Field { + f.Arithmetic.Selectznz(&f.Value, &lhs.Value, &rhs.Value, choice) + return f +} + +// Pow raises base^exp. The result is written to out. +// Public only for convenience for some internal implementations +func Pow(out, base, exp *[FieldLimbs]uint64, params *FieldParams, arithmetic FieldArithmetic) { + res := [FieldLimbs]uint64{params.R[0], params.R[1], params.R[2], params.R[3]} + tmp := [FieldLimbs]uint64{} + + for i := len(exp) - 1; i >= 0; i-- { + for j := 63; j >= 0; j-- { + arithmetic.Square(&res, &res) + arithmetic.Mul(&tmp, &res, base) + arithmetic.Selectznz(&res, &res, &tmp, int(exp[i]>>j)&1) + } + } + out[0] = res[0] + out[1] = res[1] + out[2] = res[2] + out[3] = res[3] +} + +// Pow2k raises arg to the power `2^k`. This result is written to out. +// Public only for convenience for some internal implementations +func Pow2k(out, arg *[FieldLimbs]uint64, k int, arithmetic FieldArithmetic) { + var t [FieldLimbs]uint64 + t[0] = arg[0] + t[1] = arg[1] + t[2] = arg[2] + t[3] = arg[3] + for i := 0; i < k; i++ { + arithmetic.Square(&t, &t) + } + + out[0] = t[0] + out[1] = t[1] + out[2] = t[2] + out[3] = t[3] +} diff --git a/core/curves/native/hash2field.go b/core/curves/native/hash2field.go new file mode 100755 index 0000000..1d00025 --- /dev/null +++ b/core/curves/native/hash2field.go @@ -0,0 +1,107 @@ +package native + +import ( + "hash" + + "golang.org/x/crypto/sha3" +) + +// OversizeDstSalt is the salt used to hash a dst over MaxDstLen +var OversizeDstSalt = []byte("H2C-OVERSIZE-DST-") + +// MaxDstLen the max size for dst in hash to curve +const MaxDstLen = 255 + +func getDomainXmd(h hash.Hash, domain []byte) []byte { + var out []byte + if len(domain) > MaxDstLen { + h.Reset() + _, _ = h.Write(OversizeDstSalt) + _, _ = h.Write(domain) + out = h.Sum(nil) + } else { + out = domain + } + return out +} + +func getDomainXof(h sha3.ShakeHash, domain []byte) []byte { + var out []byte + if len(domain) > MaxDstLen { + h.Reset() + _, _ = h.Write(OversizeDstSalt) + _, _ = h.Write(domain) + var tv [64]byte + _, _ = h.Read(tv[:]) + out = tv[:] + } else { + out = domain + } + return out +} + +// ExpandMsgXmd expands the msg with the domain to output a byte array +// with outLen in size using a fixed size hash. +// See https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-13#section-5.4.1 +func ExpandMsgXmd(h *EllipticPointHasher, msg, domain []byte, outLen int) []byte { + domain = getDomainXmd(h.xmd, domain) + domainLen := byte(len(domain)) + h.xmd.Reset() + // DST_prime = DST || I2OSP(len(DST), 1) + // b_0 = H(Z_pad || msg || l_i_b_str || I2OSP(0, 1) || DST_prime) + _, _ = h.xmd.Write(make([]byte, h.xmd.BlockSize())) + _, _ = h.xmd.Write(msg) + _, _ = h.xmd.Write([]byte{uint8(outLen >> 8), uint8(outLen)}) + _, _ = h.xmd.Write([]byte{0}) + _, _ = h.xmd.Write(domain) + _, _ = h.xmd.Write([]byte{domainLen}) + b0 := h.xmd.Sum(nil) + + // b_1 = H(b_0 || I2OSP(1, 1) || DST_prime) + h.xmd.Reset() + _, _ = h.xmd.Write(b0) + _, _ = h.xmd.Write([]byte{1}) + _, _ = h.xmd.Write(domain) + _, _ = h.xmd.Write([]byte{domainLen}) + b1 := h.xmd.Sum(nil) + + // b_i = H(strxor(b_0, b_(i - 1)) || I2OSP(i, 1) || DST_prime) + ell := (outLen + h.xmd.Size() - 1) / h.xmd.Size() + bi := b1 + out := make([]byte, outLen) + for i := 1; i < ell; i++ { + h.xmd.Reset() + // b_i = H(strxor(b_0, b_(i - 1)) || I2OSP(i, 1) || DST_prime) + tmp := make([]byte, h.xmd.Size()) + for j := 0; j < h.xmd.Size(); j++ { + tmp[j] = b0[j] ^ bi[j] + } + _, _ = h.xmd.Write(tmp) + _, _ = h.xmd.Write([]byte{1 + uint8(i)}) + _, _ = h.xmd.Write(domain) + _, _ = h.xmd.Write([]byte{domainLen}) + + // b_1 || ... || b_(ell - 1) + copy(out[(i-1)*h.xmd.Size():i*h.xmd.Size()], bi[:]) + bi = h.xmd.Sum(nil) + } + // b_ell + copy(out[(ell-1)*h.xmd.Size():], bi[:]) + return out[:outLen] +} + +// ExpandMsgXof expands the msg with the domain to output a byte array +// with outLen in size using a xof hash +// See https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-13#section-5.4.2 +func ExpandMsgXof(h *EllipticPointHasher, msg, domain []byte, outLen int) []byte { + domain = getDomainXof(h.xof, domain) + domainLen := byte(len(domain)) + h.xof.Reset() + _, _ = h.xof.Write(msg) + _, _ = h.xof.Write([]byte{uint8(outLen >> 8), uint8(outLen)}) + _, _ = h.xof.Write(domain) + _, _ = h.xof.Write([]byte{domainLen}) + out := make([]byte, outLen) + _, _ = h.xof.Read(out) + return out +} diff --git a/core/curves/native/isogeny.go b/core/curves/native/isogeny.go new file mode 100755 index 0000000..a29be01 --- /dev/null +++ b/core/curves/native/isogeny.go @@ -0,0 +1,62 @@ +package native + +// IsogenyParams are the parameters needed to map from an isogeny to the main curve +type IsogenyParams struct { + XNum [][FieldLimbs]uint64 + XDen [][FieldLimbs]uint64 + YNum [][FieldLimbs]uint64 + YDen [][FieldLimbs]uint64 +} + +// Map from the isogeny curve to the main curve using the parameters +func (p *IsogenyParams) Map(xIn, yIn *Field) (x, y *Field) { + var xNum, xDen, yNum, yDen, tv [FieldLimbs]uint64 + var wasInverted int + + xnumL := len(p.XNum) + xdenL := len(p.XDen) + ynumL := len(p.YNum) + ydenL := len(p.YDen) + + degree := 0 + for _, i := range []int{xnumL, xdenL, ynumL, ydenL} { + if degree < i { + degree = i + } + } + + xs := make([][FieldLimbs]uint64, degree) + xs[0] = xIn.Params.R // x[0] = x^0 + xs[1] = xIn.Value // x[1] = x^1 + xIn.Arithmetic.Square(&xs[2], &xIn.Value) // x[2] = x^2 + for i := 3; i < degree; i++ { + // x[i] = x^i + xIn.Arithmetic.Mul(&xs[i], &xs[i-1], &xIn.Value) + } + + computeIsoK(&xNum, &xs, &p.XNum, xIn.Arithmetic) + computeIsoK(&xDen, &xs, &p.XDen, xIn.Arithmetic) + computeIsoK(&yNum, &xs, &p.YNum, xIn.Arithmetic) + computeIsoK(&yDen, &xs, &p.YDen, xIn.Arithmetic) + + xIn.Arithmetic.Invert(&wasInverted, &xDen, &xDen) + x = new(Field).Set(xIn) + xIn.Arithmetic.Mul(&tv, &xNum, &xDen) + xIn.Arithmetic.Selectznz(&x.Value, &x.Value, &tv, wasInverted) + + yIn.Arithmetic.Invert(&wasInverted, &yDen, &yDen) + y = new(Field).Set(yIn) + yIn.Arithmetic.Mul(&tv, &yNum, &yDen) + yIn.Arithmetic.Selectznz(&y.Value, &y.Value, &tv, wasInverted) + yIn.Arithmetic.Mul(&y.Value, &y.Value, &yIn.Value) + return x, y +} + +func computeIsoK(out *[FieldLimbs]uint64, xxs, k *[][FieldLimbs]uint64, f FieldArithmetic) { + var tv [FieldLimbs]uint64 + + for i := range *k { + f.Mul(&tv, &(*xxs)[i], &(*k)[i]) + f.Add(out, out, &tv) + } +} diff --git a/core/curves/native/k256/fp/fp.go b/core/curves/native/k256/fp/fp.go new file mode 100644 index 0000000..42b0cb8 --- /dev/null +++ b/core/curves/native/k256/fp/fp.go @@ -0,0 +1,207 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package fp + +import ( + "math/big" + "sync" + + "github.com/sonr-io/sonr/crypto/core/curves/native" +) + +var ( + k256FpInitonce sync.Once + k256FpParams native.FieldParams +) + +func K256FpNew() *native.Field { + return &native.Field{ + Value: [native.FieldLimbs]uint64{}, + Params: getK256FpParams(), + Arithmetic: k256FpArithmetic{}, + } +} + +func k256FpParamsInit() { + k256FpParams = native.FieldParams{ + R: [native.FieldLimbs]uint64{ + 0x00000001000003d1, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + }, + R2: [native.FieldLimbs]uint64{ + 0x000007a2000e90a1, + 0x0000000000000001, + 0x0000000000000000, + 0x0000000000000000, + }, + R3: [native.FieldLimbs]uint64{ + 0x002bb1e33795f671, + 0x0000000100000b73, + 0x0000000000000000, + 0x0000000000000000, + }, + Modulus: [native.FieldLimbs]uint64{ + 0xfffffffefffffc2f, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + }, + BiModulus: new(big.Int).SetBytes([]byte{ + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0xff, 0xff, 0xfc, 0x2f, + }), + } +} + +func getK256FpParams() *native.FieldParams { + k256FpInitonce.Do(k256FpParamsInit) + return &k256FpParams +} + +// k256FpArithmetic is a struct with all the methods needed for working +// in mod p +type k256FpArithmetic struct{} + +// ToMontgomery converts this field to montgomery form +func (f k256FpArithmetic) ToMontgomery(out, arg *[native.FieldLimbs]uint64) { + ToMontgomery((*MontgomeryDomainFieldElement)(out), (*NonMontgomeryDomainFieldElement)(arg)) +} + +// FromMontgomery converts this field from montgomery form +func (f k256FpArithmetic) FromMontgomery(out, arg *[native.FieldLimbs]uint64) { + FromMontgomery((*NonMontgomeryDomainFieldElement)(out), (*MontgomeryDomainFieldElement)(arg)) +} + +// Neg performs modular negation +func (f k256FpArithmetic) Neg(out, arg *[native.FieldLimbs]uint64) { + Opp((*MontgomeryDomainFieldElement)(out), (*MontgomeryDomainFieldElement)(arg)) +} + +// Square performs modular square +func (f k256FpArithmetic) Square(out, arg *[native.FieldLimbs]uint64) { + Square((*MontgomeryDomainFieldElement)(out), (*MontgomeryDomainFieldElement)(arg)) +} + +// Mul performs modular multiplication +func (f k256FpArithmetic) Mul(out, arg1, arg2 *[native.FieldLimbs]uint64) { + Mul( + (*MontgomeryDomainFieldElement)(out), + (*MontgomeryDomainFieldElement)(arg1), + (*MontgomeryDomainFieldElement)(arg2), + ) +} + +// Add performs modular addition +func (f k256FpArithmetic) Add(out, arg1, arg2 *[native.FieldLimbs]uint64) { + Add( + (*MontgomeryDomainFieldElement)(out), + (*MontgomeryDomainFieldElement)(arg1), + (*MontgomeryDomainFieldElement)(arg2), + ) +} + +// Sub performs modular subtraction +func (f k256FpArithmetic) Sub(out, arg1, arg2 *[native.FieldLimbs]uint64) { + Sub( + (*MontgomeryDomainFieldElement)(out), + (*MontgomeryDomainFieldElement)(arg1), + (*MontgomeryDomainFieldElement)(arg2), + ) +} + +// Sqrt performs modular square root +func (f k256FpArithmetic) Sqrt(wasSquare *int, out, arg *[native.FieldLimbs]uint64) { + // p is congruent to 3 mod 4 we can compute + // sqrt using elem^(p+1)/4 mod p + // 0x3fffffffffffffffffffffffffffffffffffffffffffffffffffffffbfffff0c + var s, t [native.FieldLimbs]uint64 + params := getK256FpParams() + native.Pow(&s, arg, &[native.FieldLimbs]uint64{ + 0xffffffffbfffff0c, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0x3fffffffffffffff, + }, params, f) + f.Square(&t, &s) + tv1 := &native.Field{Value: t, Params: params, Arithmetic: f} + tv2 := &native.Field{Value: *arg, Params: params, Arithmetic: f} + *wasSquare = tv1.Equal(tv2) + f.Selectznz(out, out, &s, *wasSquare) +} + +// Invert performs modular inverse +func (f k256FpArithmetic) Invert(wasInverted *int, out, arg *[native.FieldLimbs]uint64) { + // The binary representation of (p - 2) has 5 groups of 1s, with lengths in + // { 1, 2, 22, 223 }. Use an addition chain to calculate 2^n - 1 for each group: + // [1], [2], 3, 6, 9, 11, [22], 44, 88, 176, 220, [223] + var s, x2, x3, x6, x9, x11, x22, x44, x88, x176, x220, x223 [native.FieldLimbs]uint64 + + native.Pow2k(&x2, arg, 1, f) + f.Mul(&x2, &x2, arg) + + native.Pow2k(&x3, &x2, 1, f) + f.Mul(&x3, &x3, arg) + + native.Pow2k(&x6, &x3, 3, f) + f.Mul(&x6, &x6, &x3) + + native.Pow2k(&x9, &x6, 3, f) + f.Mul(&x9, &x9, &x3) + + native.Pow2k(&x11, &x9, 2, f) + f.Mul(&x11, &x11, &x2) + + native.Pow2k(&x22, &x11, 11, f) + f.Mul(&x22, &x22, &x11) + + native.Pow2k(&x44, &x22, 22, f) + f.Mul(&x44, &x44, &x22) + + native.Pow2k(&x88, &x44, 44, f) + f.Mul(&x88, &x88, &x44) + + native.Pow2k(&x176, &x88, 88, f) + f.Mul(&x176, &x176, &x88) + + native.Pow2k(&x220, &x176, 44, f) + f.Mul(&x220, &x220, &x44) + + native.Pow2k(&x223, &x220, 3, f) + f.Mul(&x223, &x223, &x3) + + // Use sliding window over the group + native.Pow2k(&s, &x223, 23, f) + f.Mul(&s, &s, &x22) + native.Pow2k(&s, &s, 5, f) + f.Mul(&s, &s, arg) + native.Pow2k(&s, &s, 3, f) + f.Mul(&s, &s, &x2) + native.Pow2k(&s, &s, 2, f) + f.Mul(&s, &s, arg) + + tv := &native.Field{Value: *arg, Params: getK256FpParams(), Arithmetic: f} + + *wasInverted = tv.IsNonZero() + f.Selectznz(out, out, &s, *wasInverted) +} + +// FromBytes converts a little endian byte array into a field element +func (f k256FpArithmetic) FromBytes(out *[native.FieldLimbs]uint64, arg *[native.FieldBytes]byte) { + FromBytes(out, arg) +} + +// ToBytes converts a field element to a little endian byte array +func (f k256FpArithmetic) ToBytes(out *[native.FieldBytes]byte, arg *[native.FieldLimbs]uint64) { + ToBytes(out, arg) +} + +// Selectznz performs conditional select. +// selects arg1 if choice == 0 and arg2 if choice == 1 +func (f k256FpArithmetic) Selectznz(out, arg1, arg2 *[native.FieldLimbs]uint64, choice int) { + Selectznz(out, uint1(choice), arg1, arg2) +} diff --git a/core/curves/native/k256/fp/fp_test.go b/core/curves/native/k256/fp/fp_test.go new file mode 100644 index 0000000..6e01fa0 --- /dev/null +++ b/core/curves/native/k256/fp/fp_test.go @@ -0,0 +1,330 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package fp + +import ( + crand "crypto/rand" + "math/big" + "math/rand" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/internal" +) + +func TestFpSetOne(t *testing.T) { + fp := K256FpNew().SetOne() + require.NotNil(t, fp) + require.Equal(t, fp.Value, getK256FpParams().R) +} + +func TestFpSetUint64(t *testing.T) { + act := K256FpNew().SetUint64(1 << 60) + require.NotNil(t, act) + // Remember it will be in montgomery form + require.Equal(t, act.Value[0], uint64(0x1000000000000000)) +} + +func TestFpAdd(t *testing.T) { + lhs := K256FpNew().SetOne() + rhs := K256FpNew().SetOne() + exp := K256FpNew().SetUint64(2) + res := K256FpNew().Add(lhs, rhs) + require.NotNil(t, res) + require.Equal(t, res.Equal(exp), 1) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint64() >> 2 + r := rand.Uint64() >> 2 + e := l + r + lhs.SetUint64(l) + rhs.SetUint64(r) + exp.SetUint64(e) + + a := K256FpNew().Add(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFpSub(t *testing.T) { + lhs := K256FpNew().SetOne() + rhs := K256FpNew().SetOne() + exp := K256FpNew().SetZero() + res := K256FpNew().Sub(lhs, rhs) + require.NotNil(t, res) + require.Equal(t, 1, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint64() >> 2 + r := rand.Uint64() >> 2 + if l < r { + l, r = r, l + } + e := l - r + lhs.SetUint64(l) + rhs.SetUint64(r) + exp.SetUint64(e) + + a := K256FpNew().Sub(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFpMul(t *testing.T) { + lhs := K256FpNew().SetOne() + rhs := K256FpNew().SetOne() + exp := K256FpNew().SetOne() + res := K256FpNew().Mul(lhs, rhs) + require.NotNil(t, res) + require.Equal(t, 1, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint32() + r := rand.Uint32() + e := uint64(l) * uint64(r) + lhs.SetUint64(uint64(l)) + rhs.SetUint64(uint64(r)) + exp.SetUint64(e) + + a := K256FpNew().Mul(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFpDouble(t *testing.T) { + a := K256FpNew().SetUint64(2) + e := K256FpNew().SetUint64(4) + require.Equal(t, e, K256FpNew().Double(a)) + + for i := 0; i < 25; i++ { + tv := rand.Uint32() + ttv := uint64(tv) * 2 + a = K256FpNew().SetUint64(uint64(tv)) + e = K256FpNew().SetUint64(ttv) + require.Equal(t, e, K256FpNew().Double(a)) + } +} + +func TestFpSquare(t *testing.T) { + a := K256FpNew().SetUint64(4) + e := K256FpNew().SetUint64(16) + require.Equal(t, e, a.Square(a)) + + for i := 0; i < 25; i++ { + j := rand.Uint32() + exp := uint64(j) * uint64(j) + e.SetUint64(exp) + a.SetUint64(uint64(j)) + require.Equal(t, e, a.Square(a)) + } +} + +func TestFpNeg(t *testing.T) { + a := K256FpNew().SetOne() + a.Neg(a) + e := K256FpNew().SetRaw(&[native.FieldLimbs]uint64{0xfffffffdfffff85e, 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff}) + require.Equal(t, e, a) +} + +func TestFpExp(t *testing.T) { + e := K256FpNew().SetUint64(8) + a := K256FpNew().SetUint64(2) + by := K256FpNew().SetUint64(3) + require.Equal(t, e, a.Exp(a, by)) +} + +func TestFpSqrt(t *testing.T) { + t1 := K256FpNew().SetUint64(2) + t2 := K256FpNew().Neg(t1) + t3 := K256FpNew().Square(t1) + _, wasSquare := t3.Sqrt(t3) + require.True(t, wasSquare) + require.Equal(t, 1, t1.Equal(t3)|t2.Equal(t3)) + t1.SetUint64(5) + _, wasSquare = K256FpNew().Sqrt(t1) + require.False(t, wasSquare) +} + +func TestFpInvert(t *testing.T) { + twoInv := K256FpNew().SetLimbs(&[native.FieldLimbs]uint64{ + 0xffffffff7ffffe18, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0x7fffffffffffffff, + }) + two := K256FpNew().SetUint64(2) + a, inverted := K256FpNew().Invert(two) + require.True(t, inverted) + require.Equal(t, a, twoInv) + + seven := K256FpNew().SetUint64(7) + sevenInv := K256FpNew().SetRaw(&[native.FieldLimbs]uint64{0xdb6db6dab6db6afd, 0x6db6db6db6db6db6, 0xb6db6db6db6db6db, 0xdb6db6db6db6db6d}) + a, inverted = K256FpNew().Invert(seven) + require.True(t, inverted) + require.Equal(t, a, sevenInv) + + lhs := K256FpNew().SetUint64(9) + rhs := K256FpNew().SetUint64(3) + rhsInv, inverted := K256FpNew().Invert(rhs) + require.True(t, inverted) + require.Equal(t, rhs, K256FpNew().Mul(lhs, rhsInv)) + + rhs.SetZero() + _, inverted = K256FpNew().Invert(rhs) + require.False(t, inverted) +} + +func TestFpCMove(t *testing.T) { + t1 := K256FpNew().SetUint64(5) + t2 := K256FpNew().SetUint64(10) + require.Equal(t, t1, K256FpNew().CMove(t1, t2, 0)) + require.Equal(t, t2, K256FpNew().CMove(t1, t2, 1)) +} + +func TestFpBytes(t *testing.T) { + t1 := K256FpNew().SetUint64(99) + seq := t1.Bytes() + t2, err := K256FpNew().SetBytes(&seq) + require.NoError(t, err) + require.Equal(t, t1, t2) + + for i := 0; i < 25; i++ { + t1.SetUint64(rand.Uint64()) + seq = t1.Bytes() + _, err = t2.SetBytes(&seq) + require.NoError(t, err) + require.Equal(t, t1, t2) + } +} + +func TestFpCmp(t *testing.T) { + tests := []struct { + a *native.Field + b *native.Field + e int + }{ + { + a: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{2731658267414164836, 14655288906067898431, 6537465423330262322, 8306191141697566219}), + b: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{6472764012681988529, 10848812988401906064, 2961825807536828898, 4282183981941645679}), + e: 1, + }, + { + a: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{8023004109510539223, 4652004072850285717, 1877219145646046927, 383214385093921911}), + b: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{10099384440823804262, 16139476942229308465, 8636966320777393798, 5435928725024696785}), + e: -1, + }, + { + a: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{3741840066202388211, 12165774400417314871, 16619312580230515379, 16195032234110087705}), + b: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{3905865991286066744, 543690822309071825, 17963103015950210055, 3745476720756119742}), + e: 1, + }, + { + a: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{16660853697936147788, 7799793619412111108, 13515141085171033220, 2641079731236069032}), + b: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{17790588295388238399, 571847801379669440, 14537208974498222469, 12792570372087452754}), + e: -1, + }, + { + a: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{3912839285384959186, 2701177075110484070, 6453856448115499033, 6475797457962597458}), + b: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{1282566391665688512, 13503640416992806563, 2962240104675990153, 3374904770947067689}), + e: 1, + }, + { + a: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{5716631803409360103, 7859567470082614154, 12747956220853330146, 18434584096087315020}), + b: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{16317076441459028418, 12854146980376319601, 2258436689269031143, 9531877130792223752}), + e: 1, + }, + { + a: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{17955191469941083403, 10350326247207200880, 17263512235150705075, 12700328451238078022}), + b: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{6767595547459644695, 7146403825494928147, 12269344038346710612, 9122477829383225603}), + e: 1, + }, + { + a: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{17099388671847024438, 6426264987820696548, 10641143464957227405, 7709745403700754098}), + b: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{10799154372990268556, 17178492485719929374, 5705777922258988797, 8051037767683567782}), + e: -1, + }, + { + a: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{4567139260680454325, 1629385880182139061, 16607020832317899145, 1261011562621553200}), + b: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{13487234491304534488, 17872642955936089265, 17651026784972590233, 9468934643333871559}), + e: -1, + }, + { + a: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{18071070103467571798, 11787850505799426140, 10631355976141928593, 4867785203635092610}), + b: K256FpNew().SetRaw(&[native.FieldLimbs]uint64{12596443599426461624, 10176122686151524591, 17075755296887483439, 6726169532695070719}), + e: -1, + }, + } + + for _, test := range tests { + require.Equal(t, test.e, test.a.Cmp(test.b)) + require.Equal(t, -test.e, test.b.Cmp(test.a)) + require.Equal(t, 0, test.a.Cmp(test.a)) + require.Equal(t, 0, test.b.Cmp(test.b)) + } +} + +func TestFpBigInt(t *testing.T) { + t1 := K256FpNew().SetBigInt(big.NewInt(9999)) + t2 := K256FpNew().SetBigInt(t1.BigInt()) + require.Equal(t, t1, t2) + + e := K256FpNew().SetRaw(&[native.FieldLimbs]uint64{0xc6c6c6c63939371d, 0xc6c6c6c6c6c6c6c6, 0x8d8d8dd28485081d, 0x8484848484848484}) + b := new( + big.Int, + ).SetBytes([]byte{9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9}) + t1.SetBigInt(b) + require.Equal(t, e, t1) + e.Value[0] = 0x39393938c6c6c512 + e.Value[1] = 0x3939393939393939 + e.Value[2] = 0x7272722d7b7af7e2 + e.Value[3] = 0x7b7b7b7b7b7b7b7b + b.Neg(b) + t1.SetBigInt(b) + require.Equal(t, e, t1) +} + +func TestFpSetBytesWide(t *testing.T) { + e := K256FpNew().SetRaw(&[native.FieldLimbs]uint64{0x6aa784623e2d641e, 0x7c40617d755bae27, 0x206b7be66ed7b71b, 0x6d1e4fc581e19dc2}) + + a := K256FpNew().SetBytesWide(&[64]byte{ + 0x69, 0x23, 0x5a, 0x0b, 0xce, 0x0c, 0xa8, 0x64, + 0x3c, 0x78, 0xbc, 0x01, 0x05, 0xef, 0xf2, 0x84, + 0xde, 0xbb, 0x6b, 0xc8, 0x63, 0x5e, 0x6e, 0x69, + 0x62, 0xcc, 0xc6, 0x2d, 0xf5, 0x72, 0x40, 0x92, + 0x28, 0x11, 0xd6, 0xc8, 0x07, 0xa5, 0x88, 0x82, + 0xfe, 0xe3, 0x97, 0xf6, 0x1e, 0xfb, 0x2e, 0x3b, + 0x27, 0x5f, 0x85, 0x06, 0x8d, 0x99, 0xa4, 0x75, + 0xc0, 0x2c, 0x71, 0x69, 0x9e, 0x58, 0xea, 0x52, + }) + require.Equal(t, e, a) +} + +func TestFpSetBytesWideBigInt(t *testing.T) { + params := getK256FpParams() + var tv2 [64]byte + for i := 0; i < 25; i++ { + _, _ = crand.Read(tv2[:]) + e := new(big.Int).SetBytes(tv2[:]) + e.Mod(e, params.BiModulus) + + tv := internal.ReverseScalarBytes(tv2[:]) + copy(tv2[:], tv) + a := K256FpNew().SetBytesWide(&tv2) + require.Equal(t, 0, e.Cmp(a.BigInt())) + } +} diff --git a/core/curves/native/k256/fp/secp256k1_fp.go b/core/curves/native/k256/fp/secp256k1_fp.go new file mode 100755 index 0000000..bbf70c0 --- /dev/null +++ b/core/curves/native/k256/fp/secp256k1_fp.go @@ -0,0 +1,1942 @@ +// Autogenerated: 'src/ExtractionOCaml/word_by_word_montgomery' --lang Go --no-wide-int --relax-primitive-carry-to-bitwidth 32,64 --cmovznz-by-mul --internal-static --package-case flatcase --public-function-case UpperCamelCase --private-function-case camelCase --public-type-case UpperCamelCase --private-type-case camelCase --no-prefix-fiat --doc-newline-in-typedef-bounds --doc-prepend-header 'Code generated by Fiat Cryptography. DO NOT EDIT.' --doc-text-before-function-name ” --doc-text-before-type-name ” --package-name secp256k1 ” 64 '2^256 - 2^32 - 977' mul square add sub opp from_montgomery to_montgomery nonzero selectznz to_bytes from_bytes one msat divstep divstep_precomp +// +// curve description (via package name): secp256k1 +// +// machine_wordsize = 64 (from "64") +// +// requested operations: mul, square, add, sub, opp, from_montgomery, to_montgomery, nonzero, selectznz, to_bytes, from_bytes, one, msat, divstep, divstep_precomp +// +// m = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f (from "2^256 - 2^32 - 977") +// +// NOTE: In addition to the bounds specified above each function, all +// +// functions synthesized for this Montgomery arithmetic require the +// +// input to be strictly less than the prime modulus (m), and also +// +// require the input to be in the unique saturated representation. +// +// All functions also ensure that these two properties are true of +// +// return values. +// +// Computed values: +// +// eval z = z[0] + (z[1] << 64) + (z[2] << 128) + (z[3] << 192) +// +// bytes_eval z = z[0] + (z[1] << 8) + (z[2] << 16) + (z[3] << 24) + (z[4] << 32) + (z[5] << 40) + (z[6] << 48) + (z[7] << 56) + (z[8] << 64) + (z[9] << 72) + (z[10] << 80) + (z[11] << 88) + (z[12] << 96) + (z[13] << 104) + (z[14] << 112) + (z[15] << 120) + (z[16] << 128) + (z[17] << 136) + (z[18] << 144) + (z[19] << 152) + (z[20] << 160) + (z[21] << 168) + (z[22] << 176) + (z[23] << 184) + (z[24] << 192) + (z[25] << 200) + (z[26] << 208) + (z[27] << 216) + (z[28] << 224) + (z[29] << 232) + (z[30] << 240) + (z[31] << 248) +// +// twos_complement_eval z = let x1 := z[0] + (z[1] << 64) + (z[2] << 128) + (z[3] << 192) in +// +// if x1 & (2^256-1) < 2^255 then x1 & (2^256-1) else (x1 & (2^256-1)) - 2^256 +package fp + +import "math/bits" + +type ( + uint1 uint64 // We use uint64 instead of a more narrow type for performance reasons; see https://github.com/mit-plv/fiat-crypto/pull/1006#issuecomment-892625927 +) + +// MontgomeryDomainFieldElement is a field element in the Montgomery domain. +// +// Bounds: +// +// [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +type MontgomeryDomainFieldElement [4]uint64 + +// NonMontgomeryDomainFieldElement is a field element NOT in the Montgomery domain. +// +// Bounds: +// +// [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +type NonMontgomeryDomainFieldElement [4]uint64 + +// cmovznzU64 is a single-word conditional move. +// +// Postconditions: +// +// out1 = (if arg1 = 0 then arg2 else arg3) +// +// Input Bounds: +// +// arg1: [0x0 ~> 0x1] +// arg2: [0x0 ~> 0xffffffffffffffff] +// arg3: [0x0 ~> 0xffffffffffffffff] +// +// Output Bounds: +// +// out1: [0x0 ~> 0xffffffffffffffff] +func cmovznzU64(out1 *uint64, arg1 uint1, arg2 uint64, arg3 uint64) { + x1 := (uint64(arg1) * 0xffffffffffffffff) + x2 := ((x1 & arg3) | ((^x1) & arg2)) + *out1 = x2 +} + +// Mul multiplies two field elements in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// 0 ≤ eval arg2 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) * eval (from_montgomery arg2)) mod m +// 0 ≤ eval out1 < m +func Mul( + out1 *MontgomeryDomainFieldElement, + arg1 *MontgomeryDomainFieldElement, + arg2 *MontgomeryDomainFieldElement, +) { + x1 := arg1[1] + x2 := arg1[2] + x3 := arg1[3] + x4 := arg1[0] + var x5 uint64 + var x6 uint64 + x6, x5 = bits.Mul64(x4, arg2[3]) + var x7 uint64 + var x8 uint64 + x8, x7 = bits.Mul64(x4, arg2[2]) + var x9 uint64 + var x10 uint64 + x10, x9 = bits.Mul64(x4, arg2[1]) + var x11 uint64 + var x12 uint64 + x12, x11 = bits.Mul64(x4, arg2[0]) + var x13 uint64 + var x14 uint64 + x13, x14 = bits.Add64(x12, x9, uint64(0x0)) + var x15 uint64 + var x16 uint64 + x15, x16 = bits.Add64(x10, x7, uint64(uint1(x14))) + var x17 uint64 + var x18 uint64 + x17, x18 = bits.Add64(x8, x5, uint64(uint1(x16))) + x19 := (uint64(uint1(x18)) + x6) + var x20 uint64 + _, x20 = bits.Mul64(x11, 0xd838091dd2253531) + var x22 uint64 + var x23 uint64 + x23, x22 = bits.Mul64(x20, 0xffffffffffffffff) + var x24 uint64 + var x25 uint64 + x25, x24 = bits.Mul64(x20, 0xffffffffffffffff) + var x26 uint64 + var x27 uint64 + x27, x26 = bits.Mul64(x20, 0xffffffffffffffff) + var x28 uint64 + var x29 uint64 + x29, x28 = bits.Mul64(x20, 0xfffffffefffffc2f) + var x30 uint64 + var x31 uint64 + x30, x31 = bits.Add64(x29, x26, uint64(0x0)) + var x32 uint64 + var x33 uint64 + x32, x33 = bits.Add64(x27, x24, uint64(uint1(x31))) + var x34 uint64 + var x35 uint64 + x34, x35 = bits.Add64(x25, x22, uint64(uint1(x33))) + x36 := (uint64(uint1(x35)) + x23) + var x38 uint64 + _, x38 = bits.Add64(x11, x28, uint64(0x0)) + var x39 uint64 + var x40 uint64 + x39, x40 = bits.Add64(x13, x30, uint64(uint1(x38))) + var x41 uint64 + var x42 uint64 + x41, x42 = bits.Add64(x15, x32, uint64(uint1(x40))) + var x43 uint64 + var x44 uint64 + x43, x44 = bits.Add64(x17, x34, uint64(uint1(x42))) + var x45 uint64 + var x46 uint64 + x45, x46 = bits.Add64(x19, x36, uint64(uint1(x44))) + var x47 uint64 + var x48 uint64 + x48, x47 = bits.Mul64(x1, arg2[3]) + var x49 uint64 + var x50 uint64 + x50, x49 = bits.Mul64(x1, arg2[2]) + var x51 uint64 + var x52 uint64 + x52, x51 = bits.Mul64(x1, arg2[1]) + var x53 uint64 + var x54 uint64 + x54, x53 = bits.Mul64(x1, arg2[0]) + var x55 uint64 + var x56 uint64 + x55, x56 = bits.Add64(x54, x51, uint64(0x0)) + var x57 uint64 + var x58 uint64 + x57, x58 = bits.Add64(x52, x49, uint64(uint1(x56))) + var x59 uint64 + var x60 uint64 + x59, x60 = bits.Add64(x50, x47, uint64(uint1(x58))) + x61 := (uint64(uint1(x60)) + x48) + var x62 uint64 + var x63 uint64 + x62, x63 = bits.Add64(x39, x53, uint64(0x0)) + var x64 uint64 + var x65 uint64 + x64, x65 = bits.Add64(x41, x55, uint64(uint1(x63))) + var x66 uint64 + var x67 uint64 + x66, x67 = bits.Add64(x43, x57, uint64(uint1(x65))) + var x68 uint64 + var x69 uint64 + x68, x69 = bits.Add64(x45, x59, uint64(uint1(x67))) + var x70 uint64 + var x71 uint64 + x70, x71 = bits.Add64(uint64(uint1(x46)), x61, uint64(uint1(x69))) + var x72 uint64 + _, x72 = bits.Mul64(x62, 0xd838091dd2253531) + var x74 uint64 + var x75 uint64 + x75, x74 = bits.Mul64(x72, 0xffffffffffffffff) + var x76 uint64 + var x77 uint64 + x77, x76 = bits.Mul64(x72, 0xffffffffffffffff) + var x78 uint64 + var x79 uint64 + x79, x78 = bits.Mul64(x72, 0xffffffffffffffff) + var x80 uint64 + var x81 uint64 + x81, x80 = bits.Mul64(x72, 0xfffffffefffffc2f) + var x82 uint64 + var x83 uint64 + x82, x83 = bits.Add64(x81, x78, uint64(0x0)) + var x84 uint64 + var x85 uint64 + x84, x85 = bits.Add64(x79, x76, uint64(uint1(x83))) + var x86 uint64 + var x87 uint64 + x86, x87 = bits.Add64(x77, x74, uint64(uint1(x85))) + x88 := (uint64(uint1(x87)) + x75) + var x90 uint64 + _, x90 = bits.Add64(x62, x80, uint64(0x0)) + var x91 uint64 + var x92 uint64 + x91, x92 = bits.Add64(x64, x82, uint64(uint1(x90))) + var x93 uint64 + var x94 uint64 + x93, x94 = bits.Add64(x66, x84, uint64(uint1(x92))) + var x95 uint64 + var x96 uint64 + x95, x96 = bits.Add64(x68, x86, uint64(uint1(x94))) + var x97 uint64 + var x98 uint64 + x97, x98 = bits.Add64(x70, x88, uint64(uint1(x96))) + x99 := (uint64(uint1(x98)) + uint64(uint1(x71))) + var x100 uint64 + var x101 uint64 + x101, x100 = bits.Mul64(x2, arg2[3]) + var x102 uint64 + var x103 uint64 + x103, x102 = bits.Mul64(x2, arg2[2]) + var x104 uint64 + var x105 uint64 + x105, x104 = bits.Mul64(x2, arg2[1]) + var x106 uint64 + var x107 uint64 + x107, x106 = bits.Mul64(x2, arg2[0]) + var x108 uint64 + var x109 uint64 + x108, x109 = bits.Add64(x107, x104, uint64(0x0)) + var x110 uint64 + var x111 uint64 + x110, x111 = bits.Add64(x105, x102, uint64(uint1(x109))) + var x112 uint64 + var x113 uint64 + x112, x113 = bits.Add64(x103, x100, uint64(uint1(x111))) + x114 := (uint64(uint1(x113)) + x101) + var x115 uint64 + var x116 uint64 + x115, x116 = bits.Add64(x91, x106, uint64(0x0)) + var x117 uint64 + var x118 uint64 + x117, x118 = bits.Add64(x93, x108, uint64(uint1(x116))) + var x119 uint64 + var x120 uint64 + x119, x120 = bits.Add64(x95, x110, uint64(uint1(x118))) + var x121 uint64 + var x122 uint64 + x121, x122 = bits.Add64(x97, x112, uint64(uint1(x120))) + var x123 uint64 + var x124 uint64 + x123, x124 = bits.Add64(x99, x114, uint64(uint1(x122))) + var x125 uint64 + _, x125 = bits.Mul64(x115, 0xd838091dd2253531) + var x127 uint64 + var x128 uint64 + x128, x127 = bits.Mul64(x125, 0xffffffffffffffff) + var x129 uint64 + var x130 uint64 + x130, x129 = bits.Mul64(x125, 0xffffffffffffffff) + var x131 uint64 + var x132 uint64 + x132, x131 = bits.Mul64(x125, 0xffffffffffffffff) + var x133 uint64 + var x134 uint64 + x134, x133 = bits.Mul64(x125, 0xfffffffefffffc2f) + var x135 uint64 + var x136 uint64 + x135, x136 = bits.Add64(x134, x131, uint64(0x0)) + var x137 uint64 + var x138 uint64 + x137, x138 = bits.Add64(x132, x129, uint64(uint1(x136))) + var x139 uint64 + var x140 uint64 + x139, x140 = bits.Add64(x130, x127, uint64(uint1(x138))) + x141 := (uint64(uint1(x140)) + x128) + var x143 uint64 + _, x143 = bits.Add64(x115, x133, uint64(0x0)) + var x144 uint64 + var x145 uint64 + x144, x145 = bits.Add64(x117, x135, uint64(uint1(x143))) + var x146 uint64 + var x147 uint64 + x146, x147 = bits.Add64(x119, x137, uint64(uint1(x145))) + var x148 uint64 + var x149 uint64 + x148, x149 = bits.Add64(x121, x139, uint64(uint1(x147))) + var x150 uint64 + var x151 uint64 + x150, x151 = bits.Add64(x123, x141, uint64(uint1(x149))) + x152 := (uint64(uint1(x151)) + uint64(uint1(x124))) + var x153 uint64 + var x154 uint64 + x154, x153 = bits.Mul64(x3, arg2[3]) + var x155 uint64 + var x156 uint64 + x156, x155 = bits.Mul64(x3, arg2[2]) + var x157 uint64 + var x158 uint64 + x158, x157 = bits.Mul64(x3, arg2[1]) + var x159 uint64 + var x160 uint64 + x160, x159 = bits.Mul64(x3, arg2[0]) + var x161 uint64 + var x162 uint64 + x161, x162 = bits.Add64(x160, x157, uint64(0x0)) + var x163 uint64 + var x164 uint64 + x163, x164 = bits.Add64(x158, x155, uint64(uint1(x162))) + var x165 uint64 + var x166 uint64 + x165, x166 = bits.Add64(x156, x153, uint64(uint1(x164))) + x167 := (uint64(uint1(x166)) + x154) + var x168 uint64 + var x169 uint64 + x168, x169 = bits.Add64(x144, x159, uint64(0x0)) + var x170 uint64 + var x171 uint64 + x170, x171 = bits.Add64(x146, x161, uint64(uint1(x169))) + var x172 uint64 + var x173 uint64 + x172, x173 = bits.Add64(x148, x163, uint64(uint1(x171))) + var x174 uint64 + var x175 uint64 + x174, x175 = bits.Add64(x150, x165, uint64(uint1(x173))) + var x176 uint64 + var x177 uint64 + x176, x177 = bits.Add64(x152, x167, uint64(uint1(x175))) + var x178 uint64 + _, x178 = bits.Mul64(x168, 0xd838091dd2253531) + var x180 uint64 + var x181 uint64 + x181, x180 = bits.Mul64(x178, 0xffffffffffffffff) + var x182 uint64 + var x183 uint64 + x183, x182 = bits.Mul64(x178, 0xffffffffffffffff) + var x184 uint64 + var x185 uint64 + x185, x184 = bits.Mul64(x178, 0xffffffffffffffff) + var x186 uint64 + var x187 uint64 + x187, x186 = bits.Mul64(x178, 0xfffffffefffffc2f) + var x188 uint64 + var x189 uint64 + x188, x189 = bits.Add64(x187, x184, uint64(0x0)) + var x190 uint64 + var x191 uint64 + x190, x191 = bits.Add64(x185, x182, uint64(uint1(x189))) + var x192 uint64 + var x193 uint64 + x192, x193 = bits.Add64(x183, x180, uint64(uint1(x191))) + x194 := (uint64(uint1(x193)) + x181) + var x196 uint64 + _, x196 = bits.Add64(x168, x186, uint64(0x0)) + var x197 uint64 + var x198 uint64 + x197, x198 = bits.Add64(x170, x188, uint64(uint1(x196))) + var x199 uint64 + var x200 uint64 + x199, x200 = bits.Add64(x172, x190, uint64(uint1(x198))) + var x201 uint64 + var x202 uint64 + x201, x202 = bits.Add64(x174, x192, uint64(uint1(x200))) + var x203 uint64 + var x204 uint64 + x203, x204 = bits.Add64(x176, x194, uint64(uint1(x202))) + x205 := (uint64(uint1(x204)) + uint64(uint1(x177))) + var x206 uint64 + var x207 uint64 + x206, x207 = bits.Sub64(x197, 0xfffffffefffffc2f, uint64(0x0)) + var x208 uint64 + var x209 uint64 + x208, x209 = bits.Sub64(x199, 0xffffffffffffffff, uint64(uint1(x207))) + var x210 uint64 + var x211 uint64 + x210, x211 = bits.Sub64(x201, 0xffffffffffffffff, uint64(uint1(x209))) + var x212 uint64 + var x213 uint64 + x212, x213 = bits.Sub64(x203, 0xffffffffffffffff, uint64(uint1(x211))) + var x215 uint64 + _, x215 = bits.Sub64(x205, uint64(0x0), uint64(uint1(x213))) + var x216 uint64 + cmovznzU64(&x216, uint1(x215), x206, x197) + var x217 uint64 + cmovznzU64(&x217, uint1(x215), x208, x199) + var x218 uint64 + cmovznzU64(&x218, uint1(x215), x210, x201) + var x219 uint64 + cmovznzU64(&x219, uint1(x215), x212, x203) + out1[0] = x216 + out1[1] = x217 + out1[2] = x218 + out1[3] = x219 +} + +// Square squares a field element in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) * eval (from_montgomery arg1)) mod m +// 0 ≤ eval out1 < m +func Square(out1 *MontgomeryDomainFieldElement, arg1 *MontgomeryDomainFieldElement) { + x1 := arg1[1] + x2 := arg1[2] + x3 := arg1[3] + x4 := arg1[0] + var x5 uint64 + var x6 uint64 + x6, x5 = bits.Mul64(x4, arg1[3]) + var x7 uint64 + var x8 uint64 + x8, x7 = bits.Mul64(x4, arg1[2]) + var x9 uint64 + var x10 uint64 + x10, x9 = bits.Mul64(x4, arg1[1]) + var x11 uint64 + var x12 uint64 + x12, x11 = bits.Mul64(x4, arg1[0]) + var x13 uint64 + var x14 uint64 + x13, x14 = bits.Add64(x12, x9, uint64(0x0)) + var x15 uint64 + var x16 uint64 + x15, x16 = bits.Add64(x10, x7, uint64(uint1(x14))) + var x17 uint64 + var x18 uint64 + x17, x18 = bits.Add64(x8, x5, uint64(uint1(x16))) + x19 := (uint64(uint1(x18)) + x6) + var x20 uint64 + _, x20 = bits.Mul64(x11, 0xd838091dd2253531) + var x22 uint64 + var x23 uint64 + x23, x22 = bits.Mul64(x20, 0xffffffffffffffff) + var x24 uint64 + var x25 uint64 + x25, x24 = bits.Mul64(x20, 0xffffffffffffffff) + var x26 uint64 + var x27 uint64 + x27, x26 = bits.Mul64(x20, 0xffffffffffffffff) + var x28 uint64 + var x29 uint64 + x29, x28 = bits.Mul64(x20, 0xfffffffefffffc2f) + var x30 uint64 + var x31 uint64 + x30, x31 = bits.Add64(x29, x26, uint64(0x0)) + var x32 uint64 + var x33 uint64 + x32, x33 = bits.Add64(x27, x24, uint64(uint1(x31))) + var x34 uint64 + var x35 uint64 + x34, x35 = bits.Add64(x25, x22, uint64(uint1(x33))) + x36 := (uint64(uint1(x35)) + x23) + var x38 uint64 + _, x38 = bits.Add64(x11, x28, uint64(0x0)) + var x39 uint64 + var x40 uint64 + x39, x40 = bits.Add64(x13, x30, uint64(uint1(x38))) + var x41 uint64 + var x42 uint64 + x41, x42 = bits.Add64(x15, x32, uint64(uint1(x40))) + var x43 uint64 + var x44 uint64 + x43, x44 = bits.Add64(x17, x34, uint64(uint1(x42))) + var x45 uint64 + var x46 uint64 + x45, x46 = bits.Add64(x19, x36, uint64(uint1(x44))) + var x47 uint64 + var x48 uint64 + x48, x47 = bits.Mul64(x1, arg1[3]) + var x49 uint64 + var x50 uint64 + x50, x49 = bits.Mul64(x1, arg1[2]) + var x51 uint64 + var x52 uint64 + x52, x51 = bits.Mul64(x1, arg1[1]) + var x53 uint64 + var x54 uint64 + x54, x53 = bits.Mul64(x1, arg1[0]) + var x55 uint64 + var x56 uint64 + x55, x56 = bits.Add64(x54, x51, uint64(0x0)) + var x57 uint64 + var x58 uint64 + x57, x58 = bits.Add64(x52, x49, uint64(uint1(x56))) + var x59 uint64 + var x60 uint64 + x59, x60 = bits.Add64(x50, x47, uint64(uint1(x58))) + x61 := (uint64(uint1(x60)) + x48) + var x62 uint64 + var x63 uint64 + x62, x63 = bits.Add64(x39, x53, uint64(0x0)) + var x64 uint64 + var x65 uint64 + x64, x65 = bits.Add64(x41, x55, uint64(uint1(x63))) + var x66 uint64 + var x67 uint64 + x66, x67 = bits.Add64(x43, x57, uint64(uint1(x65))) + var x68 uint64 + var x69 uint64 + x68, x69 = bits.Add64(x45, x59, uint64(uint1(x67))) + var x70 uint64 + var x71 uint64 + x70, x71 = bits.Add64(uint64(uint1(x46)), x61, uint64(uint1(x69))) + var x72 uint64 + _, x72 = bits.Mul64(x62, 0xd838091dd2253531) + var x74 uint64 + var x75 uint64 + x75, x74 = bits.Mul64(x72, 0xffffffffffffffff) + var x76 uint64 + var x77 uint64 + x77, x76 = bits.Mul64(x72, 0xffffffffffffffff) + var x78 uint64 + var x79 uint64 + x79, x78 = bits.Mul64(x72, 0xffffffffffffffff) + var x80 uint64 + var x81 uint64 + x81, x80 = bits.Mul64(x72, 0xfffffffefffffc2f) + var x82 uint64 + var x83 uint64 + x82, x83 = bits.Add64(x81, x78, uint64(0x0)) + var x84 uint64 + var x85 uint64 + x84, x85 = bits.Add64(x79, x76, uint64(uint1(x83))) + var x86 uint64 + var x87 uint64 + x86, x87 = bits.Add64(x77, x74, uint64(uint1(x85))) + x88 := (uint64(uint1(x87)) + x75) + var x90 uint64 + _, x90 = bits.Add64(x62, x80, uint64(0x0)) + var x91 uint64 + var x92 uint64 + x91, x92 = bits.Add64(x64, x82, uint64(uint1(x90))) + var x93 uint64 + var x94 uint64 + x93, x94 = bits.Add64(x66, x84, uint64(uint1(x92))) + var x95 uint64 + var x96 uint64 + x95, x96 = bits.Add64(x68, x86, uint64(uint1(x94))) + var x97 uint64 + var x98 uint64 + x97, x98 = bits.Add64(x70, x88, uint64(uint1(x96))) + x99 := (uint64(uint1(x98)) + uint64(uint1(x71))) + var x100 uint64 + var x101 uint64 + x101, x100 = bits.Mul64(x2, arg1[3]) + var x102 uint64 + var x103 uint64 + x103, x102 = bits.Mul64(x2, arg1[2]) + var x104 uint64 + var x105 uint64 + x105, x104 = bits.Mul64(x2, arg1[1]) + var x106 uint64 + var x107 uint64 + x107, x106 = bits.Mul64(x2, arg1[0]) + var x108 uint64 + var x109 uint64 + x108, x109 = bits.Add64(x107, x104, uint64(0x0)) + var x110 uint64 + var x111 uint64 + x110, x111 = bits.Add64(x105, x102, uint64(uint1(x109))) + var x112 uint64 + var x113 uint64 + x112, x113 = bits.Add64(x103, x100, uint64(uint1(x111))) + x114 := (uint64(uint1(x113)) + x101) + var x115 uint64 + var x116 uint64 + x115, x116 = bits.Add64(x91, x106, uint64(0x0)) + var x117 uint64 + var x118 uint64 + x117, x118 = bits.Add64(x93, x108, uint64(uint1(x116))) + var x119 uint64 + var x120 uint64 + x119, x120 = bits.Add64(x95, x110, uint64(uint1(x118))) + var x121 uint64 + var x122 uint64 + x121, x122 = bits.Add64(x97, x112, uint64(uint1(x120))) + var x123 uint64 + var x124 uint64 + x123, x124 = bits.Add64(x99, x114, uint64(uint1(x122))) + var x125 uint64 + _, x125 = bits.Mul64(x115, 0xd838091dd2253531) + var x127 uint64 + var x128 uint64 + x128, x127 = bits.Mul64(x125, 0xffffffffffffffff) + var x129 uint64 + var x130 uint64 + x130, x129 = bits.Mul64(x125, 0xffffffffffffffff) + var x131 uint64 + var x132 uint64 + x132, x131 = bits.Mul64(x125, 0xffffffffffffffff) + var x133 uint64 + var x134 uint64 + x134, x133 = bits.Mul64(x125, 0xfffffffefffffc2f) + var x135 uint64 + var x136 uint64 + x135, x136 = bits.Add64(x134, x131, uint64(0x0)) + var x137 uint64 + var x138 uint64 + x137, x138 = bits.Add64(x132, x129, uint64(uint1(x136))) + var x139 uint64 + var x140 uint64 + x139, x140 = bits.Add64(x130, x127, uint64(uint1(x138))) + x141 := (uint64(uint1(x140)) + x128) + var x143 uint64 + _, x143 = bits.Add64(x115, x133, uint64(0x0)) + var x144 uint64 + var x145 uint64 + x144, x145 = bits.Add64(x117, x135, uint64(uint1(x143))) + var x146 uint64 + var x147 uint64 + x146, x147 = bits.Add64(x119, x137, uint64(uint1(x145))) + var x148 uint64 + var x149 uint64 + x148, x149 = bits.Add64(x121, x139, uint64(uint1(x147))) + var x150 uint64 + var x151 uint64 + x150, x151 = bits.Add64(x123, x141, uint64(uint1(x149))) + x152 := (uint64(uint1(x151)) + uint64(uint1(x124))) + var x153 uint64 + var x154 uint64 + x154, x153 = bits.Mul64(x3, arg1[3]) + var x155 uint64 + var x156 uint64 + x156, x155 = bits.Mul64(x3, arg1[2]) + var x157 uint64 + var x158 uint64 + x158, x157 = bits.Mul64(x3, arg1[1]) + var x159 uint64 + var x160 uint64 + x160, x159 = bits.Mul64(x3, arg1[0]) + var x161 uint64 + var x162 uint64 + x161, x162 = bits.Add64(x160, x157, uint64(0x0)) + var x163 uint64 + var x164 uint64 + x163, x164 = bits.Add64(x158, x155, uint64(uint1(x162))) + var x165 uint64 + var x166 uint64 + x165, x166 = bits.Add64(x156, x153, uint64(uint1(x164))) + x167 := (uint64(uint1(x166)) + x154) + var x168 uint64 + var x169 uint64 + x168, x169 = bits.Add64(x144, x159, uint64(0x0)) + var x170 uint64 + var x171 uint64 + x170, x171 = bits.Add64(x146, x161, uint64(uint1(x169))) + var x172 uint64 + var x173 uint64 + x172, x173 = bits.Add64(x148, x163, uint64(uint1(x171))) + var x174 uint64 + var x175 uint64 + x174, x175 = bits.Add64(x150, x165, uint64(uint1(x173))) + var x176 uint64 + var x177 uint64 + x176, x177 = bits.Add64(x152, x167, uint64(uint1(x175))) + var x178 uint64 + _, x178 = bits.Mul64(x168, 0xd838091dd2253531) + var x180 uint64 + var x181 uint64 + x181, x180 = bits.Mul64(x178, 0xffffffffffffffff) + var x182 uint64 + var x183 uint64 + x183, x182 = bits.Mul64(x178, 0xffffffffffffffff) + var x184 uint64 + var x185 uint64 + x185, x184 = bits.Mul64(x178, 0xffffffffffffffff) + var x186 uint64 + var x187 uint64 + x187, x186 = bits.Mul64(x178, 0xfffffffefffffc2f) + var x188 uint64 + var x189 uint64 + x188, x189 = bits.Add64(x187, x184, uint64(0x0)) + var x190 uint64 + var x191 uint64 + x190, x191 = bits.Add64(x185, x182, uint64(uint1(x189))) + var x192 uint64 + var x193 uint64 + x192, x193 = bits.Add64(x183, x180, uint64(uint1(x191))) + x194 := (uint64(uint1(x193)) + x181) + var x196 uint64 + _, x196 = bits.Add64(x168, x186, uint64(0x0)) + var x197 uint64 + var x198 uint64 + x197, x198 = bits.Add64(x170, x188, uint64(uint1(x196))) + var x199 uint64 + var x200 uint64 + x199, x200 = bits.Add64(x172, x190, uint64(uint1(x198))) + var x201 uint64 + var x202 uint64 + x201, x202 = bits.Add64(x174, x192, uint64(uint1(x200))) + var x203 uint64 + var x204 uint64 + x203, x204 = bits.Add64(x176, x194, uint64(uint1(x202))) + x205 := (uint64(uint1(x204)) + uint64(uint1(x177))) + var x206 uint64 + var x207 uint64 + x206, x207 = bits.Sub64(x197, 0xfffffffefffffc2f, uint64(0x0)) + var x208 uint64 + var x209 uint64 + x208, x209 = bits.Sub64(x199, 0xffffffffffffffff, uint64(uint1(x207))) + var x210 uint64 + var x211 uint64 + x210, x211 = bits.Sub64(x201, 0xffffffffffffffff, uint64(uint1(x209))) + var x212 uint64 + var x213 uint64 + x212, x213 = bits.Sub64(x203, 0xffffffffffffffff, uint64(uint1(x211))) + var x215 uint64 + _, x215 = bits.Sub64(x205, uint64(0x0), uint64(uint1(x213))) + var x216 uint64 + cmovznzU64(&x216, uint1(x215), x206, x197) + var x217 uint64 + cmovznzU64(&x217, uint1(x215), x208, x199) + var x218 uint64 + cmovznzU64(&x218, uint1(x215), x210, x201) + var x219 uint64 + cmovznzU64(&x219, uint1(x215), x212, x203) + out1[0] = x216 + out1[1] = x217 + out1[2] = x218 + out1[3] = x219 +} + +// Add adds two field elements in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// 0 ≤ eval arg2 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) + eval (from_montgomery arg2)) mod m +// 0 ≤ eval out1 < m +func Add( + out1 *MontgomeryDomainFieldElement, + arg1 *MontgomeryDomainFieldElement, + arg2 *MontgomeryDomainFieldElement, +) { + var x1 uint64 + var x2 uint64 + x1, x2 = bits.Add64(arg1[0], arg2[0], uint64(0x0)) + var x3 uint64 + var x4 uint64 + x3, x4 = bits.Add64(arg1[1], arg2[1], uint64(uint1(x2))) + var x5 uint64 + var x6 uint64 + x5, x6 = bits.Add64(arg1[2], arg2[2], uint64(uint1(x4))) + var x7 uint64 + var x8 uint64 + x7, x8 = bits.Add64(arg1[3], arg2[3], uint64(uint1(x6))) + var x9 uint64 + var x10 uint64 + x9, x10 = bits.Sub64(x1, 0xfffffffefffffc2f, uint64(0x0)) + var x11 uint64 + var x12 uint64 + x11, x12 = bits.Sub64(x3, 0xffffffffffffffff, uint64(uint1(x10))) + var x13 uint64 + var x14 uint64 + x13, x14 = bits.Sub64(x5, 0xffffffffffffffff, uint64(uint1(x12))) + var x15 uint64 + var x16 uint64 + x15, x16 = bits.Sub64(x7, 0xffffffffffffffff, uint64(uint1(x14))) + var x18 uint64 + _, x18 = bits.Sub64(uint64(uint1(x8)), uint64(0x0), uint64(uint1(x16))) + var x19 uint64 + cmovznzU64(&x19, uint1(x18), x9, x1) + var x20 uint64 + cmovznzU64(&x20, uint1(x18), x11, x3) + var x21 uint64 + cmovznzU64(&x21, uint1(x18), x13, x5) + var x22 uint64 + cmovznzU64(&x22, uint1(x18), x15, x7) + out1[0] = x19 + out1[1] = x20 + out1[2] = x21 + out1[3] = x22 +} + +// Sub subtracts two field elements in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// 0 ≤ eval arg2 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) - eval (from_montgomery arg2)) mod m +// 0 ≤ eval out1 < m +func Sub( + out1 *MontgomeryDomainFieldElement, + arg1 *MontgomeryDomainFieldElement, + arg2 *MontgomeryDomainFieldElement, +) { + var x1 uint64 + var x2 uint64 + x1, x2 = bits.Sub64(arg1[0], arg2[0], uint64(0x0)) + var x3 uint64 + var x4 uint64 + x3, x4 = bits.Sub64(arg1[1], arg2[1], uint64(uint1(x2))) + var x5 uint64 + var x6 uint64 + x5, x6 = bits.Sub64(arg1[2], arg2[2], uint64(uint1(x4))) + var x7 uint64 + var x8 uint64 + x7, x8 = bits.Sub64(arg1[3], arg2[3], uint64(uint1(x6))) + var x9 uint64 + cmovznzU64(&x9, uint1(x8), uint64(0x0), 0xffffffffffffffff) + var x10 uint64 + var x11 uint64 + x10, x11 = bits.Add64(x1, (x9 & 0xfffffffefffffc2f), uint64(0x0)) + var x12 uint64 + var x13 uint64 + x12, x13 = bits.Add64(x3, x9, uint64(uint1(x11))) + var x14 uint64 + var x15 uint64 + x14, x15 = bits.Add64(x5, x9, uint64(uint1(x13))) + var x16 uint64 + x16, _ = bits.Add64(x7, x9, uint64(uint1(x15))) + out1[0] = x10 + out1[1] = x12 + out1[2] = x14 + out1[3] = x16 +} + +// Opp negates a field element in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = -eval (from_montgomery arg1) mod m +// 0 ≤ eval out1 < m +func Opp(out1 *MontgomeryDomainFieldElement, arg1 *MontgomeryDomainFieldElement) { + var x1 uint64 + var x2 uint64 + x1, x2 = bits.Sub64(uint64(0x0), arg1[0], uint64(0x0)) + var x3 uint64 + var x4 uint64 + x3, x4 = bits.Sub64(uint64(0x0), arg1[1], uint64(uint1(x2))) + var x5 uint64 + var x6 uint64 + x5, x6 = bits.Sub64(uint64(0x0), arg1[2], uint64(uint1(x4))) + var x7 uint64 + var x8 uint64 + x7, x8 = bits.Sub64(uint64(0x0), arg1[3], uint64(uint1(x6))) + var x9 uint64 + cmovznzU64(&x9, uint1(x8), uint64(0x0), 0xffffffffffffffff) + var x10 uint64 + var x11 uint64 + x10, x11 = bits.Add64(x1, (x9 & 0xfffffffefffffc2f), uint64(0x0)) + var x12 uint64 + var x13 uint64 + x12, x13 = bits.Add64(x3, x9, uint64(uint1(x11))) + var x14 uint64 + var x15 uint64 + x14, x15 = bits.Add64(x5, x9, uint64(uint1(x13))) + var x16 uint64 + x16, _ = bits.Add64(x7, x9, uint64(uint1(x15))) + out1[0] = x10 + out1[1] = x12 + out1[2] = x14 + out1[3] = x16 +} + +// FromMontgomery translates a field element out of the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval out1 mod m = (eval arg1 * ((2^64)⁻¹ mod m)^4) mod m +// 0 ≤ eval out1 < m +func FromMontgomery(out1 *NonMontgomeryDomainFieldElement, arg1 *MontgomeryDomainFieldElement) { + x1 := arg1[0] + var x2 uint64 + _, x2 = bits.Mul64(x1, 0xd838091dd2253531) + var x4 uint64 + var x5 uint64 + x5, x4 = bits.Mul64(x2, 0xffffffffffffffff) + var x6 uint64 + var x7 uint64 + x7, x6 = bits.Mul64(x2, 0xffffffffffffffff) + var x8 uint64 + var x9 uint64 + x9, x8 = bits.Mul64(x2, 0xffffffffffffffff) + var x10 uint64 + var x11 uint64 + x11, x10 = bits.Mul64(x2, 0xfffffffefffffc2f) + var x12 uint64 + var x13 uint64 + x12, x13 = bits.Add64(x11, x8, uint64(0x0)) + var x14 uint64 + var x15 uint64 + x14, x15 = bits.Add64(x9, x6, uint64(uint1(x13))) + var x16 uint64 + var x17 uint64 + x16, x17 = bits.Add64(x7, x4, uint64(uint1(x15))) + var x19 uint64 + _, x19 = bits.Add64(x1, x10, uint64(0x0)) + var x20 uint64 + var x21 uint64 + x20, x21 = bits.Add64(uint64(0x0), x12, uint64(uint1(x19))) + var x22 uint64 + var x23 uint64 + x22, x23 = bits.Add64(uint64(0x0), x14, uint64(uint1(x21))) + var x24 uint64 + var x25 uint64 + x24, x25 = bits.Add64(uint64(0x0), x16, uint64(uint1(x23))) + var x26 uint64 + var x27 uint64 + x26, x27 = bits.Add64(uint64(0x0), (uint64(uint1(x17)) + x5), uint64(uint1(x25))) + var x28 uint64 + var x29 uint64 + x28, x29 = bits.Add64(x20, arg1[1], uint64(0x0)) + var x30 uint64 + var x31 uint64 + x30, x31 = bits.Add64(x22, uint64(0x0), uint64(uint1(x29))) + var x32 uint64 + var x33 uint64 + x32, x33 = bits.Add64(x24, uint64(0x0), uint64(uint1(x31))) + var x34 uint64 + var x35 uint64 + x34, x35 = bits.Add64(x26, uint64(0x0), uint64(uint1(x33))) + var x36 uint64 + _, x36 = bits.Mul64(x28, 0xd838091dd2253531) + var x38 uint64 + var x39 uint64 + x39, x38 = bits.Mul64(x36, 0xffffffffffffffff) + var x40 uint64 + var x41 uint64 + x41, x40 = bits.Mul64(x36, 0xffffffffffffffff) + var x42 uint64 + var x43 uint64 + x43, x42 = bits.Mul64(x36, 0xffffffffffffffff) + var x44 uint64 + var x45 uint64 + x45, x44 = bits.Mul64(x36, 0xfffffffefffffc2f) + var x46 uint64 + var x47 uint64 + x46, x47 = bits.Add64(x45, x42, uint64(0x0)) + var x48 uint64 + var x49 uint64 + x48, x49 = bits.Add64(x43, x40, uint64(uint1(x47))) + var x50 uint64 + var x51 uint64 + x50, x51 = bits.Add64(x41, x38, uint64(uint1(x49))) + var x53 uint64 + _, x53 = bits.Add64(x28, x44, uint64(0x0)) + var x54 uint64 + var x55 uint64 + x54, x55 = bits.Add64(x30, x46, uint64(uint1(x53))) + var x56 uint64 + var x57 uint64 + x56, x57 = bits.Add64(x32, x48, uint64(uint1(x55))) + var x58 uint64 + var x59 uint64 + x58, x59 = bits.Add64(x34, x50, uint64(uint1(x57))) + var x60 uint64 + var x61 uint64 + x60, x61 = bits.Add64( + (uint64(uint1(x35)) + uint64(uint1(x27))), + (uint64(uint1(x51)) + x39), + uint64(uint1(x59)), + ) + var x62 uint64 + var x63 uint64 + x62, x63 = bits.Add64(x54, arg1[2], uint64(0x0)) + var x64 uint64 + var x65 uint64 + x64, x65 = bits.Add64(x56, uint64(0x0), uint64(uint1(x63))) + var x66 uint64 + var x67 uint64 + x66, x67 = bits.Add64(x58, uint64(0x0), uint64(uint1(x65))) + var x68 uint64 + var x69 uint64 + x68, x69 = bits.Add64(x60, uint64(0x0), uint64(uint1(x67))) + var x70 uint64 + _, x70 = bits.Mul64(x62, 0xd838091dd2253531) + var x72 uint64 + var x73 uint64 + x73, x72 = bits.Mul64(x70, 0xffffffffffffffff) + var x74 uint64 + var x75 uint64 + x75, x74 = bits.Mul64(x70, 0xffffffffffffffff) + var x76 uint64 + var x77 uint64 + x77, x76 = bits.Mul64(x70, 0xffffffffffffffff) + var x78 uint64 + var x79 uint64 + x79, x78 = bits.Mul64(x70, 0xfffffffefffffc2f) + var x80 uint64 + var x81 uint64 + x80, x81 = bits.Add64(x79, x76, uint64(0x0)) + var x82 uint64 + var x83 uint64 + x82, x83 = bits.Add64(x77, x74, uint64(uint1(x81))) + var x84 uint64 + var x85 uint64 + x84, x85 = bits.Add64(x75, x72, uint64(uint1(x83))) + var x87 uint64 + _, x87 = bits.Add64(x62, x78, uint64(0x0)) + var x88 uint64 + var x89 uint64 + x88, x89 = bits.Add64(x64, x80, uint64(uint1(x87))) + var x90 uint64 + var x91 uint64 + x90, x91 = bits.Add64(x66, x82, uint64(uint1(x89))) + var x92 uint64 + var x93 uint64 + x92, x93 = bits.Add64(x68, x84, uint64(uint1(x91))) + var x94 uint64 + var x95 uint64 + x94, x95 = bits.Add64( + (uint64(uint1(x69)) + uint64(uint1(x61))), + (uint64(uint1(x85)) + x73), + uint64(uint1(x93)), + ) + var x96 uint64 + var x97 uint64 + x96, x97 = bits.Add64(x88, arg1[3], uint64(0x0)) + var x98 uint64 + var x99 uint64 + x98, x99 = bits.Add64(x90, uint64(0x0), uint64(uint1(x97))) + var x100 uint64 + var x101 uint64 + x100, x101 = bits.Add64(x92, uint64(0x0), uint64(uint1(x99))) + var x102 uint64 + var x103 uint64 + x102, x103 = bits.Add64(x94, uint64(0x0), uint64(uint1(x101))) + var x104 uint64 + _, x104 = bits.Mul64(x96, 0xd838091dd2253531) + var x106 uint64 + var x107 uint64 + x107, x106 = bits.Mul64(x104, 0xffffffffffffffff) + var x108 uint64 + var x109 uint64 + x109, x108 = bits.Mul64(x104, 0xffffffffffffffff) + var x110 uint64 + var x111 uint64 + x111, x110 = bits.Mul64(x104, 0xffffffffffffffff) + var x112 uint64 + var x113 uint64 + x113, x112 = bits.Mul64(x104, 0xfffffffefffffc2f) + var x114 uint64 + var x115 uint64 + x114, x115 = bits.Add64(x113, x110, uint64(0x0)) + var x116 uint64 + var x117 uint64 + x116, x117 = bits.Add64(x111, x108, uint64(uint1(x115))) + var x118 uint64 + var x119 uint64 + x118, x119 = bits.Add64(x109, x106, uint64(uint1(x117))) + var x121 uint64 + _, x121 = bits.Add64(x96, x112, uint64(0x0)) + var x122 uint64 + var x123 uint64 + x122, x123 = bits.Add64(x98, x114, uint64(uint1(x121))) + var x124 uint64 + var x125 uint64 + x124, x125 = bits.Add64(x100, x116, uint64(uint1(x123))) + var x126 uint64 + var x127 uint64 + x126, x127 = bits.Add64(x102, x118, uint64(uint1(x125))) + var x128 uint64 + var x129 uint64 + x128, x129 = bits.Add64( + (uint64(uint1(x103)) + uint64(uint1(x95))), + (uint64(uint1(x119)) + x107), + uint64(uint1(x127)), + ) + var x130 uint64 + var x131 uint64 + x130, x131 = bits.Sub64(x122, 0xfffffffefffffc2f, uint64(0x0)) + var x132 uint64 + var x133 uint64 + x132, x133 = bits.Sub64(x124, 0xffffffffffffffff, uint64(uint1(x131))) + var x134 uint64 + var x135 uint64 + x134, x135 = bits.Sub64(x126, 0xffffffffffffffff, uint64(uint1(x133))) + var x136 uint64 + var x137 uint64 + x136, x137 = bits.Sub64(x128, 0xffffffffffffffff, uint64(uint1(x135))) + var x139 uint64 + _, x139 = bits.Sub64(uint64(uint1(x129)), uint64(0x0), uint64(uint1(x137))) + var x140 uint64 + cmovznzU64(&x140, uint1(x139), x130, x122) + var x141 uint64 + cmovznzU64(&x141, uint1(x139), x132, x124) + var x142 uint64 + cmovznzU64(&x142, uint1(x139), x134, x126) + var x143 uint64 + cmovznzU64(&x143, uint1(x139), x136, x128) + out1[0] = x140 + out1[1] = x141 + out1[2] = x142 + out1[3] = x143 +} + +// ToMontgomery translates a field element into the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = eval arg1 mod m +// 0 ≤ eval out1 < m +func ToMontgomery(out1 *MontgomeryDomainFieldElement, arg1 *NonMontgomeryDomainFieldElement) { + x1 := arg1[1] + x2 := arg1[2] + x3 := arg1[3] + x4 := arg1[0] + var x5 uint64 + var x6 uint64 + x6, x5 = bits.Mul64(x4, 0x7a2000e90a1) + var x7 uint64 + var x8 uint64 + x7, x8 = bits.Add64(x6, x4, uint64(0x0)) + var x9 uint64 + _, x9 = bits.Mul64(x5, 0xd838091dd2253531) + var x11 uint64 + var x12 uint64 + x12, x11 = bits.Mul64(x9, 0xffffffffffffffff) + var x13 uint64 + var x14 uint64 + x14, x13 = bits.Mul64(x9, 0xffffffffffffffff) + var x15 uint64 + var x16 uint64 + x16, x15 = bits.Mul64(x9, 0xffffffffffffffff) + var x17 uint64 + var x18 uint64 + x18, x17 = bits.Mul64(x9, 0xfffffffefffffc2f) + var x19 uint64 + var x20 uint64 + x19, x20 = bits.Add64(x18, x15, uint64(0x0)) + var x21 uint64 + var x22 uint64 + x21, x22 = bits.Add64(x16, x13, uint64(uint1(x20))) + var x23 uint64 + var x24 uint64 + x23, x24 = bits.Add64(x14, x11, uint64(uint1(x22))) + var x26 uint64 + _, x26 = bits.Add64(x5, x17, uint64(0x0)) + var x27 uint64 + var x28 uint64 + x27, x28 = bits.Add64(x7, x19, uint64(uint1(x26))) + var x29 uint64 + var x30 uint64 + x29, x30 = bits.Add64(uint64(uint1(x8)), x21, uint64(uint1(x28))) + var x31 uint64 + var x32 uint64 + x31, x32 = bits.Add64(uint64(0x0), x23, uint64(uint1(x30))) + var x33 uint64 + var x34 uint64 + x33, x34 = bits.Add64(uint64(0x0), (uint64(uint1(x24)) + x12), uint64(uint1(x32))) + var x35 uint64 + var x36 uint64 + x36, x35 = bits.Mul64(x1, 0x7a2000e90a1) + var x37 uint64 + var x38 uint64 + x37, x38 = bits.Add64(x36, x1, uint64(0x0)) + var x39 uint64 + var x40 uint64 + x39, x40 = bits.Add64(x27, x35, uint64(0x0)) + var x41 uint64 + var x42 uint64 + x41, x42 = bits.Add64(x29, x37, uint64(uint1(x40))) + var x43 uint64 + var x44 uint64 + x43, x44 = bits.Add64(x31, uint64(uint1(x38)), uint64(uint1(x42))) + var x45 uint64 + var x46 uint64 + x45, x46 = bits.Add64(x33, uint64(0x0), uint64(uint1(x44))) + var x47 uint64 + _, x47 = bits.Mul64(x39, 0xd838091dd2253531) + var x49 uint64 + var x50 uint64 + x50, x49 = bits.Mul64(x47, 0xffffffffffffffff) + var x51 uint64 + var x52 uint64 + x52, x51 = bits.Mul64(x47, 0xffffffffffffffff) + var x53 uint64 + var x54 uint64 + x54, x53 = bits.Mul64(x47, 0xffffffffffffffff) + var x55 uint64 + var x56 uint64 + x56, x55 = bits.Mul64(x47, 0xfffffffefffffc2f) + var x57 uint64 + var x58 uint64 + x57, x58 = bits.Add64(x56, x53, uint64(0x0)) + var x59 uint64 + var x60 uint64 + x59, x60 = bits.Add64(x54, x51, uint64(uint1(x58))) + var x61 uint64 + var x62 uint64 + x61, x62 = bits.Add64(x52, x49, uint64(uint1(x60))) + var x64 uint64 + _, x64 = bits.Add64(x39, x55, uint64(0x0)) + var x65 uint64 + var x66 uint64 + x65, x66 = bits.Add64(x41, x57, uint64(uint1(x64))) + var x67 uint64 + var x68 uint64 + x67, x68 = bits.Add64(x43, x59, uint64(uint1(x66))) + var x69 uint64 + var x70 uint64 + x69, x70 = bits.Add64(x45, x61, uint64(uint1(x68))) + var x71 uint64 + var x72 uint64 + x71, x72 = bits.Add64( + (uint64(uint1(x46)) + uint64(uint1(x34))), + (uint64(uint1(x62)) + x50), + uint64(uint1(x70)), + ) + var x73 uint64 + var x74 uint64 + x74, x73 = bits.Mul64(x2, 0x7a2000e90a1) + var x75 uint64 + var x76 uint64 + x75, x76 = bits.Add64(x74, x2, uint64(0x0)) + var x77 uint64 + var x78 uint64 + x77, x78 = bits.Add64(x65, x73, uint64(0x0)) + var x79 uint64 + var x80 uint64 + x79, x80 = bits.Add64(x67, x75, uint64(uint1(x78))) + var x81 uint64 + var x82 uint64 + x81, x82 = bits.Add64(x69, uint64(uint1(x76)), uint64(uint1(x80))) + var x83 uint64 + var x84 uint64 + x83, x84 = bits.Add64(x71, uint64(0x0), uint64(uint1(x82))) + var x85 uint64 + _, x85 = bits.Mul64(x77, 0xd838091dd2253531) + var x87 uint64 + var x88 uint64 + x88, x87 = bits.Mul64(x85, 0xffffffffffffffff) + var x89 uint64 + var x90 uint64 + x90, x89 = bits.Mul64(x85, 0xffffffffffffffff) + var x91 uint64 + var x92 uint64 + x92, x91 = bits.Mul64(x85, 0xffffffffffffffff) + var x93 uint64 + var x94 uint64 + x94, x93 = bits.Mul64(x85, 0xfffffffefffffc2f) + var x95 uint64 + var x96 uint64 + x95, x96 = bits.Add64(x94, x91, uint64(0x0)) + var x97 uint64 + var x98 uint64 + x97, x98 = bits.Add64(x92, x89, uint64(uint1(x96))) + var x99 uint64 + var x100 uint64 + x99, x100 = bits.Add64(x90, x87, uint64(uint1(x98))) + var x102 uint64 + _, x102 = bits.Add64(x77, x93, uint64(0x0)) + var x103 uint64 + var x104 uint64 + x103, x104 = bits.Add64(x79, x95, uint64(uint1(x102))) + var x105 uint64 + var x106 uint64 + x105, x106 = bits.Add64(x81, x97, uint64(uint1(x104))) + var x107 uint64 + var x108 uint64 + x107, x108 = bits.Add64(x83, x99, uint64(uint1(x106))) + var x109 uint64 + var x110 uint64 + x109, x110 = bits.Add64( + (uint64(uint1(x84)) + uint64(uint1(x72))), + (uint64(uint1(x100)) + x88), + uint64(uint1(x108)), + ) + var x111 uint64 + var x112 uint64 + x112, x111 = bits.Mul64(x3, 0x7a2000e90a1) + var x113 uint64 + var x114 uint64 + x113, x114 = bits.Add64(x112, x3, uint64(0x0)) + var x115 uint64 + var x116 uint64 + x115, x116 = bits.Add64(x103, x111, uint64(0x0)) + var x117 uint64 + var x118 uint64 + x117, x118 = bits.Add64(x105, x113, uint64(uint1(x116))) + var x119 uint64 + var x120 uint64 + x119, x120 = bits.Add64(x107, uint64(uint1(x114)), uint64(uint1(x118))) + var x121 uint64 + var x122 uint64 + x121, x122 = bits.Add64(x109, uint64(0x0), uint64(uint1(x120))) + var x123 uint64 + _, x123 = bits.Mul64(x115, 0xd838091dd2253531) + var x125 uint64 + var x126 uint64 + x126, x125 = bits.Mul64(x123, 0xffffffffffffffff) + var x127 uint64 + var x128 uint64 + x128, x127 = bits.Mul64(x123, 0xffffffffffffffff) + var x129 uint64 + var x130 uint64 + x130, x129 = bits.Mul64(x123, 0xffffffffffffffff) + var x131 uint64 + var x132 uint64 + x132, x131 = bits.Mul64(x123, 0xfffffffefffffc2f) + var x133 uint64 + var x134 uint64 + x133, x134 = bits.Add64(x132, x129, uint64(0x0)) + var x135 uint64 + var x136 uint64 + x135, x136 = bits.Add64(x130, x127, uint64(uint1(x134))) + var x137 uint64 + var x138 uint64 + x137, x138 = bits.Add64(x128, x125, uint64(uint1(x136))) + var x140 uint64 + _, x140 = bits.Add64(x115, x131, uint64(0x0)) + var x141 uint64 + var x142 uint64 + x141, x142 = bits.Add64(x117, x133, uint64(uint1(x140))) + var x143 uint64 + var x144 uint64 + x143, x144 = bits.Add64(x119, x135, uint64(uint1(x142))) + var x145 uint64 + var x146 uint64 + x145, x146 = bits.Add64(x121, x137, uint64(uint1(x144))) + var x147 uint64 + var x148 uint64 + x147, x148 = bits.Add64( + (uint64(uint1(x122)) + uint64(uint1(x110))), + (uint64(uint1(x138)) + x126), + uint64(uint1(x146)), + ) + var x149 uint64 + var x150 uint64 + x149, x150 = bits.Sub64(x141, 0xfffffffefffffc2f, uint64(0x0)) + var x151 uint64 + var x152 uint64 + x151, x152 = bits.Sub64(x143, 0xffffffffffffffff, uint64(uint1(x150))) + var x153 uint64 + var x154 uint64 + x153, x154 = bits.Sub64(x145, 0xffffffffffffffff, uint64(uint1(x152))) + var x155 uint64 + var x156 uint64 + x155, x156 = bits.Sub64(x147, 0xffffffffffffffff, uint64(uint1(x154))) + var x158 uint64 + _, x158 = bits.Sub64(uint64(uint1(x148)), uint64(0x0), uint64(uint1(x156))) + var x159 uint64 + cmovznzU64(&x159, uint1(x158), x149, x141) + var x160 uint64 + cmovznzU64(&x160, uint1(x158), x151, x143) + var x161 uint64 + cmovznzU64(&x161, uint1(x158), x153, x145) + var x162 uint64 + cmovznzU64(&x162, uint1(x158), x155, x147) + out1[0] = x159 + out1[1] = x160 + out1[2] = x161 + out1[3] = x162 +} + +// Nonzero outputs a single non-zero word if the input is non-zero and zero otherwise. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// out1 = 0 ↔ eval (from_montgomery arg1) mod m = 0 +// +// Input Bounds: +// +// arg1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// +// Output Bounds: +// +// out1: [0x0 ~> 0xffffffffffffffff] +func Nonzero(out1 *uint64, arg1 *[4]uint64) { + x1 := (arg1[0] | (arg1[1] | (arg1[2] | arg1[3]))) + *out1 = x1 +} + +// Selectznz is a multi-limb conditional select. +// +// Postconditions: +// +// eval out1 = (if arg1 = 0 then eval arg2 else eval arg3) +// +// Input Bounds: +// +// arg1: [0x0 ~> 0x1] +// arg2: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// arg3: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +func Selectznz(out1 *[4]uint64, arg1 uint1, arg2 *[4]uint64, arg3 *[4]uint64) { + var x1 uint64 + cmovznzU64(&x1, arg1, arg2[0], arg3[0]) + var x2 uint64 + cmovznzU64(&x2, arg1, arg2[1], arg3[1]) + var x3 uint64 + cmovznzU64(&x3, arg1, arg2[2], arg3[2]) + var x4 uint64 + cmovznzU64(&x4, arg1, arg2[3], arg3[3]) + out1[0] = x1 + out1[1] = x2 + out1[2] = x3 + out1[3] = x4 +} + +// ToBytes serializes a field element NOT in the Montgomery domain to bytes in little-endian order. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// out1 = map (λ x, ⌊((eval arg1 mod m) mod 2^(8 * (x + 1))) / 2^(8 * x)⌋) [0..31] +// +// Input Bounds: +// +// arg1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff]] +func ToBytes(out1 *[32]uint8, arg1 *[4]uint64) { + x1 := arg1[3] + x2 := arg1[2] + x3 := arg1[1] + x4 := arg1[0] + x5 := (uint8(x4) & 0xff) + x6 := (x4 >> 8) + x7 := (uint8(x6) & 0xff) + x8 := (x6 >> 8) + x9 := (uint8(x8) & 0xff) + x10 := (x8 >> 8) + x11 := (uint8(x10) & 0xff) + x12 := (x10 >> 8) + x13 := (uint8(x12) & 0xff) + x14 := (x12 >> 8) + x15 := (uint8(x14) & 0xff) + x16 := (x14 >> 8) + x17 := (uint8(x16) & 0xff) + x18 := uint8((x16 >> 8)) + x19 := (uint8(x3) & 0xff) + x20 := (x3 >> 8) + x21 := (uint8(x20) & 0xff) + x22 := (x20 >> 8) + x23 := (uint8(x22) & 0xff) + x24 := (x22 >> 8) + x25 := (uint8(x24) & 0xff) + x26 := (x24 >> 8) + x27 := (uint8(x26) & 0xff) + x28 := (x26 >> 8) + x29 := (uint8(x28) & 0xff) + x30 := (x28 >> 8) + x31 := (uint8(x30) & 0xff) + x32 := uint8((x30 >> 8)) + x33 := (uint8(x2) & 0xff) + x34 := (x2 >> 8) + x35 := (uint8(x34) & 0xff) + x36 := (x34 >> 8) + x37 := (uint8(x36) & 0xff) + x38 := (x36 >> 8) + x39 := (uint8(x38) & 0xff) + x40 := (x38 >> 8) + x41 := (uint8(x40) & 0xff) + x42 := (x40 >> 8) + x43 := (uint8(x42) & 0xff) + x44 := (x42 >> 8) + x45 := (uint8(x44) & 0xff) + x46 := uint8((x44 >> 8)) + x47 := (uint8(x1) & 0xff) + x48 := (x1 >> 8) + x49 := (uint8(x48) & 0xff) + x50 := (x48 >> 8) + x51 := (uint8(x50) & 0xff) + x52 := (x50 >> 8) + x53 := (uint8(x52) & 0xff) + x54 := (x52 >> 8) + x55 := (uint8(x54) & 0xff) + x56 := (x54 >> 8) + x57 := (uint8(x56) & 0xff) + x58 := (x56 >> 8) + x59 := (uint8(x58) & 0xff) + x60 := uint8((x58 >> 8)) + out1[0] = x5 + out1[1] = x7 + out1[2] = x9 + out1[3] = x11 + out1[4] = x13 + out1[5] = x15 + out1[6] = x17 + out1[7] = x18 + out1[8] = x19 + out1[9] = x21 + out1[10] = x23 + out1[11] = x25 + out1[12] = x27 + out1[13] = x29 + out1[14] = x31 + out1[15] = x32 + out1[16] = x33 + out1[17] = x35 + out1[18] = x37 + out1[19] = x39 + out1[20] = x41 + out1[21] = x43 + out1[22] = x45 + out1[23] = x46 + out1[24] = x47 + out1[25] = x49 + out1[26] = x51 + out1[27] = x53 + out1[28] = x55 + out1[29] = x57 + out1[30] = x59 + out1[31] = x60 +} + +// FromBytes deserializes a field element NOT in the Montgomery domain from bytes in little-endian order. +// +// Preconditions: +// +// 0 ≤ bytes_eval arg1 < m +// +// Postconditions: +// +// eval out1 mod m = bytes_eval arg1 mod m +// 0 ≤ eval out1 < m +// +// Input Bounds: +// +// arg1: [[0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff]] +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +func FromBytes(out1 *[4]uint64, arg1 *[32]uint8) { + x1 := (uint64(arg1[31]) << 56) + x2 := (uint64(arg1[30]) << 48) + x3 := (uint64(arg1[29]) << 40) + x4 := (uint64(arg1[28]) << 32) + x5 := (uint64(arg1[27]) << 24) + x6 := (uint64(arg1[26]) << 16) + x7 := (uint64(arg1[25]) << 8) + x8 := arg1[24] + x9 := (uint64(arg1[23]) << 56) + x10 := (uint64(arg1[22]) << 48) + x11 := (uint64(arg1[21]) << 40) + x12 := (uint64(arg1[20]) << 32) + x13 := (uint64(arg1[19]) << 24) + x14 := (uint64(arg1[18]) << 16) + x15 := (uint64(arg1[17]) << 8) + x16 := arg1[16] + x17 := (uint64(arg1[15]) << 56) + x18 := (uint64(arg1[14]) << 48) + x19 := (uint64(arg1[13]) << 40) + x20 := (uint64(arg1[12]) << 32) + x21 := (uint64(arg1[11]) << 24) + x22 := (uint64(arg1[10]) << 16) + x23 := (uint64(arg1[9]) << 8) + x24 := arg1[8] + x25 := (uint64(arg1[7]) << 56) + x26 := (uint64(arg1[6]) << 48) + x27 := (uint64(arg1[5]) << 40) + x28 := (uint64(arg1[4]) << 32) + x29 := (uint64(arg1[3]) << 24) + x30 := (uint64(arg1[2]) << 16) + x31 := (uint64(arg1[1]) << 8) + x32 := arg1[0] + x33 := (x31 + uint64(x32)) + x34 := (x30 + x33) + x35 := (x29 + x34) + x36 := (x28 + x35) + x37 := (x27 + x36) + x38 := (x26 + x37) + x39 := (x25 + x38) + x40 := (x23 + uint64(x24)) + x41 := (x22 + x40) + x42 := (x21 + x41) + x43 := (x20 + x42) + x44 := (x19 + x43) + x45 := (x18 + x44) + x46 := (x17 + x45) + x47 := (x15 + uint64(x16)) + x48 := (x14 + x47) + x49 := (x13 + x48) + x50 := (x12 + x49) + x51 := (x11 + x50) + x52 := (x10 + x51) + x53 := (x9 + x52) + x54 := (x7 + uint64(x8)) + x55 := (x6 + x54) + x56 := (x5 + x55) + x57 := (x4 + x56) + x58 := (x3 + x57) + x59 := (x2 + x58) + x60 := (x1 + x59) + out1[0] = x39 + out1[1] = x46 + out1[2] = x53 + out1[3] = x60 +} + +// SetOne returns the field element one in the Montgomery domain. +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = 1 mod m +// 0 ≤ eval out1 < m +func SetOne(out1 *MontgomeryDomainFieldElement) { + out1[0] = 0x1000003d1 + out1[1] = uint64(0x0) + out1[2] = uint64(0x0) + out1[3] = uint64(0x0) +} + +// Msat returns the saturated representation of the prime modulus. +// +// Postconditions: +// +// twos_complement_eval out1 = m +// 0 ≤ eval out1 < m +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +func Msat(out1 *[5]uint64) { + out1[0] = 0xfffffffefffffc2f + out1[1] = 0xffffffffffffffff + out1[2] = 0xffffffffffffffff + out1[3] = 0xffffffffffffffff + out1[4] = uint64(0x0) +} + +// Divstep computes a divstep. +// +// Preconditions: +// +// 0 ≤ eval arg4 < m +// 0 ≤ eval arg5 < m +// +// Postconditions: +// +// out1 = (if 0 < arg1 ∧ (twos_complement_eval arg3) is odd then 1 - arg1 else 1 + arg1) +// twos_complement_eval out2 = (if 0 < arg1 ∧ (twos_complement_eval arg3) is odd then twos_complement_eval arg3 else twos_complement_eval arg2) +// twos_complement_eval out3 = (if 0 < arg1 ∧ (twos_complement_eval arg3) is odd then ⌊(twos_complement_eval arg3 - twos_complement_eval arg2) / 2⌋ else ⌊(twos_complement_eval arg3 + (twos_complement_eval arg3 mod 2) * twos_complement_eval arg2) / 2⌋) +// eval (from_montgomery out4) mod m = (if 0 < arg1 ∧ (twos_complement_eval arg3) is odd then (2 * eval (from_montgomery arg5)) mod m else (2 * eval (from_montgomery arg4)) mod m) +// eval (from_montgomery out5) mod m = (if 0 < arg1 ∧ (twos_complement_eval arg3) is odd then (eval (from_montgomery arg4) - eval (from_montgomery arg4)) mod m else (eval (from_montgomery arg5) + (twos_complement_eval arg3 mod 2) * eval (from_montgomery arg4)) mod m) +// 0 ≤ eval out5 < m +// 0 ≤ eval out5 < m +// 0 ≤ eval out2 < m +// 0 ≤ eval out3 < m +// +// Input Bounds: +// +// arg1: [0x0 ~> 0xffffffffffffffff] +// arg2: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// arg3: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// arg4: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// arg5: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// +// Output Bounds: +// +// out1: [0x0 ~> 0xffffffffffffffff] +// out2: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// out3: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// out4: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// out5: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +func Divstep( + out1 *uint64, + out2 *[5]uint64, + out3 *[5]uint64, + out4 *[4]uint64, + out5 *[4]uint64, + arg1 uint64, + arg2 *[5]uint64, + arg3 *[5]uint64, + arg4 *[4]uint64, + arg5 *[4]uint64, +) { + var x1 uint64 + x1, _ = bits.Add64((^arg1), uint64(0x1), uint64(0x0)) + x3 := (uint1((x1 >> 63)) & (uint1(arg3[0]) & 0x1)) + var x4 uint64 + x4, _ = bits.Add64((^arg1), uint64(0x1), uint64(0x0)) + var x6 uint64 + cmovznzU64(&x6, x3, arg1, x4) + var x7 uint64 + cmovznzU64(&x7, x3, arg2[0], arg3[0]) + var x8 uint64 + cmovznzU64(&x8, x3, arg2[1], arg3[1]) + var x9 uint64 + cmovznzU64(&x9, x3, arg2[2], arg3[2]) + var x10 uint64 + cmovznzU64(&x10, x3, arg2[3], arg3[3]) + var x11 uint64 + cmovznzU64(&x11, x3, arg2[4], arg3[4]) + var x12 uint64 + var x13 uint64 + x12, x13 = bits.Add64(uint64(0x1), (^arg2[0]), uint64(0x0)) + var x14 uint64 + var x15 uint64 + x14, x15 = bits.Add64(uint64(0x0), (^arg2[1]), uint64(uint1(x13))) + var x16 uint64 + var x17 uint64 + x16, x17 = bits.Add64(uint64(0x0), (^arg2[2]), uint64(uint1(x15))) + var x18 uint64 + var x19 uint64 + x18, x19 = bits.Add64(uint64(0x0), (^arg2[3]), uint64(uint1(x17))) + var x20 uint64 + x20, _ = bits.Add64(uint64(0x0), (^arg2[4]), uint64(uint1(x19))) + var x22 uint64 + cmovznzU64(&x22, x3, arg3[0], x12) + var x23 uint64 + cmovznzU64(&x23, x3, arg3[1], x14) + var x24 uint64 + cmovznzU64(&x24, x3, arg3[2], x16) + var x25 uint64 + cmovznzU64(&x25, x3, arg3[3], x18) + var x26 uint64 + cmovznzU64(&x26, x3, arg3[4], x20) + var x27 uint64 + cmovznzU64(&x27, x3, arg4[0], arg5[0]) + var x28 uint64 + cmovznzU64(&x28, x3, arg4[1], arg5[1]) + var x29 uint64 + cmovznzU64(&x29, x3, arg4[2], arg5[2]) + var x30 uint64 + cmovznzU64(&x30, x3, arg4[3], arg5[3]) + var x31 uint64 + var x32 uint64 + x31, x32 = bits.Add64(x27, x27, uint64(0x0)) + var x33 uint64 + var x34 uint64 + x33, x34 = bits.Add64(x28, x28, uint64(uint1(x32))) + var x35 uint64 + var x36 uint64 + x35, x36 = bits.Add64(x29, x29, uint64(uint1(x34))) + var x37 uint64 + var x38 uint64 + x37, x38 = bits.Add64(x30, x30, uint64(uint1(x36))) + var x39 uint64 + var x40 uint64 + x39, x40 = bits.Sub64(x31, 0xfffffffefffffc2f, uint64(0x0)) + var x41 uint64 + var x42 uint64 + x41, x42 = bits.Sub64(x33, 0xffffffffffffffff, uint64(uint1(x40))) + var x43 uint64 + var x44 uint64 + x43, x44 = bits.Sub64(x35, 0xffffffffffffffff, uint64(uint1(x42))) + var x45 uint64 + var x46 uint64 + x45, x46 = bits.Sub64(x37, 0xffffffffffffffff, uint64(uint1(x44))) + var x48 uint64 + _, x48 = bits.Sub64(uint64(uint1(x38)), uint64(0x0), uint64(uint1(x46))) + x49 := arg4[3] + x50 := arg4[2] + x51 := arg4[1] + x52 := arg4[0] + var x53 uint64 + var x54 uint64 + x53, x54 = bits.Sub64(uint64(0x0), x52, uint64(0x0)) + var x55 uint64 + var x56 uint64 + x55, x56 = bits.Sub64(uint64(0x0), x51, uint64(uint1(x54))) + var x57 uint64 + var x58 uint64 + x57, x58 = bits.Sub64(uint64(0x0), x50, uint64(uint1(x56))) + var x59 uint64 + var x60 uint64 + x59, x60 = bits.Sub64(uint64(0x0), x49, uint64(uint1(x58))) + var x61 uint64 + cmovznzU64(&x61, uint1(x60), uint64(0x0), 0xffffffffffffffff) + var x62 uint64 + var x63 uint64 + x62, x63 = bits.Add64(x53, (x61 & 0xfffffffefffffc2f), uint64(0x0)) + var x64 uint64 + var x65 uint64 + x64, x65 = bits.Add64(x55, x61, uint64(uint1(x63))) + var x66 uint64 + var x67 uint64 + x66, x67 = bits.Add64(x57, x61, uint64(uint1(x65))) + var x68 uint64 + x68, _ = bits.Add64(x59, x61, uint64(uint1(x67))) + var x70 uint64 + cmovznzU64(&x70, x3, arg5[0], x62) + var x71 uint64 + cmovznzU64(&x71, x3, arg5[1], x64) + var x72 uint64 + cmovznzU64(&x72, x3, arg5[2], x66) + var x73 uint64 + cmovznzU64(&x73, x3, arg5[3], x68) + x74 := (uint1(x22) & 0x1) + var x75 uint64 + cmovznzU64(&x75, x74, uint64(0x0), x7) + var x76 uint64 + cmovznzU64(&x76, x74, uint64(0x0), x8) + var x77 uint64 + cmovznzU64(&x77, x74, uint64(0x0), x9) + var x78 uint64 + cmovznzU64(&x78, x74, uint64(0x0), x10) + var x79 uint64 + cmovznzU64(&x79, x74, uint64(0x0), x11) + var x80 uint64 + var x81 uint64 + x80, x81 = bits.Add64(x22, x75, uint64(0x0)) + var x82 uint64 + var x83 uint64 + x82, x83 = bits.Add64(x23, x76, uint64(uint1(x81))) + var x84 uint64 + var x85 uint64 + x84, x85 = bits.Add64(x24, x77, uint64(uint1(x83))) + var x86 uint64 + var x87 uint64 + x86, x87 = bits.Add64(x25, x78, uint64(uint1(x85))) + var x88 uint64 + x88, _ = bits.Add64(x26, x79, uint64(uint1(x87))) + var x90 uint64 + cmovznzU64(&x90, x74, uint64(0x0), x27) + var x91 uint64 + cmovznzU64(&x91, x74, uint64(0x0), x28) + var x92 uint64 + cmovznzU64(&x92, x74, uint64(0x0), x29) + var x93 uint64 + cmovznzU64(&x93, x74, uint64(0x0), x30) + var x94 uint64 + var x95 uint64 + x94, x95 = bits.Add64(x70, x90, uint64(0x0)) + var x96 uint64 + var x97 uint64 + x96, x97 = bits.Add64(x71, x91, uint64(uint1(x95))) + var x98 uint64 + var x99 uint64 + x98, x99 = bits.Add64(x72, x92, uint64(uint1(x97))) + var x100 uint64 + var x101 uint64 + x100, x101 = bits.Add64(x73, x93, uint64(uint1(x99))) + var x102 uint64 + var x103 uint64 + x102, x103 = bits.Sub64(x94, 0xfffffffefffffc2f, uint64(0x0)) + var x104 uint64 + var x105 uint64 + x104, x105 = bits.Sub64(x96, 0xffffffffffffffff, uint64(uint1(x103))) + var x106 uint64 + var x107 uint64 + x106, x107 = bits.Sub64(x98, 0xffffffffffffffff, uint64(uint1(x105))) + var x108 uint64 + var x109 uint64 + x108, x109 = bits.Sub64(x100, 0xffffffffffffffff, uint64(uint1(x107))) + var x111 uint64 + _, x111 = bits.Sub64(uint64(uint1(x101)), uint64(0x0), uint64(uint1(x109))) + var x112 uint64 + x112, _ = bits.Add64(x6, uint64(0x1), uint64(0x0)) + x114 := ((x80 >> 1) | ((x82 << 63) & 0xffffffffffffffff)) + x115 := ((x82 >> 1) | ((x84 << 63) & 0xffffffffffffffff)) + x116 := ((x84 >> 1) | ((x86 << 63) & 0xffffffffffffffff)) + x117 := ((x86 >> 1) | ((x88 << 63) & 0xffffffffffffffff)) + x118 := ((x88 & 0x8000000000000000) | (x88 >> 1)) + var x119 uint64 + cmovznzU64(&x119, uint1(x48), x39, x31) + var x120 uint64 + cmovznzU64(&x120, uint1(x48), x41, x33) + var x121 uint64 + cmovznzU64(&x121, uint1(x48), x43, x35) + var x122 uint64 + cmovznzU64(&x122, uint1(x48), x45, x37) + var x123 uint64 + cmovznzU64(&x123, uint1(x111), x102, x94) + var x124 uint64 + cmovznzU64(&x124, uint1(x111), x104, x96) + var x125 uint64 + cmovznzU64(&x125, uint1(x111), x106, x98) + var x126 uint64 + cmovznzU64(&x126, uint1(x111), x108, x100) + *out1 = x112 + out2[0] = x7 + out2[1] = x8 + out2[2] = x9 + out2[3] = x10 + out2[4] = x11 + out3[0] = x114 + out3[1] = x115 + out3[2] = x116 + out3[3] = x117 + out3[4] = x118 + out4[0] = x119 + out4[1] = x120 + out4[2] = x121 + out4[3] = x122 + out5[0] = x123 + out5[1] = x124 + out5[2] = x125 + out5[3] = x126 +} + +// DivstepPrecomp returns the precomputed value for Bernstein-Yang-inversion (in montgomery form). +// +// Postconditions: +// +// eval (from_montgomery out1) = ⌊(m - 1) / 2⌋^(if ⌊log2 m⌋ + 1 < 46 then ⌊(49 * (⌊log2 m⌋ + 1) + 80) / 17⌋ else ⌊(49 * (⌊log2 m⌋ + 1) + 57) / 17⌋) +// 0 ≤ eval out1 < m +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +func DivstepPrecomp(out1 *[4]uint64) { + out1[0] = 0xf201a41831525e0a + out1[1] = 0x9953f9ddcd648d85 + out1[2] = 0xe86029463db210a9 + out1[3] = 0x24fb8a3104b03709 +} diff --git a/core/curves/native/k256/fq/fq.go b/core/curves/native/k256/fq/fq.go new file mode 100644 index 0000000..6cc63c0 --- /dev/null +++ b/core/curves/native/k256/fq/fq.go @@ -0,0 +1,493 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package fq + +import ( + "math/big" + "sync" + + "github.com/sonr-io/sonr/crypto/core/curves/native" +) + +var ( + k256FqInitonce sync.Once + k256FqParams native.FieldParams +) + +func K256FqNew() *native.Field { + return &native.Field{ + Value: [native.FieldLimbs]uint64{}, + Params: getK256FqParams(), + Arithmetic: k256FqArithmetic{}, + } +} + +func k256FqParamsInit() { + k256FqParams = native.FieldParams{ + R: [native.FieldLimbs]uint64{ + 0x402da1732fc9bebf, + 0x4551231950b75fc4, + 0x0000000000000001, + 0x0000000000000000, + }, + R2: [native.FieldLimbs]uint64{ + 0x896cf21467d7d140, + 0x741496c20e7cf878, + 0xe697f5e45bcd07c6, + 0x9d671cd581c69bc5, + }, + R3: [native.FieldLimbs]uint64{ + 0x7bc0cfe0e9ff41ed, + 0x0017648444d4322c, + 0xb1b31347f1d0b2da, + 0x555d800c18ef116d, + }, + Modulus: [native.FieldLimbs]uint64{ + 0xbfd25e8cd0364141, + 0xbaaedce6af48a03b, + 0xfffffffffffffffe, + 0xffffffffffffffff, + }, + BiModulus: new(big.Int).SetBytes([]byte{ + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0xba, 0xae, 0xdc, 0xe6, 0xaf, 0x48, 0xa0, 0x3b, 0xbf, 0xd2, 0x5e, 0x8c, 0xd0, 0x36, 0x41, 0x41, + }, + ), + } +} + +func getK256FqParams() *native.FieldParams { + k256FqInitonce.Do(k256FqParamsInit) + return &k256FqParams +} + +// k256FqArithmetic is a struct with all the methods needed for working +// in mod q +type k256FqArithmetic struct{} + +// ToMontgomery converts this field to montgomery form +func (f k256FqArithmetic) ToMontgomery(out, arg *[native.FieldLimbs]uint64) { + ToMontgomery((*MontgomeryDomainFieldElement)(out), (*NonMontgomeryDomainFieldElement)(arg)) +} + +// FromMontgomery converts this field from montgomery form +func (f k256FqArithmetic) FromMontgomery(out, arg *[native.FieldLimbs]uint64) { + FromMontgomery((*NonMontgomeryDomainFieldElement)(out), (*MontgomeryDomainFieldElement)(arg)) +} + +// Neg performs modular negation +func (f k256FqArithmetic) Neg(out, arg *[native.FieldLimbs]uint64) { + Opp((*MontgomeryDomainFieldElement)(out), (*MontgomeryDomainFieldElement)(arg)) +} + +// Square performs modular square +func (f k256FqArithmetic) Square(out, arg *[native.FieldLimbs]uint64) { + Square((*MontgomeryDomainFieldElement)(out), (*MontgomeryDomainFieldElement)(arg)) +} + +// Mul performs modular multiplication +func (f k256FqArithmetic) Mul(out, arg1, arg2 *[native.FieldLimbs]uint64) { + Mul( + (*MontgomeryDomainFieldElement)(out), + (*MontgomeryDomainFieldElement)(arg1), + (*MontgomeryDomainFieldElement)(arg2), + ) +} + +// Add performs modular addition +func (f k256FqArithmetic) Add(out, arg1, arg2 *[native.FieldLimbs]uint64) { + Add( + (*MontgomeryDomainFieldElement)(out), + (*MontgomeryDomainFieldElement)(arg1), + (*MontgomeryDomainFieldElement)(arg2), + ) +} + +// Sub performs modular subtraction +func (f k256FqArithmetic) Sub(out, arg1, arg2 *[native.FieldLimbs]uint64) { + Sub( + (*MontgomeryDomainFieldElement)(out), + (*MontgomeryDomainFieldElement)(arg1), + (*MontgomeryDomainFieldElement)(arg2), + ) +} + +// Sqrt performs modular square root +func (f k256FqArithmetic) Sqrt(wasSquare *int, out, arg *[native.FieldLimbs]uint64) { + // See sqrt_ts_ct at + // https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-11#appendix-I.4 + // c1 := 6 + // c2 := (q - 1) / (2^c1) + // c2 := [4]uint64{ + // 0xeeff497a3340d905, + // 0xfaeabb739abd2280, + // 0xffffffffffffffff, + // 0x03ffffffffffffff, + //} + // c3 := (c2 - 1) / 2 + c3 := [native.FieldLimbs]uint64{ + 0x777fa4bd19a06c82, + 0xfd755db9cd5e9140, + 0xffffffffffffffff, + 0x01ffffffffffffff, + } + // c4 := generator + // c5 := new(Fq).pow(generator, c2) + c5 := [native.FieldLimbs]uint64{ + 0x944cf2a220910e04, + 0x815c829c780589f4, + 0x55980b07bc222113, + 0xc702b0d248825b36, + } + var z, t, b, c, tv [native.FieldLimbs]uint64 + + native.Pow(&z, arg, &c3, getK256FqParams(), f) + Square((*MontgomeryDomainFieldElement)(&t), (*MontgomeryDomainFieldElement)(&z)) + Mul( + (*MontgomeryDomainFieldElement)(&t), + (*MontgomeryDomainFieldElement)(&t), + (*MontgomeryDomainFieldElement)(arg), + ) + Mul( + (*MontgomeryDomainFieldElement)(&z), + (*MontgomeryDomainFieldElement)(&z), + (*MontgomeryDomainFieldElement)(arg), + ) + + copy(b[:], t[:]) + copy(c[:], c5[:]) + + for i := s; i >= 2; i-- { + for j := 1; j <= i-2; j++ { + Square((*MontgomeryDomainFieldElement)(&b), (*MontgomeryDomainFieldElement)(&b)) + } + // if b == 1 flag = 0 else flag = 1 + flag := -(&native.Field{ + Value: b, + Params: getK256FqParams(), + Arithmetic: f, + }).IsOne() + 1 + Mul( + (*MontgomeryDomainFieldElement)(&tv), + (*MontgomeryDomainFieldElement)(&z), + (*MontgomeryDomainFieldElement)(&c), + ) + Selectznz(&z, uint1(flag), &z, &tv) + Square((*MontgomeryDomainFieldElement)(&c), (*MontgomeryDomainFieldElement)(&c)) + Mul( + (*MontgomeryDomainFieldElement)(&tv), + (*MontgomeryDomainFieldElement)(&t), + (*MontgomeryDomainFieldElement)(&c), + ) + Selectznz(&t, uint1(flag), &t, &tv) + copy(b[:], t[:]) + } + Square((*MontgomeryDomainFieldElement)(&c), (*MontgomeryDomainFieldElement)(&z)) + *wasSquare = (&native.Field{ + Value: c, + Params: getK256FqParams(), + Arithmetic: f, + }).Equal(&native.Field{ + Value: *arg, + Params: getK256FqParams(), + Arithmetic: f, + }) + Selectznz(out, uint1(*wasSquare), out, &z) +} + +// Invert performs modular inverse +func (f k256FqArithmetic) Invert(wasInverted *int, out, arg *[native.FieldLimbs]uint64) { + // Using an addition chain from + // https://briansmith.org/ecc-inversion-addition-chains-01#secp256k1_scalar_inversion + var x1, x10, x11, x101, x111, x1001, x1011, x1101 [native.FieldLimbs]uint64 + var x6, x8, x14, x28, x56, tmp [native.FieldLimbs]uint64 + + copy(x1[:], arg[:]) + native.Pow2k(&x10, arg, 1, f) + Mul( + (*MontgomeryDomainFieldElement)(&x11), + (*MontgomeryDomainFieldElement)(&x10), + (*MontgomeryDomainFieldElement)(&x1), + ) + Mul( + (*MontgomeryDomainFieldElement)(&x101), + (*MontgomeryDomainFieldElement)(&x10), + (*MontgomeryDomainFieldElement)(&x11), + ) + Mul( + (*MontgomeryDomainFieldElement)(&x111), + (*MontgomeryDomainFieldElement)(&x10), + (*MontgomeryDomainFieldElement)(&x101), + ) + Mul( + (*MontgomeryDomainFieldElement)(&x1001), + (*MontgomeryDomainFieldElement)(&x10), + (*MontgomeryDomainFieldElement)(&x111), + ) + Mul( + (*MontgomeryDomainFieldElement)(&x1011), + (*MontgomeryDomainFieldElement)(&x10), + (*MontgomeryDomainFieldElement)(&x1001), + ) + Mul( + (*MontgomeryDomainFieldElement)(&x1101), + (*MontgomeryDomainFieldElement)(&x10), + (*MontgomeryDomainFieldElement)(&x1011), + ) + + native.Pow2k(&x6, &x1101, 2, f) + Mul( + (*MontgomeryDomainFieldElement)(&x6), + (*MontgomeryDomainFieldElement)(&x6), + (*MontgomeryDomainFieldElement)(&x1011), + ) + + native.Pow2k(&x8, &x6, 2, f) + Mul( + (*MontgomeryDomainFieldElement)(&x8), + (*MontgomeryDomainFieldElement)(&x8), + (*MontgomeryDomainFieldElement)(&x11), + ) + + native.Pow2k(&x14, &x8, 6, f) + Mul( + (*MontgomeryDomainFieldElement)(&x14), + (*MontgomeryDomainFieldElement)(&x14), + (*MontgomeryDomainFieldElement)(&x6), + ) + + native.Pow2k(&x28, &x14, 14, f) + Mul( + (*MontgomeryDomainFieldElement)(&x28), + (*MontgomeryDomainFieldElement)(&x28), + (*MontgomeryDomainFieldElement)(&x14), + ) + + native.Pow2k(&x56, &x28, 28, f) + Mul( + (*MontgomeryDomainFieldElement)(&x56), + (*MontgomeryDomainFieldElement)(&x56), + (*MontgomeryDomainFieldElement)(&x28), + ) + + native.Pow2k(&tmp, &x56, 56, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x56), + ) + + native.Pow2k(&tmp, &tmp, 14, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x14), + ) + + native.Pow2k(&tmp, &tmp, 3, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x101), + ) + + native.Pow2k(&tmp, &tmp, 4, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x111), + ) + + native.Pow2k(&tmp, &tmp, 4, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x101), + ) + + native.Pow2k(&tmp, &tmp, 5, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x1011), + ) + + native.Pow2k(&tmp, &tmp, 4, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x1011), + ) + + native.Pow2k(&tmp, &tmp, 4, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x111), + ) + + native.Pow2k(&tmp, &tmp, 5, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x111), + ) + + native.Pow2k(&tmp, &tmp, 6, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x1101), + ) + + native.Pow2k(&tmp, &tmp, 4, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x101), + ) + + native.Pow2k(&tmp, &tmp, 3, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x111), + ) + + native.Pow2k(&tmp, &tmp, 5, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x1001), + ) + + native.Pow2k(&tmp, &tmp, 6, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x101), + ) + + native.Pow2k(&tmp, &tmp, 10, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x111), + ) + + native.Pow2k(&tmp, &tmp, 4, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x111), + ) + + native.Pow2k(&tmp, &tmp, 9, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x8), + ) + + native.Pow2k(&tmp, &tmp, 5, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x1001), + ) + + native.Pow2k(&tmp, &tmp, 6, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x1011), + ) + + native.Pow2k(&tmp, &tmp, 4, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x1101), + ) + + native.Pow2k(&tmp, &tmp, 5, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x11), + ) + + native.Pow2k(&tmp, &tmp, 6, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x1101), + ) + + native.Pow2k(&tmp, &tmp, 10, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x1101), + ) + + native.Pow2k(&tmp, &tmp, 4, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x1001), + ) + + native.Pow2k(&tmp, &tmp, 6, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x1), + ) + + native.Pow2k(&tmp, &tmp, 8, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x6), + ) + + *wasInverted = (&native.Field{ + Value: *arg, + Params: getK256FqParams(), + Arithmetic: f, + }).IsNonZero() + Selectznz(out, uint1(*wasInverted), out, &tmp) +} + +// FromBytes converts a little endian byte array into a field element +func (f k256FqArithmetic) FromBytes(out *[native.FieldLimbs]uint64, arg *[native.FieldBytes]byte) { + FromBytes(out, arg) +} + +// ToBytes converts a field element to a little endian byte array +func (f k256FqArithmetic) ToBytes(out *[native.FieldBytes]byte, arg *[native.FieldLimbs]uint64) { + ToBytes(out, arg) +} + +// Selectznz performs conditional select. +// selects arg1 if choice == 0 and arg2 if choice == 1 +func (f k256FqArithmetic) Selectznz(out, arg1, arg2 *[native.FieldLimbs]uint64, choice int) { + Selectznz(out, uint1(choice), arg1, arg2) +} + +// generator = 7 mod q is a generator of the `q - 1` order multiplicative +// subgroup, or in other words a primitive element of the field. +// generator^t where t * 2^s + 1 = q +var generator = &[native.FieldLimbs]uint64{ + 0xc13f6a264e843739, + 0xe537f5b135039e5d, + 0x0000000000000008, + 0x0000000000000000, +} + +// s satisfies the equation 2^s * t = q - 1 with t odd. +var s = 6 diff --git a/core/curves/native/k256/fq/fq_test.go b/core/curves/native/k256/fq/fq_test.go new file mode 100644 index 0000000..9ff8fae --- /dev/null +++ b/core/curves/native/k256/fq/fq_test.go @@ -0,0 +1,330 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package fq + +import ( + crand "crypto/rand" + "math/big" + "math/rand" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/internal" +) + +func TestFqSetOne(t *testing.T) { + fq := K256FqNew().SetOne() + require.NotNil(t, fq) + require.Equal(t, fq.Value, getK256FqParams().R) +} + +func TestFqSetUint64(t *testing.T) { + act := K256FqNew().SetUint64(1 << 60) + require.NotNil(t, act) + // Remember it will be in montgomery form + require.Equal(t, act.Value[0], uint64(0xF000000000000000)) +} + +func TestFqAdd(t *testing.T) { + lhs := K256FqNew().SetOne() + rhs := K256FqNew().SetOne() + exp := K256FqNew().SetUint64(2) + res := K256FqNew().Add(lhs, rhs) + require.NotNil(t, res) + require.Equal(t, 1, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint64() >> 2 + r := rand.Uint64() >> 2 + e := l + r + lhs.SetUint64(l) + rhs.SetUint64(r) + exp.SetUint64(e) + + a := K256FqNew().Add(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFqSub(t *testing.T) { + lhs := K256FqNew().SetOne() + rhs := K256FqNew().SetOne() + exp := K256FqNew().SetZero() + res := K256FqNew().Sub(lhs, rhs) + require.NotNil(t, res) + require.Equal(t, 1, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint64() >> 2 + r := rand.Uint64() >> 2 + if l < r { + l, r = r, l + } + e := l - r + lhs.SetUint64(l) + rhs.SetUint64(r) + exp.SetUint64(e) + + a := K256FqNew().Sub(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFqMul(t *testing.T) { + lhs := K256FqNew().SetOne() + rhs := K256FqNew().SetOne() + exp := K256FqNew().SetOne() + res := K256FqNew().Mul(lhs, rhs) + require.NotNil(t, res) + require.Equal(t, 1, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint32() + r := rand.Uint32() + e := uint64(l) * uint64(r) + lhs.SetUint64(uint64(l)) + rhs.SetUint64(uint64(r)) + exp.SetUint64(e) + + a := K256FqNew().Mul(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFqDouble(t *testing.T) { + a := K256FqNew().SetUint64(2) + e := K256FqNew().SetUint64(4) + require.Equal(t, e, K256FqNew().Double(a)) + + for i := 0; i < 25; i++ { + tv := rand.Uint32() + ttv := uint64(tv) * 2 + a = K256FqNew().SetUint64(uint64(tv)) + e = K256FqNew().SetUint64(ttv) + require.Equal(t, e, K256FqNew().Double(a)) + } +} + +func TestFqSquare(t *testing.T) { + a := K256FqNew().SetUint64(4) + e := K256FqNew().SetUint64(16) + require.Equal(t, e, a.Square(a)) + + for i := 0; i < 25; i++ { + j := rand.Uint32() + exp := uint64(j) * uint64(j) + e.SetUint64(exp) + a.SetUint64(uint64(j)) + require.Equal(t, e, a.Square(a)) + } +} + +func TestFqNeg(t *testing.T) { + g := K256FqNew().SetRaw(generator) + a := K256FqNew().SetOne() + a.Neg(a) + e := K256FqNew().SetRaw(&[native.FieldLimbs]uint64{0x7fa4bd19a06c8282, 0x755db9cd5e914077, 0xfffffffffffffffd, 0xffffffffffffffff}) + require.Equal(t, e, a) + a.Neg(g) + e = K256FqNew().SetRaw(&[native.FieldLimbs]uint64{0xfe92f46681b20a08, 0xd576e7357a4501dd, 0xfffffffffffffff5, 0xffffffffffffffff}) + require.Equal(t, e, a) +} + +func TestFqExp(t *testing.T) { + e := K256FqNew().SetUint64(8) + a := K256FqNew().SetUint64(2) + by := K256FqNew().SetUint64(3) + require.Equal(t, e, a.Exp(a, by)) +} + +func TestFqSqrt(t *testing.T) { + t1 := K256FqNew().SetUint64(2) + t2 := K256FqNew().Neg(t1) + t3 := K256FqNew().Square(t1) + _, wasSquare := t3.Sqrt(t3) + + require.True(t, wasSquare) + require.Equal(t, 1, t1.Equal(t3)|t2.Equal(t3)) + t1.SetUint64(5) + _, wasSquare = K256FqNew().Sqrt(t1) + require.False(t, wasSquare) +} + +func TestFqInvert(t *testing.T) { + twoInv := K256FqNew().SetLimbs(&[native.FieldLimbs]uint64{0xdfe92f46681b20a1, 0x5d576e7357a4501d, 0xffffffffffffffff, 0x7fffffffffffffff}) + two := K256FqNew().SetUint64(2) + a, inverted := K256FqNew().Invert(two) + require.True(t, inverted) + require.Equal(t, a, twoInv) + + rootOfUnity := K256FqNew().SetLimbs(&[native.FieldLimbs]uint64{0x8619a9e760c01d0c, 0xa883c4fba37998df, 0x45607580b6eabd98, 0xf252b002544b2f99}) + rootOfUnityInv := K256FqNew().SetRaw(&[native.FieldLimbs]uint64{0x7d99f8e21447e314, 0x5b60c477e7728d4c, 0xd78befc191f58654, 0x6897e5ff7824360f}) + a, inverted = K256FqNew().Invert(rootOfUnity) + require.True(t, inverted) + require.Equal(t, a, rootOfUnityInv) + + lhs := K256FqNew().SetUint64(9) + rhs := K256FqNew().SetUint64(3) + rhsInv, inverted := K256FqNew().Invert(rhs) + require.True(t, inverted) + require.Equal(t, rhs, K256FqNew().Mul(lhs, rhsInv)) + + rhs.SetZero() + _, inverted = K256FqNew().Invert(rhs) + require.False(t, inverted) +} + +func TestFqCMove(t *testing.T) { + t1 := K256FqNew().SetUint64(5) + t2 := K256FqNew().SetUint64(10) + require.Equal(t, t1, K256FqNew().CMove(t1, t2, 0)) + require.Equal(t, t2, K256FqNew().CMove(t1, t2, 1)) +} + +func TestFqBytes(t *testing.T) { + t1 := K256FqNew().SetUint64(99) + seq := t1.Bytes() + t2, err := K256FqNew().SetBytes(&seq) + require.NoError(t, err) + require.Equal(t, t1, t2) + + for i := 0; i < 25; i++ { + t1.SetUint64(rand.Uint64()) + seq = t1.Bytes() + _, err = t2.SetBytes(&seq) + require.NoError(t, err) + require.Equal(t, t1, t2) + } +} + +func TestFqCmp(t *testing.T) { + tests := []struct { + a *native.Field + b *native.Field + e int + }{ + { + a: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{2731658267414164836, 14655288906067898431, 6537465423330262322, 8306191141697566219}), + b: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{6472764012681988529, 10848812988401906064, 2961825807536828898, 4282183981941645679}), + e: 1, + }, + { + a: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{8023004109510539223, 4652004072850285717, 1877219145646046927, 383214385093921911}), + b: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{10099384440823804262, 16139476942229308465, 8636966320777393798, 5435928725024696785}), + e: -1, + }, + { + a: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{3741840066202388211, 12165774400417314871, 16619312580230515379, 16195032234110087705}), + b: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{3905865991286066744, 543690822309071825, 17963103015950210055, 3745476720756119742}), + e: 1, + }, + { + a: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{16660853697936147788, 7799793619412111108, 13515141085171033220, 2641079731236069032}), + b: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{17790588295388238399, 571847801379669440, 14537208974498222469, 12792570372087452754}), + e: -1, + }, + { + a: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{3912839285384959186, 2701177075110484070, 6453856448115499033, 6475797457962597458}), + b: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{1282566391665688512, 13503640416992806563, 2962240104675990153, 3374904770947067689}), + e: 1, + }, + { + a: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{5716631803409360103, 7859567470082614154, 12747956220853330146, 18434584096087315020}), + b: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{16317076441459028418, 12854146980376319601, 2258436689269031143, 9531877130792223752}), + e: 1, + }, + { + a: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{17955191469941083403, 10350326247207200880, 17263512235150705075, 12700328451238078022}), + b: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{6767595547459644695, 7146403825494928147, 12269344038346710612, 9122477829383225603}), + e: 1, + }, + { + a: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{17099388671847024438, 6426264987820696548, 10641143464957227405, 7709745403700754098}), + b: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{10799154372990268556, 17178492485719929374, 5705777922258988797, 8051037767683567782}), + e: -1, + }, + { + a: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{4567139260680454325, 1629385880182139061, 16607020832317899145, 1261011562621553200}), + b: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{13487234491304534488, 17872642955936089265, 17651026784972590233, 9468934643333871559}), + e: -1, + }, + { + a: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{18071070103467571798, 11787850505799426140, 10631355976141928593, 4867785203635092610}), + b: K256FqNew().SetRaw(&[native.FieldLimbs]uint64{12596443599426461624, 10176122686151524591, 17075755296887483439, 6726169532695070719}), + e: -1, + }, + } + + for _, test := range tests { + require.Equal(t, test.e, test.a.Cmp(test.b)) + require.Equal(t, -test.e, test.b.Cmp(test.a)) + require.Equal(t, 0, test.a.Cmp(test.a)) + require.Equal(t, 0, test.b.Cmp(test.b)) + } +} + +func TestFqBigInt(t *testing.T) { + t1 := K256FqNew().SetBigInt(big.NewInt(9999)) + t2 := K256FqNew().SetBigInt(t1.BigInt()) + require.Equal(t, t1, t2) + + e := K256FqNew().SetRaw(&[native.FieldLimbs]uint64{0xa764d3f6f152f222, 0x3b5dc8aacb9297b7, 0xb015fa9d2b3efdc6, 0x567360cef000f24a}) + b := new( + big.Int, + ).SetBytes([]byte{9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9}) + t1.SetBigInt(b) + require.Equal(t, e, t1) + e.Value[0] = 0x186d8a95dee34f1f + e.Value[1] = 0x7f51143be3b60884 + e.Value[2] = 0x4fea0562d4c10238 + e.Value[3] = 0xa98c9f310fff0db5 + b.Neg(b) + t1.SetBigInt(b) + require.Equal(t, e, t1) +} + +func TestFqSetBytesWide(t *testing.T) { + e := K256FqNew().SetRaw(&[native.FieldLimbs]uint64{0x70620a92b4f2eb7a, 0xd04588eb9c228a3a, 0xccb71ae40a10491c, 0x61cf39d70a8b33b7}) + + a := K256FqNew().SetBytesWide(&[64]byte{ + 0x69, 0x23, 0x5a, 0x0b, 0xce, 0x0c, 0xa8, 0x64, + 0x3c, 0x78, 0xbc, 0x01, 0x05, 0xef, 0xf2, 0x84, + 0xde, 0xbb, 0x6b, 0xc8, 0x63, 0x5e, 0x6e, 0x69, + 0x62, 0xcc, 0xc6, 0x2d, 0xf5, 0x72, 0x40, 0x92, + 0x28, 0x11, 0xd6, 0xc8, 0x07, 0xa5, 0x88, 0x82, + 0xfe, 0xe3, 0x97, 0xf6, 0x1e, 0xfb, 0x2e, 0x3b, + 0x27, 0x5f, 0x85, 0x06, 0x8d, 0x99, 0xa4, 0x75, + 0xc0, 0x2c, 0x71, 0x69, 0x9e, 0x58, 0xea, 0x52, + }) + require.Equal(t, e, a) +} + +func TestFpSetBytesWideBigInt(t *testing.T) { + params := getK256FqParams() + var tv2 [64]byte + for i := 0; i < 25; i++ { + _, _ = crand.Read(tv2[:]) + e := new(big.Int).SetBytes(tv2[:]) + e.Mod(e, params.BiModulus) + + tv := internal.ReverseScalarBytes(tv2[:]) + copy(tv2[:], tv) + a := K256FqNew().SetBytesWide(&tv2) + require.Equal(t, 0, e.Cmp(a.BigInt())) + } +} diff --git a/core/curves/native/k256/fq/secp256k1_fq.go b/core/curves/native/k256/fq/secp256k1_fq.go new file mode 100755 index 0000000..9a78dbe --- /dev/null +++ b/core/curves/native/k256/fq/secp256k1_fq.go @@ -0,0 +1,2002 @@ +// Autogenerated: 'src/ExtractionOCaml/word_by_word_montgomery' --lang Go --no-wide-int --relax-primitive-carry-to-bitwidth 32,64 --cmovznz-by-mul --internal-static --package-case flatcase --public-function-case UpperCamelCase --private-function-case camelCase --public-type-case UpperCamelCase --private-type-case camelCase --no-prefix-fiat --doc-newline-in-typedef-bounds --doc-prepend-header 'Code generated by Fiat Cryptography. DO NOT EDIT.' --doc-text-before-function-name ” --doc-text-before-type-name ” --package-name secp256k1_q ” 64 '2^256 - 432420386565659656852420866394968145599' mul square add sub opp from_montgomery to_montgomery nonzero selectznz to_bytes from_bytes one msat divstep divstep_precomp +// +// curve description (via package name): secp256k1_q +// +// machine_wordsize = 64 (from "64") +// +// requested operations: mul, square, add, sub, opp, from_montgomery, to_montgomery, nonzero, selectznz, to_bytes, from_bytes, one, msat, divstep, divstep_precomp +// +// m = 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 (from "2^256 - 432420386565659656852420866394968145599") +// +// NOTE: In addition to the bounds specified above each function, all +// +// functions synthesized for this Montgomery arithmetic require the +// +// input to be strictly less than the prime modulus (m), and also +// +// require the input to be in the unique saturated representation. +// +// All functions also ensure that these two properties are true of +// +// return values. +// +// Computed values: +// +// eval z = z[0] + (z[1] << 64) + (z[2] << 128) + (z[3] << 192) +// +// bytes_eval z = z[0] + (z[1] << 8) + (z[2] << 16) + (z[3] << 24) + (z[4] << 32) + (z[5] << 40) + (z[6] << 48) + (z[7] << 56) + (z[8] << 64) + (z[9] << 72) + (z[10] << 80) + (z[11] << 88) + (z[12] << 96) + (z[13] << 104) + (z[14] << 112) + (z[15] << 120) + (z[16] << 128) + (z[17] << 136) + (z[18] << 144) + (z[19] << 152) + (z[20] << 160) + (z[21] << 168) + (z[22] << 176) + (z[23] << 184) + (z[24] << 192) + (z[25] << 200) + (z[26] << 208) + (z[27] << 216) + (z[28] << 224) + (z[29] << 232) + (z[30] << 240) + (z[31] << 248) +// +// twos_complement_eval z = let x1 := z[0] + (z[1] << 64) + (z[2] << 128) + (z[3] << 192) in +// +// if x1 & (2^256-1) < 2^255 then x1 & (2^256-1) else (x1 & (2^256-1)) - 2^256 +package fq + +import "math/bits" + +type ( + uint1 uint64 // We use uint64 instead of a more narrow type for performance reasons; see https://github.com/mit-plv/fiat-crypto/pull/1006#issuecomment-892625927 +) + +// MontgomeryDomainFieldElement is a field element in the Montgomery domain. +// +// Bounds: +// +// [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +type MontgomeryDomainFieldElement [4]uint64 + +// NonMontgomeryDomainFieldElement is a field element NOT in the Montgomery domain. +// +// Bounds: +// +// [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +type NonMontgomeryDomainFieldElement [4]uint64 + +// cmovznzU64 is a single-word conditional move. +// +// Postconditions: +// +// out1 = (if arg1 = 0 then arg2 else arg3) +// +// Input Bounds: +// +// arg1: [0x0 ~> 0x1] +// arg2: [0x0 ~> 0xffffffffffffffff] +// arg3: [0x0 ~> 0xffffffffffffffff] +// +// Output Bounds: +// +// out1: [0x0 ~> 0xffffffffffffffff] +func cmovznzU64(out1 *uint64, arg1 uint1, arg2 uint64, arg3 uint64) { + x1 := (uint64(arg1) * 0xffffffffffffffff) + x2 := ((x1 & arg3) | ((^x1) & arg2)) + *out1 = x2 +} + +// Mul multiplies two field elements in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// 0 ≤ eval arg2 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) * eval (from_montgomery arg2)) mod m +// 0 ≤ eval out1 < m +func Mul( + out1 *MontgomeryDomainFieldElement, + arg1 *MontgomeryDomainFieldElement, + arg2 *MontgomeryDomainFieldElement, +) { + x1 := arg1[1] + x2 := arg1[2] + x3 := arg1[3] + x4 := arg1[0] + var x5 uint64 + var x6 uint64 + x6, x5 = bits.Mul64(x4, arg2[3]) + var x7 uint64 + var x8 uint64 + x8, x7 = bits.Mul64(x4, arg2[2]) + var x9 uint64 + var x10 uint64 + x10, x9 = bits.Mul64(x4, arg2[1]) + var x11 uint64 + var x12 uint64 + x12, x11 = bits.Mul64(x4, arg2[0]) + var x13 uint64 + var x14 uint64 + x13, x14 = bits.Add64(x12, x9, uint64(0x0)) + var x15 uint64 + var x16 uint64 + x15, x16 = bits.Add64(x10, x7, uint64(uint1(x14))) + var x17 uint64 + var x18 uint64 + x17, x18 = bits.Add64(x8, x5, uint64(uint1(x16))) + x19 := (uint64(uint1(x18)) + x6) + var x20 uint64 + _, x20 = bits.Mul64(x11, 0x4b0dff665588b13f) + var x22 uint64 + var x23 uint64 + x23, x22 = bits.Mul64(x20, 0xffffffffffffffff) + var x24 uint64 + var x25 uint64 + x25, x24 = bits.Mul64(x20, 0xfffffffffffffffe) + var x26 uint64 + var x27 uint64 + x27, x26 = bits.Mul64(x20, 0xbaaedce6af48a03b) + var x28 uint64 + var x29 uint64 + x29, x28 = bits.Mul64(x20, 0xbfd25e8cd0364141) + var x30 uint64 + var x31 uint64 + x30, x31 = bits.Add64(x29, x26, uint64(0x0)) + var x32 uint64 + var x33 uint64 + x32, x33 = bits.Add64(x27, x24, uint64(uint1(x31))) + var x34 uint64 + var x35 uint64 + x34, x35 = bits.Add64(x25, x22, uint64(uint1(x33))) + x36 := (uint64(uint1(x35)) + x23) + var x38 uint64 + _, x38 = bits.Add64(x11, x28, uint64(0x0)) + var x39 uint64 + var x40 uint64 + x39, x40 = bits.Add64(x13, x30, uint64(uint1(x38))) + var x41 uint64 + var x42 uint64 + x41, x42 = bits.Add64(x15, x32, uint64(uint1(x40))) + var x43 uint64 + var x44 uint64 + x43, x44 = bits.Add64(x17, x34, uint64(uint1(x42))) + var x45 uint64 + var x46 uint64 + x45, x46 = bits.Add64(x19, x36, uint64(uint1(x44))) + var x47 uint64 + var x48 uint64 + x48, x47 = bits.Mul64(x1, arg2[3]) + var x49 uint64 + var x50 uint64 + x50, x49 = bits.Mul64(x1, arg2[2]) + var x51 uint64 + var x52 uint64 + x52, x51 = bits.Mul64(x1, arg2[1]) + var x53 uint64 + var x54 uint64 + x54, x53 = bits.Mul64(x1, arg2[0]) + var x55 uint64 + var x56 uint64 + x55, x56 = bits.Add64(x54, x51, uint64(0x0)) + var x57 uint64 + var x58 uint64 + x57, x58 = bits.Add64(x52, x49, uint64(uint1(x56))) + var x59 uint64 + var x60 uint64 + x59, x60 = bits.Add64(x50, x47, uint64(uint1(x58))) + x61 := (uint64(uint1(x60)) + x48) + var x62 uint64 + var x63 uint64 + x62, x63 = bits.Add64(x39, x53, uint64(0x0)) + var x64 uint64 + var x65 uint64 + x64, x65 = bits.Add64(x41, x55, uint64(uint1(x63))) + var x66 uint64 + var x67 uint64 + x66, x67 = bits.Add64(x43, x57, uint64(uint1(x65))) + var x68 uint64 + var x69 uint64 + x68, x69 = bits.Add64(x45, x59, uint64(uint1(x67))) + var x70 uint64 + var x71 uint64 + x70, x71 = bits.Add64(uint64(uint1(x46)), x61, uint64(uint1(x69))) + var x72 uint64 + _, x72 = bits.Mul64(x62, 0x4b0dff665588b13f) + var x74 uint64 + var x75 uint64 + x75, x74 = bits.Mul64(x72, 0xffffffffffffffff) + var x76 uint64 + var x77 uint64 + x77, x76 = bits.Mul64(x72, 0xfffffffffffffffe) + var x78 uint64 + var x79 uint64 + x79, x78 = bits.Mul64(x72, 0xbaaedce6af48a03b) + var x80 uint64 + var x81 uint64 + x81, x80 = bits.Mul64(x72, 0xbfd25e8cd0364141) + var x82 uint64 + var x83 uint64 + x82, x83 = bits.Add64(x81, x78, uint64(0x0)) + var x84 uint64 + var x85 uint64 + x84, x85 = bits.Add64(x79, x76, uint64(uint1(x83))) + var x86 uint64 + var x87 uint64 + x86, x87 = bits.Add64(x77, x74, uint64(uint1(x85))) + x88 := (uint64(uint1(x87)) + x75) + var x90 uint64 + _, x90 = bits.Add64(x62, x80, uint64(0x0)) + var x91 uint64 + var x92 uint64 + x91, x92 = bits.Add64(x64, x82, uint64(uint1(x90))) + var x93 uint64 + var x94 uint64 + x93, x94 = bits.Add64(x66, x84, uint64(uint1(x92))) + var x95 uint64 + var x96 uint64 + x95, x96 = bits.Add64(x68, x86, uint64(uint1(x94))) + var x97 uint64 + var x98 uint64 + x97, x98 = bits.Add64(x70, x88, uint64(uint1(x96))) + x99 := (uint64(uint1(x98)) + uint64(uint1(x71))) + var x100 uint64 + var x101 uint64 + x101, x100 = bits.Mul64(x2, arg2[3]) + var x102 uint64 + var x103 uint64 + x103, x102 = bits.Mul64(x2, arg2[2]) + var x104 uint64 + var x105 uint64 + x105, x104 = bits.Mul64(x2, arg2[1]) + var x106 uint64 + var x107 uint64 + x107, x106 = bits.Mul64(x2, arg2[0]) + var x108 uint64 + var x109 uint64 + x108, x109 = bits.Add64(x107, x104, uint64(0x0)) + var x110 uint64 + var x111 uint64 + x110, x111 = bits.Add64(x105, x102, uint64(uint1(x109))) + var x112 uint64 + var x113 uint64 + x112, x113 = bits.Add64(x103, x100, uint64(uint1(x111))) + x114 := (uint64(uint1(x113)) + x101) + var x115 uint64 + var x116 uint64 + x115, x116 = bits.Add64(x91, x106, uint64(0x0)) + var x117 uint64 + var x118 uint64 + x117, x118 = bits.Add64(x93, x108, uint64(uint1(x116))) + var x119 uint64 + var x120 uint64 + x119, x120 = bits.Add64(x95, x110, uint64(uint1(x118))) + var x121 uint64 + var x122 uint64 + x121, x122 = bits.Add64(x97, x112, uint64(uint1(x120))) + var x123 uint64 + var x124 uint64 + x123, x124 = bits.Add64(x99, x114, uint64(uint1(x122))) + var x125 uint64 + _, x125 = bits.Mul64(x115, 0x4b0dff665588b13f) + var x127 uint64 + var x128 uint64 + x128, x127 = bits.Mul64(x125, 0xffffffffffffffff) + var x129 uint64 + var x130 uint64 + x130, x129 = bits.Mul64(x125, 0xfffffffffffffffe) + var x131 uint64 + var x132 uint64 + x132, x131 = bits.Mul64(x125, 0xbaaedce6af48a03b) + var x133 uint64 + var x134 uint64 + x134, x133 = bits.Mul64(x125, 0xbfd25e8cd0364141) + var x135 uint64 + var x136 uint64 + x135, x136 = bits.Add64(x134, x131, uint64(0x0)) + var x137 uint64 + var x138 uint64 + x137, x138 = bits.Add64(x132, x129, uint64(uint1(x136))) + var x139 uint64 + var x140 uint64 + x139, x140 = bits.Add64(x130, x127, uint64(uint1(x138))) + x141 := (uint64(uint1(x140)) + x128) + var x143 uint64 + _, x143 = bits.Add64(x115, x133, uint64(0x0)) + var x144 uint64 + var x145 uint64 + x144, x145 = bits.Add64(x117, x135, uint64(uint1(x143))) + var x146 uint64 + var x147 uint64 + x146, x147 = bits.Add64(x119, x137, uint64(uint1(x145))) + var x148 uint64 + var x149 uint64 + x148, x149 = bits.Add64(x121, x139, uint64(uint1(x147))) + var x150 uint64 + var x151 uint64 + x150, x151 = bits.Add64(x123, x141, uint64(uint1(x149))) + x152 := (uint64(uint1(x151)) + uint64(uint1(x124))) + var x153 uint64 + var x154 uint64 + x154, x153 = bits.Mul64(x3, arg2[3]) + var x155 uint64 + var x156 uint64 + x156, x155 = bits.Mul64(x3, arg2[2]) + var x157 uint64 + var x158 uint64 + x158, x157 = bits.Mul64(x3, arg2[1]) + var x159 uint64 + var x160 uint64 + x160, x159 = bits.Mul64(x3, arg2[0]) + var x161 uint64 + var x162 uint64 + x161, x162 = bits.Add64(x160, x157, uint64(0x0)) + var x163 uint64 + var x164 uint64 + x163, x164 = bits.Add64(x158, x155, uint64(uint1(x162))) + var x165 uint64 + var x166 uint64 + x165, x166 = bits.Add64(x156, x153, uint64(uint1(x164))) + x167 := (uint64(uint1(x166)) + x154) + var x168 uint64 + var x169 uint64 + x168, x169 = bits.Add64(x144, x159, uint64(0x0)) + var x170 uint64 + var x171 uint64 + x170, x171 = bits.Add64(x146, x161, uint64(uint1(x169))) + var x172 uint64 + var x173 uint64 + x172, x173 = bits.Add64(x148, x163, uint64(uint1(x171))) + var x174 uint64 + var x175 uint64 + x174, x175 = bits.Add64(x150, x165, uint64(uint1(x173))) + var x176 uint64 + var x177 uint64 + x176, x177 = bits.Add64(x152, x167, uint64(uint1(x175))) + var x178 uint64 + _, x178 = bits.Mul64(x168, 0x4b0dff665588b13f) + var x180 uint64 + var x181 uint64 + x181, x180 = bits.Mul64(x178, 0xffffffffffffffff) + var x182 uint64 + var x183 uint64 + x183, x182 = bits.Mul64(x178, 0xfffffffffffffffe) + var x184 uint64 + var x185 uint64 + x185, x184 = bits.Mul64(x178, 0xbaaedce6af48a03b) + var x186 uint64 + var x187 uint64 + x187, x186 = bits.Mul64(x178, 0xbfd25e8cd0364141) + var x188 uint64 + var x189 uint64 + x188, x189 = bits.Add64(x187, x184, uint64(0x0)) + var x190 uint64 + var x191 uint64 + x190, x191 = bits.Add64(x185, x182, uint64(uint1(x189))) + var x192 uint64 + var x193 uint64 + x192, x193 = bits.Add64(x183, x180, uint64(uint1(x191))) + x194 := (uint64(uint1(x193)) + x181) + var x196 uint64 + _, x196 = bits.Add64(x168, x186, uint64(0x0)) + var x197 uint64 + var x198 uint64 + x197, x198 = bits.Add64(x170, x188, uint64(uint1(x196))) + var x199 uint64 + var x200 uint64 + x199, x200 = bits.Add64(x172, x190, uint64(uint1(x198))) + var x201 uint64 + var x202 uint64 + x201, x202 = bits.Add64(x174, x192, uint64(uint1(x200))) + var x203 uint64 + var x204 uint64 + x203, x204 = bits.Add64(x176, x194, uint64(uint1(x202))) + x205 := (uint64(uint1(x204)) + uint64(uint1(x177))) + var x206 uint64 + var x207 uint64 + x206, x207 = bits.Sub64(x197, 0xbfd25e8cd0364141, uint64(0x0)) + var x208 uint64 + var x209 uint64 + x208, x209 = bits.Sub64(x199, 0xbaaedce6af48a03b, uint64(uint1(x207))) + var x210 uint64 + var x211 uint64 + x210, x211 = bits.Sub64(x201, 0xfffffffffffffffe, uint64(uint1(x209))) + var x212 uint64 + var x213 uint64 + x212, x213 = bits.Sub64(x203, 0xffffffffffffffff, uint64(uint1(x211))) + var x215 uint64 + _, x215 = bits.Sub64(x205, uint64(0x0), uint64(uint1(x213))) + var x216 uint64 + cmovznzU64(&x216, uint1(x215), x206, x197) + var x217 uint64 + cmovznzU64(&x217, uint1(x215), x208, x199) + var x218 uint64 + cmovznzU64(&x218, uint1(x215), x210, x201) + var x219 uint64 + cmovznzU64(&x219, uint1(x215), x212, x203) + out1[0] = x216 + out1[1] = x217 + out1[2] = x218 + out1[3] = x219 +} + +// Square squares a field element in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) * eval (from_montgomery arg1)) mod m +// 0 ≤ eval out1 < m +func Square(out1 *MontgomeryDomainFieldElement, arg1 *MontgomeryDomainFieldElement) { + x1 := arg1[1] + x2 := arg1[2] + x3 := arg1[3] + x4 := arg1[0] + var x5 uint64 + var x6 uint64 + x6, x5 = bits.Mul64(x4, arg1[3]) + var x7 uint64 + var x8 uint64 + x8, x7 = bits.Mul64(x4, arg1[2]) + var x9 uint64 + var x10 uint64 + x10, x9 = bits.Mul64(x4, arg1[1]) + var x11 uint64 + var x12 uint64 + x12, x11 = bits.Mul64(x4, arg1[0]) + var x13 uint64 + var x14 uint64 + x13, x14 = bits.Add64(x12, x9, uint64(0x0)) + var x15 uint64 + var x16 uint64 + x15, x16 = bits.Add64(x10, x7, uint64(uint1(x14))) + var x17 uint64 + var x18 uint64 + x17, x18 = bits.Add64(x8, x5, uint64(uint1(x16))) + x19 := (uint64(uint1(x18)) + x6) + var x20 uint64 + _, x20 = bits.Mul64(x11, 0x4b0dff665588b13f) + var x22 uint64 + var x23 uint64 + x23, x22 = bits.Mul64(x20, 0xffffffffffffffff) + var x24 uint64 + var x25 uint64 + x25, x24 = bits.Mul64(x20, 0xfffffffffffffffe) + var x26 uint64 + var x27 uint64 + x27, x26 = bits.Mul64(x20, 0xbaaedce6af48a03b) + var x28 uint64 + var x29 uint64 + x29, x28 = bits.Mul64(x20, 0xbfd25e8cd0364141) + var x30 uint64 + var x31 uint64 + x30, x31 = bits.Add64(x29, x26, uint64(0x0)) + var x32 uint64 + var x33 uint64 + x32, x33 = bits.Add64(x27, x24, uint64(uint1(x31))) + var x34 uint64 + var x35 uint64 + x34, x35 = bits.Add64(x25, x22, uint64(uint1(x33))) + x36 := (uint64(uint1(x35)) + x23) + var x38 uint64 + _, x38 = bits.Add64(x11, x28, uint64(0x0)) + var x39 uint64 + var x40 uint64 + x39, x40 = bits.Add64(x13, x30, uint64(uint1(x38))) + var x41 uint64 + var x42 uint64 + x41, x42 = bits.Add64(x15, x32, uint64(uint1(x40))) + var x43 uint64 + var x44 uint64 + x43, x44 = bits.Add64(x17, x34, uint64(uint1(x42))) + var x45 uint64 + var x46 uint64 + x45, x46 = bits.Add64(x19, x36, uint64(uint1(x44))) + var x47 uint64 + var x48 uint64 + x48, x47 = bits.Mul64(x1, arg1[3]) + var x49 uint64 + var x50 uint64 + x50, x49 = bits.Mul64(x1, arg1[2]) + var x51 uint64 + var x52 uint64 + x52, x51 = bits.Mul64(x1, arg1[1]) + var x53 uint64 + var x54 uint64 + x54, x53 = bits.Mul64(x1, arg1[0]) + var x55 uint64 + var x56 uint64 + x55, x56 = bits.Add64(x54, x51, uint64(0x0)) + var x57 uint64 + var x58 uint64 + x57, x58 = bits.Add64(x52, x49, uint64(uint1(x56))) + var x59 uint64 + var x60 uint64 + x59, x60 = bits.Add64(x50, x47, uint64(uint1(x58))) + x61 := (uint64(uint1(x60)) + x48) + var x62 uint64 + var x63 uint64 + x62, x63 = bits.Add64(x39, x53, uint64(0x0)) + var x64 uint64 + var x65 uint64 + x64, x65 = bits.Add64(x41, x55, uint64(uint1(x63))) + var x66 uint64 + var x67 uint64 + x66, x67 = bits.Add64(x43, x57, uint64(uint1(x65))) + var x68 uint64 + var x69 uint64 + x68, x69 = bits.Add64(x45, x59, uint64(uint1(x67))) + var x70 uint64 + var x71 uint64 + x70, x71 = bits.Add64(uint64(uint1(x46)), x61, uint64(uint1(x69))) + var x72 uint64 + _, x72 = bits.Mul64(x62, 0x4b0dff665588b13f) + var x74 uint64 + var x75 uint64 + x75, x74 = bits.Mul64(x72, 0xffffffffffffffff) + var x76 uint64 + var x77 uint64 + x77, x76 = bits.Mul64(x72, 0xfffffffffffffffe) + var x78 uint64 + var x79 uint64 + x79, x78 = bits.Mul64(x72, 0xbaaedce6af48a03b) + var x80 uint64 + var x81 uint64 + x81, x80 = bits.Mul64(x72, 0xbfd25e8cd0364141) + var x82 uint64 + var x83 uint64 + x82, x83 = bits.Add64(x81, x78, uint64(0x0)) + var x84 uint64 + var x85 uint64 + x84, x85 = bits.Add64(x79, x76, uint64(uint1(x83))) + var x86 uint64 + var x87 uint64 + x86, x87 = bits.Add64(x77, x74, uint64(uint1(x85))) + x88 := (uint64(uint1(x87)) + x75) + var x90 uint64 + _, x90 = bits.Add64(x62, x80, uint64(0x0)) + var x91 uint64 + var x92 uint64 + x91, x92 = bits.Add64(x64, x82, uint64(uint1(x90))) + var x93 uint64 + var x94 uint64 + x93, x94 = bits.Add64(x66, x84, uint64(uint1(x92))) + var x95 uint64 + var x96 uint64 + x95, x96 = bits.Add64(x68, x86, uint64(uint1(x94))) + var x97 uint64 + var x98 uint64 + x97, x98 = bits.Add64(x70, x88, uint64(uint1(x96))) + x99 := (uint64(uint1(x98)) + uint64(uint1(x71))) + var x100 uint64 + var x101 uint64 + x101, x100 = bits.Mul64(x2, arg1[3]) + var x102 uint64 + var x103 uint64 + x103, x102 = bits.Mul64(x2, arg1[2]) + var x104 uint64 + var x105 uint64 + x105, x104 = bits.Mul64(x2, arg1[1]) + var x106 uint64 + var x107 uint64 + x107, x106 = bits.Mul64(x2, arg1[0]) + var x108 uint64 + var x109 uint64 + x108, x109 = bits.Add64(x107, x104, uint64(0x0)) + var x110 uint64 + var x111 uint64 + x110, x111 = bits.Add64(x105, x102, uint64(uint1(x109))) + var x112 uint64 + var x113 uint64 + x112, x113 = bits.Add64(x103, x100, uint64(uint1(x111))) + x114 := (uint64(uint1(x113)) + x101) + var x115 uint64 + var x116 uint64 + x115, x116 = bits.Add64(x91, x106, uint64(0x0)) + var x117 uint64 + var x118 uint64 + x117, x118 = bits.Add64(x93, x108, uint64(uint1(x116))) + var x119 uint64 + var x120 uint64 + x119, x120 = bits.Add64(x95, x110, uint64(uint1(x118))) + var x121 uint64 + var x122 uint64 + x121, x122 = bits.Add64(x97, x112, uint64(uint1(x120))) + var x123 uint64 + var x124 uint64 + x123, x124 = bits.Add64(x99, x114, uint64(uint1(x122))) + var x125 uint64 + _, x125 = bits.Mul64(x115, 0x4b0dff665588b13f) + var x127 uint64 + var x128 uint64 + x128, x127 = bits.Mul64(x125, 0xffffffffffffffff) + var x129 uint64 + var x130 uint64 + x130, x129 = bits.Mul64(x125, 0xfffffffffffffffe) + var x131 uint64 + var x132 uint64 + x132, x131 = bits.Mul64(x125, 0xbaaedce6af48a03b) + var x133 uint64 + var x134 uint64 + x134, x133 = bits.Mul64(x125, 0xbfd25e8cd0364141) + var x135 uint64 + var x136 uint64 + x135, x136 = bits.Add64(x134, x131, uint64(0x0)) + var x137 uint64 + var x138 uint64 + x137, x138 = bits.Add64(x132, x129, uint64(uint1(x136))) + var x139 uint64 + var x140 uint64 + x139, x140 = bits.Add64(x130, x127, uint64(uint1(x138))) + x141 := (uint64(uint1(x140)) + x128) + var x143 uint64 + _, x143 = bits.Add64(x115, x133, uint64(0x0)) + var x144 uint64 + var x145 uint64 + x144, x145 = bits.Add64(x117, x135, uint64(uint1(x143))) + var x146 uint64 + var x147 uint64 + x146, x147 = bits.Add64(x119, x137, uint64(uint1(x145))) + var x148 uint64 + var x149 uint64 + x148, x149 = bits.Add64(x121, x139, uint64(uint1(x147))) + var x150 uint64 + var x151 uint64 + x150, x151 = bits.Add64(x123, x141, uint64(uint1(x149))) + x152 := (uint64(uint1(x151)) + uint64(uint1(x124))) + var x153 uint64 + var x154 uint64 + x154, x153 = bits.Mul64(x3, arg1[3]) + var x155 uint64 + var x156 uint64 + x156, x155 = bits.Mul64(x3, arg1[2]) + var x157 uint64 + var x158 uint64 + x158, x157 = bits.Mul64(x3, arg1[1]) + var x159 uint64 + var x160 uint64 + x160, x159 = bits.Mul64(x3, arg1[0]) + var x161 uint64 + var x162 uint64 + x161, x162 = bits.Add64(x160, x157, uint64(0x0)) + var x163 uint64 + var x164 uint64 + x163, x164 = bits.Add64(x158, x155, uint64(uint1(x162))) + var x165 uint64 + var x166 uint64 + x165, x166 = bits.Add64(x156, x153, uint64(uint1(x164))) + x167 := (uint64(uint1(x166)) + x154) + var x168 uint64 + var x169 uint64 + x168, x169 = bits.Add64(x144, x159, uint64(0x0)) + var x170 uint64 + var x171 uint64 + x170, x171 = bits.Add64(x146, x161, uint64(uint1(x169))) + var x172 uint64 + var x173 uint64 + x172, x173 = bits.Add64(x148, x163, uint64(uint1(x171))) + var x174 uint64 + var x175 uint64 + x174, x175 = bits.Add64(x150, x165, uint64(uint1(x173))) + var x176 uint64 + var x177 uint64 + x176, x177 = bits.Add64(x152, x167, uint64(uint1(x175))) + var x178 uint64 + _, x178 = bits.Mul64(x168, 0x4b0dff665588b13f) + var x180 uint64 + var x181 uint64 + x181, x180 = bits.Mul64(x178, 0xffffffffffffffff) + var x182 uint64 + var x183 uint64 + x183, x182 = bits.Mul64(x178, 0xfffffffffffffffe) + var x184 uint64 + var x185 uint64 + x185, x184 = bits.Mul64(x178, 0xbaaedce6af48a03b) + var x186 uint64 + var x187 uint64 + x187, x186 = bits.Mul64(x178, 0xbfd25e8cd0364141) + var x188 uint64 + var x189 uint64 + x188, x189 = bits.Add64(x187, x184, uint64(0x0)) + var x190 uint64 + var x191 uint64 + x190, x191 = bits.Add64(x185, x182, uint64(uint1(x189))) + var x192 uint64 + var x193 uint64 + x192, x193 = bits.Add64(x183, x180, uint64(uint1(x191))) + x194 := (uint64(uint1(x193)) + x181) + var x196 uint64 + _, x196 = bits.Add64(x168, x186, uint64(0x0)) + var x197 uint64 + var x198 uint64 + x197, x198 = bits.Add64(x170, x188, uint64(uint1(x196))) + var x199 uint64 + var x200 uint64 + x199, x200 = bits.Add64(x172, x190, uint64(uint1(x198))) + var x201 uint64 + var x202 uint64 + x201, x202 = bits.Add64(x174, x192, uint64(uint1(x200))) + var x203 uint64 + var x204 uint64 + x203, x204 = bits.Add64(x176, x194, uint64(uint1(x202))) + x205 := (uint64(uint1(x204)) + uint64(uint1(x177))) + var x206 uint64 + var x207 uint64 + x206, x207 = bits.Sub64(x197, 0xbfd25e8cd0364141, uint64(0x0)) + var x208 uint64 + var x209 uint64 + x208, x209 = bits.Sub64(x199, 0xbaaedce6af48a03b, uint64(uint1(x207))) + var x210 uint64 + var x211 uint64 + x210, x211 = bits.Sub64(x201, 0xfffffffffffffffe, uint64(uint1(x209))) + var x212 uint64 + var x213 uint64 + x212, x213 = bits.Sub64(x203, 0xffffffffffffffff, uint64(uint1(x211))) + var x215 uint64 + _, x215 = bits.Sub64(x205, uint64(0x0), uint64(uint1(x213))) + var x216 uint64 + cmovznzU64(&x216, uint1(x215), x206, x197) + var x217 uint64 + cmovznzU64(&x217, uint1(x215), x208, x199) + var x218 uint64 + cmovznzU64(&x218, uint1(x215), x210, x201) + var x219 uint64 + cmovznzU64(&x219, uint1(x215), x212, x203) + out1[0] = x216 + out1[1] = x217 + out1[2] = x218 + out1[3] = x219 +} + +// Add adds two field elements in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// 0 ≤ eval arg2 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) + eval (from_montgomery arg2)) mod m +// 0 ≤ eval out1 < m +func Add( + out1 *MontgomeryDomainFieldElement, + arg1 *MontgomeryDomainFieldElement, + arg2 *MontgomeryDomainFieldElement, +) { + var x1 uint64 + var x2 uint64 + x1, x2 = bits.Add64(arg1[0], arg2[0], uint64(0x0)) + var x3 uint64 + var x4 uint64 + x3, x4 = bits.Add64(arg1[1], arg2[1], uint64(uint1(x2))) + var x5 uint64 + var x6 uint64 + x5, x6 = bits.Add64(arg1[2], arg2[2], uint64(uint1(x4))) + var x7 uint64 + var x8 uint64 + x7, x8 = bits.Add64(arg1[3], arg2[3], uint64(uint1(x6))) + var x9 uint64 + var x10 uint64 + x9, x10 = bits.Sub64(x1, 0xbfd25e8cd0364141, uint64(0x0)) + var x11 uint64 + var x12 uint64 + x11, x12 = bits.Sub64(x3, 0xbaaedce6af48a03b, uint64(uint1(x10))) + var x13 uint64 + var x14 uint64 + x13, x14 = bits.Sub64(x5, 0xfffffffffffffffe, uint64(uint1(x12))) + var x15 uint64 + var x16 uint64 + x15, x16 = bits.Sub64(x7, 0xffffffffffffffff, uint64(uint1(x14))) + var x18 uint64 + _, x18 = bits.Sub64(uint64(uint1(x8)), uint64(0x0), uint64(uint1(x16))) + var x19 uint64 + cmovznzU64(&x19, uint1(x18), x9, x1) + var x20 uint64 + cmovznzU64(&x20, uint1(x18), x11, x3) + var x21 uint64 + cmovznzU64(&x21, uint1(x18), x13, x5) + var x22 uint64 + cmovznzU64(&x22, uint1(x18), x15, x7) + out1[0] = x19 + out1[1] = x20 + out1[2] = x21 + out1[3] = x22 +} + +// Sub subtracts two field elements in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// 0 ≤ eval arg2 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) - eval (from_montgomery arg2)) mod m +// 0 ≤ eval out1 < m +func Sub( + out1 *MontgomeryDomainFieldElement, + arg1 *MontgomeryDomainFieldElement, + arg2 *MontgomeryDomainFieldElement, +) { + var x1 uint64 + var x2 uint64 + x1, x2 = bits.Sub64(arg1[0], arg2[0], uint64(0x0)) + var x3 uint64 + var x4 uint64 + x3, x4 = bits.Sub64(arg1[1], arg2[1], uint64(uint1(x2))) + var x5 uint64 + var x6 uint64 + x5, x6 = bits.Sub64(arg1[2], arg2[2], uint64(uint1(x4))) + var x7 uint64 + var x8 uint64 + x7, x8 = bits.Sub64(arg1[3], arg2[3], uint64(uint1(x6))) + var x9 uint64 + cmovznzU64(&x9, uint1(x8), uint64(0x0), 0xffffffffffffffff) + var x10 uint64 + var x11 uint64 + x10, x11 = bits.Add64(x1, (x9 & 0xbfd25e8cd0364141), uint64(0x0)) + var x12 uint64 + var x13 uint64 + x12, x13 = bits.Add64(x3, (x9 & 0xbaaedce6af48a03b), uint64(uint1(x11))) + var x14 uint64 + var x15 uint64 + x14, x15 = bits.Add64(x5, (x9 & 0xfffffffffffffffe), uint64(uint1(x13))) + var x16 uint64 + x16, _ = bits.Add64(x7, x9, uint64(uint1(x15))) + out1[0] = x10 + out1[1] = x12 + out1[2] = x14 + out1[3] = x16 +} + +// Opp negates a field element in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = -eval (from_montgomery arg1) mod m +// 0 ≤ eval out1 < m +func Opp(out1 *MontgomeryDomainFieldElement, arg1 *MontgomeryDomainFieldElement) { + var x1 uint64 + var x2 uint64 + x1, x2 = bits.Sub64(uint64(0x0), arg1[0], uint64(0x0)) + var x3 uint64 + var x4 uint64 + x3, x4 = bits.Sub64(uint64(0x0), arg1[1], uint64(uint1(x2))) + var x5 uint64 + var x6 uint64 + x5, x6 = bits.Sub64(uint64(0x0), arg1[2], uint64(uint1(x4))) + var x7 uint64 + var x8 uint64 + x7, x8 = bits.Sub64(uint64(0x0), arg1[3], uint64(uint1(x6))) + var x9 uint64 + cmovznzU64(&x9, uint1(x8), uint64(0x0), 0xffffffffffffffff) + var x10 uint64 + var x11 uint64 + x10, x11 = bits.Add64(x1, (x9 & 0xbfd25e8cd0364141), uint64(0x0)) + var x12 uint64 + var x13 uint64 + x12, x13 = bits.Add64(x3, (x9 & 0xbaaedce6af48a03b), uint64(uint1(x11))) + var x14 uint64 + var x15 uint64 + x14, x15 = bits.Add64(x5, (x9 & 0xfffffffffffffffe), uint64(uint1(x13))) + var x16 uint64 + x16, _ = bits.Add64(x7, x9, uint64(uint1(x15))) + out1[0] = x10 + out1[1] = x12 + out1[2] = x14 + out1[3] = x16 +} + +// FromMontgomery translates a field element out of the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval out1 mod m = (eval arg1 * ((2^64)⁻¹ mod m)^4) mod m +// 0 ≤ eval out1 < m +func FromMontgomery(out1 *NonMontgomeryDomainFieldElement, arg1 *MontgomeryDomainFieldElement) { + x1 := arg1[0] + var x2 uint64 + _, x2 = bits.Mul64(x1, 0x4b0dff665588b13f) + var x4 uint64 + var x5 uint64 + x5, x4 = bits.Mul64(x2, 0xffffffffffffffff) + var x6 uint64 + var x7 uint64 + x7, x6 = bits.Mul64(x2, 0xfffffffffffffffe) + var x8 uint64 + var x9 uint64 + x9, x8 = bits.Mul64(x2, 0xbaaedce6af48a03b) + var x10 uint64 + var x11 uint64 + x11, x10 = bits.Mul64(x2, 0xbfd25e8cd0364141) + var x12 uint64 + var x13 uint64 + x12, x13 = bits.Add64(x11, x8, uint64(0x0)) + var x14 uint64 + var x15 uint64 + x14, x15 = bits.Add64(x9, x6, uint64(uint1(x13))) + var x16 uint64 + var x17 uint64 + x16, x17 = bits.Add64(x7, x4, uint64(uint1(x15))) + var x19 uint64 + _, x19 = bits.Add64(x1, x10, uint64(0x0)) + var x20 uint64 + var x21 uint64 + x20, x21 = bits.Add64(uint64(0x0), x12, uint64(uint1(x19))) + var x22 uint64 + var x23 uint64 + x22, x23 = bits.Add64(uint64(0x0), x14, uint64(uint1(x21))) + var x24 uint64 + var x25 uint64 + x24, x25 = bits.Add64(uint64(0x0), x16, uint64(uint1(x23))) + var x26 uint64 + var x27 uint64 + x26, x27 = bits.Add64(uint64(0x0), (uint64(uint1(x17)) + x5), uint64(uint1(x25))) + var x28 uint64 + var x29 uint64 + x28, x29 = bits.Add64(x20, arg1[1], uint64(0x0)) + var x30 uint64 + var x31 uint64 + x30, x31 = bits.Add64(x22, uint64(0x0), uint64(uint1(x29))) + var x32 uint64 + var x33 uint64 + x32, x33 = bits.Add64(x24, uint64(0x0), uint64(uint1(x31))) + var x34 uint64 + var x35 uint64 + x34, x35 = bits.Add64(x26, uint64(0x0), uint64(uint1(x33))) + var x36 uint64 + _, x36 = bits.Mul64(x28, 0x4b0dff665588b13f) + var x38 uint64 + var x39 uint64 + x39, x38 = bits.Mul64(x36, 0xffffffffffffffff) + var x40 uint64 + var x41 uint64 + x41, x40 = bits.Mul64(x36, 0xfffffffffffffffe) + var x42 uint64 + var x43 uint64 + x43, x42 = bits.Mul64(x36, 0xbaaedce6af48a03b) + var x44 uint64 + var x45 uint64 + x45, x44 = bits.Mul64(x36, 0xbfd25e8cd0364141) + var x46 uint64 + var x47 uint64 + x46, x47 = bits.Add64(x45, x42, uint64(0x0)) + var x48 uint64 + var x49 uint64 + x48, x49 = bits.Add64(x43, x40, uint64(uint1(x47))) + var x50 uint64 + var x51 uint64 + x50, x51 = bits.Add64(x41, x38, uint64(uint1(x49))) + var x53 uint64 + _, x53 = bits.Add64(x28, x44, uint64(0x0)) + var x54 uint64 + var x55 uint64 + x54, x55 = bits.Add64(x30, x46, uint64(uint1(x53))) + var x56 uint64 + var x57 uint64 + x56, x57 = bits.Add64(x32, x48, uint64(uint1(x55))) + var x58 uint64 + var x59 uint64 + x58, x59 = bits.Add64(x34, x50, uint64(uint1(x57))) + var x60 uint64 + var x61 uint64 + x60, x61 = bits.Add64( + (uint64(uint1(x35)) + uint64(uint1(x27))), + (uint64(uint1(x51)) + x39), + uint64(uint1(x59)), + ) + var x62 uint64 + var x63 uint64 + x62, x63 = bits.Add64(x54, arg1[2], uint64(0x0)) + var x64 uint64 + var x65 uint64 + x64, x65 = bits.Add64(x56, uint64(0x0), uint64(uint1(x63))) + var x66 uint64 + var x67 uint64 + x66, x67 = bits.Add64(x58, uint64(0x0), uint64(uint1(x65))) + var x68 uint64 + var x69 uint64 + x68, x69 = bits.Add64(x60, uint64(0x0), uint64(uint1(x67))) + var x70 uint64 + _, x70 = bits.Mul64(x62, 0x4b0dff665588b13f) + var x72 uint64 + var x73 uint64 + x73, x72 = bits.Mul64(x70, 0xffffffffffffffff) + var x74 uint64 + var x75 uint64 + x75, x74 = bits.Mul64(x70, 0xfffffffffffffffe) + var x76 uint64 + var x77 uint64 + x77, x76 = bits.Mul64(x70, 0xbaaedce6af48a03b) + var x78 uint64 + var x79 uint64 + x79, x78 = bits.Mul64(x70, 0xbfd25e8cd0364141) + var x80 uint64 + var x81 uint64 + x80, x81 = bits.Add64(x79, x76, uint64(0x0)) + var x82 uint64 + var x83 uint64 + x82, x83 = bits.Add64(x77, x74, uint64(uint1(x81))) + var x84 uint64 + var x85 uint64 + x84, x85 = bits.Add64(x75, x72, uint64(uint1(x83))) + var x87 uint64 + _, x87 = bits.Add64(x62, x78, uint64(0x0)) + var x88 uint64 + var x89 uint64 + x88, x89 = bits.Add64(x64, x80, uint64(uint1(x87))) + var x90 uint64 + var x91 uint64 + x90, x91 = bits.Add64(x66, x82, uint64(uint1(x89))) + var x92 uint64 + var x93 uint64 + x92, x93 = bits.Add64(x68, x84, uint64(uint1(x91))) + var x94 uint64 + var x95 uint64 + x94, x95 = bits.Add64( + (uint64(uint1(x69)) + uint64(uint1(x61))), + (uint64(uint1(x85)) + x73), + uint64(uint1(x93)), + ) + var x96 uint64 + var x97 uint64 + x96, x97 = bits.Add64(x88, arg1[3], uint64(0x0)) + var x98 uint64 + var x99 uint64 + x98, x99 = bits.Add64(x90, uint64(0x0), uint64(uint1(x97))) + var x100 uint64 + var x101 uint64 + x100, x101 = bits.Add64(x92, uint64(0x0), uint64(uint1(x99))) + var x102 uint64 + var x103 uint64 + x102, x103 = bits.Add64(x94, uint64(0x0), uint64(uint1(x101))) + var x104 uint64 + _, x104 = bits.Mul64(x96, 0x4b0dff665588b13f) + var x106 uint64 + var x107 uint64 + x107, x106 = bits.Mul64(x104, 0xffffffffffffffff) + var x108 uint64 + var x109 uint64 + x109, x108 = bits.Mul64(x104, 0xfffffffffffffffe) + var x110 uint64 + var x111 uint64 + x111, x110 = bits.Mul64(x104, 0xbaaedce6af48a03b) + var x112 uint64 + var x113 uint64 + x113, x112 = bits.Mul64(x104, 0xbfd25e8cd0364141) + var x114 uint64 + var x115 uint64 + x114, x115 = bits.Add64(x113, x110, uint64(0x0)) + var x116 uint64 + var x117 uint64 + x116, x117 = bits.Add64(x111, x108, uint64(uint1(x115))) + var x118 uint64 + var x119 uint64 + x118, x119 = bits.Add64(x109, x106, uint64(uint1(x117))) + var x121 uint64 + _, x121 = bits.Add64(x96, x112, uint64(0x0)) + var x122 uint64 + var x123 uint64 + x122, x123 = bits.Add64(x98, x114, uint64(uint1(x121))) + var x124 uint64 + var x125 uint64 + x124, x125 = bits.Add64(x100, x116, uint64(uint1(x123))) + var x126 uint64 + var x127 uint64 + x126, x127 = bits.Add64(x102, x118, uint64(uint1(x125))) + var x128 uint64 + var x129 uint64 + x128, x129 = bits.Add64( + (uint64(uint1(x103)) + uint64(uint1(x95))), + (uint64(uint1(x119)) + x107), + uint64(uint1(x127)), + ) + var x130 uint64 + var x131 uint64 + x130, x131 = bits.Sub64(x122, 0xbfd25e8cd0364141, uint64(0x0)) + var x132 uint64 + var x133 uint64 + x132, x133 = bits.Sub64(x124, 0xbaaedce6af48a03b, uint64(uint1(x131))) + var x134 uint64 + var x135 uint64 + x134, x135 = bits.Sub64(x126, 0xfffffffffffffffe, uint64(uint1(x133))) + var x136 uint64 + var x137 uint64 + x136, x137 = bits.Sub64(x128, 0xffffffffffffffff, uint64(uint1(x135))) + var x139 uint64 + _, x139 = bits.Sub64(uint64(uint1(x129)), uint64(0x0), uint64(uint1(x137))) + var x140 uint64 + cmovznzU64(&x140, uint1(x139), x130, x122) + var x141 uint64 + cmovznzU64(&x141, uint1(x139), x132, x124) + var x142 uint64 + cmovznzU64(&x142, uint1(x139), x134, x126) + var x143 uint64 + cmovznzU64(&x143, uint1(x139), x136, x128) + out1[0] = x140 + out1[1] = x141 + out1[2] = x142 + out1[3] = x143 +} + +// ToMontgomery translates a field element into the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = eval arg1 mod m +// 0 ≤ eval out1 < m +func ToMontgomery(out1 *MontgomeryDomainFieldElement, arg1 *NonMontgomeryDomainFieldElement) { + x1 := arg1[1] + x2 := arg1[2] + x3 := arg1[3] + x4 := arg1[0] + var x5 uint64 + var x6 uint64 + x6, x5 = bits.Mul64(x4, 0x9d671cd581c69bc5) + var x7 uint64 + var x8 uint64 + x8, x7 = bits.Mul64(x4, 0xe697f5e45bcd07c6) + var x9 uint64 + var x10 uint64 + x10, x9 = bits.Mul64(x4, 0x741496c20e7cf878) + var x11 uint64 + var x12 uint64 + x12, x11 = bits.Mul64(x4, 0x896cf21467d7d140) + var x13 uint64 + var x14 uint64 + x13, x14 = bits.Add64(x12, x9, uint64(0x0)) + var x15 uint64 + var x16 uint64 + x15, x16 = bits.Add64(x10, x7, uint64(uint1(x14))) + var x17 uint64 + var x18 uint64 + x17, x18 = bits.Add64(x8, x5, uint64(uint1(x16))) + var x19 uint64 + _, x19 = bits.Mul64(x11, 0x4b0dff665588b13f) + var x21 uint64 + var x22 uint64 + x22, x21 = bits.Mul64(x19, 0xffffffffffffffff) + var x23 uint64 + var x24 uint64 + x24, x23 = bits.Mul64(x19, 0xfffffffffffffffe) + var x25 uint64 + var x26 uint64 + x26, x25 = bits.Mul64(x19, 0xbaaedce6af48a03b) + var x27 uint64 + var x28 uint64 + x28, x27 = bits.Mul64(x19, 0xbfd25e8cd0364141) + var x29 uint64 + var x30 uint64 + x29, x30 = bits.Add64(x28, x25, uint64(0x0)) + var x31 uint64 + var x32 uint64 + x31, x32 = bits.Add64(x26, x23, uint64(uint1(x30))) + var x33 uint64 + var x34 uint64 + x33, x34 = bits.Add64(x24, x21, uint64(uint1(x32))) + var x36 uint64 + _, x36 = bits.Add64(x11, x27, uint64(0x0)) + var x37 uint64 + var x38 uint64 + x37, x38 = bits.Add64(x13, x29, uint64(uint1(x36))) + var x39 uint64 + var x40 uint64 + x39, x40 = bits.Add64(x15, x31, uint64(uint1(x38))) + var x41 uint64 + var x42 uint64 + x41, x42 = bits.Add64(x17, x33, uint64(uint1(x40))) + var x43 uint64 + var x44 uint64 + x43, x44 = bits.Add64((uint64(uint1(x18)) + x6), (uint64(uint1(x34)) + x22), uint64(uint1(x42))) + var x45 uint64 + var x46 uint64 + x46, x45 = bits.Mul64(x1, 0x9d671cd581c69bc5) + var x47 uint64 + var x48 uint64 + x48, x47 = bits.Mul64(x1, 0xe697f5e45bcd07c6) + var x49 uint64 + var x50 uint64 + x50, x49 = bits.Mul64(x1, 0x741496c20e7cf878) + var x51 uint64 + var x52 uint64 + x52, x51 = bits.Mul64(x1, 0x896cf21467d7d140) + var x53 uint64 + var x54 uint64 + x53, x54 = bits.Add64(x52, x49, uint64(0x0)) + var x55 uint64 + var x56 uint64 + x55, x56 = bits.Add64(x50, x47, uint64(uint1(x54))) + var x57 uint64 + var x58 uint64 + x57, x58 = bits.Add64(x48, x45, uint64(uint1(x56))) + var x59 uint64 + var x60 uint64 + x59, x60 = bits.Add64(x37, x51, uint64(0x0)) + var x61 uint64 + var x62 uint64 + x61, x62 = bits.Add64(x39, x53, uint64(uint1(x60))) + var x63 uint64 + var x64 uint64 + x63, x64 = bits.Add64(x41, x55, uint64(uint1(x62))) + var x65 uint64 + var x66 uint64 + x65, x66 = bits.Add64(x43, x57, uint64(uint1(x64))) + var x67 uint64 + _, x67 = bits.Mul64(x59, 0x4b0dff665588b13f) + var x69 uint64 + var x70 uint64 + x70, x69 = bits.Mul64(x67, 0xffffffffffffffff) + var x71 uint64 + var x72 uint64 + x72, x71 = bits.Mul64(x67, 0xfffffffffffffffe) + var x73 uint64 + var x74 uint64 + x74, x73 = bits.Mul64(x67, 0xbaaedce6af48a03b) + var x75 uint64 + var x76 uint64 + x76, x75 = bits.Mul64(x67, 0xbfd25e8cd0364141) + var x77 uint64 + var x78 uint64 + x77, x78 = bits.Add64(x76, x73, uint64(0x0)) + var x79 uint64 + var x80 uint64 + x79, x80 = bits.Add64(x74, x71, uint64(uint1(x78))) + var x81 uint64 + var x82 uint64 + x81, x82 = bits.Add64(x72, x69, uint64(uint1(x80))) + var x84 uint64 + _, x84 = bits.Add64(x59, x75, uint64(0x0)) + var x85 uint64 + var x86 uint64 + x85, x86 = bits.Add64(x61, x77, uint64(uint1(x84))) + var x87 uint64 + var x88 uint64 + x87, x88 = bits.Add64(x63, x79, uint64(uint1(x86))) + var x89 uint64 + var x90 uint64 + x89, x90 = bits.Add64(x65, x81, uint64(uint1(x88))) + var x91 uint64 + var x92 uint64 + x91, x92 = bits.Add64( + ((uint64(uint1(x66)) + uint64(uint1(x44))) + (uint64(uint1(x58)) + x46)), + (uint64(uint1(x82)) + x70), + uint64(uint1(x90)), + ) + var x93 uint64 + var x94 uint64 + x94, x93 = bits.Mul64(x2, 0x9d671cd581c69bc5) + var x95 uint64 + var x96 uint64 + x96, x95 = bits.Mul64(x2, 0xe697f5e45bcd07c6) + var x97 uint64 + var x98 uint64 + x98, x97 = bits.Mul64(x2, 0x741496c20e7cf878) + var x99 uint64 + var x100 uint64 + x100, x99 = bits.Mul64(x2, 0x896cf21467d7d140) + var x101 uint64 + var x102 uint64 + x101, x102 = bits.Add64(x100, x97, uint64(0x0)) + var x103 uint64 + var x104 uint64 + x103, x104 = bits.Add64(x98, x95, uint64(uint1(x102))) + var x105 uint64 + var x106 uint64 + x105, x106 = bits.Add64(x96, x93, uint64(uint1(x104))) + var x107 uint64 + var x108 uint64 + x107, x108 = bits.Add64(x85, x99, uint64(0x0)) + var x109 uint64 + var x110 uint64 + x109, x110 = bits.Add64(x87, x101, uint64(uint1(x108))) + var x111 uint64 + var x112 uint64 + x111, x112 = bits.Add64(x89, x103, uint64(uint1(x110))) + var x113 uint64 + var x114 uint64 + x113, x114 = bits.Add64(x91, x105, uint64(uint1(x112))) + var x115 uint64 + _, x115 = bits.Mul64(x107, 0x4b0dff665588b13f) + var x117 uint64 + var x118 uint64 + x118, x117 = bits.Mul64(x115, 0xffffffffffffffff) + var x119 uint64 + var x120 uint64 + x120, x119 = bits.Mul64(x115, 0xfffffffffffffffe) + var x121 uint64 + var x122 uint64 + x122, x121 = bits.Mul64(x115, 0xbaaedce6af48a03b) + var x123 uint64 + var x124 uint64 + x124, x123 = bits.Mul64(x115, 0xbfd25e8cd0364141) + var x125 uint64 + var x126 uint64 + x125, x126 = bits.Add64(x124, x121, uint64(0x0)) + var x127 uint64 + var x128 uint64 + x127, x128 = bits.Add64(x122, x119, uint64(uint1(x126))) + var x129 uint64 + var x130 uint64 + x129, x130 = bits.Add64(x120, x117, uint64(uint1(x128))) + var x132 uint64 + _, x132 = bits.Add64(x107, x123, uint64(0x0)) + var x133 uint64 + var x134 uint64 + x133, x134 = bits.Add64(x109, x125, uint64(uint1(x132))) + var x135 uint64 + var x136 uint64 + x135, x136 = bits.Add64(x111, x127, uint64(uint1(x134))) + var x137 uint64 + var x138 uint64 + x137, x138 = bits.Add64(x113, x129, uint64(uint1(x136))) + var x139 uint64 + var x140 uint64 + x139, x140 = bits.Add64( + ((uint64(uint1(x114)) + uint64(uint1(x92))) + (uint64(uint1(x106)) + x94)), + (uint64(uint1(x130)) + x118), + uint64(uint1(x138)), + ) + var x141 uint64 + var x142 uint64 + x142, x141 = bits.Mul64(x3, 0x9d671cd581c69bc5) + var x143 uint64 + var x144 uint64 + x144, x143 = bits.Mul64(x3, 0xe697f5e45bcd07c6) + var x145 uint64 + var x146 uint64 + x146, x145 = bits.Mul64(x3, 0x741496c20e7cf878) + var x147 uint64 + var x148 uint64 + x148, x147 = bits.Mul64(x3, 0x896cf21467d7d140) + var x149 uint64 + var x150 uint64 + x149, x150 = bits.Add64(x148, x145, uint64(0x0)) + var x151 uint64 + var x152 uint64 + x151, x152 = bits.Add64(x146, x143, uint64(uint1(x150))) + var x153 uint64 + var x154 uint64 + x153, x154 = bits.Add64(x144, x141, uint64(uint1(x152))) + var x155 uint64 + var x156 uint64 + x155, x156 = bits.Add64(x133, x147, uint64(0x0)) + var x157 uint64 + var x158 uint64 + x157, x158 = bits.Add64(x135, x149, uint64(uint1(x156))) + var x159 uint64 + var x160 uint64 + x159, x160 = bits.Add64(x137, x151, uint64(uint1(x158))) + var x161 uint64 + var x162 uint64 + x161, x162 = bits.Add64(x139, x153, uint64(uint1(x160))) + var x163 uint64 + _, x163 = bits.Mul64(x155, 0x4b0dff665588b13f) + var x165 uint64 + var x166 uint64 + x166, x165 = bits.Mul64(x163, 0xffffffffffffffff) + var x167 uint64 + var x168 uint64 + x168, x167 = bits.Mul64(x163, 0xfffffffffffffffe) + var x169 uint64 + var x170 uint64 + x170, x169 = bits.Mul64(x163, 0xbaaedce6af48a03b) + var x171 uint64 + var x172 uint64 + x172, x171 = bits.Mul64(x163, 0xbfd25e8cd0364141) + var x173 uint64 + var x174 uint64 + x173, x174 = bits.Add64(x172, x169, uint64(0x0)) + var x175 uint64 + var x176 uint64 + x175, x176 = bits.Add64(x170, x167, uint64(uint1(x174))) + var x177 uint64 + var x178 uint64 + x177, x178 = bits.Add64(x168, x165, uint64(uint1(x176))) + var x180 uint64 + _, x180 = bits.Add64(x155, x171, uint64(0x0)) + var x181 uint64 + var x182 uint64 + x181, x182 = bits.Add64(x157, x173, uint64(uint1(x180))) + var x183 uint64 + var x184 uint64 + x183, x184 = bits.Add64(x159, x175, uint64(uint1(x182))) + var x185 uint64 + var x186 uint64 + x185, x186 = bits.Add64(x161, x177, uint64(uint1(x184))) + var x187 uint64 + var x188 uint64 + x187, x188 = bits.Add64( + ((uint64(uint1(x162)) + uint64(uint1(x140))) + (uint64(uint1(x154)) + x142)), + (uint64(uint1(x178)) + x166), + uint64(uint1(x186)), + ) + var x189 uint64 + var x190 uint64 + x189, x190 = bits.Sub64(x181, 0xbfd25e8cd0364141, uint64(0x0)) + var x191 uint64 + var x192 uint64 + x191, x192 = bits.Sub64(x183, 0xbaaedce6af48a03b, uint64(uint1(x190))) + var x193 uint64 + var x194 uint64 + x193, x194 = bits.Sub64(x185, 0xfffffffffffffffe, uint64(uint1(x192))) + var x195 uint64 + var x196 uint64 + x195, x196 = bits.Sub64(x187, 0xffffffffffffffff, uint64(uint1(x194))) + var x198 uint64 + _, x198 = bits.Sub64(uint64(uint1(x188)), uint64(0x0), uint64(uint1(x196))) + var x199 uint64 + cmovznzU64(&x199, uint1(x198), x189, x181) + var x200 uint64 + cmovznzU64(&x200, uint1(x198), x191, x183) + var x201 uint64 + cmovznzU64(&x201, uint1(x198), x193, x185) + var x202 uint64 + cmovznzU64(&x202, uint1(x198), x195, x187) + out1[0] = x199 + out1[1] = x200 + out1[2] = x201 + out1[3] = x202 +} + +// Nonzero outputs a single non-zero word if the input is non-zero and zero otherwise. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// out1 = 0 ↔ eval (from_montgomery arg1) mod m = 0 +// +// Input Bounds: +// +// arg1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// +// Output Bounds: +// +// out1: [0x0 ~> 0xffffffffffffffff] +func Nonzero(out1 *uint64, arg1 *[4]uint64) { + x1 := (arg1[0] | (arg1[1] | (arg1[2] | arg1[3]))) + *out1 = x1 +} + +// Selectznz is a multi-limb conditional select. +// +// Postconditions: +// +// eval out1 = (if arg1 = 0 then eval arg2 else eval arg3) +// +// Input Bounds: +// +// arg1: [0x0 ~> 0x1] +// arg2: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// arg3: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +func Selectznz(out1 *[4]uint64, arg1 uint1, arg2 *[4]uint64, arg3 *[4]uint64) { + var x1 uint64 + cmovznzU64(&x1, arg1, arg2[0], arg3[0]) + var x2 uint64 + cmovznzU64(&x2, arg1, arg2[1], arg3[1]) + var x3 uint64 + cmovznzU64(&x3, arg1, arg2[2], arg3[2]) + var x4 uint64 + cmovznzU64(&x4, arg1, arg2[3], arg3[3]) + out1[0] = x1 + out1[1] = x2 + out1[2] = x3 + out1[3] = x4 +} + +// ToBytes serializes a field element NOT in the Montgomery domain to bytes in little-endian order. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// out1 = map (λ x, ⌊((eval arg1 mod m) mod 2^(8 * (x + 1))) / 2^(8 * x)⌋) [0..31] +// +// Input Bounds: +// +// arg1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff]] +func ToBytes(out1 *[32]uint8, arg1 *[4]uint64) { + x1 := arg1[3] + x2 := arg1[2] + x3 := arg1[1] + x4 := arg1[0] + x5 := (uint8(x4) & 0xff) + x6 := (x4 >> 8) + x7 := (uint8(x6) & 0xff) + x8 := (x6 >> 8) + x9 := (uint8(x8) & 0xff) + x10 := (x8 >> 8) + x11 := (uint8(x10) & 0xff) + x12 := (x10 >> 8) + x13 := (uint8(x12) & 0xff) + x14 := (x12 >> 8) + x15 := (uint8(x14) & 0xff) + x16 := (x14 >> 8) + x17 := (uint8(x16) & 0xff) + x18 := uint8((x16 >> 8)) + x19 := (uint8(x3) & 0xff) + x20 := (x3 >> 8) + x21 := (uint8(x20) & 0xff) + x22 := (x20 >> 8) + x23 := (uint8(x22) & 0xff) + x24 := (x22 >> 8) + x25 := (uint8(x24) & 0xff) + x26 := (x24 >> 8) + x27 := (uint8(x26) & 0xff) + x28 := (x26 >> 8) + x29 := (uint8(x28) & 0xff) + x30 := (x28 >> 8) + x31 := (uint8(x30) & 0xff) + x32 := uint8((x30 >> 8)) + x33 := (uint8(x2) & 0xff) + x34 := (x2 >> 8) + x35 := (uint8(x34) & 0xff) + x36 := (x34 >> 8) + x37 := (uint8(x36) & 0xff) + x38 := (x36 >> 8) + x39 := (uint8(x38) & 0xff) + x40 := (x38 >> 8) + x41 := (uint8(x40) & 0xff) + x42 := (x40 >> 8) + x43 := (uint8(x42) & 0xff) + x44 := (x42 >> 8) + x45 := (uint8(x44) & 0xff) + x46 := uint8((x44 >> 8)) + x47 := (uint8(x1) & 0xff) + x48 := (x1 >> 8) + x49 := (uint8(x48) & 0xff) + x50 := (x48 >> 8) + x51 := (uint8(x50) & 0xff) + x52 := (x50 >> 8) + x53 := (uint8(x52) & 0xff) + x54 := (x52 >> 8) + x55 := (uint8(x54) & 0xff) + x56 := (x54 >> 8) + x57 := (uint8(x56) & 0xff) + x58 := (x56 >> 8) + x59 := (uint8(x58) & 0xff) + x60 := uint8((x58 >> 8)) + out1[0] = x5 + out1[1] = x7 + out1[2] = x9 + out1[3] = x11 + out1[4] = x13 + out1[5] = x15 + out1[6] = x17 + out1[7] = x18 + out1[8] = x19 + out1[9] = x21 + out1[10] = x23 + out1[11] = x25 + out1[12] = x27 + out1[13] = x29 + out1[14] = x31 + out1[15] = x32 + out1[16] = x33 + out1[17] = x35 + out1[18] = x37 + out1[19] = x39 + out1[20] = x41 + out1[21] = x43 + out1[22] = x45 + out1[23] = x46 + out1[24] = x47 + out1[25] = x49 + out1[26] = x51 + out1[27] = x53 + out1[28] = x55 + out1[29] = x57 + out1[30] = x59 + out1[31] = x60 +} + +// FromBytes deserializes a field element NOT in the Montgomery domain from bytes in little-endian order. +// +// Preconditions: +// +// 0 ≤ bytes_eval arg1 < m +// +// Postconditions: +// +// eval out1 mod m = bytes_eval arg1 mod m +// 0 ≤ eval out1 < m +// +// Input Bounds: +// +// arg1: [[0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff]] +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +func FromBytes(out1 *[4]uint64, arg1 *[32]uint8) { + x1 := (uint64(arg1[31]) << 56) + x2 := (uint64(arg1[30]) << 48) + x3 := (uint64(arg1[29]) << 40) + x4 := (uint64(arg1[28]) << 32) + x5 := (uint64(arg1[27]) << 24) + x6 := (uint64(arg1[26]) << 16) + x7 := (uint64(arg1[25]) << 8) + x8 := arg1[24] + x9 := (uint64(arg1[23]) << 56) + x10 := (uint64(arg1[22]) << 48) + x11 := (uint64(arg1[21]) << 40) + x12 := (uint64(arg1[20]) << 32) + x13 := (uint64(arg1[19]) << 24) + x14 := (uint64(arg1[18]) << 16) + x15 := (uint64(arg1[17]) << 8) + x16 := arg1[16] + x17 := (uint64(arg1[15]) << 56) + x18 := (uint64(arg1[14]) << 48) + x19 := (uint64(arg1[13]) << 40) + x20 := (uint64(arg1[12]) << 32) + x21 := (uint64(arg1[11]) << 24) + x22 := (uint64(arg1[10]) << 16) + x23 := (uint64(arg1[9]) << 8) + x24 := arg1[8] + x25 := (uint64(arg1[7]) << 56) + x26 := (uint64(arg1[6]) << 48) + x27 := (uint64(arg1[5]) << 40) + x28 := (uint64(arg1[4]) << 32) + x29 := (uint64(arg1[3]) << 24) + x30 := (uint64(arg1[2]) << 16) + x31 := (uint64(arg1[1]) << 8) + x32 := arg1[0] + x33 := (x31 + uint64(x32)) + x34 := (x30 + x33) + x35 := (x29 + x34) + x36 := (x28 + x35) + x37 := (x27 + x36) + x38 := (x26 + x37) + x39 := (x25 + x38) + x40 := (x23 + uint64(x24)) + x41 := (x22 + x40) + x42 := (x21 + x41) + x43 := (x20 + x42) + x44 := (x19 + x43) + x45 := (x18 + x44) + x46 := (x17 + x45) + x47 := (x15 + uint64(x16)) + x48 := (x14 + x47) + x49 := (x13 + x48) + x50 := (x12 + x49) + x51 := (x11 + x50) + x52 := (x10 + x51) + x53 := (x9 + x52) + x54 := (x7 + uint64(x8)) + x55 := (x6 + x54) + x56 := (x5 + x55) + x57 := (x4 + x56) + x58 := (x3 + x57) + x59 := (x2 + x58) + x60 := (x1 + x59) + out1[0] = x39 + out1[1] = x46 + out1[2] = x53 + out1[3] = x60 +} + +// SetOne returns the field element one in the Montgomery domain. +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = 1 mod m +// 0 ≤ eval out1 < m +func SetOne(out1 *MontgomeryDomainFieldElement) { + out1[0] = 0x402da1732fc9bebf + out1[1] = 0x4551231950b75fc4 + out1[2] = uint64(0x1) + out1[3] = uint64(0x0) +} + +// Msat returns the saturated representation of the prime modulus. +// +// Postconditions: +// +// twos_complement_eval out1 = m +// 0 ≤ eval out1 < m +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +func Msat(out1 *[5]uint64) { + out1[0] = 0xbfd25e8cd0364141 + out1[1] = 0xbaaedce6af48a03b + out1[2] = 0xfffffffffffffffe + out1[3] = 0xffffffffffffffff + out1[4] = uint64(0x0) +} + +// Divstep computes a divstep. +// +// Preconditions: +// +// 0 ≤ eval arg4 < m +// 0 ≤ eval arg5 < m +// +// Postconditions: +// +// out1 = (if 0 < arg1 ∧ (twos_complement_eval arg3) is odd then 1 - arg1 else 1 + arg1) +// twos_complement_eval out2 = (if 0 < arg1 ∧ (twos_complement_eval arg3) is odd then twos_complement_eval arg3 else twos_complement_eval arg2) +// twos_complement_eval out3 = (if 0 < arg1 ∧ (twos_complement_eval arg3) is odd then ⌊(twos_complement_eval arg3 - twos_complement_eval arg2) / 2⌋ else ⌊(twos_complement_eval arg3 + (twos_complement_eval arg3 mod 2) * twos_complement_eval arg2) / 2⌋) +// eval (from_montgomery out4) mod m = (if 0 < arg1 ∧ (twos_complement_eval arg3) is odd then (2 * eval (from_montgomery arg5)) mod m else (2 * eval (from_montgomery arg4)) mod m) +// eval (from_montgomery out5) mod m = (if 0 < arg1 ∧ (twos_complement_eval arg3) is odd then (eval (from_montgomery arg4) - eval (from_montgomery arg4)) mod m else (eval (from_montgomery arg5) + (twos_complement_eval arg3 mod 2) * eval (from_montgomery arg4)) mod m) +// 0 ≤ eval out5 < m +// 0 ≤ eval out5 < m +// 0 ≤ eval out2 < m +// 0 ≤ eval out3 < m +// +// Input Bounds: +// +// arg1: [0x0 ~> 0xffffffffffffffff] +// arg2: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// arg3: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// arg4: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// arg5: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// +// Output Bounds: +// +// out1: [0x0 ~> 0xffffffffffffffff] +// out2: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// out3: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// out4: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// out5: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +func Divstep( + out1 *uint64, + out2 *[5]uint64, + out3 *[5]uint64, + out4 *[4]uint64, + out5 *[4]uint64, + arg1 uint64, + arg2 *[5]uint64, + arg3 *[5]uint64, + arg4 *[4]uint64, + arg5 *[4]uint64, +) { + var x1 uint64 + x1, _ = bits.Add64((^arg1), uint64(0x1), uint64(0x0)) + x3 := (uint1((x1 >> 63)) & (uint1(arg3[0]) & 0x1)) + var x4 uint64 + x4, _ = bits.Add64((^arg1), uint64(0x1), uint64(0x0)) + var x6 uint64 + cmovznzU64(&x6, x3, arg1, x4) + var x7 uint64 + cmovznzU64(&x7, x3, arg2[0], arg3[0]) + var x8 uint64 + cmovznzU64(&x8, x3, arg2[1], arg3[1]) + var x9 uint64 + cmovznzU64(&x9, x3, arg2[2], arg3[2]) + var x10 uint64 + cmovznzU64(&x10, x3, arg2[3], arg3[3]) + var x11 uint64 + cmovznzU64(&x11, x3, arg2[4], arg3[4]) + var x12 uint64 + var x13 uint64 + x12, x13 = bits.Add64(uint64(0x1), (^arg2[0]), uint64(0x0)) + var x14 uint64 + var x15 uint64 + x14, x15 = bits.Add64(uint64(0x0), (^arg2[1]), uint64(uint1(x13))) + var x16 uint64 + var x17 uint64 + x16, x17 = bits.Add64(uint64(0x0), (^arg2[2]), uint64(uint1(x15))) + var x18 uint64 + var x19 uint64 + x18, x19 = bits.Add64(uint64(0x0), (^arg2[3]), uint64(uint1(x17))) + var x20 uint64 + x20, _ = bits.Add64(uint64(0x0), (^arg2[4]), uint64(uint1(x19))) + var x22 uint64 + cmovznzU64(&x22, x3, arg3[0], x12) + var x23 uint64 + cmovznzU64(&x23, x3, arg3[1], x14) + var x24 uint64 + cmovznzU64(&x24, x3, arg3[2], x16) + var x25 uint64 + cmovznzU64(&x25, x3, arg3[3], x18) + var x26 uint64 + cmovznzU64(&x26, x3, arg3[4], x20) + var x27 uint64 + cmovznzU64(&x27, x3, arg4[0], arg5[0]) + var x28 uint64 + cmovznzU64(&x28, x3, arg4[1], arg5[1]) + var x29 uint64 + cmovznzU64(&x29, x3, arg4[2], arg5[2]) + var x30 uint64 + cmovznzU64(&x30, x3, arg4[3], arg5[3]) + var x31 uint64 + var x32 uint64 + x31, x32 = bits.Add64(x27, x27, uint64(0x0)) + var x33 uint64 + var x34 uint64 + x33, x34 = bits.Add64(x28, x28, uint64(uint1(x32))) + var x35 uint64 + var x36 uint64 + x35, x36 = bits.Add64(x29, x29, uint64(uint1(x34))) + var x37 uint64 + var x38 uint64 + x37, x38 = bits.Add64(x30, x30, uint64(uint1(x36))) + var x39 uint64 + var x40 uint64 + x39, x40 = bits.Sub64(x31, 0xbfd25e8cd0364141, uint64(0x0)) + var x41 uint64 + var x42 uint64 + x41, x42 = bits.Sub64(x33, 0xbaaedce6af48a03b, uint64(uint1(x40))) + var x43 uint64 + var x44 uint64 + x43, x44 = bits.Sub64(x35, 0xfffffffffffffffe, uint64(uint1(x42))) + var x45 uint64 + var x46 uint64 + x45, x46 = bits.Sub64(x37, 0xffffffffffffffff, uint64(uint1(x44))) + var x48 uint64 + _, x48 = bits.Sub64(uint64(uint1(x38)), uint64(0x0), uint64(uint1(x46))) + x49 := arg4[3] + x50 := arg4[2] + x51 := arg4[1] + x52 := arg4[0] + var x53 uint64 + var x54 uint64 + x53, x54 = bits.Sub64(uint64(0x0), x52, uint64(0x0)) + var x55 uint64 + var x56 uint64 + x55, x56 = bits.Sub64(uint64(0x0), x51, uint64(uint1(x54))) + var x57 uint64 + var x58 uint64 + x57, x58 = bits.Sub64(uint64(0x0), x50, uint64(uint1(x56))) + var x59 uint64 + var x60 uint64 + x59, x60 = bits.Sub64(uint64(0x0), x49, uint64(uint1(x58))) + var x61 uint64 + cmovznzU64(&x61, uint1(x60), uint64(0x0), 0xffffffffffffffff) + var x62 uint64 + var x63 uint64 + x62, x63 = bits.Add64(x53, (x61 & 0xbfd25e8cd0364141), uint64(0x0)) + var x64 uint64 + var x65 uint64 + x64, x65 = bits.Add64(x55, (x61 & 0xbaaedce6af48a03b), uint64(uint1(x63))) + var x66 uint64 + var x67 uint64 + x66, x67 = bits.Add64(x57, (x61 & 0xfffffffffffffffe), uint64(uint1(x65))) + var x68 uint64 + x68, _ = bits.Add64(x59, x61, uint64(uint1(x67))) + var x70 uint64 + cmovznzU64(&x70, x3, arg5[0], x62) + var x71 uint64 + cmovznzU64(&x71, x3, arg5[1], x64) + var x72 uint64 + cmovznzU64(&x72, x3, arg5[2], x66) + var x73 uint64 + cmovznzU64(&x73, x3, arg5[3], x68) + x74 := (uint1(x22) & 0x1) + var x75 uint64 + cmovznzU64(&x75, x74, uint64(0x0), x7) + var x76 uint64 + cmovznzU64(&x76, x74, uint64(0x0), x8) + var x77 uint64 + cmovznzU64(&x77, x74, uint64(0x0), x9) + var x78 uint64 + cmovznzU64(&x78, x74, uint64(0x0), x10) + var x79 uint64 + cmovznzU64(&x79, x74, uint64(0x0), x11) + var x80 uint64 + var x81 uint64 + x80, x81 = bits.Add64(x22, x75, uint64(0x0)) + var x82 uint64 + var x83 uint64 + x82, x83 = bits.Add64(x23, x76, uint64(uint1(x81))) + var x84 uint64 + var x85 uint64 + x84, x85 = bits.Add64(x24, x77, uint64(uint1(x83))) + var x86 uint64 + var x87 uint64 + x86, x87 = bits.Add64(x25, x78, uint64(uint1(x85))) + var x88 uint64 + x88, _ = bits.Add64(x26, x79, uint64(uint1(x87))) + var x90 uint64 + cmovznzU64(&x90, x74, uint64(0x0), x27) + var x91 uint64 + cmovznzU64(&x91, x74, uint64(0x0), x28) + var x92 uint64 + cmovznzU64(&x92, x74, uint64(0x0), x29) + var x93 uint64 + cmovznzU64(&x93, x74, uint64(0x0), x30) + var x94 uint64 + var x95 uint64 + x94, x95 = bits.Add64(x70, x90, uint64(0x0)) + var x96 uint64 + var x97 uint64 + x96, x97 = bits.Add64(x71, x91, uint64(uint1(x95))) + var x98 uint64 + var x99 uint64 + x98, x99 = bits.Add64(x72, x92, uint64(uint1(x97))) + var x100 uint64 + var x101 uint64 + x100, x101 = bits.Add64(x73, x93, uint64(uint1(x99))) + var x102 uint64 + var x103 uint64 + x102, x103 = bits.Sub64(x94, 0xbfd25e8cd0364141, uint64(0x0)) + var x104 uint64 + var x105 uint64 + x104, x105 = bits.Sub64(x96, 0xbaaedce6af48a03b, uint64(uint1(x103))) + var x106 uint64 + var x107 uint64 + x106, x107 = bits.Sub64(x98, 0xfffffffffffffffe, uint64(uint1(x105))) + var x108 uint64 + var x109 uint64 + x108, x109 = bits.Sub64(x100, 0xffffffffffffffff, uint64(uint1(x107))) + var x111 uint64 + _, x111 = bits.Sub64(uint64(uint1(x101)), uint64(0x0), uint64(uint1(x109))) + var x112 uint64 + x112, _ = bits.Add64(x6, uint64(0x1), uint64(0x0)) + x114 := ((x80 >> 1) | ((x82 << 63) & 0xffffffffffffffff)) + x115 := ((x82 >> 1) | ((x84 << 63) & 0xffffffffffffffff)) + x116 := ((x84 >> 1) | ((x86 << 63) & 0xffffffffffffffff)) + x117 := ((x86 >> 1) | ((x88 << 63) & 0xffffffffffffffff)) + x118 := ((x88 & 0x8000000000000000) | (x88 >> 1)) + var x119 uint64 + cmovznzU64(&x119, uint1(x48), x39, x31) + var x120 uint64 + cmovznzU64(&x120, uint1(x48), x41, x33) + var x121 uint64 + cmovznzU64(&x121, uint1(x48), x43, x35) + var x122 uint64 + cmovznzU64(&x122, uint1(x48), x45, x37) + var x123 uint64 + cmovznzU64(&x123, uint1(x111), x102, x94) + var x124 uint64 + cmovznzU64(&x124, uint1(x111), x104, x96) + var x125 uint64 + cmovznzU64(&x125, uint1(x111), x106, x98) + var x126 uint64 + cmovznzU64(&x126, uint1(x111), x108, x100) + *out1 = x112 + out2[0] = x7 + out2[1] = x8 + out2[2] = x9 + out2[3] = x10 + out2[4] = x11 + out3[0] = x114 + out3[1] = x115 + out3[2] = x116 + out3[3] = x117 + out3[4] = x118 + out4[0] = x119 + out4[1] = x120 + out4[2] = x121 + out4[3] = x122 + out5[0] = x123 + out5[1] = x124 + out5[2] = x125 + out5[3] = x126 +} + +// DivstepPrecomp returns the precomputed value for Bernstein-Yang-inversion (in montgomery form). +// +// Postconditions: +// +// eval (from_montgomery out1) = ⌊(m - 1) / 2⌋^(if ⌊log2 m⌋ + 1 < 46 then ⌊(49 * (⌊log2 m⌋ + 1) + 80) / 17⌋ else ⌊(49 * (⌊log2 m⌋ + 1) + 57) / 17⌋) +// 0 ≤ eval out1 < m +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +func DivstepPrecomp(out1 *[4]uint64) { + out1[0] = 0xd7431a4d2b9cb4e9 + out1[1] = 0xab67d35a32d9c503 + out1[2] = 0xadf6c7e5859ce35f + out1[3] = 0x615441451df6c379 +} diff --git a/core/curves/native/k256/point.go b/core/curves/native/k256/point.go new file mode 100644 index 0000000..1ef2225 --- /dev/null +++ b/core/curves/native/k256/point.go @@ -0,0 +1,475 @@ +package k256 + +import ( + "sync" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/core/curves/native/k256/fp" + "github.com/sonr-io/sonr/crypto/internal" +) + +var ( + k256PointInitonce sync.Once + k256PointParams native.EllipticPointParams + k256PointSswuInitOnce sync.Once + k256PointSswuParams native.SswuParams + k256PointIsogenyInitOnce sync.Once + k256PointIsogenyParams native.IsogenyParams +) + +func K256PointNew() *native.EllipticPoint { + return &native.EllipticPoint{ + X: fp.K256FpNew(), + Y: fp.K256FpNew(), + Z: fp.K256FpNew(), + Params: getK256PointParams(), + Arithmetic: &k256PointArithmetic{}, + } +} + +func k256PointParamsInit() { + k256PointParams = native.EllipticPointParams{ + A: fp.K256FpNew(), + B: fp.K256FpNew().SetUint64(7), + Gx: fp.K256FpNew().SetLimbs(&[native.FieldLimbs]uint64{ + 0x59f2815b16f81798, + 0x029bfcdb2dce28d9, + 0x55a06295ce870b07, + 0x79be667ef9dcbbac, + }), + Gy: fp.K256FpNew().SetLimbs(&[native.FieldLimbs]uint64{ + 0x9c47d08ffb10d4b8, + 0xfd17b448a6855419, + 0x5da4fbfc0e1108a8, + 0x483ada7726a3c465, + }), + BitSize: 256, + Name: "secp256k1", + } +} + +func getK256PointParams() *native.EllipticPointParams { + k256PointInitonce.Do(k256PointParamsInit) + return &k256PointParams +} + +func getK256PointSswuParams() *native.SswuParams { + k256PointSswuInitOnce.Do(k256PointSswuParamsInit) + return &k256PointSswuParams +} + +func k256PointSswuParamsInit() { + // Taken from https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-11#section-8.7 + //params := btcec.S256().Params() + // + //// c1 = (q - 3) / 4 + //c1 := new(big.Int).Set(params.P) + //c1.Sub(c1, big.NewInt(3)) + //c1.Rsh(c1, 2) + // + //a, _ := new(big.Int).SetString("3f8731abdd661adca08a5558f0f5d272e953d363cb6f0e5d405447c01a444533", 16) + //b := big.NewInt(1771) + //z := big.NewInt(-11) + //z.Mod(z, params.P) + //// sqrt(-z^3) + //zTmp := new(big.Int).Exp(z, big.NewInt(3), nil) + //zTmp = zTmp.Neg(zTmp) + //zTmp.Mod(zTmp, params.P) + //c2 := new(big.Int).ModSqrt(zTmp, params.P) + // + //var tBytes [32]byte + //c1.FillBytes(tBytes[:]) + //newC1 := [native.FieldLimbs]uint64{ + // binary.BigEndian.Uint64(tBytes[24:32]), + // binary.BigEndian.Uint64(tBytes[16:24]), + // binary.BigEndian.Uint64(tBytes[8:16]), + // binary.BigEndian.Uint64(tBytes[:8]), + //} + //fp.K256FpNew().Arithmetic.ToMontgomery(&newC1, &newC1) + //c2.FillBytes(tBytes[:]) + //newC2 := [native.FieldLimbs]uint64{ + // binary.BigEndian.Uint64(tBytes[24:32]), + // binary.BigEndian.Uint64(tBytes[16:24]), + // binary.BigEndian.Uint64(tBytes[8:16]), + // binary.BigEndian.Uint64(tBytes[:8]), + //} + //fp.K256FpNew().Arithmetic.ToMontgomery(&newC2, &newC2) + //a.FillBytes(tBytes[:]) + //newA := [native.FieldLimbs]uint64{ + // binary.BigEndian.Uint64(tBytes[24:32]), + // binary.BigEndian.Uint64(tBytes[16:24]), + // binary.BigEndian.Uint64(tBytes[8:16]), + // binary.BigEndian.Uint64(tBytes[:8]), + //} + //fp.K256FpNew().Arithmetic.ToMontgomery(&newA, &newA) + //b.FillBytes(tBytes[:]) + //newB := [native.FieldLimbs]uint64{ + // binary.BigEndian.Uint64(tBytes[24:32]), + // binary.BigEndian.Uint64(tBytes[16:24]), + // binary.BigEndian.Uint64(tBytes[8:16]), + // binary.BigEndian.Uint64(tBytes[:8]), + //} + //fp.K256FpNew().Arithmetic.ToMontgomery(&newB, &newB) + //z.FillBytes(tBytes[:]) + //newZ := [native.FieldLimbs]uint64{ + // binary.BigEndian.Uint64(tBytes[24:32]), + // binary.BigEndian.Uint64(tBytes[16:24]), + // binary.BigEndian.Uint64(tBytes[8:16]), + // binary.BigEndian.Uint64(tBytes[:8]), + //} + //fp.K256FpNew().Arithmetic.ToMontgomery(&newZ, &newZ) + + k256PointSswuParams = native.SswuParams{ + // (q -3) // 4 + C1: [native.FieldLimbs]uint64{ + 0xffffffffbfffff0b, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0x3fffffffffffffff, + }, + // sqrt(-z^3) + C2: [native.FieldLimbs]uint64{ + 0x5b57ba53a30d1520, + 0x908f7cef34a762eb, + 0x190b0ffe068460c8, + 0x98a9828e8f00ff62, + }, + // 0x3f8731abdd661adca08a5558f0f5d272e953d363cb6f0e5d405447c01a444533 + A: [native.FieldLimbs]uint64{ + 0xdb714ce7b18444a1, + 0x4458ce38a32a19a2, + 0xa0e58ae2837bfbf0, + 0x505aabc49336d959, + }, + // 1771 + B: [native.FieldLimbs]uint64{ + 0x000006eb001a66db, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + }, + // -11 + Z: [native.FieldLimbs]uint64{ + 0xfffffff3ffffd234, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + }, + } +} + +func k256PointIsogenyInit() { + k256PointIsogenyParams = native.IsogenyParams{ + XNum: [][native.FieldLimbs]uint64{ + { + 0x0000003b1c72a8b4, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + }, + { + 0xd5bd51a17b2edf46, + 0x2cc06f7c86b86bcd, + 0x50b37e74f3294a00, + 0xeb32314a9da73679, + }, + { + 0x48c18b1b0d2191bd, + 0x5a3f74c29bfccce3, + 0xbe55a02e5e8bd357, + 0x09bf218d11fff905, + }, + { + 0x000000001c71c789, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + }, + }, + XDen: [][native.FieldLimbs]uint64{ + { + 0x8af79c1ffdf1e7fa, + 0xb84bc22235735eb5, + 0x82ee5655a55ace04, + 0xce4b32dea0a2becb, + }, + { + 0x8ecde3f3762e1fa5, + 0x2c3b1ad77be333fd, + 0xb102a1a152ea6e12, + 0x57b82df5a1ffc133, + }, + { + 0x00000001000003d1, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + }, + }, + YNum: [][native.FieldLimbs]uint64{ + { + 0xffffffce425e12c3, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + }, + { + 0xba60d5fd6e56922e, + 0x4ec198c898a435f2, + 0x27e77a577b9764ab, + 0xb3b80a1197651d12, + }, + { + 0xa460c58d0690c6f6, + 0xad1fba614dfe6671, + 0xdf2ad0172f45e9ab, + 0x84df90c688fffc82, + }, + { + 0x00000000097b4283, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + }, + }, + YDen: [][native.FieldLimbs]uint64{ + { + 0xfffffd0afff4b6fb, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + }, + { + 0xa0e6d461f9d5bf90, + 0x28e34666a05a1c20, + 0x88cb0300f0106a0e, + 0x6ae1989be1e83c62, + }, + { + 0x5634d5edb1453160, + 0x4258a84339d4cdfc, + 0x8983f271fc5fa51b, + 0x039444f072ffa1cd, + }, + { + 0x00000001000003d1, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + }, + }, + } +} + +func getK256PointIsogenyParams() *native.IsogenyParams { + k256PointIsogenyInitOnce.Do(k256PointIsogenyInit) + return &k256PointIsogenyParams +} + +type k256PointArithmetic struct{} + +func (k k256PointArithmetic) Hash( + out *native.EllipticPoint, + hash *native.EllipticPointHasher, + msg, dst []byte, +) error { + var u []byte + sswuParams := getK256PointSswuParams() + isoParams := getK256PointIsogenyParams() + + switch hash.Type() { + case native.XMD: + u = native.ExpandMsgXmd(hash, msg, dst, 96) + case native.XOF: + u = native.ExpandMsgXof(hash, msg, dst, 96) + } + var buf [64]byte + copy(buf[:48], internal.ReverseScalarBytes(u[:48])) + u0 := fp.K256FpNew().SetBytesWide(&buf) + copy(buf[:48], internal.ReverseScalarBytes(u[48:])) + u1 := fp.K256FpNew().SetBytesWide(&buf) + + r0x, r0y := sswuParams.Osswu3mod4(u0) + r1x, r1y := sswuParams.Osswu3mod4(u1) + q0x, q0y := isoParams.Map(r0x, r0y) + q1x, q1y := isoParams.Map(r1x, r1y) + out.X = q0x + out.Y = q0y + out.Z.SetOne() + tv := &native.EllipticPoint{ + X: q1x, + Y: q1y, + Z: fp.K256FpNew().SetOne(), + } + k.Add(out, out, tv) + return nil +} + +func (k k256PointArithmetic) Double(out, arg *native.EllipticPoint) { + // Addition formula from Renes-Costello-Batina 2015 + // (https://eprint.iacr.org/2015/1060 Algorithm 9) + var yy, zz, xy2, bzz, bzz3, bzz9 [native.FieldLimbs]uint64 + var yyMBzz9, yyPBzz3, yyzz, yyzz8, t [native.FieldLimbs]uint64 + var x, y, z [native.FieldLimbs]uint64 + f := arg.X.Arithmetic + + f.Square(&yy, &arg.Y.Value) + f.Square(&zz, &arg.Z.Value) + f.Mul(&xy2, &arg.X.Value, &arg.Y.Value) + f.Add(&xy2, &xy2, &xy2) + f.Mul(&bzz, &zz, &arg.Params.B.Value) + f.Add(&bzz3, &bzz, &bzz) + f.Add(&bzz3, &bzz3, &bzz) + f.Add(&bzz9, &bzz3, &bzz3) + f.Add(&bzz9, &bzz9, &bzz3) + f.Neg(&yyMBzz9, &bzz9) + f.Add(&yyMBzz9, &yyMBzz9, &yy) + f.Add(&yyPBzz3, &yy, &bzz3) + f.Mul(&yyzz, &yy, &zz) + f.Add(&yyzz8, &yyzz, &yyzz) + f.Add(&yyzz8, &yyzz8, &yyzz8) + f.Add(&yyzz8, &yyzz8, &yyzz8) + f.Add(&t, &yyzz8, &yyzz8) + f.Add(&t, &t, &yyzz8) + f.Mul(&t, &t, &arg.Params.B.Value) + + f.Mul(&x, &xy2, &yyMBzz9) + + f.Mul(&y, &yyMBzz9, &yyPBzz3) + f.Add(&y, &y, &t) + + f.Mul(&z, &yy, &arg.Y.Value) + f.Mul(&z, &z, &arg.Z.Value) + f.Add(&z, &z, &z) + f.Add(&z, &z, &z) + f.Add(&z, &z, &z) + + out.X.Value = x + out.Y.Value = y + out.Z.Value = z +} + +func (k k256PointArithmetic) Add(out, arg1, arg2 *native.EllipticPoint) { + // Addition formula from Renes-Costello-Batina 2015 + // (https://eprint.iacr.org/2015/1060 Algorithm 7). + var xx, yy, zz, nXxYy, nYyZz, nXxZz [native.FieldLimbs]uint64 + var tv1, tv2, xyPairs, yzPairs, xzPairs [native.FieldLimbs]uint64 + var bzz, bzz3, yyMBzz3, yyPBzz3, byz [native.FieldLimbs]uint64 + var byz3, xx3, bxx9, x, y, z [native.FieldLimbs]uint64 + f := arg1.X.Arithmetic + + f.Mul(&xx, &arg1.X.Value, &arg2.X.Value) + f.Mul(&yy, &arg1.Y.Value, &arg2.Y.Value) + f.Mul(&zz, &arg1.Z.Value, &arg2.Z.Value) + + f.Add(&nXxYy, &xx, &yy) + f.Neg(&nXxYy, &nXxYy) + + f.Add(&nYyZz, &yy, &zz) + f.Neg(&nYyZz, &nYyZz) + + f.Add(&nXxZz, &xx, &zz) + f.Neg(&nXxZz, &nXxZz) + + f.Add(&tv1, &arg1.X.Value, &arg1.Y.Value) + f.Add(&tv2, &arg2.X.Value, &arg2.Y.Value) + f.Mul(&xyPairs, &tv1, &tv2) + f.Add(&xyPairs, &xyPairs, &nXxYy) + + f.Add(&tv1, &arg1.Y.Value, &arg1.Z.Value) + f.Add(&tv2, &arg2.Y.Value, &arg2.Z.Value) + f.Mul(&yzPairs, &tv1, &tv2) + f.Add(&yzPairs, &yzPairs, &nYyZz) + + f.Add(&tv1, &arg1.X.Value, &arg1.Z.Value) + f.Add(&tv2, &arg2.X.Value, &arg2.Z.Value) + f.Mul(&xzPairs, &tv1, &tv2) + f.Add(&xzPairs, &xzPairs, &nXxZz) + + f.Mul(&bzz, &zz, &arg1.Params.B.Value) + f.Add(&bzz3, &bzz, &bzz) + f.Add(&bzz3, &bzz3, &bzz) + + f.Neg(&yyMBzz3, &bzz3) + f.Add(&yyMBzz3, &yyMBzz3, &yy) + + f.Add(&yyPBzz3, &yy, &bzz3) + + f.Mul(&byz, &yzPairs, &arg1.Params.B.Value) + f.Add(&byz3, &byz, &byz) + f.Add(&byz3, &byz3, &byz) + + f.Add(&xx3, &xx, &xx) + f.Add(&xx3, &xx3, &xx) + + f.Add(&bxx9, &xx3, &xx3) + f.Add(&bxx9, &bxx9, &xx3) + f.Mul(&bxx9, &bxx9, &arg1.Params.B.Value) + + f.Mul(&tv1, &xyPairs, &yyMBzz3) + f.Mul(&tv2, &byz3, &xzPairs) + f.Neg(&tv2, &tv2) + f.Add(&x, &tv1, &tv2) + + f.Mul(&tv1, &yyPBzz3, &yyMBzz3) + f.Mul(&tv2, &bxx9, &xzPairs) + f.Add(&y, &tv1, &tv2) + + f.Mul(&tv1, &yzPairs, &yyPBzz3) + f.Mul(&tv2, &xx3, &xyPairs) + f.Add(&z, &tv1, &tv2) + + e1 := arg1.Z.IsZero() + e2 := arg2.Z.IsZero() + + // If arg1 is identity set it to arg2 + f.Selectznz(&z, &z, &arg2.Z.Value, e1) + f.Selectznz(&y, &y, &arg2.Y.Value, e1) + f.Selectznz(&x, &x, &arg2.X.Value, e1) + // If arg2 is identity set it to arg1 + f.Selectznz(&z, &z, &arg1.Z.Value, e2) + f.Selectznz(&y, &y, &arg1.Y.Value, e2) + f.Selectznz(&x, &x, &arg1.X.Value, e2) + + out.X.Value = x + out.Y.Value = y + out.Z.Value = z +} + +func (k k256PointArithmetic) IsOnCurve(arg *native.EllipticPoint) bool { + affine := K256PointNew() + k.ToAffine(affine, arg) + lhs := fp.K256FpNew().Square(affine.Y) + rhs := fp.K256FpNew() + k.RhsEq(rhs, affine.X) + return lhs.Equal(rhs) == 1 +} + +func (k k256PointArithmetic) ToAffine(out, arg *native.EllipticPoint) { + var wasInverted int + var zero, x, y, z [native.FieldLimbs]uint64 + f := arg.X.Arithmetic + + f.Invert(&wasInverted, &z, &arg.Z.Value) + f.Mul(&x, &arg.X.Value, &z) + f.Mul(&y, &arg.Y.Value, &z) + + out.Z.SetOne() + // If point at infinity this does nothing + f.Selectznz(&x, &zero, &x, wasInverted) + f.Selectznz(&y, &zero, &y, wasInverted) + f.Selectznz(&z, &zero, &out.Z.Value, wasInverted) + + out.X.Value = x + out.Y.Value = y + out.Z.Value = z + out.Params = arg.Params + out.Arithmetic = arg.Arithmetic +} + +func (k k256PointArithmetic) RhsEq(out, x *native.Field) { + // Elliptic curve equation for secp256k1 is: y^2 = x^3 + 7 + out.Square(x) + out.Mul(out, x) + out.Add(out, getK256PointParams().B) +} diff --git a/core/curves/native/k256/point_test.go b/core/curves/native/k256/point_test.go new file mode 100644 index 0000000..8a7f52d --- /dev/null +++ b/core/curves/native/k256/point_test.go @@ -0,0 +1,19 @@ +package k256_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/core/curves/native/k256" +) + +func TestK256PointArithmetic_Hash(t *testing.T) { + var b [32]byte + sc, err := k256.K256PointNew().Hash(b[:], native.EllipticPointHasherSha256()) + + require.NoError(t, err) + require.True(t, !sc.IsIdentity()) + require.True(t, sc.IsOnCurve()) +} diff --git a/core/curves/native/osswu.go b/core/curves/native/osswu.go new file mode 100755 index 0000000..2ea6743 --- /dev/null +++ b/core/curves/native/osswu.go @@ -0,0 +1,82 @@ +package native + +// SswuParams for computing the Simplified SWU mapping +// for hash to curve implementations +type SswuParams struct { + C1, C2, A, B, Z [FieldLimbs]uint64 +} + +// Osswu3mod4 computes the simplified map optmized for 3 mod 4 primes +// https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-11#appendix-G.2.1 +func (p *SswuParams) Osswu3mod4(u *Field) (x, y *Field) { + var tv1, tv2, tv3, tv4, xd, x1n, x2n, gxd, gx1, aNeg, zA, y1, y2 [FieldLimbs]uint64 + var wasInverted int + u.Arithmetic.Mul(&tv1, &u.Value, &u.Value) // tv1 = u^2 + u.Arithmetic.Mul(&tv3, &p.Z, &tv1) // tv3 = z * tv1 + u.Arithmetic.Square(&tv2, &tv3) // tv2 = tv3^2 + u.Arithmetic.Add(&xd, &tv2, &tv3) // xd = tv2 + tv3 + u.Arithmetic.Add(&x1n, &u.Params.R, &xd) // x1n = (xd + 1) + u.Arithmetic.Mul(&x1n, &x1n, &p.B) // x1n * B + u.Arithmetic.Neg(&aNeg, &p.A) + u.Arithmetic.Mul(&xd, &xd, &aNeg) // xd = -A * xd + + xdIsZero := (&Field{ + Value: xd, + }).IsZero() + u.Arithmetic.Mul(&zA, &p.Z, &p.A) + u.Arithmetic.Selectznz(&xd, &xd, &zA, xdIsZero) // xd = z * A if xd == 0 + + u.Arithmetic.Square(&tv2, &xd) // tv2 = xd^2 + u.Arithmetic.Mul(&gxd, &tv2, &xd) // gxd = tv2 * xd + u.Arithmetic.Mul(&tv2, &tv2, &p.A) // tv2 = A * tv2 + + u.Arithmetic.Square(&gx1, &x1n) // gx1 = x1n^2 + u.Arithmetic.Add(&gx1, &gx1, &tv2) // gx1 = gx1 + tv2 + u.Arithmetic.Mul(&gx1, &gx1, &x1n) // gx1 = gx1 * x1n + u.Arithmetic.Mul(&tv2, &gxd, &p.B) // tv2 = B * gxd + u.Arithmetic.Add(&gx1, &gx1, &tv2) // gx1 = gx1 + tv2 + + u.Arithmetic.Square(&tv4, &gxd) // tv4 = gxd^2 + u.Arithmetic.Mul(&tv2, &gx1, &gxd) // tv2 = gx1 * gxd + u.Arithmetic.Mul(&tv4, &tv4, &tv2) // tv4 = tv4 * tv2 + + Pow(&y1, &tv4, &p.C1, u.Params, u.Arithmetic) // y1 = tv4^C1 + u.Arithmetic.Mul(&y1, &y1, &tv2) // y1 = y1 * tv2 + u.Arithmetic.Mul(&x2n, &tv3, &x1n) // x2n = tv3 * x1n + + u.Arithmetic.Mul(&y2, &y1, &p.C2) // y2 = y1 * c2 + u.Arithmetic.Mul(&y2, &y2, &tv1) // y2 = y2 * tv1 + u.Arithmetic.Mul(&y2, &y2, &u.Value) // y2 = y2 * u + + u.Arithmetic.Square(&tv2, &y1) // tv2 = y1^2 + u.Arithmetic.Mul(&tv2, &tv2, &gxd) // tv2 = tv2 * gxd + + e2 := (&Field{Value: tv2}).Equal(&Field{Value: gx1}) + + x = new(Field).Set(u) + y = new(Field).Set(u) + + // If e2, x = x1, else x = x2 + u.Arithmetic.Selectznz(&x.Value, &x2n, &x1n, e2) + + // xn / xd + u.Arithmetic.Invert(&wasInverted, &tv1, &xd) + u.Arithmetic.Mul(&tv1, &x.Value, &tv1) + u.Arithmetic.Selectznz(&x.Value, &x.Value, &tv1, wasInverted) + + // If e2, y = y1, else y = y2 + u.Arithmetic.Selectznz(&y.Value, &y2, &y1, e2) + + uBytes := u.Bytes() + yBytes := y.Bytes() + + usign := uBytes[0] & 1 + ysign := yBytes[0] & 1 + + // Fix sign of y + if usign != ysign { + y.Neg(y) + } + + return x, y +} diff --git a/core/curves/native/p256/fp/fp.go b/core/curves/native/p256/fp/fp.go new file mode 100644 index 0000000..5559b39 --- /dev/null +++ b/core/curves/native/p256/fp/fp.go @@ -0,0 +1,206 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package fp + +import ( + "math/big" + "sync" + + "github.com/sonr-io/sonr/crypto/core/curves/native" +) + +var ( + p256FpInitonce sync.Once + p256FpParams native.FieldParams +) + +func P256FpNew() *native.Field { + return &native.Field{ + Value: [native.FieldLimbs]uint64{}, + Params: getP256FpParams(), + Arithmetic: p256FpArithmetic{}, + } +} + +func p256FpParamsInit() { + // See FIPS 186-3, section D.2.3 + p256FpParams = native.FieldParams{ + R: [native.FieldLimbs]uint64{ + 0x0000000000000001, + 0xffffffff00000000, + 0xffffffffffffffff, + 0x00000000fffffffe, + }, + R2: [native.FieldLimbs]uint64{ + 0x0000000000000003, + 0xfffffffbffffffff, + 0xfffffffffffffffe, + 0x00000004fffffffd, + }, + R3: [native.FieldLimbs]uint64{ + 0xfffffffd0000000a, + 0xffffffedfffffff7, + 0x00000005fffffffc, + 0x0000001800000001, + }, + Modulus: [native.FieldLimbs]uint64{ + 0xffffffffffffffff, + 0x00000000ffffffff, + 0x0000000000000000, + 0xffffffff00000001, + }, + BiModulus: new(big.Int).SetBytes([]byte{ + 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + }), + } +} + +func getP256FpParams() *native.FieldParams { + p256FpInitonce.Do(p256FpParamsInit) + return &p256FpParams +} + +// p256FpArithmetic is a struct with all the methods needed for working +// in mod q +type p256FpArithmetic struct{} + +// ToMontgomery converts this field to montgomery form +func (f p256FpArithmetic) ToMontgomery(out, arg *[native.FieldLimbs]uint64) { + ToMontgomery((*MontgomeryDomainFieldElement)(out), (*NonMontgomeryDomainFieldElement)(arg)) +} + +// FromMontgomery converts this field from montgomery form +func (f p256FpArithmetic) FromMontgomery(out, arg *[native.FieldLimbs]uint64) { + FromMontgomery((*NonMontgomeryDomainFieldElement)(out), (*MontgomeryDomainFieldElement)(arg)) +} + +// Neg performs modular negation +func (f p256FpArithmetic) Neg(out, arg *[native.FieldLimbs]uint64) { + Opp((*MontgomeryDomainFieldElement)(out), (*MontgomeryDomainFieldElement)(arg)) +} + +// Square performs modular square +func (f p256FpArithmetic) Square(out, arg *[native.FieldLimbs]uint64) { + Square((*MontgomeryDomainFieldElement)(out), (*MontgomeryDomainFieldElement)(arg)) +} + +// Mul performs modular multiplication +func (f p256FpArithmetic) Mul(out, arg1, arg2 *[native.FieldLimbs]uint64) { + Mul( + (*MontgomeryDomainFieldElement)(out), + (*MontgomeryDomainFieldElement)(arg1), + (*MontgomeryDomainFieldElement)(arg2), + ) +} + +// Add performs modular addition +func (f p256FpArithmetic) Add(out, arg1, arg2 *[native.FieldLimbs]uint64) { + Add( + (*MontgomeryDomainFieldElement)(out), + (*MontgomeryDomainFieldElement)(arg1), + (*MontgomeryDomainFieldElement)(arg2), + ) +} + +// Sub performs modular subtraction +func (f p256FpArithmetic) Sub(out, arg1, arg2 *[native.FieldLimbs]uint64) { + Sub( + (*MontgomeryDomainFieldElement)(out), + (*MontgomeryDomainFieldElement)(arg1), + (*MontgomeryDomainFieldElement)(arg2), + ) +} + +// Sqrt performs modular square root +func (f p256FpArithmetic) Sqrt(wasSquare *int, out, arg *[native.FieldLimbs]uint64) { + // Use p = 3 mod 4 by Euler's criterion means + // arg^((p+1)/4 mod p + var t, c [native.FieldLimbs]uint64 + c1 := [native.FieldLimbs]uint64{ + 0x0000_0000_0000_0000, + 0x0000_0000_4000_0000, + 0x4000_0000_0000_0000, + 0x3fff_ffff_c000_0000, + } + native.Pow(&t, arg, &c1, getP256FpParams(), f) + Square((*MontgomeryDomainFieldElement)(&c), (*MontgomeryDomainFieldElement)(&t)) + *wasSquare = (&native.Field{Value: c, Params: getP256FpParams(), Arithmetic: f}).Equal( + &native.Field{ + Value: *arg, Params: getP256FpParams(), Arithmetic: f, + }, + ) + Selectznz(out, uint1(*wasSquare), out, &t) +} + +// Invert performs modular inverse +func (f p256FpArithmetic) Invert(wasInverted *int, out, arg *[native.FieldLimbs]uint64) { + // Fermat's Little Theorem + // a ^ (p - 2) mod p + // + // The exponent pattern (from high to low) is: + // - 32 bits of value 1 + // - 31 bits of value 0 + // - 1 bit of value 1 + // - 96 bits of value 0 + // - 94 bits of value 1 + // - 1 bit of value 0 + // - 1 bit of value 1 + // To speed up the square-and-multiply algorithm, precompute a^(2^31-1). + // + // Courtesy of Thomas Pornin + // + var t, r [native.FieldLimbs]uint64 + copy(t[:], arg[:]) + + for i := 0; i < 30; i++ { + f.Square(&t, &t) + f.Mul(&t, &t, arg) + } + copy(r[:], t[:]) + for i := 224; i >= 0; i-- { + f.Square(&r, &r) + switch i { + case 0: + fallthrough + case 2: + fallthrough + case 192: + fallthrough + case 224: + f.Mul(&r, &r, arg) + case 3: + fallthrough + case 34: + fallthrough + case 65: + f.Mul(&r, &r, &t) + } + } + + *wasInverted = (&native.Field{ + Value: *arg, + Params: getP256FpParams(), + Arithmetic: f, + }).IsNonZero() + Selectznz(out, uint1(*wasInverted), out, &r) +} + +// FromBytes converts a little endian byte array into a field element +func (f p256FpArithmetic) FromBytes(out *[native.FieldLimbs]uint64, arg *[native.FieldBytes]byte) { + FromBytes(out, arg) +} + +// ToBytes converts a field element to a little endian byte array +func (f p256FpArithmetic) ToBytes(out *[native.FieldBytes]byte, arg *[native.FieldLimbs]uint64) { + ToBytes(out, arg) +} + +// Selectznz performs conditional select. +// selects arg1 if choice == 0 and arg2 if choice == 1 +func (f p256FpArithmetic) Selectznz(out, arg1, arg2 *[native.FieldLimbs]uint64, choice int) { + Selectznz(out, uint1(choice), arg1, arg2) +} diff --git a/core/curves/native/p256/fp/fp_test.go b/core/curves/native/p256/fp/fp_test.go new file mode 100644 index 0000000..7ccfb13 --- /dev/null +++ b/core/curves/native/p256/fp/fp_test.go @@ -0,0 +1,330 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package fp + +import ( + crand "crypto/rand" + "math/big" + "math/rand" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/internal" +) + +func TestFpSetOne(t *testing.T) { + fq := P256FpNew().SetOne() + require.NotNil(t, fq) + require.Equal(t, fq.Value, getP256FpParams().R) +} + +func TestFpSetUint64(t *testing.T) { + act := P256FpNew().SetUint64(1 << 60) + require.NotNil(t, act) + // Remember it will be in montgomery form + require.Equal(t, act.Value[0], uint64(0x100000000fffffff)) +} + +func TestFpAdd(t *testing.T) { + lhs := P256FpNew().SetOne() + rhs := P256FpNew().SetOne() + exp := P256FpNew().SetUint64(2) + res := P256FpNew().Add(lhs, rhs) + require.NotNil(t, res) + require.Equal(t, 1, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint64() >> 2 + r := rand.Uint64() >> 2 + e := l + r + lhs.SetUint64(l) + rhs.SetUint64(r) + exp.SetUint64(e) + + a := P256FpNew().Add(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFpSub(t *testing.T) { + lhs := P256FpNew().SetOne() + rhs := P256FpNew().SetOne() + exp := P256FpNew().SetZero() + res := P256FpNew().Sub(lhs, rhs) + require.NotNil(t, res) + require.Equal(t, 1, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint64() >> 2 + r := rand.Uint64() >> 2 + if l < r { + l, r = r, l + } + e := l - r + lhs.SetUint64(l) + rhs.SetUint64(r) + exp.SetUint64(e) + + a := P256FpNew().Sub(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFpMul(t *testing.T) { + lhs := P256FpNew().SetOne() + rhs := P256FpNew().SetOne() + exp := P256FpNew().SetOne() + res := P256FpNew().Mul(lhs, rhs) + require.NotNil(t, res) + require.Equal(t, 1, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint32() + r := rand.Uint32() + e := uint64(l) * uint64(r) + lhs.SetUint64(uint64(l)) + rhs.SetUint64(uint64(r)) + exp.SetUint64(e) + + a := P256FpNew().Mul(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFpDouble(t *testing.T) { + a := P256FpNew().SetUint64(2) + e := P256FpNew().SetUint64(4) + require.Equal(t, e, P256FpNew().Double(a)) + + for i := 0; i < 25; i++ { + tv := rand.Uint32() + ttv := uint64(tv) * 2 + a = P256FpNew().SetUint64(uint64(tv)) + e = P256FpNew().SetUint64(ttv) + require.Equal(t, e, P256FpNew().Double(a)) + } +} + +func TestFpSquare(t *testing.T) { + a := P256FpNew().SetUint64(4) + e := P256FpNew().SetUint64(16) + require.Equal(t, e, a.Square(a)) + + for i := 0; i < 25; i++ { + j := rand.Uint32() + exp := uint64(j) * uint64(j) + e.SetUint64(exp) + a.SetUint64(uint64(j)) + require.Equal(t, e, a.Square(a)) + } +} + +func TestFpNeg(t *testing.T) { + g := P256FpNew().SetUint64(7) + a := P256FpNew().SetOne() + a.Neg(a) + e := P256FpNew().SetRaw(&[native.FieldLimbs]uint64{0xfffffffffffffffe, 0x00000001ffffffff, 0x0000000000000000, 0xfffffffe00000002}) + require.Equal(t, e, a) + a.Neg(g) + e = P256FpNew().SetRaw(&[native.FieldLimbs]uint64{0xfffffffffffffff8, 0x00000007ffffffff, 0x0000000000000000, 0xfffffff800000008}) + require.Equal(t, e, a) +} + +func TestFpExp(t *testing.T) { + e := P256FpNew().SetUint64(8) + a := P256FpNew().SetUint64(2) + by := P256FpNew().SetUint64(3) + require.Equal(t, e, a.Exp(a, by)) +} + +func TestFpSqrt(t *testing.T) { + t1 := P256FpNew().SetUint64(2) + t2 := P256FpNew().Neg(t1) + t3 := P256FpNew().Square(t1) + _, wasSquare := t3.Sqrt(t3) + + require.True(t, wasSquare) + require.Equal(t, 1, t1.Equal(t3)|t2.Equal(t3)) + t1.SetUint64(3) + _, wasSquare = P256FpNew().Sqrt(t1) + require.False(t, wasSquare) +} + +func TestFpInvert(t *testing.T) { + twoInv := P256FpNew().SetRaw(&[native.FieldLimbs]uint64{0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x8000000000000000}) + two := P256FpNew().SetUint64(2) + a, inverted := P256FpNew().Invert(two) + require.True(t, inverted) + require.Equal(t, a, twoInv) + + rootOfUnity := P256FpNew().SetLimbs(&[native.FieldLimbs]uint64{0x8619a9e760c01d0c, 0xa883c4fba37998df, 0x45607580b6eabd98, 0xf252b002544b2f99}) + rootOfUnityInv := P256FpNew().SetRaw(&[native.FieldLimbs]uint64{0x2609d8ab477f96d1, 0x6e5f128fb20a0f24, 0xe4d636874d99643b, 0x376080cc1f2a3735}) + a, inverted = P256FpNew().Invert(rootOfUnity) + require.True(t, inverted) + require.Equal(t, a, rootOfUnityInv) + + lhs := P256FpNew().SetUint64(9) + rhs := P256FpNew().SetUint64(3) + rhsInv, inverted := P256FpNew().Invert(rhs) + require.True(t, inverted) + require.Equal(t, rhs, P256FpNew().Mul(lhs, rhsInv)) + + rhs.SetZero() + _, inverted = P256FpNew().Invert(rhs) + require.False(t, inverted) +} + +func TestFpCMove(t *testing.T) { + t1 := P256FpNew().SetUint64(5) + t2 := P256FpNew().SetUint64(10) + require.Equal(t, t1, P256FpNew().CMove(t1, t2, 0)) + require.Equal(t, t2, P256FpNew().CMove(t1, t2, 1)) +} + +func TestFpBytes(t *testing.T) { + t1 := P256FpNew().SetUint64(99) + seq := t1.Bytes() + t2, err := P256FpNew().SetBytes(&seq) + require.NoError(t, err) + require.Equal(t, t1, t2) + + for i := 0; i < 25; i++ { + t1.SetUint64(rand.Uint64()) + seq = t1.Bytes() + _, err = t2.SetBytes(&seq) + require.NoError(t, err) + require.Equal(t, t1, t2) + } +} + +func TestFpCmp(t *testing.T) { + tests := []struct { + a *native.Field + b *native.Field + e int + }{ + { + a: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{2731658267414164836, 14655288906067898431, 6537465423330262322, 8306191141697566219}), + b: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{6472764012681988529, 10848812988401906064, 2961825807536828898, 4282183981941645679}), + e: 1, + }, + { + a: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{8023004109510539223, 4652004072850285717, 1877219145646046927, 383214385093921911}), + b: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{10099384440823804262, 16139476942229308465, 8636966320777393798, 5435928725024696785}), + e: -1, + }, + { + a: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{3741840066202388211, 12165774400417314871, 16619312580230515379, 16195032234110087705}), + b: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{3905865991286066744, 543690822309071825, 17963103015950210055, 3745476720756119742}), + e: 1, + }, + { + a: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{16660853697936147788, 7799793619412111108, 13515141085171033220, 2641079731236069032}), + b: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{17790588295388238399, 571847801379669440, 14537208974498222469, 12792570372087452754}), + e: -1, + }, + { + a: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{3912839285384959186, 2701177075110484070, 6453856448115499033, 6475797457962597458}), + b: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{1282566391665688512, 13503640416992806563, 2962240104675990153, 3374904770947067689}), + e: 1, + }, + { + a: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{5716631803409360103, 7859567470082614154, 12747956220853330146, 18434584096087315020}), + b: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{16317076441459028418, 12854146980376319601, 2258436689269031143, 9531877130792223752}), + e: 1, + }, + { + a: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{17955191469941083403, 10350326247207200880, 17263512235150705075, 12700328451238078022}), + b: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{6767595547459644695, 7146403825494928147, 12269344038346710612, 9122477829383225603}), + e: 1, + }, + { + a: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{17099388671847024438, 6426264987820696548, 10641143464957227405, 7709745403700754098}), + b: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{10799154372990268556, 17178492485719929374, 5705777922258988797, 8051037767683567782}), + e: -1, + }, + { + a: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{4567139260680454325, 1629385880182139061, 16607020832317899145, 1261011562621553200}), + b: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{13487234491304534488, 17872642955936089265, 17651026784972590233, 9468934643333871559}), + e: -1, + }, + { + a: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{18071070103467571798, 11787850505799426140, 10631355976141928593, 4867785203635092610}), + b: P256FpNew().SetRaw(&[native.FieldLimbs]uint64{12596443599426461624, 10176122686151524591, 17075755296887483439, 6726169532695070719}), + e: -1, + }, + } + + for _, test := range tests { + require.Equal(t, test.e, test.a.Cmp(test.b)) + require.Equal(t, -test.e, test.b.Cmp(test.a)) + require.Equal(t, 0, test.a.Cmp(test.a)) + require.Equal(t, 0, test.b.Cmp(test.b)) + } +} + +func TestFpBigInt(t *testing.T) { + t1 := P256FpNew().SetBigInt(big.NewInt(9999)) + t2 := P256FpNew().SetBigInt(t1.BigInt()) + require.Equal(t, t1, t2) + + e := P256FpNew().SetRaw(&[native.FieldLimbs]uint64{0x515151513f3f3f3e, 0xc9c9c9cb36363636, 0xb7b7b7b79c9c9c9c, 0xfffffffeaeaeaeaf}) + b := new( + big.Int, + ).SetBytes([]byte{9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9}) + t1.SetBigInt(b) + require.Equal(t, e, t1) + e.Value[0] = 0xaeaeaeaec0c0c0c1 + e.Value[1] = 0x36363635c9c9c9c9 + e.Value[2] = 0x4848484863636363 + e.Value[3] = 0x0000000051515151 + b.Neg(b) + t1.SetBigInt(b) + require.Equal(t, e, t1) +} + +func TestFpSetBytesWide(t *testing.T) { + e := P256FpNew().SetRaw(&[native.FieldLimbs]uint64{0xccdefd48c77805bc, 0xe935dc2db86364d6, 0xca8ee6e5870a020e, 0x4c94bf4467f3b5bf}) + + a := P256FpNew().SetBytesWide(&[64]byte{ + 0x69, 0x23, 0x5a, 0x0b, 0xce, 0x0c, 0xa8, 0x64, + 0x3c, 0x78, 0xbc, 0x01, 0x05, 0xef, 0xf2, 0x84, + 0xde, 0xbb, 0x6b, 0xc8, 0x63, 0x5e, 0x6e, 0x69, + 0x62, 0xcc, 0xc6, 0x2d, 0xf5, 0x72, 0x40, 0x92, + 0x28, 0x11, 0xd6, 0xc8, 0x07, 0xa5, 0x88, 0x82, + 0xfe, 0xe3, 0x97, 0xf6, 0x1e, 0xfb, 0x2e, 0x3b, + 0x27, 0x5f, 0x85, 0x06, 0x8d, 0x99, 0xa4, 0x75, + 0xc0, 0x2c, 0x71, 0x69, 0x9e, 0x58, 0xea, 0x52, + }) + require.Equal(t, e, a) +} + +func TestFpSetBytesWideBigInt(t *testing.T) { + params := getP256FpParams() + var tv2 [64]byte + for i := 0; i < 25; i++ { + _, _ = crand.Read(tv2[:]) + e := new(big.Int).SetBytes(tv2[:]) + e.Mod(e, params.BiModulus) + + tv := internal.ReverseScalarBytes(tv2[:]) + copy(tv2[:], tv) + a := P256FpNew().SetBytesWide(&tv2) + require.Equal(t, 0, e.Cmp(a.BigInt())) + } +} diff --git a/core/curves/native/p256/fp/p256_fp.go b/core/curves/native/p256/fp/p256_fp.go new file mode 100755 index 0000000..6931a06 --- /dev/null +++ b/core/curves/native/p256/fp/p256_fp.go @@ -0,0 +1,1463 @@ +// Code generated by Fiat Cryptography. DO NOT EDIT. +// +// Autogenerated: 'src/ExtractionOCaml/word_by_word_montgomery' --lang Go --no-wide-int --relax-primitive-carry-to-bitwidth 32,64 --cmovznz-by-mul --internal-static --package-case flatcase --public-function-case UpperCamelCase --private-function-case camelCase --public-type-case UpperCamelCase --private-type-case camelCase --no-prefix-fiat --doc-newline-in-typedef-bounds --doc-prepend-header 'Code generated by Fiat Cryptography. DO NOT EDIT.' --doc-text-before-function-name ” --doc-text-before-type-name ” --package-name p256 ” 64 '2^256 - 2^224 + 2^192 + 2^96 - 1' mul square add sub opp from_montgomery to_montgomery nonzero selectznz to_bytes from_bytes one +// +// curve description (via package name): p256 +// +// machine_wordsize = 64 (from "64") +// +// requested operations: mul, square, add, sub, opp, from_montgomery, to_montgomery, nonzero, selectznz, to_bytes, from_bytes, one, msat, divstep, divstep_precomp +// +// m = 0xffffffff00000001000000000000000000000000ffffffffffffffffffffffff (from "2^256 - 2^224 + 2^192 + 2^96 - 1") +// +// NOTE: In addition to the bounds specified above each function, all +// +// functions synthesized for this Montgomery arithmetic require the +// +// input to be strictly less than the prime modulus (m), and also +// +// require the input to be in the unique saturated representation. +// +// All functions also ensure that these two properties are true of +// +// return values. +// +// Computed values: +// +// eval z = z[0] + (z[1] << 64) + (z[2] << 128) + (z[3] << 192) +// +// bytes_eval z = z[0] + (z[1] << 8) + (z[2] << 16) + (z[3] << 24) + (z[4] << 32) + (z[5] << 40) + (z[6] << 48) + (z[7] << 56) + (z[8] << 64) + (z[9] << 72) + (z[10] << 80) + (z[11] << 88) + (z[12] << 96) + (z[13] << 104) + (z[14] << 112) + (z[15] << 120) + (z[16] << 128) + (z[17] << 136) + (z[18] << 144) + (z[19] << 152) + (z[20] << 160) + (z[21] << 168) + (z[22] << 176) + (z[23] << 184) + (z[24] << 192) + (z[25] << 200) + (z[26] << 208) + (z[27] << 216) + (z[28] << 224) + (z[29] << 232) + (z[30] << 240) + (z[31] << 248) +// +// twos_complement_eval z = let x1 := z[0] + (z[1] << 64) + (z[2] << 128) + (z[3] << 192) in +// +// if x1 & (2^256-1) < 2^255 then x1 & (2^256-1) else (x1 & (2^256-1)) - 2^256 +package fp + +import "math/bits" + +type uint1 uint64 // We use uint64 instead of a more narrow type for performance reasons; see https://github.com/mit-plv/fiat-crypto/pull/1006#issuecomment-892625927 +type int1 int64 // We use uint64 instead of a more narrow type for performance reasons; see https://github.com/mit-plv/fiat-crypto/pull/1006#issuecomment-892625927 + +// MontgomeryDomainFieldElement is a field element in the Montgomery domain. +// +// Bounds: +// +// [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +type MontgomeryDomainFieldElement [4]uint64 + +// NonMontgomeryDomainFieldElement is a field element NOT in the Montgomery domain. +// +// Bounds: +// +// [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +type NonMontgomeryDomainFieldElement [4]uint64 + +// cmovznzU64 is a single-word conditional move. +// +// Postconditions: +// +// out1 = (if arg1 = 0 then arg2 else arg3) +// +// Input Bounds: +// +// arg1: [0x0 ~> 0x1] +// arg2: [0x0 ~> 0xffffffffffffffff] +// arg3: [0x0 ~> 0xffffffffffffffff] +// +// Output Bounds: +// +// out1: [0x0 ~> 0xffffffffffffffff] +func cmovznzU64(out1 *uint64, arg1 uint1, arg2 uint64, arg3 uint64) { + x1 := (uint64(arg1) * 0xffffffffffffffff) + x2 := ((x1 & arg3) | ((^x1) & arg2)) + *out1 = x2 +} + +// Mul multiplies two field elements in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// 0 ≤ eval arg2 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) * eval (from_montgomery arg2)) mod m +// 0 ≤ eval out1 < m +func Mul(out1 *MontgomeryDomainFieldElement, arg1 *MontgomeryDomainFieldElement, arg2 *MontgomeryDomainFieldElement) { + x1 := arg1[1] + x2 := arg1[2] + x3 := arg1[3] + x4 := arg1[0] + var x5 uint64 + var x6 uint64 + x6, x5 = bits.Mul64(x4, arg2[3]) + var x7 uint64 + var x8 uint64 + x8, x7 = bits.Mul64(x4, arg2[2]) + var x9 uint64 + var x10 uint64 + x10, x9 = bits.Mul64(x4, arg2[1]) + var x11 uint64 + var x12 uint64 + x12, x11 = bits.Mul64(x4, arg2[0]) + var x13 uint64 + var x14 uint64 + x13, x14 = bits.Add64(x12, x9, uint64(0x0)) + var x15 uint64 + var x16 uint64 + x15, x16 = bits.Add64(x10, x7, uint64(uint1(x14))) + var x17 uint64 + var x18 uint64 + x17, x18 = bits.Add64(x8, x5, uint64(uint1(x16))) + x19 := (uint64(uint1(x18)) + x6) + var x20 uint64 + var x21 uint64 + x21, x20 = bits.Mul64(x11, 0xffffffff00000001) + var x22 uint64 + var x23 uint64 + x23, x22 = bits.Mul64(x11, 0xffffffff) + var x24 uint64 + var x25 uint64 + x25, x24 = bits.Mul64(x11, 0xffffffffffffffff) + var x26 uint64 + var x27 uint64 + x26, x27 = bits.Add64(x25, x22, uint64(0x0)) + x28 := (uint64(uint1(x27)) + x23) + var x30 uint64 + _, x30 = bits.Add64(x11, x24, uint64(0x0)) + var x31 uint64 + var x32 uint64 + x31, x32 = bits.Add64(x13, x26, uint64(uint1(x30))) + var x33 uint64 + var x34 uint64 + x33, x34 = bits.Add64(x15, x28, uint64(uint1(x32))) + var x35 uint64 + var x36 uint64 + x35, x36 = bits.Add64(x17, x20, uint64(uint1(x34))) + var x37 uint64 + var x38 uint64 + x37, x38 = bits.Add64(x19, x21, uint64(uint1(x36))) + var x39 uint64 + var x40 uint64 + x40, x39 = bits.Mul64(x1, arg2[3]) + var x41 uint64 + var x42 uint64 + x42, x41 = bits.Mul64(x1, arg2[2]) + var x43 uint64 + var x44 uint64 + x44, x43 = bits.Mul64(x1, arg2[1]) + var x45 uint64 + var x46 uint64 + x46, x45 = bits.Mul64(x1, arg2[0]) + var x47 uint64 + var x48 uint64 + x47, x48 = bits.Add64(x46, x43, uint64(0x0)) + var x49 uint64 + var x50 uint64 + x49, x50 = bits.Add64(x44, x41, uint64(uint1(x48))) + var x51 uint64 + var x52 uint64 + x51, x52 = bits.Add64(x42, x39, uint64(uint1(x50))) + x53 := (uint64(uint1(x52)) + x40) + var x54 uint64 + var x55 uint64 + x54, x55 = bits.Add64(x31, x45, uint64(0x0)) + var x56 uint64 + var x57 uint64 + x56, x57 = bits.Add64(x33, x47, uint64(uint1(x55))) + var x58 uint64 + var x59 uint64 + x58, x59 = bits.Add64(x35, x49, uint64(uint1(x57))) + var x60 uint64 + var x61 uint64 + x60, x61 = bits.Add64(x37, x51, uint64(uint1(x59))) + var x62 uint64 + var x63 uint64 + x62, x63 = bits.Add64(uint64(uint1(x38)), x53, uint64(uint1(x61))) + var x64 uint64 + var x65 uint64 + x65, x64 = bits.Mul64(x54, 0xffffffff00000001) + var x66 uint64 + var x67 uint64 + x67, x66 = bits.Mul64(x54, 0xffffffff) + var x68 uint64 + var x69 uint64 + x69, x68 = bits.Mul64(x54, 0xffffffffffffffff) + var x70 uint64 + var x71 uint64 + x70, x71 = bits.Add64(x69, x66, uint64(0x0)) + x72 := (uint64(uint1(x71)) + x67) + var x74 uint64 + _, x74 = bits.Add64(x54, x68, uint64(0x0)) + var x75 uint64 + var x76 uint64 + x75, x76 = bits.Add64(x56, x70, uint64(uint1(x74))) + var x77 uint64 + var x78 uint64 + x77, x78 = bits.Add64(x58, x72, uint64(uint1(x76))) + var x79 uint64 + var x80 uint64 + x79, x80 = bits.Add64(x60, x64, uint64(uint1(x78))) + var x81 uint64 + var x82 uint64 + x81, x82 = bits.Add64(x62, x65, uint64(uint1(x80))) + x83 := (uint64(uint1(x82)) + uint64(uint1(x63))) + var x84 uint64 + var x85 uint64 + x85, x84 = bits.Mul64(x2, arg2[3]) + var x86 uint64 + var x87 uint64 + x87, x86 = bits.Mul64(x2, arg2[2]) + var x88 uint64 + var x89 uint64 + x89, x88 = bits.Mul64(x2, arg2[1]) + var x90 uint64 + var x91 uint64 + x91, x90 = bits.Mul64(x2, arg2[0]) + var x92 uint64 + var x93 uint64 + x92, x93 = bits.Add64(x91, x88, uint64(0x0)) + var x94 uint64 + var x95 uint64 + x94, x95 = bits.Add64(x89, x86, uint64(uint1(x93))) + var x96 uint64 + var x97 uint64 + x96, x97 = bits.Add64(x87, x84, uint64(uint1(x95))) + x98 := (uint64(uint1(x97)) + x85) + var x99 uint64 + var x100 uint64 + x99, x100 = bits.Add64(x75, x90, uint64(0x0)) + var x101 uint64 + var x102 uint64 + x101, x102 = bits.Add64(x77, x92, uint64(uint1(x100))) + var x103 uint64 + var x104 uint64 + x103, x104 = bits.Add64(x79, x94, uint64(uint1(x102))) + var x105 uint64 + var x106 uint64 + x105, x106 = bits.Add64(x81, x96, uint64(uint1(x104))) + var x107 uint64 + var x108 uint64 + x107, x108 = bits.Add64(x83, x98, uint64(uint1(x106))) + var x109 uint64 + var x110 uint64 + x110, x109 = bits.Mul64(x99, 0xffffffff00000001) + var x111 uint64 + var x112 uint64 + x112, x111 = bits.Mul64(x99, 0xffffffff) + var x113 uint64 + var x114 uint64 + x114, x113 = bits.Mul64(x99, 0xffffffffffffffff) + var x115 uint64 + var x116 uint64 + x115, x116 = bits.Add64(x114, x111, uint64(0x0)) + x117 := (uint64(uint1(x116)) + x112) + var x119 uint64 + _, x119 = bits.Add64(x99, x113, uint64(0x0)) + var x120 uint64 + var x121 uint64 + x120, x121 = bits.Add64(x101, x115, uint64(uint1(x119))) + var x122 uint64 + var x123 uint64 + x122, x123 = bits.Add64(x103, x117, uint64(uint1(x121))) + var x124 uint64 + var x125 uint64 + x124, x125 = bits.Add64(x105, x109, uint64(uint1(x123))) + var x126 uint64 + var x127 uint64 + x126, x127 = bits.Add64(x107, x110, uint64(uint1(x125))) + x128 := (uint64(uint1(x127)) + uint64(uint1(x108))) + var x129 uint64 + var x130 uint64 + x130, x129 = bits.Mul64(x3, arg2[3]) + var x131 uint64 + var x132 uint64 + x132, x131 = bits.Mul64(x3, arg2[2]) + var x133 uint64 + var x134 uint64 + x134, x133 = bits.Mul64(x3, arg2[1]) + var x135 uint64 + var x136 uint64 + x136, x135 = bits.Mul64(x3, arg2[0]) + var x137 uint64 + var x138 uint64 + x137, x138 = bits.Add64(x136, x133, uint64(0x0)) + var x139 uint64 + var x140 uint64 + x139, x140 = bits.Add64(x134, x131, uint64(uint1(x138))) + var x141 uint64 + var x142 uint64 + x141, x142 = bits.Add64(x132, x129, uint64(uint1(x140))) + x143 := (uint64(uint1(x142)) + x130) + var x144 uint64 + var x145 uint64 + x144, x145 = bits.Add64(x120, x135, uint64(0x0)) + var x146 uint64 + var x147 uint64 + x146, x147 = bits.Add64(x122, x137, uint64(uint1(x145))) + var x148 uint64 + var x149 uint64 + x148, x149 = bits.Add64(x124, x139, uint64(uint1(x147))) + var x150 uint64 + var x151 uint64 + x150, x151 = bits.Add64(x126, x141, uint64(uint1(x149))) + var x152 uint64 + var x153 uint64 + x152, x153 = bits.Add64(x128, x143, uint64(uint1(x151))) + var x154 uint64 + var x155 uint64 + x155, x154 = bits.Mul64(x144, 0xffffffff00000001) + var x156 uint64 + var x157 uint64 + x157, x156 = bits.Mul64(x144, 0xffffffff) + var x158 uint64 + var x159 uint64 + x159, x158 = bits.Mul64(x144, 0xffffffffffffffff) + var x160 uint64 + var x161 uint64 + x160, x161 = bits.Add64(x159, x156, uint64(0x0)) + x162 := (uint64(uint1(x161)) + x157) + var x164 uint64 + _, x164 = bits.Add64(x144, x158, uint64(0x0)) + var x165 uint64 + var x166 uint64 + x165, x166 = bits.Add64(x146, x160, uint64(uint1(x164))) + var x167 uint64 + var x168 uint64 + x167, x168 = bits.Add64(x148, x162, uint64(uint1(x166))) + var x169 uint64 + var x170 uint64 + x169, x170 = bits.Add64(x150, x154, uint64(uint1(x168))) + var x171 uint64 + var x172 uint64 + x171, x172 = bits.Add64(x152, x155, uint64(uint1(x170))) + x173 := (uint64(uint1(x172)) + uint64(uint1(x153))) + var x174 uint64 + var x175 uint64 + x174, x175 = bits.Sub64(x165, 0xffffffffffffffff, uint64(0x0)) + var x176 uint64 + var x177 uint64 + x176, x177 = bits.Sub64(x167, 0xffffffff, uint64(uint1(x175))) + var x178 uint64 + var x179 uint64 + x178, x179 = bits.Sub64(x169, uint64(0x0), uint64(uint1(x177))) + var x180 uint64 + var x181 uint64 + x180, x181 = bits.Sub64(x171, 0xffffffff00000001, uint64(uint1(x179))) + var x183 uint64 + _, x183 = bits.Sub64(x173, uint64(0x0), uint64(uint1(x181))) + var x184 uint64 + cmovznzU64(&x184, uint1(x183), x174, x165) + var x185 uint64 + cmovznzU64(&x185, uint1(x183), x176, x167) + var x186 uint64 + cmovznzU64(&x186, uint1(x183), x178, x169) + var x187 uint64 + cmovznzU64(&x187, uint1(x183), x180, x171) + out1[0] = x184 + out1[1] = x185 + out1[2] = x186 + out1[3] = x187 +} + +// Square squares a field element in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) * eval (from_montgomery arg1)) mod m +// 0 ≤ eval out1 < m +func Square(out1 *MontgomeryDomainFieldElement, arg1 *MontgomeryDomainFieldElement) { + x1 := arg1[1] + x2 := arg1[2] + x3 := arg1[3] + x4 := arg1[0] + var x5 uint64 + var x6 uint64 + x6, x5 = bits.Mul64(x4, arg1[3]) + var x7 uint64 + var x8 uint64 + x8, x7 = bits.Mul64(x4, arg1[2]) + var x9 uint64 + var x10 uint64 + x10, x9 = bits.Mul64(x4, arg1[1]) + var x11 uint64 + var x12 uint64 + x12, x11 = bits.Mul64(x4, arg1[0]) + var x13 uint64 + var x14 uint64 + x13, x14 = bits.Add64(x12, x9, uint64(0x0)) + var x15 uint64 + var x16 uint64 + x15, x16 = bits.Add64(x10, x7, uint64(uint1(x14))) + var x17 uint64 + var x18 uint64 + x17, x18 = bits.Add64(x8, x5, uint64(uint1(x16))) + x19 := (uint64(uint1(x18)) + x6) + var x20 uint64 + var x21 uint64 + x21, x20 = bits.Mul64(x11, 0xffffffff00000001) + var x22 uint64 + var x23 uint64 + x23, x22 = bits.Mul64(x11, 0xffffffff) + var x24 uint64 + var x25 uint64 + x25, x24 = bits.Mul64(x11, 0xffffffffffffffff) + var x26 uint64 + var x27 uint64 + x26, x27 = bits.Add64(x25, x22, uint64(0x0)) + x28 := (uint64(uint1(x27)) + x23) + var x30 uint64 + _, x30 = bits.Add64(x11, x24, uint64(0x0)) + var x31 uint64 + var x32 uint64 + x31, x32 = bits.Add64(x13, x26, uint64(uint1(x30))) + var x33 uint64 + var x34 uint64 + x33, x34 = bits.Add64(x15, x28, uint64(uint1(x32))) + var x35 uint64 + var x36 uint64 + x35, x36 = bits.Add64(x17, x20, uint64(uint1(x34))) + var x37 uint64 + var x38 uint64 + x37, x38 = bits.Add64(x19, x21, uint64(uint1(x36))) + var x39 uint64 + var x40 uint64 + x40, x39 = bits.Mul64(x1, arg1[3]) + var x41 uint64 + var x42 uint64 + x42, x41 = bits.Mul64(x1, arg1[2]) + var x43 uint64 + var x44 uint64 + x44, x43 = bits.Mul64(x1, arg1[1]) + var x45 uint64 + var x46 uint64 + x46, x45 = bits.Mul64(x1, arg1[0]) + var x47 uint64 + var x48 uint64 + x47, x48 = bits.Add64(x46, x43, uint64(0x0)) + var x49 uint64 + var x50 uint64 + x49, x50 = bits.Add64(x44, x41, uint64(uint1(x48))) + var x51 uint64 + var x52 uint64 + x51, x52 = bits.Add64(x42, x39, uint64(uint1(x50))) + x53 := (uint64(uint1(x52)) + x40) + var x54 uint64 + var x55 uint64 + x54, x55 = bits.Add64(x31, x45, uint64(0x0)) + var x56 uint64 + var x57 uint64 + x56, x57 = bits.Add64(x33, x47, uint64(uint1(x55))) + var x58 uint64 + var x59 uint64 + x58, x59 = bits.Add64(x35, x49, uint64(uint1(x57))) + var x60 uint64 + var x61 uint64 + x60, x61 = bits.Add64(x37, x51, uint64(uint1(x59))) + var x62 uint64 + var x63 uint64 + x62, x63 = bits.Add64(uint64(uint1(x38)), x53, uint64(uint1(x61))) + var x64 uint64 + var x65 uint64 + x65, x64 = bits.Mul64(x54, 0xffffffff00000001) + var x66 uint64 + var x67 uint64 + x67, x66 = bits.Mul64(x54, 0xffffffff) + var x68 uint64 + var x69 uint64 + x69, x68 = bits.Mul64(x54, 0xffffffffffffffff) + var x70 uint64 + var x71 uint64 + x70, x71 = bits.Add64(x69, x66, uint64(0x0)) + x72 := (uint64(uint1(x71)) + x67) + var x74 uint64 + _, x74 = bits.Add64(x54, x68, uint64(0x0)) + var x75 uint64 + var x76 uint64 + x75, x76 = bits.Add64(x56, x70, uint64(uint1(x74))) + var x77 uint64 + var x78 uint64 + x77, x78 = bits.Add64(x58, x72, uint64(uint1(x76))) + var x79 uint64 + var x80 uint64 + x79, x80 = bits.Add64(x60, x64, uint64(uint1(x78))) + var x81 uint64 + var x82 uint64 + x81, x82 = bits.Add64(x62, x65, uint64(uint1(x80))) + x83 := (uint64(uint1(x82)) + uint64(uint1(x63))) + var x84 uint64 + var x85 uint64 + x85, x84 = bits.Mul64(x2, arg1[3]) + var x86 uint64 + var x87 uint64 + x87, x86 = bits.Mul64(x2, arg1[2]) + var x88 uint64 + var x89 uint64 + x89, x88 = bits.Mul64(x2, arg1[1]) + var x90 uint64 + var x91 uint64 + x91, x90 = bits.Mul64(x2, arg1[0]) + var x92 uint64 + var x93 uint64 + x92, x93 = bits.Add64(x91, x88, uint64(0x0)) + var x94 uint64 + var x95 uint64 + x94, x95 = bits.Add64(x89, x86, uint64(uint1(x93))) + var x96 uint64 + var x97 uint64 + x96, x97 = bits.Add64(x87, x84, uint64(uint1(x95))) + x98 := (uint64(uint1(x97)) + x85) + var x99 uint64 + var x100 uint64 + x99, x100 = bits.Add64(x75, x90, uint64(0x0)) + var x101 uint64 + var x102 uint64 + x101, x102 = bits.Add64(x77, x92, uint64(uint1(x100))) + var x103 uint64 + var x104 uint64 + x103, x104 = bits.Add64(x79, x94, uint64(uint1(x102))) + var x105 uint64 + var x106 uint64 + x105, x106 = bits.Add64(x81, x96, uint64(uint1(x104))) + var x107 uint64 + var x108 uint64 + x107, x108 = bits.Add64(x83, x98, uint64(uint1(x106))) + var x109 uint64 + var x110 uint64 + x110, x109 = bits.Mul64(x99, 0xffffffff00000001) + var x111 uint64 + var x112 uint64 + x112, x111 = bits.Mul64(x99, 0xffffffff) + var x113 uint64 + var x114 uint64 + x114, x113 = bits.Mul64(x99, 0xffffffffffffffff) + var x115 uint64 + var x116 uint64 + x115, x116 = bits.Add64(x114, x111, uint64(0x0)) + x117 := (uint64(uint1(x116)) + x112) + var x119 uint64 + _, x119 = bits.Add64(x99, x113, uint64(0x0)) + var x120 uint64 + var x121 uint64 + x120, x121 = bits.Add64(x101, x115, uint64(uint1(x119))) + var x122 uint64 + var x123 uint64 + x122, x123 = bits.Add64(x103, x117, uint64(uint1(x121))) + var x124 uint64 + var x125 uint64 + x124, x125 = bits.Add64(x105, x109, uint64(uint1(x123))) + var x126 uint64 + var x127 uint64 + x126, x127 = bits.Add64(x107, x110, uint64(uint1(x125))) + x128 := (uint64(uint1(x127)) + uint64(uint1(x108))) + var x129 uint64 + var x130 uint64 + x130, x129 = bits.Mul64(x3, arg1[3]) + var x131 uint64 + var x132 uint64 + x132, x131 = bits.Mul64(x3, arg1[2]) + var x133 uint64 + var x134 uint64 + x134, x133 = bits.Mul64(x3, arg1[1]) + var x135 uint64 + var x136 uint64 + x136, x135 = bits.Mul64(x3, arg1[0]) + var x137 uint64 + var x138 uint64 + x137, x138 = bits.Add64(x136, x133, uint64(0x0)) + var x139 uint64 + var x140 uint64 + x139, x140 = bits.Add64(x134, x131, uint64(uint1(x138))) + var x141 uint64 + var x142 uint64 + x141, x142 = bits.Add64(x132, x129, uint64(uint1(x140))) + x143 := (uint64(uint1(x142)) + x130) + var x144 uint64 + var x145 uint64 + x144, x145 = bits.Add64(x120, x135, uint64(0x0)) + var x146 uint64 + var x147 uint64 + x146, x147 = bits.Add64(x122, x137, uint64(uint1(x145))) + var x148 uint64 + var x149 uint64 + x148, x149 = bits.Add64(x124, x139, uint64(uint1(x147))) + var x150 uint64 + var x151 uint64 + x150, x151 = bits.Add64(x126, x141, uint64(uint1(x149))) + var x152 uint64 + var x153 uint64 + x152, x153 = bits.Add64(x128, x143, uint64(uint1(x151))) + var x154 uint64 + var x155 uint64 + x155, x154 = bits.Mul64(x144, 0xffffffff00000001) + var x156 uint64 + var x157 uint64 + x157, x156 = bits.Mul64(x144, 0xffffffff) + var x158 uint64 + var x159 uint64 + x159, x158 = bits.Mul64(x144, 0xffffffffffffffff) + var x160 uint64 + var x161 uint64 + x160, x161 = bits.Add64(x159, x156, uint64(0x0)) + x162 := (uint64(uint1(x161)) + x157) + var x164 uint64 + _, x164 = bits.Add64(x144, x158, uint64(0x0)) + var x165 uint64 + var x166 uint64 + x165, x166 = bits.Add64(x146, x160, uint64(uint1(x164))) + var x167 uint64 + var x168 uint64 + x167, x168 = bits.Add64(x148, x162, uint64(uint1(x166))) + var x169 uint64 + var x170 uint64 + x169, x170 = bits.Add64(x150, x154, uint64(uint1(x168))) + var x171 uint64 + var x172 uint64 + x171, x172 = bits.Add64(x152, x155, uint64(uint1(x170))) + x173 := (uint64(uint1(x172)) + uint64(uint1(x153))) + var x174 uint64 + var x175 uint64 + x174, x175 = bits.Sub64(x165, 0xffffffffffffffff, uint64(0x0)) + var x176 uint64 + var x177 uint64 + x176, x177 = bits.Sub64(x167, 0xffffffff, uint64(uint1(x175))) + var x178 uint64 + var x179 uint64 + x178, x179 = bits.Sub64(x169, uint64(0x0), uint64(uint1(x177))) + var x180 uint64 + var x181 uint64 + x180, x181 = bits.Sub64(x171, 0xffffffff00000001, uint64(uint1(x179))) + var x183 uint64 + _, x183 = bits.Sub64(x173, uint64(0x0), uint64(uint1(x181))) + var x184 uint64 + cmovznzU64(&x184, uint1(x183), x174, x165) + var x185 uint64 + cmovznzU64(&x185, uint1(x183), x176, x167) + var x186 uint64 + cmovznzU64(&x186, uint1(x183), x178, x169) + var x187 uint64 + cmovznzU64(&x187, uint1(x183), x180, x171) + out1[0] = x184 + out1[1] = x185 + out1[2] = x186 + out1[3] = x187 +} + +// Add adds two field elements in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// 0 ≤ eval arg2 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) + eval (from_montgomery arg2)) mod m +// 0 ≤ eval out1 < m +func Add(out1 *MontgomeryDomainFieldElement, arg1 *MontgomeryDomainFieldElement, arg2 *MontgomeryDomainFieldElement) { + var x1 uint64 + var x2 uint64 + x1, x2 = bits.Add64(arg1[0], arg2[0], uint64(0x0)) + var x3 uint64 + var x4 uint64 + x3, x4 = bits.Add64(arg1[1], arg2[1], uint64(uint1(x2))) + var x5 uint64 + var x6 uint64 + x5, x6 = bits.Add64(arg1[2], arg2[2], uint64(uint1(x4))) + var x7 uint64 + var x8 uint64 + x7, x8 = bits.Add64(arg1[3], arg2[3], uint64(uint1(x6))) + var x9 uint64 + var x10 uint64 + x9, x10 = bits.Sub64(x1, 0xffffffffffffffff, uint64(0x0)) + var x11 uint64 + var x12 uint64 + x11, x12 = bits.Sub64(x3, 0xffffffff, uint64(uint1(x10))) + var x13 uint64 + var x14 uint64 + x13, x14 = bits.Sub64(x5, uint64(0x0), uint64(uint1(x12))) + var x15 uint64 + var x16 uint64 + x15, x16 = bits.Sub64(x7, 0xffffffff00000001, uint64(uint1(x14))) + var x18 uint64 + _, x18 = bits.Sub64(uint64(uint1(x8)), uint64(0x0), uint64(uint1(x16))) + var x19 uint64 + cmovznzU64(&x19, uint1(x18), x9, x1) + var x20 uint64 + cmovznzU64(&x20, uint1(x18), x11, x3) + var x21 uint64 + cmovznzU64(&x21, uint1(x18), x13, x5) + var x22 uint64 + cmovznzU64(&x22, uint1(x18), x15, x7) + out1[0] = x19 + out1[1] = x20 + out1[2] = x21 + out1[3] = x22 +} + +// Sub subtracts two field elements in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// 0 ≤ eval arg2 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) - eval (from_montgomery arg2)) mod m +// 0 ≤ eval out1 < m +func Sub(out1 *MontgomeryDomainFieldElement, arg1 *MontgomeryDomainFieldElement, arg2 *MontgomeryDomainFieldElement) { + var x1 uint64 + var x2 uint64 + x1, x2 = bits.Sub64(arg1[0], arg2[0], uint64(0x0)) + var x3 uint64 + var x4 uint64 + x3, x4 = bits.Sub64(arg1[1], arg2[1], uint64(uint1(x2))) + var x5 uint64 + var x6 uint64 + x5, x6 = bits.Sub64(arg1[2], arg2[2], uint64(uint1(x4))) + var x7 uint64 + var x8 uint64 + x7, x8 = bits.Sub64(arg1[3], arg2[3], uint64(uint1(x6))) + var x9 uint64 + cmovznzU64(&x9, uint1(x8), uint64(0x0), 0xffffffffffffffff) + var x10 uint64 + var x11 uint64 + x10, x11 = bits.Add64(x1, x9, uint64(0x0)) + var x12 uint64 + var x13 uint64 + x12, x13 = bits.Add64(x3, (x9 & 0xffffffff), uint64(uint1(x11))) + var x14 uint64 + var x15 uint64 + x14, x15 = bits.Add64(x5, uint64(0x0), uint64(uint1(x13))) + var x16 uint64 + x16, _ = bits.Add64(x7, (x9 & 0xffffffff00000001), uint64(uint1(x15))) + out1[0] = x10 + out1[1] = x12 + out1[2] = x14 + out1[3] = x16 +} + +// Opp negates a field element in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = -eval (from_montgomery arg1) mod m +// 0 ≤ eval out1 < m +func Opp(out1 *MontgomeryDomainFieldElement, arg1 *MontgomeryDomainFieldElement) { + var x1 uint64 + var x2 uint64 + x1, x2 = bits.Sub64(uint64(0x0), arg1[0], uint64(0x0)) + var x3 uint64 + var x4 uint64 + x3, x4 = bits.Sub64(uint64(0x0), arg1[1], uint64(uint1(x2))) + var x5 uint64 + var x6 uint64 + x5, x6 = bits.Sub64(uint64(0x0), arg1[2], uint64(uint1(x4))) + var x7 uint64 + var x8 uint64 + x7, x8 = bits.Sub64(uint64(0x0), arg1[3], uint64(uint1(x6))) + var x9 uint64 + cmovznzU64(&x9, uint1(x8), uint64(0x0), 0xffffffffffffffff) + var x10 uint64 + var x11 uint64 + x10, x11 = bits.Add64(x1, x9, uint64(0x0)) + var x12 uint64 + var x13 uint64 + x12, x13 = bits.Add64(x3, (x9 & 0xffffffff), uint64(uint1(x11))) + var x14 uint64 + var x15 uint64 + x14, x15 = bits.Add64(x5, uint64(0x0), uint64(uint1(x13))) + var x16 uint64 + x16, _ = bits.Add64(x7, (x9 & 0xffffffff00000001), uint64(uint1(x15))) + out1[0] = x10 + out1[1] = x12 + out1[2] = x14 + out1[3] = x16 +} + +// FromMontgomery translates a field element out of the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval out1 mod m = (eval arg1 * ((2^64)⁻¹ mod m)^4) mod m +// 0 ≤ eval out1 < m +func FromMontgomery(out1 *NonMontgomeryDomainFieldElement, arg1 *MontgomeryDomainFieldElement) { + x1 := arg1[0] + var x2 uint64 + var x3 uint64 + x3, x2 = bits.Mul64(x1, 0xffffffff00000001) + var x4 uint64 + var x5 uint64 + x5, x4 = bits.Mul64(x1, 0xffffffff) + var x6 uint64 + var x7 uint64 + x7, x6 = bits.Mul64(x1, 0xffffffffffffffff) + var x8 uint64 + var x9 uint64 + x8, x9 = bits.Add64(x7, x4, uint64(0x0)) + var x11 uint64 + _, x11 = bits.Add64(x1, x6, uint64(0x0)) + var x12 uint64 + var x13 uint64 + x12, x13 = bits.Add64(uint64(0x0), x8, uint64(uint1(x11))) + var x14 uint64 + var x15 uint64 + x14, x15 = bits.Add64(x12, arg1[1], uint64(0x0)) + var x16 uint64 + var x17 uint64 + x17, x16 = bits.Mul64(x14, 0xffffffff00000001) + var x18 uint64 + var x19 uint64 + x19, x18 = bits.Mul64(x14, 0xffffffff) + var x20 uint64 + var x21 uint64 + x21, x20 = bits.Mul64(x14, 0xffffffffffffffff) + var x22 uint64 + var x23 uint64 + x22, x23 = bits.Add64(x21, x18, uint64(0x0)) + var x25 uint64 + _, x25 = bits.Add64(x14, x20, uint64(0x0)) + var x26 uint64 + var x27 uint64 + x26, x27 = bits.Add64((uint64(uint1(x15)) + (uint64(uint1(x13)) + (uint64(uint1(x9)) + x5))), x22, uint64(uint1(x25))) + var x28 uint64 + var x29 uint64 + x28, x29 = bits.Add64(x2, (uint64(uint1(x23)) + x19), uint64(uint1(x27))) + var x30 uint64 + var x31 uint64 + x30, x31 = bits.Add64(x3, x16, uint64(uint1(x29))) + var x32 uint64 + var x33 uint64 + x32, x33 = bits.Add64(x26, arg1[2], uint64(0x0)) + var x34 uint64 + var x35 uint64 + x34, x35 = bits.Add64(x28, uint64(0x0), uint64(uint1(x33))) + var x36 uint64 + var x37 uint64 + x36, x37 = bits.Add64(x30, uint64(0x0), uint64(uint1(x35))) + var x38 uint64 + var x39 uint64 + x39, x38 = bits.Mul64(x32, 0xffffffff00000001) + var x40 uint64 + var x41 uint64 + x41, x40 = bits.Mul64(x32, 0xffffffff) + var x42 uint64 + var x43 uint64 + x43, x42 = bits.Mul64(x32, 0xffffffffffffffff) + var x44 uint64 + var x45 uint64 + x44, x45 = bits.Add64(x43, x40, uint64(0x0)) + var x47 uint64 + _, x47 = bits.Add64(x32, x42, uint64(0x0)) + var x48 uint64 + var x49 uint64 + x48, x49 = bits.Add64(x34, x44, uint64(uint1(x47))) + var x50 uint64 + var x51 uint64 + x50, x51 = bits.Add64(x36, (uint64(uint1(x45)) + x41), uint64(uint1(x49))) + var x52 uint64 + var x53 uint64 + x52, x53 = bits.Add64((uint64(uint1(x37)) + (uint64(uint1(x31)) + x17)), x38, uint64(uint1(x51))) + var x54 uint64 + var x55 uint64 + x54, x55 = bits.Add64(x48, arg1[3], uint64(0x0)) + var x56 uint64 + var x57 uint64 + x56, x57 = bits.Add64(x50, uint64(0x0), uint64(uint1(x55))) + var x58 uint64 + var x59 uint64 + x58, x59 = bits.Add64(x52, uint64(0x0), uint64(uint1(x57))) + var x60 uint64 + var x61 uint64 + x61, x60 = bits.Mul64(x54, 0xffffffff00000001) + var x62 uint64 + var x63 uint64 + x63, x62 = bits.Mul64(x54, 0xffffffff) + var x64 uint64 + var x65 uint64 + x65, x64 = bits.Mul64(x54, 0xffffffffffffffff) + var x66 uint64 + var x67 uint64 + x66, x67 = bits.Add64(x65, x62, uint64(0x0)) + var x69 uint64 + _, x69 = bits.Add64(x54, x64, uint64(0x0)) + var x70 uint64 + var x71 uint64 + x70, x71 = bits.Add64(x56, x66, uint64(uint1(x69))) + var x72 uint64 + var x73 uint64 + x72, x73 = bits.Add64(x58, (uint64(uint1(x67)) + x63), uint64(uint1(x71))) + var x74 uint64 + var x75 uint64 + x74, x75 = bits.Add64((uint64(uint1(x59)) + (uint64(uint1(x53)) + x39)), x60, uint64(uint1(x73))) + x76 := (uint64(uint1(x75)) + x61) + var x77 uint64 + var x78 uint64 + x77, x78 = bits.Sub64(x70, 0xffffffffffffffff, uint64(0x0)) + var x79 uint64 + var x80 uint64 + x79, x80 = bits.Sub64(x72, 0xffffffff, uint64(uint1(x78))) + var x81 uint64 + var x82 uint64 + x81, x82 = bits.Sub64(x74, uint64(0x0), uint64(uint1(x80))) + var x83 uint64 + var x84 uint64 + x83, x84 = bits.Sub64(x76, 0xffffffff00000001, uint64(uint1(x82))) + var x86 uint64 + _, x86 = bits.Sub64(uint64(0x0), uint64(0x0), uint64(uint1(x84))) + var x87 uint64 + cmovznzU64(&x87, uint1(x86), x77, x70) + var x88 uint64 + cmovznzU64(&x88, uint1(x86), x79, x72) + var x89 uint64 + cmovznzU64(&x89, uint1(x86), x81, x74) + var x90 uint64 + cmovznzU64(&x90, uint1(x86), x83, x76) + out1[0] = x87 + out1[1] = x88 + out1[2] = x89 + out1[3] = x90 +} + +// ToMontgomery translates a field element into the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = eval arg1 mod m +// 0 ≤ eval out1 < m +func ToMontgomery(out1 *MontgomeryDomainFieldElement, arg1 *NonMontgomeryDomainFieldElement) { + x1 := arg1[1] + x2 := arg1[2] + x3 := arg1[3] + x4 := arg1[0] + var x5 uint64 + var x6 uint64 + x6, x5 = bits.Mul64(x4, 0x4fffffffd) + var x7 uint64 + var x8 uint64 + x8, x7 = bits.Mul64(x4, 0xfffffffffffffffe) + var x9 uint64 + var x10 uint64 + x10, x9 = bits.Mul64(x4, 0xfffffffbffffffff) + var x11 uint64 + var x12 uint64 + x12, x11 = bits.Mul64(x4, 0x3) + var x13 uint64 + var x14 uint64 + x13, x14 = bits.Add64(x12, x9, uint64(0x0)) + var x15 uint64 + var x16 uint64 + x15, x16 = bits.Add64(x10, x7, uint64(uint1(x14))) + var x17 uint64 + var x18 uint64 + x17, x18 = bits.Add64(x8, x5, uint64(uint1(x16))) + var x19 uint64 + var x20 uint64 + x20, x19 = bits.Mul64(x11, 0xffffffff00000001) + var x21 uint64 + var x22 uint64 + x22, x21 = bits.Mul64(x11, 0xffffffff) + var x23 uint64 + var x24 uint64 + x24, x23 = bits.Mul64(x11, 0xffffffffffffffff) + var x25 uint64 + var x26 uint64 + x25, x26 = bits.Add64(x24, x21, uint64(0x0)) + var x28 uint64 + _, x28 = bits.Add64(x11, x23, uint64(0x0)) + var x29 uint64 + var x30 uint64 + x29, x30 = bits.Add64(x13, x25, uint64(uint1(x28))) + var x31 uint64 + var x32 uint64 + x31, x32 = bits.Add64(x15, (uint64(uint1(x26)) + x22), uint64(uint1(x30))) + var x33 uint64 + var x34 uint64 + x33, x34 = bits.Add64(x17, x19, uint64(uint1(x32))) + var x35 uint64 + var x36 uint64 + x35, x36 = bits.Add64((uint64(uint1(x18)) + x6), x20, uint64(uint1(x34))) + var x37 uint64 + var x38 uint64 + x38, x37 = bits.Mul64(x1, 0x4fffffffd) + var x39 uint64 + var x40 uint64 + x40, x39 = bits.Mul64(x1, 0xfffffffffffffffe) + var x41 uint64 + var x42 uint64 + x42, x41 = bits.Mul64(x1, 0xfffffffbffffffff) + var x43 uint64 + var x44 uint64 + x44, x43 = bits.Mul64(x1, 0x3) + var x45 uint64 + var x46 uint64 + x45, x46 = bits.Add64(x44, x41, uint64(0x0)) + var x47 uint64 + var x48 uint64 + x47, x48 = bits.Add64(x42, x39, uint64(uint1(x46))) + var x49 uint64 + var x50 uint64 + x49, x50 = bits.Add64(x40, x37, uint64(uint1(x48))) + var x51 uint64 + var x52 uint64 + x51, x52 = bits.Add64(x29, x43, uint64(0x0)) + var x53 uint64 + var x54 uint64 + x53, x54 = bits.Add64(x31, x45, uint64(uint1(x52))) + var x55 uint64 + var x56 uint64 + x55, x56 = bits.Add64(x33, x47, uint64(uint1(x54))) + var x57 uint64 + var x58 uint64 + x57, x58 = bits.Add64(x35, x49, uint64(uint1(x56))) + var x59 uint64 + var x60 uint64 + x60, x59 = bits.Mul64(x51, 0xffffffff00000001) + var x61 uint64 + var x62 uint64 + x62, x61 = bits.Mul64(x51, 0xffffffff) + var x63 uint64 + var x64 uint64 + x64, x63 = bits.Mul64(x51, 0xffffffffffffffff) + var x65 uint64 + var x66 uint64 + x65, x66 = bits.Add64(x64, x61, uint64(0x0)) + var x68 uint64 + _, x68 = bits.Add64(x51, x63, uint64(0x0)) + var x69 uint64 + var x70 uint64 + x69, x70 = bits.Add64(x53, x65, uint64(uint1(x68))) + var x71 uint64 + var x72 uint64 + x71, x72 = bits.Add64(x55, (uint64(uint1(x66)) + x62), uint64(uint1(x70))) + var x73 uint64 + var x74 uint64 + x73, x74 = bits.Add64(x57, x59, uint64(uint1(x72))) + var x75 uint64 + var x76 uint64 + x75, x76 = bits.Add64(((uint64(uint1(x58)) + uint64(uint1(x36))) + (uint64(uint1(x50)) + x38)), x60, uint64(uint1(x74))) + var x77 uint64 + var x78 uint64 + x78, x77 = bits.Mul64(x2, 0x4fffffffd) + var x79 uint64 + var x80 uint64 + x80, x79 = bits.Mul64(x2, 0xfffffffffffffffe) + var x81 uint64 + var x82 uint64 + x82, x81 = bits.Mul64(x2, 0xfffffffbffffffff) + var x83 uint64 + var x84 uint64 + x84, x83 = bits.Mul64(x2, 0x3) + var x85 uint64 + var x86 uint64 + x85, x86 = bits.Add64(x84, x81, uint64(0x0)) + var x87 uint64 + var x88 uint64 + x87, x88 = bits.Add64(x82, x79, uint64(uint1(x86))) + var x89 uint64 + var x90 uint64 + x89, x90 = bits.Add64(x80, x77, uint64(uint1(x88))) + var x91 uint64 + var x92 uint64 + x91, x92 = bits.Add64(x69, x83, uint64(0x0)) + var x93 uint64 + var x94 uint64 + x93, x94 = bits.Add64(x71, x85, uint64(uint1(x92))) + var x95 uint64 + var x96 uint64 + x95, x96 = bits.Add64(x73, x87, uint64(uint1(x94))) + var x97 uint64 + var x98 uint64 + x97, x98 = bits.Add64(x75, x89, uint64(uint1(x96))) + var x99 uint64 + var x100 uint64 + x100, x99 = bits.Mul64(x91, 0xffffffff00000001) + var x101 uint64 + var x102 uint64 + x102, x101 = bits.Mul64(x91, 0xffffffff) + var x103 uint64 + var x104 uint64 + x104, x103 = bits.Mul64(x91, 0xffffffffffffffff) + var x105 uint64 + var x106 uint64 + x105, x106 = bits.Add64(x104, x101, uint64(0x0)) + var x108 uint64 + _, x108 = bits.Add64(x91, x103, uint64(0x0)) + var x109 uint64 + var x110 uint64 + x109, x110 = bits.Add64(x93, x105, uint64(uint1(x108))) + var x111 uint64 + var x112 uint64 + x111, x112 = bits.Add64(x95, (uint64(uint1(x106)) + x102), uint64(uint1(x110))) + var x113 uint64 + var x114 uint64 + x113, x114 = bits.Add64(x97, x99, uint64(uint1(x112))) + var x115 uint64 + var x116 uint64 + x115, x116 = bits.Add64(((uint64(uint1(x98)) + uint64(uint1(x76))) + (uint64(uint1(x90)) + x78)), x100, uint64(uint1(x114))) + var x117 uint64 + var x118 uint64 + x118, x117 = bits.Mul64(x3, 0x4fffffffd) + var x119 uint64 + var x120 uint64 + x120, x119 = bits.Mul64(x3, 0xfffffffffffffffe) + var x121 uint64 + var x122 uint64 + x122, x121 = bits.Mul64(x3, 0xfffffffbffffffff) + var x123 uint64 + var x124 uint64 + x124, x123 = bits.Mul64(x3, 0x3) + var x125 uint64 + var x126 uint64 + x125, x126 = bits.Add64(x124, x121, uint64(0x0)) + var x127 uint64 + var x128 uint64 + x127, x128 = bits.Add64(x122, x119, uint64(uint1(x126))) + var x129 uint64 + var x130 uint64 + x129, x130 = bits.Add64(x120, x117, uint64(uint1(x128))) + var x131 uint64 + var x132 uint64 + x131, x132 = bits.Add64(x109, x123, uint64(0x0)) + var x133 uint64 + var x134 uint64 + x133, x134 = bits.Add64(x111, x125, uint64(uint1(x132))) + var x135 uint64 + var x136 uint64 + x135, x136 = bits.Add64(x113, x127, uint64(uint1(x134))) + var x137 uint64 + var x138 uint64 + x137, x138 = bits.Add64(x115, x129, uint64(uint1(x136))) + var x139 uint64 + var x140 uint64 + x140, x139 = bits.Mul64(x131, 0xffffffff00000001) + var x141 uint64 + var x142 uint64 + x142, x141 = bits.Mul64(x131, 0xffffffff) + var x143 uint64 + var x144 uint64 + x144, x143 = bits.Mul64(x131, 0xffffffffffffffff) + var x145 uint64 + var x146 uint64 + x145, x146 = bits.Add64(x144, x141, uint64(0x0)) + var x148 uint64 + _, x148 = bits.Add64(x131, x143, uint64(0x0)) + var x149 uint64 + var x150 uint64 + x149, x150 = bits.Add64(x133, x145, uint64(uint1(x148))) + var x151 uint64 + var x152 uint64 + x151, x152 = bits.Add64(x135, (uint64(uint1(x146)) + x142), uint64(uint1(x150))) + var x153 uint64 + var x154 uint64 + x153, x154 = bits.Add64(x137, x139, uint64(uint1(x152))) + var x155 uint64 + var x156 uint64 + x155, x156 = bits.Add64(((uint64(uint1(x138)) + uint64(uint1(x116))) + (uint64(uint1(x130)) + x118)), x140, uint64(uint1(x154))) + var x157 uint64 + var x158 uint64 + x157, x158 = bits.Sub64(x149, 0xffffffffffffffff, uint64(0x0)) + var x159 uint64 + var x160 uint64 + x159, x160 = bits.Sub64(x151, 0xffffffff, uint64(uint1(x158))) + var x161 uint64 + var x162 uint64 + x161, x162 = bits.Sub64(x153, uint64(0x0), uint64(uint1(x160))) + var x163 uint64 + var x164 uint64 + x163, x164 = bits.Sub64(x155, 0xffffffff00000001, uint64(uint1(x162))) + var x166 uint64 + _, x166 = bits.Sub64(uint64(uint1(x156)), uint64(0x0), uint64(uint1(x164))) + var x167 uint64 + cmovznzU64(&x167, uint1(x166), x157, x149) + var x168 uint64 + cmovznzU64(&x168, uint1(x166), x159, x151) + var x169 uint64 + cmovznzU64(&x169, uint1(x166), x161, x153) + var x170 uint64 + cmovznzU64(&x170, uint1(x166), x163, x155) + out1[0] = x167 + out1[1] = x168 + out1[2] = x169 + out1[3] = x170 +} + +// Nonzero outputs a single non-zero word if the input is non-zero and zero otherwise. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// out1 = 0 ↔ eval (from_montgomery arg1) mod m = 0 +// +// Input Bounds: +// +// arg1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// +// Output Bounds: +// +// out1: [0x0 ~> 0xffffffffffffffff] +func Nonzero(out1 *uint64, arg1 *[4]uint64) { + x1 := (arg1[0] | (arg1[1] | (arg1[2] | arg1[3]))) + *out1 = x1 +} + +// Selectznz is a multi-limb conditional select. +// +// Postconditions: +// +// eval out1 = (if arg1 = 0 then eval arg2 else eval arg3) +// +// Input Bounds: +// +// arg1: [0x0 ~> 0x1] +// arg2: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// arg3: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +func Selectznz(out1 *[4]uint64, arg1 uint1, arg2 *[4]uint64, arg3 *[4]uint64) { + var x1 uint64 + cmovznzU64(&x1, arg1, arg2[0], arg3[0]) + var x2 uint64 + cmovznzU64(&x2, arg1, arg2[1], arg3[1]) + var x3 uint64 + cmovznzU64(&x3, arg1, arg2[2], arg3[2]) + var x4 uint64 + cmovznzU64(&x4, arg1, arg2[3], arg3[3]) + out1[0] = x1 + out1[1] = x2 + out1[2] = x3 + out1[3] = x4 +} + +// ToBytes serializes a field element NOT in the Montgomery domain to bytes in little-endian order. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// out1 = map (λ x, ⌊((eval arg1 mod m) mod 2^(8 * (x + 1))) / 2^(8 * x)⌋) [0..31] +// +// Input Bounds: +// +// arg1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff]] +func ToBytes(out1 *[32]uint8, arg1 *[4]uint64) { + x1 := arg1[3] + x2 := arg1[2] + x3 := arg1[1] + x4 := arg1[0] + x5 := (uint8(x4) & 0xff) + x6 := (x4 >> 8) + x7 := (uint8(x6) & 0xff) + x8 := (x6 >> 8) + x9 := (uint8(x8) & 0xff) + x10 := (x8 >> 8) + x11 := (uint8(x10) & 0xff) + x12 := (x10 >> 8) + x13 := (uint8(x12) & 0xff) + x14 := (x12 >> 8) + x15 := (uint8(x14) & 0xff) + x16 := (x14 >> 8) + x17 := (uint8(x16) & 0xff) + x18 := uint8((x16 >> 8)) + x19 := (uint8(x3) & 0xff) + x20 := (x3 >> 8) + x21 := (uint8(x20) & 0xff) + x22 := (x20 >> 8) + x23 := (uint8(x22) & 0xff) + x24 := (x22 >> 8) + x25 := (uint8(x24) & 0xff) + x26 := (x24 >> 8) + x27 := (uint8(x26) & 0xff) + x28 := (x26 >> 8) + x29 := (uint8(x28) & 0xff) + x30 := (x28 >> 8) + x31 := (uint8(x30) & 0xff) + x32 := uint8((x30 >> 8)) + x33 := (uint8(x2) & 0xff) + x34 := (x2 >> 8) + x35 := (uint8(x34) & 0xff) + x36 := (x34 >> 8) + x37 := (uint8(x36) & 0xff) + x38 := (x36 >> 8) + x39 := (uint8(x38) & 0xff) + x40 := (x38 >> 8) + x41 := (uint8(x40) & 0xff) + x42 := (x40 >> 8) + x43 := (uint8(x42) & 0xff) + x44 := (x42 >> 8) + x45 := (uint8(x44) & 0xff) + x46 := uint8((x44 >> 8)) + x47 := (uint8(x1) & 0xff) + x48 := (x1 >> 8) + x49 := (uint8(x48) & 0xff) + x50 := (x48 >> 8) + x51 := (uint8(x50) & 0xff) + x52 := (x50 >> 8) + x53 := (uint8(x52) & 0xff) + x54 := (x52 >> 8) + x55 := (uint8(x54) & 0xff) + x56 := (x54 >> 8) + x57 := (uint8(x56) & 0xff) + x58 := (x56 >> 8) + x59 := (uint8(x58) & 0xff) + x60 := uint8((x58 >> 8)) + out1[0] = x5 + out1[1] = x7 + out1[2] = x9 + out1[3] = x11 + out1[4] = x13 + out1[5] = x15 + out1[6] = x17 + out1[7] = x18 + out1[8] = x19 + out1[9] = x21 + out1[10] = x23 + out1[11] = x25 + out1[12] = x27 + out1[13] = x29 + out1[14] = x31 + out1[15] = x32 + out1[16] = x33 + out1[17] = x35 + out1[18] = x37 + out1[19] = x39 + out1[20] = x41 + out1[21] = x43 + out1[22] = x45 + out1[23] = x46 + out1[24] = x47 + out1[25] = x49 + out1[26] = x51 + out1[27] = x53 + out1[28] = x55 + out1[29] = x57 + out1[30] = x59 + out1[31] = x60 +} + +// FromBytes deserializes a field element NOT in the Montgomery domain from bytes in little-endian order. +// +// Preconditions: +// +// 0 ≤ bytes_eval arg1 < m +// +// Postconditions: +// +// eval out1 mod m = bytes_eval arg1 mod m +// 0 ≤ eval out1 < m +// +// Input Bounds: +// +// arg1: [[0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff]] +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +func FromBytes(out1 *[4]uint64, arg1 *[32]uint8) { + x1 := (uint64(arg1[31]) << 56) + x2 := (uint64(arg1[30]) << 48) + x3 := (uint64(arg1[29]) << 40) + x4 := (uint64(arg1[28]) << 32) + x5 := (uint64(arg1[27]) << 24) + x6 := (uint64(arg1[26]) << 16) + x7 := (uint64(arg1[25]) << 8) + x8 := arg1[24] + x9 := (uint64(arg1[23]) << 56) + x10 := (uint64(arg1[22]) << 48) + x11 := (uint64(arg1[21]) << 40) + x12 := (uint64(arg1[20]) << 32) + x13 := (uint64(arg1[19]) << 24) + x14 := (uint64(arg1[18]) << 16) + x15 := (uint64(arg1[17]) << 8) + x16 := arg1[16] + x17 := (uint64(arg1[15]) << 56) + x18 := (uint64(arg1[14]) << 48) + x19 := (uint64(arg1[13]) << 40) + x20 := (uint64(arg1[12]) << 32) + x21 := (uint64(arg1[11]) << 24) + x22 := (uint64(arg1[10]) << 16) + x23 := (uint64(arg1[9]) << 8) + x24 := arg1[8] + x25 := (uint64(arg1[7]) << 56) + x26 := (uint64(arg1[6]) << 48) + x27 := (uint64(arg1[5]) << 40) + x28 := (uint64(arg1[4]) << 32) + x29 := (uint64(arg1[3]) << 24) + x30 := (uint64(arg1[2]) << 16) + x31 := (uint64(arg1[1]) << 8) + x32 := arg1[0] + x33 := (x31 + uint64(x32)) + x34 := (x30 + x33) + x35 := (x29 + x34) + x36 := (x28 + x35) + x37 := (x27 + x36) + x38 := (x26 + x37) + x39 := (x25 + x38) + x40 := (x23 + uint64(x24)) + x41 := (x22 + x40) + x42 := (x21 + x41) + x43 := (x20 + x42) + x44 := (x19 + x43) + x45 := (x18 + x44) + x46 := (x17 + x45) + x47 := (x15 + uint64(x16)) + x48 := (x14 + x47) + x49 := (x13 + x48) + x50 := (x12 + x49) + x51 := (x11 + x50) + x52 := (x10 + x51) + x53 := (x9 + x52) + x54 := (x7 + uint64(x8)) + x55 := (x6 + x54) + x56 := (x5 + x55) + x57 := (x4 + x56) + x58 := (x3 + x57) + x59 := (x2 + x58) + x60 := (x1 + x59) + out1[0] = x39 + out1[1] = x46 + out1[2] = x53 + out1[3] = x60 +} + +// SetOne returns the field element one in the Montgomery domain. +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = 1 mod m +// 0 ≤ eval out1 < m +func SetOne(out1 *MontgomeryDomainFieldElement) { + out1[0] = uint64(0x1) + out1[1] = 0xffffffff00000000 + out1[2] = 0xffffffffffffffff + out1[3] = 0xfffffffe +} diff --git a/core/curves/native/p256/fq/fq.go b/core/curves/native/p256/fq/fq.go new file mode 100644 index 0000000..7d95e94 --- /dev/null +++ b/core/curves/native/p256/fq/fq.go @@ -0,0 +1,510 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package fq + +import ( + "math/big" + "sync" + + "github.com/sonr-io/sonr/crypto/core/curves/native" +) + +var ( + p256FqInitonce sync.Once + p256FqParams native.FieldParams +) + +func P256FqNew() *native.Field { + return &native.Field{ + Value: [native.FieldLimbs]uint64{}, + Params: getP256FqParams(), + Arithmetic: p256FqArithmetic{}, + } +} + +func p256FqParamsInit() { + // See FIPS 186-3, section D.2.3 + p256FqParams = native.FieldParams{ + R: [native.FieldLimbs]uint64{ + 0x0c46353d039cdaaf, + 0x4319055258e8617b, + 0x0000000000000000, + 0x00000000ffffffff, + }, + R2: [native.FieldLimbs]uint64{ + 0x83244c95be79eea2, + 0x4699799c49bd6fa6, + 0x2845b2392b6bec59, + 0x66e12d94f3d95620, + }, + R3: [native.FieldLimbs]uint64{ + 0xac8ebec90b65a624, + 0x111f28ae0c0555c9, + 0x2543b9246ba5e93f, + 0x503a54e76407be65, + }, + Modulus: [native.FieldLimbs]uint64{ + 0xf3b9cac2fc632551, + 0xbce6faada7179e84, + 0xffffffffffffffff, + 0xffffffff00000000, + }, + BiModulus: new(big.Int).SetBytes([]byte{ + 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xbc, 0xe6, 0xfa, 0xad, 0xa7, 0x17, 0x9e, 0x84, 0xf3, 0xb9, 0xca, 0xc2, 0xfc, 0x63, 0x25, 0x51, + }), + } +} + +func getP256FqParams() *native.FieldParams { + p256FqInitonce.Do(p256FqParamsInit) + return &p256FqParams +} + +// p256FqArithmetic is a struct with all the methods needed for working +// in mod q +type p256FqArithmetic struct{} + +// ToMontgomery converts this field to montgomery form +func (f p256FqArithmetic) ToMontgomery(out, arg *[native.FieldLimbs]uint64) { + ToMontgomery((*MontgomeryDomainFieldElement)(out), (*NonMontgomeryDomainFieldElement)(arg)) +} + +// FromMontgomery converts this field from montgomery form +func (f p256FqArithmetic) FromMontgomery(out, arg *[native.FieldLimbs]uint64) { + FromMontgomery((*NonMontgomeryDomainFieldElement)(out), (*MontgomeryDomainFieldElement)(arg)) +} + +// Neg performs modular negation +func (f p256FqArithmetic) Neg(out, arg *[native.FieldLimbs]uint64) { + Opp((*MontgomeryDomainFieldElement)(out), (*MontgomeryDomainFieldElement)(arg)) +} + +// Square performs modular square +func (f p256FqArithmetic) Square(out, arg *[native.FieldLimbs]uint64) { + Square((*MontgomeryDomainFieldElement)(out), (*MontgomeryDomainFieldElement)(arg)) +} + +// Mul performs modular multiplication +func (f p256FqArithmetic) Mul(out, arg1, arg2 *[native.FieldLimbs]uint64) { + Mul( + (*MontgomeryDomainFieldElement)(out), + (*MontgomeryDomainFieldElement)(arg1), + (*MontgomeryDomainFieldElement)(arg2), + ) +} + +// Add performs modular addition +func (f p256FqArithmetic) Add(out, arg1, arg2 *[native.FieldLimbs]uint64) { + Add( + (*MontgomeryDomainFieldElement)(out), + (*MontgomeryDomainFieldElement)(arg1), + (*MontgomeryDomainFieldElement)(arg2), + ) +} + +// Sub performs modular subtraction +func (f p256FqArithmetic) Sub(out, arg1, arg2 *[native.FieldLimbs]uint64) { + Sub( + (*MontgomeryDomainFieldElement)(out), + (*MontgomeryDomainFieldElement)(arg1), + (*MontgomeryDomainFieldElement)(arg2), + ) +} + +// Sqrt performs modular square root +func (f p256FqArithmetic) Sqrt(wasSquare *int, out, arg *[native.FieldLimbs]uint64) { + // See sqrt_ts_ct at + // https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-11#appendix-I.4 + // c1 := s + // c2 := (q - 1) / (2^c1) + // c2 := [4]uint64{ + // 0x4f3b9cac2fc63255, + // 0xfbce6faada7179e8, + // 0x0fffffffffffffff, + // 0x0ffffffff0000000, + // } + // c3 := (c2 - 1) / 2 + c3 := [native.FieldLimbs]uint64{ + 0x279dce5617e3192a, + 0xfde737d56d38bcf4, + 0x07ffffffffffffff, + 0x07fffffff8000000, + } + // c4 := generator + // c5 := new(Fq).pow(generator, c2) + c5 := [native.FieldLimbs]uint64{ + 0x1015708f7e368fe1, + 0x31c6c5456ecc4511, + 0x5281fe8998a19ea1, + 0x0279089e10c63fe8, + } + var z, t, b, c, tv [native.FieldLimbs]uint64 + + native.Pow(&z, arg, &c3, getP256FqParams(), f) + Square((*MontgomeryDomainFieldElement)(&t), (*MontgomeryDomainFieldElement)(&z)) + Mul( + (*MontgomeryDomainFieldElement)(&t), + (*MontgomeryDomainFieldElement)(&t), + (*MontgomeryDomainFieldElement)(arg), + ) + Mul( + (*MontgomeryDomainFieldElement)(&z), + (*MontgomeryDomainFieldElement)(&z), + (*MontgomeryDomainFieldElement)(arg), + ) + + copy(b[:], t[:]) + copy(c[:], c5[:]) + + for i := s; i >= 2; i-- { + for j := 1; j <= i-2; j++ { + Square((*MontgomeryDomainFieldElement)(&b), (*MontgomeryDomainFieldElement)(&b)) + } + // if b == 1 flag = 0 else flag = 1 + flag := -(&native.Field{ + Value: b, + Params: getP256FqParams(), + Arithmetic: f, + }).IsOne() + 1 + Mul( + (*MontgomeryDomainFieldElement)(&tv), + (*MontgomeryDomainFieldElement)(&z), + (*MontgomeryDomainFieldElement)(&c), + ) + Selectznz(&z, uint1(flag), &z, &tv) + Square((*MontgomeryDomainFieldElement)(&c), (*MontgomeryDomainFieldElement)(&c)) + Mul( + (*MontgomeryDomainFieldElement)(&tv), + (*MontgomeryDomainFieldElement)(&t), + (*MontgomeryDomainFieldElement)(&c), + ) + Selectznz(&t, uint1(flag), &t, &tv) + copy(b[:], t[:]) + } + Square((*MontgomeryDomainFieldElement)(&c), (*MontgomeryDomainFieldElement)(&z)) + *wasSquare = (&native.Field{ + Value: c, + Params: getP256FqParams(), + Arithmetic: f, + }).Equal(&native.Field{ + Value: *arg, + Params: getP256FqParams(), + Arithmetic: f, + }) + Selectznz(out, uint1(*wasSquare), out, &z) +} + +// Invert performs modular inverse +func (f p256FqArithmetic) Invert(wasInverted *int, out, arg *[native.FieldLimbs]uint64) { + // Using an addition chain from + // https://briansmith.org/ecc-inversion-addition-chains-01#p256_field_inversion + var x1, x10, x11, x101, x111, x1010, x1111, x10101, x101010, x101111 [native.FieldLimbs]uint64 + var x6, x8, x16, x32, tmp [native.FieldLimbs]uint64 + + copy(x1[:], arg[:]) + native.Pow2k(&x10, arg, 1, f) + Mul( + (*MontgomeryDomainFieldElement)(&x11), + (*MontgomeryDomainFieldElement)(&x10), + (*MontgomeryDomainFieldElement)(&x1), + ) + Mul( + (*MontgomeryDomainFieldElement)(&x101), + (*MontgomeryDomainFieldElement)(&x10), + (*MontgomeryDomainFieldElement)(&x11), + ) + Mul( + (*MontgomeryDomainFieldElement)(&x111), + (*MontgomeryDomainFieldElement)(&x10), + (*MontgomeryDomainFieldElement)(&x101), + ) + native.Pow2k(&x1010, &x101, 1, f) + Mul( + (*MontgomeryDomainFieldElement)(&x1111), + (*MontgomeryDomainFieldElement)(&x101), + (*MontgomeryDomainFieldElement)(&x1010), + ) + native.Pow2k(&x10101, &x1010, 1, f) + Mul( + (*MontgomeryDomainFieldElement)(&x10101), + (*MontgomeryDomainFieldElement)(&x10101), + (*MontgomeryDomainFieldElement)(&x1), + ) + native.Pow2k(&x101010, &x10101, 1, f) + Mul( + (*MontgomeryDomainFieldElement)(&x101111), + (*MontgomeryDomainFieldElement)(&x101), + (*MontgomeryDomainFieldElement)(&x101010), + ) + + Mul( + (*MontgomeryDomainFieldElement)(&x6), + (*MontgomeryDomainFieldElement)(&x10101), + (*MontgomeryDomainFieldElement)(&x101010), + ) + + native.Pow2k(&x8, &x6, 2, f) + Mul( + (*MontgomeryDomainFieldElement)(&x8), + (*MontgomeryDomainFieldElement)(&x8), + (*MontgomeryDomainFieldElement)(&x11), + ) + + native.Pow2k(&x16, &x8, 8, f) + Mul( + (*MontgomeryDomainFieldElement)(&x16), + (*MontgomeryDomainFieldElement)(&x16), + (*MontgomeryDomainFieldElement)(&x8), + ) + + native.Pow2k(&x32, &x16, 16, f) + Mul( + (*MontgomeryDomainFieldElement)(&x32), + (*MontgomeryDomainFieldElement)(&x32), + (*MontgomeryDomainFieldElement)(&x16), + ) + + native.Pow2k(&tmp, &x32, 64, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x32), + ) + + native.Pow2k(&tmp, &tmp, 32, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x32), + ) + + native.Pow2k(&tmp, &tmp, 6, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x101111), + ) + + native.Pow2k(&tmp, &tmp, 5, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x111), + ) + + native.Pow2k(&tmp, &tmp, 4, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x11), + ) + + native.Pow2k(&tmp, &tmp, 5, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x1111), + ) + + native.Pow2k(&tmp, &tmp, 5, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x10101), + ) + + native.Pow2k(&tmp, &tmp, 4, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x101), + ) + + native.Pow2k(&tmp, &tmp, 3, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x101), + ) + + native.Pow2k(&tmp, &tmp, 3, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x101), + ) + + native.Pow2k(&tmp, &tmp, 5, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x111), + ) + + native.Pow2k(&tmp, &tmp, 9, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x101111), + ) + + native.Pow2k(&tmp, &tmp, 6, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x1111), + ) + + native.Pow2k(&tmp, &tmp, 2, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x1), + ) + + native.Pow2k(&tmp, &tmp, 5, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x1), + ) + + native.Pow2k(&tmp, &tmp, 6, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x1111), + ) + + native.Pow2k(&tmp, &tmp, 5, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x111), + ) + + native.Pow2k(&tmp, &tmp, 4, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x111), + ) + + native.Pow2k(&tmp, &tmp, 5, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x111), + ) + + native.Pow2k(&tmp, &tmp, 5, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x101), + ) + + native.Pow2k(&tmp, &tmp, 3, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x11), + ) + + native.Pow2k(&tmp, &tmp, 10, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x101111), + ) + + native.Pow2k(&tmp, &tmp, 2, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x11), + ) + + native.Pow2k(&tmp, &tmp, 5, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x11), + ) + + native.Pow2k(&tmp, &tmp, 5, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x11), + ) + + native.Pow2k(&tmp, &tmp, 3, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x1), + ) + + native.Pow2k(&tmp, &tmp, 7, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x10101), + ) + + native.Pow2k(&tmp, &tmp, 6, f) + Mul( + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&tmp), + (*MontgomeryDomainFieldElement)(&x1111), + ) + + *wasInverted = (&native.Field{ + Value: *arg, + Params: getP256FqParams(), + Arithmetic: f, + }).IsNonZero() + Selectznz(out, uint1(*wasInverted), out, &tmp) +} + +// FromBytes converts a little endian byte array into a field element +func (f p256FqArithmetic) FromBytes(out *[native.FieldLimbs]uint64, arg *[native.FieldBytes]byte) { + FromBytes(out, arg) +} + +// ToBytes converts a field element to a little endian byte array +func (f p256FqArithmetic) ToBytes(out *[native.FieldBytes]byte, arg *[native.FieldLimbs]uint64) { + ToBytes(out, arg) +} + +// Selectznz performs conditional select. +// selects arg1 if choice == 0 and arg2 if choice == 1 +func (f p256FqArithmetic) Selectznz(out, arg1, arg2 *[native.FieldLimbs]uint64, choice int) { + Selectznz(out, uint1(choice), arg1, arg2) +} + +// generator = 7 mod q is a generator of the `q - 1` order multiplicative +// subgroup, or in other words a primitive element of the field. +// generator^t where t * 2^s + 1 = q +var generator = &[native.FieldLimbs]uint64{ + 0x55eb74ab1949fac9, + 0xd5af25406e5aaa5d, + 0x0000000000000001, + 0x00000006fffffff9, +} + +// s satisfies the equation 2^s * t = q - 1 with t odd. +var s = 4 + +// rootOfUnity +var rootOfUnity = &[native.FieldLimbs]uint64{ + 0x0592d7fbb41e6602, + 0x1546cad004378daf, + 0xba807ace842a3dfc, + 0xffc97f062a770992, +} diff --git a/core/curves/native/p256/fq/fq_test.go b/core/curves/native/p256/fq/fq_test.go new file mode 100644 index 0000000..4449ec8 --- /dev/null +++ b/core/curves/native/p256/fq/fq_test.go @@ -0,0 +1,330 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package fq + +import ( + crand "crypto/rand" + "math/big" + "math/rand" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/internal" +) + +func TestFqSetOne(t *testing.T) { + fq := P256FqNew().SetOne() + require.NotNil(t, fq) + require.Equal(t, fq.Value, getP256FqParams().R) +} + +func TestFqSetUint64(t *testing.T) { + act := P256FqNew().SetUint64(1 << 60) + require.NotNil(t, act) + // Remember it will be in montgomery form + require.Equal(t, act.Value[0], uint64(0xb3f3986dec632551)) +} + +func TestFqAdd(t *testing.T) { + lhs := P256FqNew().SetOne() + rhs := P256FqNew().SetOne() + exp := P256FqNew().SetUint64(2) + res := P256FqNew().Add(lhs, rhs) + require.NotNil(t, res) + require.Equal(t, 1, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint64() >> 2 + r := rand.Uint64() >> 2 + e := l + r + lhs.SetUint64(l) + rhs.SetUint64(r) + exp.SetUint64(e) + + a := P256FqNew().Add(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFqSub(t *testing.T) { + lhs := P256FqNew().SetOne() + rhs := P256FqNew().SetOne() + exp := P256FqNew().SetZero() + res := P256FqNew().Sub(lhs, rhs) + require.NotNil(t, res) + require.Equal(t, 1, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint64() >> 2 + r := rand.Uint64() >> 2 + if l < r { + l, r = r, l + } + e := l - r + lhs.SetUint64(l) + rhs.SetUint64(r) + exp.SetUint64(e) + + a := P256FqNew().Sub(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFqMul(t *testing.T) { + lhs := P256FqNew().SetOne() + rhs := P256FqNew().SetOne() + exp := P256FqNew().SetOne() + res := P256FqNew().Mul(lhs, rhs) + require.NotNil(t, res) + require.Equal(t, 1, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint32() + r := rand.Uint32() + e := uint64(l) * uint64(r) + lhs.SetUint64(uint64(l)) + rhs.SetUint64(uint64(r)) + exp.SetUint64(e) + + a := P256FqNew().Mul(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFqDouble(t *testing.T) { + a := P256FqNew().SetUint64(2) + e := P256FqNew().SetUint64(4) + require.Equal(t, e, P256FqNew().Double(a)) + + for i := 0; i < 25; i++ { + tv := rand.Uint32() + ttv := uint64(tv) * 2 + a = P256FqNew().SetUint64(uint64(tv)) + e = P256FqNew().SetUint64(ttv) + require.Equal(t, e, P256FqNew().Double(a)) + } +} + +func TestFqSquare(t *testing.T) { + a := P256FqNew().SetUint64(4) + e := P256FqNew().SetUint64(16) + require.Equal(t, e, a.Square(a)) + + for i := 0; i < 25; i++ { + j := rand.Uint32() + exp := uint64(j) * uint64(j) + e.SetUint64(exp) + a.SetUint64(uint64(j)) + require.Equal(t, e, a.Square(a)) + } +} + +func TestFqNeg(t *testing.T) { + g := P256FqNew().SetRaw(generator) + a := P256FqNew().SetOne() + a.Neg(a) + e := P256FqNew().SetRaw(&[native.FieldLimbs]uint64{0xe7739585f8c64aa2, 0x79cdf55b4e2f3d09, 0xffffffffffffffff, 0xfffffffe00000001}) + require.Equal(t, e, a) + a.Neg(g) + e = P256FqNew().SetRaw(&[native.FieldLimbs]uint64{0x9dce5617e3192a88, 0xe737d56d38bcf427, 0xfffffffffffffffd, 0xfffffff800000007}) + require.Equal(t, e, a) +} + +func TestFqExp(t *testing.T) { + e := P256FqNew().SetUint64(8) + a := P256FqNew().SetUint64(2) + by := P256FqNew().SetUint64(3) + require.Equal(t, e, a.Exp(a, by)) +} + +func TestFqSqrt(t *testing.T) { + t1 := P256FqNew().SetUint64(2) + t2 := P256FqNew().Neg(t1) + t3 := P256FqNew().Square(t1) + _, wasSquare := t3.Sqrt(t3) + + require.True(t, wasSquare) + require.Equal(t, 1, t1.Equal(t3)|t2.Equal(t3)) + t1.SetUint64(7) + _, wasSquare = t3.Sqrt(t1) + require.False(t, wasSquare) +} + +func TestFqInvert(t *testing.T) { + twoInv := P256FqNew().SetRaw(&[native.FieldLimbs]uint64{0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x8000000000000000}) + two := P256FqNew().SetUint64(2) + a, inverted := P256FqNew().Invert(two) + require.True(t, inverted) + require.Equal(t, a, twoInv) + + rootOfUnityInv := P256FqNew().SetRaw(&[native.FieldLimbs]uint64{0xcfbf53b9618acf96, 0xf17e5c39df7bd05b, 0xc7acb1f83e3ad9ad, 0x4659a42b394ff7df}) + rootOU := P256FqNew().SetRaw(rootOfUnity) + a, inverted = P256FqNew().Invert(rootOU) + require.True(t, inverted) + require.Equal(t, a, rootOfUnityInv) + + lhs := P256FqNew().SetUint64(9) + rhs := P256FqNew().SetUint64(3) + rhsInv, inverted := P256FqNew().Invert(rhs) + require.True(t, inverted) + require.Equal(t, rhs, P256FqNew().Mul(lhs, rhsInv)) + + rhs.SetZero() + _, inverted = P256FqNew().Invert(rhs) + require.False(t, inverted) +} + +func TestFqCMove(t *testing.T) { + t1 := P256FqNew().SetUint64(5) + t2 := P256FqNew().SetUint64(10) + require.Equal(t, t1, P256FqNew().CMove(t1, t2, 0)) + require.Equal(t, t2, P256FqNew().CMove(t1, t2, 1)) +} + +func TestFqBytes(t *testing.T) { + t1 := P256FqNew().SetUint64(99) + seq := t1.Bytes() + t2, err := P256FqNew().SetBytes(&seq) + require.NoError(t, err) + require.Equal(t, t1, t2) + + for i := 0; i < 25; i++ { + t1.SetUint64(rand.Uint64()) + seq = t1.Bytes() + _, err = t2.SetBytes(&seq) + require.NoError(t, err) + require.Equal(t, t1, t2) + } +} + +func TestFqCmp(t *testing.T) { + tests := []struct { + a *native.Field + b *native.Field + e int + }{ + { + a: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{2731658267414164836, 14655288906067898431, 6537465423330262322, 8306191141697566219}), + b: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{6472764012681988529, 10848812988401906064, 2961825807536828898, 4282183981941645679}), + e: 1, + }, + { + a: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{8023004109510539223, 4652004072850285717, 1877219145646046927, 383214385093921911}), + b: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{10099384440823804262, 16139476942229308465, 8636966320777393798, 5435928725024696785}), + e: -1, + }, + { + a: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{3741840066202388211, 12165774400417314871, 16619312580230515379, 16195032234110087705}), + b: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{3905865991286066744, 543690822309071825, 17963103015950210055, 3745476720756119742}), + e: 1, + }, + { + a: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{16660853697936147788, 7799793619412111108, 13515141085171033220, 2641079731236069032}), + b: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{17790588295388238399, 571847801379669440, 14537208974498222469, 12792570372087452754}), + e: -1, + }, + { + a: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{3912839285384959186, 2701177075110484070, 6453856448115499033, 6475797457962597458}), + b: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{1282566391665688512, 13503640416992806563, 2962240104675990153, 3374904770947067689}), + e: 1, + }, + { + a: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{5716631803409360103, 7859567470082614154, 12747956220853330146, 18434584096087315020}), + b: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{16317076441459028418, 12854146980376319601, 2258436689269031143, 9531877130792223752}), + e: 1, + }, + { + a: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{17955191469941083403, 10350326247207200880, 17263512235150705075, 12700328451238078022}), + b: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{6767595547459644695, 7146403825494928147, 12269344038346710612, 9122477829383225603}), + e: 1, + }, + { + a: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{17099388671847024438, 6426264987820696548, 10641143464957227405, 7709745403700754098}), + b: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{10799154372990268556, 17178492485719929374, 5705777922258988797, 8051037767683567782}), + e: -1, + }, + { + a: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{4567139260680454325, 1629385880182139061, 16607020832317899145, 1261011562621553200}), + b: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{13487234491304534488, 17872642955936089265, 17651026784972590233, 9468934643333871559}), + e: -1, + }, + { + a: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{18071070103467571798, 11787850505799426140, 10631355976141928593, 4867785203635092610}), + b: P256FqNew().SetRaw(&[native.FieldLimbs]uint64{12596443599426461624, 10176122686151524591, 17075755296887483439, 6726169532695070719}), + e: -1, + }, + } + + for _, test := range tests { + require.Equal(t, test.e, test.a.Cmp(test.b)) + require.Equal(t, -test.e, test.b.Cmp(test.a)) + require.Equal(t, 0, test.a.Cmp(test.a)) + require.Equal(t, 0, test.b.Cmp(test.b)) + } +} + +func TestFqBigInt(t *testing.T) { + t1 := P256FqNew().SetBigInt(big.NewInt(9999)) + t2 := P256FqNew().SetBigInt(t1.BigInt()) + require.Equal(t, t1, t2) + + e := P256FqNew().SetRaw(&[native.FieldLimbs]uint64{0x21dcaaadf1cb6aa0, 0x568de5f5990a98d7, 0x354f43b0d837fac5, 0x3e02532cb23f481a}) + b := new( + big.Int, + ).SetBytes([]byte{9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9}) + t1.SetBigInt(b) + require.Equal(t, e, t1) + e.Value[0] = 0xd1dd20150a97bab1 + e.Value[1] = 0x665914b80e0d05ad + e.Value[2] = 0xcab0bc4f27c8053a + e.Value[3] = 0xc1fdacd24dc0b7e6 + b.Neg(b) + t1.SetBigInt(b) + require.Equal(t, e, t1) +} + +func TestFqSetBytesWide(t *testing.T) { + e := P256FqNew().SetRaw(&[native.FieldLimbs]uint64{0xe2b8d4b0e576c8fa, 0x9d2b215f85d3bdf7, 0xf6070a872442640c, 0xcf15d1e49c990b88}) + + a := P256FqNew().SetBytesWide(&[64]byte{ + 0x69, 0x23, 0x5a, 0x0b, 0xce, 0x0c, 0xa8, 0x64, + 0x3c, 0x78, 0xbc, 0x01, 0x05, 0xef, 0xf2, 0x84, + 0xde, 0xbb, 0x6b, 0xc8, 0x63, 0x5e, 0x6e, 0x69, + 0x62, 0xcc, 0xc6, 0x2d, 0xf5, 0x72, 0x40, 0x92, + 0x28, 0x11, 0xd6, 0xc8, 0x07, 0xa5, 0x88, 0x82, + 0xfe, 0xe3, 0x97, 0xf6, 0x1e, 0xfb, 0x2e, 0x3b, + 0x27, 0x5f, 0x85, 0x06, 0x8d, 0x99, 0xa4, 0x75, + 0xc0, 0x2c, 0x71, 0x69, 0x9e, 0x58, 0xea, 0x52, + }) + require.Equal(t, e, a) +} + +func TestFpSetBytesWideBigInt(t *testing.T) { + params := getP256FqParams() + var tv2 [64]byte + for i := 0; i < 25; i++ { + _, _ = crand.Read(tv2[:]) + e := new(big.Int).SetBytes(tv2[:]) + e.Mod(e, params.BiModulus) + + tv := internal.ReverseScalarBytes(tv2[:]) + copy(tv2[:], tv) + a := P256FqNew().SetBytesWide(&tv2) + require.Equal(t, 0, e.Cmp(a.BigInt())) + } +} diff --git a/core/curves/native/p256/fq/p256_fq.go b/core/curves/native/p256/fq/p256_fq.go new file mode 100755 index 0000000..728e58b --- /dev/null +++ b/core/curves/native/p256/fq/p256_fq.go @@ -0,0 +1,1651 @@ +// Code generated by Fiat Cryptography. DO NOT EDIT. +// +// Autogenerated: 'src/ExtractionOCaml/word_by_word_montgomery' --lang Go --no-wide-int --relax-primitive-carry-to-bitwidth 32,64 --cmovznz-by-mul --internal-static --package-case flatcase --public-function-case UpperCamelCase --private-function-case camelCase --public-type-case UpperCamelCase --private-type-case camelCase --no-prefix-fiat --doc-newline-in-typedef-bounds --doc-prepend-header 'Code generated by Fiat Cryptography. DO NOT EDIT.' --doc-text-before-function-name ” --doc-text-before-type-name ” --package-name fq ” 64 '2^256 - 2^224 + 2^192 - 89188191075325690597107910205041859247' mul square add sub opp from_montgomery to_montgomery nonzero selectznz to_bytes from_bytes one +// +// curve description (via package name): fq +// +// machine_wordsize = 64 (from "64") +// +// requested operations: mul, square, add, sub, opp, from_montgomery, to_montgomery, nonzero, selectznz, to_bytes, from_bytes, one +// +// m = 0xffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551 (from "2^256 - 2^224 + 2^192 - 89188191075325690597107910205041859247") +// +// NOTE: In addition to the bounds specified above each function, all +// +// functions synthesized for this Montgomery arithmetic require the +// +// input to be strictly less than the prime modulus (m), and also +// +// require the input to be in the unique saturated representation. +// +// All functions also ensure that these two properties are true of +// +// return values. +// +// Computed values: +// +// eval z = z[0] + (z[1] << 64) + (z[2] << 128) + (z[3] << 192) +// +// bytes_eval z = z[0] + (z[1] << 8) + (z[2] << 16) + (z[3] << 24) + (z[4] << 32) + (z[5] << 40) + (z[6] << 48) + (z[7] << 56) + (z[8] << 64) + (z[9] << 72) + (z[10] << 80) + (z[11] << 88) + (z[12] << 96) + (z[13] << 104) + (z[14] << 112) + (z[15] << 120) + (z[16] << 128) + (z[17] << 136) + (z[18] << 144) + (z[19] << 152) + (z[20] << 160) + (z[21] << 168) + (z[22] << 176) + (z[23] << 184) + (z[24] << 192) + (z[25] << 200) + (z[26] << 208) + (z[27] << 216) + (z[28] << 224) + (z[29] << 232) + (z[30] << 240) + (z[31] << 248) +// +// twos_complement_eval z = let x1 := z[0] + (z[1] << 64) + (z[2] << 128) + (z[3] << 192) in +// +// if x1 & (2^256-1) < 2^255 then x1 & (2^256-1) else (x1 & (2^256-1)) - 2^256 +package fq + +import "math/bits" + +type uint1 uint64 // We use uint64 instead of a more narrow type for performance reasons; see https://github.com/mit-plv/fiat-crypto/pull/1006#issuecomment-892625927 +type int1 int64 // We use uint64 instead of a more narrow type for performance reasons; see https://github.com/mit-plv/fiat-crypto/pull/1006#issuecomment-892625927 + +// MontgomeryDomainFieldElement is a field element in the Montgomery domain. +// +// Bounds: +// +// [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +type MontgomeryDomainFieldElement [4]uint64 + +// NonMontgomeryDomainFieldElement is a field element NOT in the Montgomery domain. +// +// Bounds: +// +// [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +type NonMontgomeryDomainFieldElement [4]uint64 + +// cmovznzU64 is a single-word conditional move. +// +// Postconditions: +// +// out1 = (if arg1 = 0 then arg2 else arg3) +// +// Input Bounds: +// +// arg1: [0x0 ~> 0x1] +// arg2: [0x0 ~> 0xffffffffffffffff] +// arg3: [0x0 ~> 0xffffffffffffffff] +// +// Output Bounds: +// +// out1: [0x0 ~> 0xffffffffffffffff] +func cmovznzU64(out1 *uint64, arg1 uint1, arg2 uint64, arg3 uint64) { + x1 := (uint64(arg1) * 0xffffffffffffffff) + x2 := ((x1 & arg3) | ((^x1) & arg2)) + *out1 = x2 +} + +// Mul multiplies two field elements in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// 0 ≤ eval arg2 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) * eval (from_montgomery arg2)) mod m +// 0 ≤ eval out1 < m +func Mul(out1 *MontgomeryDomainFieldElement, arg1 *MontgomeryDomainFieldElement, arg2 *MontgomeryDomainFieldElement) { + x1 := arg1[1] + x2 := arg1[2] + x3 := arg1[3] + x4 := arg1[0] + var x5 uint64 + var x6 uint64 + x6, x5 = bits.Mul64(x4, arg2[3]) + var x7 uint64 + var x8 uint64 + x8, x7 = bits.Mul64(x4, arg2[2]) + var x9 uint64 + var x10 uint64 + x10, x9 = bits.Mul64(x4, arg2[1]) + var x11 uint64 + var x12 uint64 + x12, x11 = bits.Mul64(x4, arg2[0]) + var x13 uint64 + var x14 uint64 + x13, x14 = bits.Add64(x12, x9, uint64(0x0)) + var x15 uint64 + var x16 uint64 + x15, x16 = bits.Add64(x10, x7, uint64(uint1(x14))) + var x17 uint64 + var x18 uint64 + x17, x18 = bits.Add64(x8, x5, uint64(uint1(x16))) + x19 := (uint64(uint1(x18)) + x6) + var x20 uint64 + _, x20 = bits.Mul64(x11, 0xccd1c8aaee00bc4f) + var x22 uint64 + var x23 uint64 + x23, x22 = bits.Mul64(x20, 0xffffffff00000000) + var x24 uint64 + var x25 uint64 + x25, x24 = bits.Mul64(x20, 0xffffffffffffffff) + var x26 uint64 + var x27 uint64 + x27, x26 = bits.Mul64(x20, 0xbce6faada7179e84) + var x28 uint64 + var x29 uint64 + x29, x28 = bits.Mul64(x20, 0xf3b9cac2fc632551) + var x30 uint64 + var x31 uint64 + x30, x31 = bits.Add64(x29, x26, uint64(0x0)) + var x32 uint64 + var x33 uint64 + x32, x33 = bits.Add64(x27, x24, uint64(uint1(x31))) + var x34 uint64 + var x35 uint64 + x34, x35 = bits.Add64(x25, x22, uint64(uint1(x33))) + x36 := (uint64(uint1(x35)) + x23) + var x38 uint64 + _, x38 = bits.Add64(x11, x28, uint64(0x0)) + var x39 uint64 + var x40 uint64 + x39, x40 = bits.Add64(x13, x30, uint64(uint1(x38))) + var x41 uint64 + var x42 uint64 + x41, x42 = bits.Add64(x15, x32, uint64(uint1(x40))) + var x43 uint64 + var x44 uint64 + x43, x44 = bits.Add64(x17, x34, uint64(uint1(x42))) + var x45 uint64 + var x46 uint64 + x45, x46 = bits.Add64(x19, x36, uint64(uint1(x44))) + var x47 uint64 + var x48 uint64 + x48, x47 = bits.Mul64(x1, arg2[3]) + var x49 uint64 + var x50 uint64 + x50, x49 = bits.Mul64(x1, arg2[2]) + var x51 uint64 + var x52 uint64 + x52, x51 = bits.Mul64(x1, arg2[1]) + var x53 uint64 + var x54 uint64 + x54, x53 = bits.Mul64(x1, arg2[0]) + var x55 uint64 + var x56 uint64 + x55, x56 = bits.Add64(x54, x51, uint64(0x0)) + var x57 uint64 + var x58 uint64 + x57, x58 = bits.Add64(x52, x49, uint64(uint1(x56))) + var x59 uint64 + var x60 uint64 + x59, x60 = bits.Add64(x50, x47, uint64(uint1(x58))) + x61 := (uint64(uint1(x60)) + x48) + var x62 uint64 + var x63 uint64 + x62, x63 = bits.Add64(x39, x53, uint64(0x0)) + var x64 uint64 + var x65 uint64 + x64, x65 = bits.Add64(x41, x55, uint64(uint1(x63))) + var x66 uint64 + var x67 uint64 + x66, x67 = bits.Add64(x43, x57, uint64(uint1(x65))) + var x68 uint64 + var x69 uint64 + x68, x69 = bits.Add64(x45, x59, uint64(uint1(x67))) + var x70 uint64 + var x71 uint64 + x70, x71 = bits.Add64(uint64(uint1(x46)), x61, uint64(uint1(x69))) + var x72 uint64 + _, x72 = bits.Mul64(x62, 0xccd1c8aaee00bc4f) + var x74 uint64 + var x75 uint64 + x75, x74 = bits.Mul64(x72, 0xffffffff00000000) + var x76 uint64 + var x77 uint64 + x77, x76 = bits.Mul64(x72, 0xffffffffffffffff) + var x78 uint64 + var x79 uint64 + x79, x78 = bits.Mul64(x72, 0xbce6faada7179e84) + var x80 uint64 + var x81 uint64 + x81, x80 = bits.Mul64(x72, 0xf3b9cac2fc632551) + var x82 uint64 + var x83 uint64 + x82, x83 = bits.Add64(x81, x78, uint64(0x0)) + var x84 uint64 + var x85 uint64 + x84, x85 = bits.Add64(x79, x76, uint64(uint1(x83))) + var x86 uint64 + var x87 uint64 + x86, x87 = bits.Add64(x77, x74, uint64(uint1(x85))) + x88 := (uint64(uint1(x87)) + x75) + var x90 uint64 + _, x90 = bits.Add64(x62, x80, uint64(0x0)) + var x91 uint64 + var x92 uint64 + x91, x92 = bits.Add64(x64, x82, uint64(uint1(x90))) + var x93 uint64 + var x94 uint64 + x93, x94 = bits.Add64(x66, x84, uint64(uint1(x92))) + var x95 uint64 + var x96 uint64 + x95, x96 = bits.Add64(x68, x86, uint64(uint1(x94))) + var x97 uint64 + var x98 uint64 + x97, x98 = bits.Add64(x70, x88, uint64(uint1(x96))) + x99 := (uint64(uint1(x98)) + uint64(uint1(x71))) + var x100 uint64 + var x101 uint64 + x101, x100 = bits.Mul64(x2, arg2[3]) + var x102 uint64 + var x103 uint64 + x103, x102 = bits.Mul64(x2, arg2[2]) + var x104 uint64 + var x105 uint64 + x105, x104 = bits.Mul64(x2, arg2[1]) + var x106 uint64 + var x107 uint64 + x107, x106 = bits.Mul64(x2, arg2[0]) + var x108 uint64 + var x109 uint64 + x108, x109 = bits.Add64(x107, x104, uint64(0x0)) + var x110 uint64 + var x111 uint64 + x110, x111 = bits.Add64(x105, x102, uint64(uint1(x109))) + var x112 uint64 + var x113 uint64 + x112, x113 = bits.Add64(x103, x100, uint64(uint1(x111))) + x114 := (uint64(uint1(x113)) + x101) + var x115 uint64 + var x116 uint64 + x115, x116 = bits.Add64(x91, x106, uint64(0x0)) + var x117 uint64 + var x118 uint64 + x117, x118 = bits.Add64(x93, x108, uint64(uint1(x116))) + var x119 uint64 + var x120 uint64 + x119, x120 = bits.Add64(x95, x110, uint64(uint1(x118))) + var x121 uint64 + var x122 uint64 + x121, x122 = bits.Add64(x97, x112, uint64(uint1(x120))) + var x123 uint64 + var x124 uint64 + x123, x124 = bits.Add64(x99, x114, uint64(uint1(x122))) + var x125 uint64 + _, x125 = bits.Mul64(x115, 0xccd1c8aaee00bc4f) + var x127 uint64 + var x128 uint64 + x128, x127 = bits.Mul64(x125, 0xffffffff00000000) + var x129 uint64 + var x130 uint64 + x130, x129 = bits.Mul64(x125, 0xffffffffffffffff) + var x131 uint64 + var x132 uint64 + x132, x131 = bits.Mul64(x125, 0xbce6faada7179e84) + var x133 uint64 + var x134 uint64 + x134, x133 = bits.Mul64(x125, 0xf3b9cac2fc632551) + var x135 uint64 + var x136 uint64 + x135, x136 = bits.Add64(x134, x131, uint64(0x0)) + var x137 uint64 + var x138 uint64 + x137, x138 = bits.Add64(x132, x129, uint64(uint1(x136))) + var x139 uint64 + var x140 uint64 + x139, x140 = bits.Add64(x130, x127, uint64(uint1(x138))) + x141 := (uint64(uint1(x140)) + x128) + var x143 uint64 + _, x143 = bits.Add64(x115, x133, uint64(0x0)) + var x144 uint64 + var x145 uint64 + x144, x145 = bits.Add64(x117, x135, uint64(uint1(x143))) + var x146 uint64 + var x147 uint64 + x146, x147 = bits.Add64(x119, x137, uint64(uint1(x145))) + var x148 uint64 + var x149 uint64 + x148, x149 = bits.Add64(x121, x139, uint64(uint1(x147))) + var x150 uint64 + var x151 uint64 + x150, x151 = bits.Add64(x123, x141, uint64(uint1(x149))) + x152 := (uint64(uint1(x151)) + uint64(uint1(x124))) + var x153 uint64 + var x154 uint64 + x154, x153 = bits.Mul64(x3, arg2[3]) + var x155 uint64 + var x156 uint64 + x156, x155 = bits.Mul64(x3, arg2[2]) + var x157 uint64 + var x158 uint64 + x158, x157 = bits.Mul64(x3, arg2[1]) + var x159 uint64 + var x160 uint64 + x160, x159 = bits.Mul64(x3, arg2[0]) + var x161 uint64 + var x162 uint64 + x161, x162 = bits.Add64(x160, x157, uint64(0x0)) + var x163 uint64 + var x164 uint64 + x163, x164 = bits.Add64(x158, x155, uint64(uint1(x162))) + var x165 uint64 + var x166 uint64 + x165, x166 = bits.Add64(x156, x153, uint64(uint1(x164))) + x167 := (uint64(uint1(x166)) + x154) + var x168 uint64 + var x169 uint64 + x168, x169 = bits.Add64(x144, x159, uint64(0x0)) + var x170 uint64 + var x171 uint64 + x170, x171 = bits.Add64(x146, x161, uint64(uint1(x169))) + var x172 uint64 + var x173 uint64 + x172, x173 = bits.Add64(x148, x163, uint64(uint1(x171))) + var x174 uint64 + var x175 uint64 + x174, x175 = bits.Add64(x150, x165, uint64(uint1(x173))) + var x176 uint64 + var x177 uint64 + x176, x177 = bits.Add64(x152, x167, uint64(uint1(x175))) + var x178 uint64 + _, x178 = bits.Mul64(x168, 0xccd1c8aaee00bc4f) + var x180 uint64 + var x181 uint64 + x181, x180 = bits.Mul64(x178, 0xffffffff00000000) + var x182 uint64 + var x183 uint64 + x183, x182 = bits.Mul64(x178, 0xffffffffffffffff) + var x184 uint64 + var x185 uint64 + x185, x184 = bits.Mul64(x178, 0xbce6faada7179e84) + var x186 uint64 + var x187 uint64 + x187, x186 = bits.Mul64(x178, 0xf3b9cac2fc632551) + var x188 uint64 + var x189 uint64 + x188, x189 = bits.Add64(x187, x184, uint64(0x0)) + var x190 uint64 + var x191 uint64 + x190, x191 = bits.Add64(x185, x182, uint64(uint1(x189))) + var x192 uint64 + var x193 uint64 + x192, x193 = bits.Add64(x183, x180, uint64(uint1(x191))) + x194 := (uint64(uint1(x193)) + x181) + var x196 uint64 + _, x196 = bits.Add64(x168, x186, uint64(0x0)) + var x197 uint64 + var x198 uint64 + x197, x198 = bits.Add64(x170, x188, uint64(uint1(x196))) + var x199 uint64 + var x200 uint64 + x199, x200 = bits.Add64(x172, x190, uint64(uint1(x198))) + var x201 uint64 + var x202 uint64 + x201, x202 = bits.Add64(x174, x192, uint64(uint1(x200))) + var x203 uint64 + var x204 uint64 + x203, x204 = bits.Add64(x176, x194, uint64(uint1(x202))) + x205 := (uint64(uint1(x204)) + uint64(uint1(x177))) + var x206 uint64 + var x207 uint64 + x206, x207 = bits.Sub64(x197, 0xf3b9cac2fc632551, uint64(0x0)) + var x208 uint64 + var x209 uint64 + x208, x209 = bits.Sub64(x199, 0xbce6faada7179e84, uint64(uint1(x207))) + var x210 uint64 + var x211 uint64 + x210, x211 = bits.Sub64(x201, 0xffffffffffffffff, uint64(uint1(x209))) + var x212 uint64 + var x213 uint64 + x212, x213 = bits.Sub64(x203, 0xffffffff00000000, uint64(uint1(x211))) + var x215 uint64 + _, x215 = bits.Sub64(x205, uint64(0x0), uint64(uint1(x213))) + var x216 uint64 + cmovznzU64(&x216, uint1(x215), x206, x197) + var x217 uint64 + cmovznzU64(&x217, uint1(x215), x208, x199) + var x218 uint64 + cmovznzU64(&x218, uint1(x215), x210, x201) + var x219 uint64 + cmovznzU64(&x219, uint1(x215), x212, x203) + out1[0] = x216 + out1[1] = x217 + out1[2] = x218 + out1[3] = x219 +} + +// Square squares a field element in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) * eval (from_montgomery arg1)) mod m +// 0 ≤ eval out1 < m +func Square(out1 *MontgomeryDomainFieldElement, arg1 *MontgomeryDomainFieldElement) { + x1 := arg1[1] + x2 := arg1[2] + x3 := arg1[3] + x4 := arg1[0] + var x5 uint64 + var x6 uint64 + x6, x5 = bits.Mul64(x4, arg1[3]) + var x7 uint64 + var x8 uint64 + x8, x7 = bits.Mul64(x4, arg1[2]) + var x9 uint64 + var x10 uint64 + x10, x9 = bits.Mul64(x4, arg1[1]) + var x11 uint64 + var x12 uint64 + x12, x11 = bits.Mul64(x4, arg1[0]) + var x13 uint64 + var x14 uint64 + x13, x14 = bits.Add64(x12, x9, uint64(0x0)) + var x15 uint64 + var x16 uint64 + x15, x16 = bits.Add64(x10, x7, uint64(uint1(x14))) + var x17 uint64 + var x18 uint64 + x17, x18 = bits.Add64(x8, x5, uint64(uint1(x16))) + x19 := (uint64(uint1(x18)) + x6) + var x20 uint64 + _, x20 = bits.Mul64(x11, 0xccd1c8aaee00bc4f) + var x22 uint64 + var x23 uint64 + x23, x22 = bits.Mul64(x20, 0xffffffff00000000) + var x24 uint64 + var x25 uint64 + x25, x24 = bits.Mul64(x20, 0xffffffffffffffff) + var x26 uint64 + var x27 uint64 + x27, x26 = bits.Mul64(x20, 0xbce6faada7179e84) + var x28 uint64 + var x29 uint64 + x29, x28 = bits.Mul64(x20, 0xf3b9cac2fc632551) + var x30 uint64 + var x31 uint64 + x30, x31 = bits.Add64(x29, x26, uint64(0x0)) + var x32 uint64 + var x33 uint64 + x32, x33 = bits.Add64(x27, x24, uint64(uint1(x31))) + var x34 uint64 + var x35 uint64 + x34, x35 = bits.Add64(x25, x22, uint64(uint1(x33))) + x36 := (uint64(uint1(x35)) + x23) + var x38 uint64 + _, x38 = bits.Add64(x11, x28, uint64(0x0)) + var x39 uint64 + var x40 uint64 + x39, x40 = bits.Add64(x13, x30, uint64(uint1(x38))) + var x41 uint64 + var x42 uint64 + x41, x42 = bits.Add64(x15, x32, uint64(uint1(x40))) + var x43 uint64 + var x44 uint64 + x43, x44 = bits.Add64(x17, x34, uint64(uint1(x42))) + var x45 uint64 + var x46 uint64 + x45, x46 = bits.Add64(x19, x36, uint64(uint1(x44))) + var x47 uint64 + var x48 uint64 + x48, x47 = bits.Mul64(x1, arg1[3]) + var x49 uint64 + var x50 uint64 + x50, x49 = bits.Mul64(x1, arg1[2]) + var x51 uint64 + var x52 uint64 + x52, x51 = bits.Mul64(x1, arg1[1]) + var x53 uint64 + var x54 uint64 + x54, x53 = bits.Mul64(x1, arg1[0]) + var x55 uint64 + var x56 uint64 + x55, x56 = bits.Add64(x54, x51, uint64(0x0)) + var x57 uint64 + var x58 uint64 + x57, x58 = bits.Add64(x52, x49, uint64(uint1(x56))) + var x59 uint64 + var x60 uint64 + x59, x60 = bits.Add64(x50, x47, uint64(uint1(x58))) + x61 := (uint64(uint1(x60)) + x48) + var x62 uint64 + var x63 uint64 + x62, x63 = bits.Add64(x39, x53, uint64(0x0)) + var x64 uint64 + var x65 uint64 + x64, x65 = bits.Add64(x41, x55, uint64(uint1(x63))) + var x66 uint64 + var x67 uint64 + x66, x67 = bits.Add64(x43, x57, uint64(uint1(x65))) + var x68 uint64 + var x69 uint64 + x68, x69 = bits.Add64(x45, x59, uint64(uint1(x67))) + var x70 uint64 + var x71 uint64 + x70, x71 = bits.Add64(uint64(uint1(x46)), x61, uint64(uint1(x69))) + var x72 uint64 + _, x72 = bits.Mul64(x62, 0xccd1c8aaee00bc4f) + var x74 uint64 + var x75 uint64 + x75, x74 = bits.Mul64(x72, 0xffffffff00000000) + var x76 uint64 + var x77 uint64 + x77, x76 = bits.Mul64(x72, 0xffffffffffffffff) + var x78 uint64 + var x79 uint64 + x79, x78 = bits.Mul64(x72, 0xbce6faada7179e84) + var x80 uint64 + var x81 uint64 + x81, x80 = bits.Mul64(x72, 0xf3b9cac2fc632551) + var x82 uint64 + var x83 uint64 + x82, x83 = bits.Add64(x81, x78, uint64(0x0)) + var x84 uint64 + var x85 uint64 + x84, x85 = bits.Add64(x79, x76, uint64(uint1(x83))) + var x86 uint64 + var x87 uint64 + x86, x87 = bits.Add64(x77, x74, uint64(uint1(x85))) + x88 := (uint64(uint1(x87)) + x75) + var x90 uint64 + _, x90 = bits.Add64(x62, x80, uint64(0x0)) + var x91 uint64 + var x92 uint64 + x91, x92 = bits.Add64(x64, x82, uint64(uint1(x90))) + var x93 uint64 + var x94 uint64 + x93, x94 = bits.Add64(x66, x84, uint64(uint1(x92))) + var x95 uint64 + var x96 uint64 + x95, x96 = bits.Add64(x68, x86, uint64(uint1(x94))) + var x97 uint64 + var x98 uint64 + x97, x98 = bits.Add64(x70, x88, uint64(uint1(x96))) + x99 := (uint64(uint1(x98)) + uint64(uint1(x71))) + var x100 uint64 + var x101 uint64 + x101, x100 = bits.Mul64(x2, arg1[3]) + var x102 uint64 + var x103 uint64 + x103, x102 = bits.Mul64(x2, arg1[2]) + var x104 uint64 + var x105 uint64 + x105, x104 = bits.Mul64(x2, arg1[1]) + var x106 uint64 + var x107 uint64 + x107, x106 = bits.Mul64(x2, arg1[0]) + var x108 uint64 + var x109 uint64 + x108, x109 = bits.Add64(x107, x104, uint64(0x0)) + var x110 uint64 + var x111 uint64 + x110, x111 = bits.Add64(x105, x102, uint64(uint1(x109))) + var x112 uint64 + var x113 uint64 + x112, x113 = bits.Add64(x103, x100, uint64(uint1(x111))) + x114 := (uint64(uint1(x113)) + x101) + var x115 uint64 + var x116 uint64 + x115, x116 = bits.Add64(x91, x106, uint64(0x0)) + var x117 uint64 + var x118 uint64 + x117, x118 = bits.Add64(x93, x108, uint64(uint1(x116))) + var x119 uint64 + var x120 uint64 + x119, x120 = bits.Add64(x95, x110, uint64(uint1(x118))) + var x121 uint64 + var x122 uint64 + x121, x122 = bits.Add64(x97, x112, uint64(uint1(x120))) + var x123 uint64 + var x124 uint64 + x123, x124 = bits.Add64(x99, x114, uint64(uint1(x122))) + var x125 uint64 + _, x125 = bits.Mul64(x115, 0xccd1c8aaee00bc4f) + var x127 uint64 + var x128 uint64 + x128, x127 = bits.Mul64(x125, 0xffffffff00000000) + var x129 uint64 + var x130 uint64 + x130, x129 = bits.Mul64(x125, 0xffffffffffffffff) + var x131 uint64 + var x132 uint64 + x132, x131 = bits.Mul64(x125, 0xbce6faada7179e84) + var x133 uint64 + var x134 uint64 + x134, x133 = bits.Mul64(x125, 0xf3b9cac2fc632551) + var x135 uint64 + var x136 uint64 + x135, x136 = bits.Add64(x134, x131, uint64(0x0)) + var x137 uint64 + var x138 uint64 + x137, x138 = bits.Add64(x132, x129, uint64(uint1(x136))) + var x139 uint64 + var x140 uint64 + x139, x140 = bits.Add64(x130, x127, uint64(uint1(x138))) + x141 := (uint64(uint1(x140)) + x128) + var x143 uint64 + _, x143 = bits.Add64(x115, x133, uint64(0x0)) + var x144 uint64 + var x145 uint64 + x144, x145 = bits.Add64(x117, x135, uint64(uint1(x143))) + var x146 uint64 + var x147 uint64 + x146, x147 = bits.Add64(x119, x137, uint64(uint1(x145))) + var x148 uint64 + var x149 uint64 + x148, x149 = bits.Add64(x121, x139, uint64(uint1(x147))) + var x150 uint64 + var x151 uint64 + x150, x151 = bits.Add64(x123, x141, uint64(uint1(x149))) + x152 := (uint64(uint1(x151)) + uint64(uint1(x124))) + var x153 uint64 + var x154 uint64 + x154, x153 = bits.Mul64(x3, arg1[3]) + var x155 uint64 + var x156 uint64 + x156, x155 = bits.Mul64(x3, arg1[2]) + var x157 uint64 + var x158 uint64 + x158, x157 = bits.Mul64(x3, arg1[1]) + var x159 uint64 + var x160 uint64 + x160, x159 = bits.Mul64(x3, arg1[0]) + var x161 uint64 + var x162 uint64 + x161, x162 = bits.Add64(x160, x157, uint64(0x0)) + var x163 uint64 + var x164 uint64 + x163, x164 = bits.Add64(x158, x155, uint64(uint1(x162))) + var x165 uint64 + var x166 uint64 + x165, x166 = bits.Add64(x156, x153, uint64(uint1(x164))) + x167 := (uint64(uint1(x166)) + x154) + var x168 uint64 + var x169 uint64 + x168, x169 = bits.Add64(x144, x159, uint64(0x0)) + var x170 uint64 + var x171 uint64 + x170, x171 = bits.Add64(x146, x161, uint64(uint1(x169))) + var x172 uint64 + var x173 uint64 + x172, x173 = bits.Add64(x148, x163, uint64(uint1(x171))) + var x174 uint64 + var x175 uint64 + x174, x175 = bits.Add64(x150, x165, uint64(uint1(x173))) + var x176 uint64 + var x177 uint64 + x176, x177 = bits.Add64(x152, x167, uint64(uint1(x175))) + var x178 uint64 + _, x178 = bits.Mul64(x168, 0xccd1c8aaee00bc4f) + var x180 uint64 + var x181 uint64 + x181, x180 = bits.Mul64(x178, 0xffffffff00000000) + var x182 uint64 + var x183 uint64 + x183, x182 = bits.Mul64(x178, 0xffffffffffffffff) + var x184 uint64 + var x185 uint64 + x185, x184 = bits.Mul64(x178, 0xbce6faada7179e84) + var x186 uint64 + var x187 uint64 + x187, x186 = bits.Mul64(x178, 0xf3b9cac2fc632551) + var x188 uint64 + var x189 uint64 + x188, x189 = bits.Add64(x187, x184, uint64(0x0)) + var x190 uint64 + var x191 uint64 + x190, x191 = bits.Add64(x185, x182, uint64(uint1(x189))) + var x192 uint64 + var x193 uint64 + x192, x193 = bits.Add64(x183, x180, uint64(uint1(x191))) + x194 := (uint64(uint1(x193)) + x181) + var x196 uint64 + _, x196 = bits.Add64(x168, x186, uint64(0x0)) + var x197 uint64 + var x198 uint64 + x197, x198 = bits.Add64(x170, x188, uint64(uint1(x196))) + var x199 uint64 + var x200 uint64 + x199, x200 = bits.Add64(x172, x190, uint64(uint1(x198))) + var x201 uint64 + var x202 uint64 + x201, x202 = bits.Add64(x174, x192, uint64(uint1(x200))) + var x203 uint64 + var x204 uint64 + x203, x204 = bits.Add64(x176, x194, uint64(uint1(x202))) + x205 := (uint64(uint1(x204)) + uint64(uint1(x177))) + var x206 uint64 + var x207 uint64 + x206, x207 = bits.Sub64(x197, 0xf3b9cac2fc632551, uint64(0x0)) + var x208 uint64 + var x209 uint64 + x208, x209 = bits.Sub64(x199, 0xbce6faada7179e84, uint64(uint1(x207))) + var x210 uint64 + var x211 uint64 + x210, x211 = bits.Sub64(x201, 0xffffffffffffffff, uint64(uint1(x209))) + var x212 uint64 + var x213 uint64 + x212, x213 = bits.Sub64(x203, 0xffffffff00000000, uint64(uint1(x211))) + var x215 uint64 + _, x215 = bits.Sub64(x205, uint64(0x0), uint64(uint1(x213))) + var x216 uint64 + cmovznzU64(&x216, uint1(x215), x206, x197) + var x217 uint64 + cmovznzU64(&x217, uint1(x215), x208, x199) + var x218 uint64 + cmovznzU64(&x218, uint1(x215), x210, x201) + var x219 uint64 + cmovznzU64(&x219, uint1(x215), x212, x203) + out1[0] = x216 + out1[1] = x217 + out1[2] = x218 + out1[3] = x219 +} + +// Add adds two field elements in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// 0 ≤ eval arg2 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) + eval (from_montgomery arg2)) mod m +// 0 ≤ eval out1 < m +func Add(out1 *MontgomeryDomainFieldElement, arg1 *MontgomeryDomainFieldElement, arg2 *MontgomeryDomainFieldElement) { + var x1 uint64 + var x2 uint64 + x1, x2 = bits.Add64(arg1[0], arg2[0], uint64(0x0)) + var x3 uint64 + var x4 uint64 + x3, x4 = bits.Add64(arg1[1], arg2[1], uint64(uint1(x2))) + var x5 uint64 + var x6 uint64 + x5, x6 = bits.Add64(arg1[2], arg2[2], uint64(uint1(x4))) + var x7 uint64 + var x8 uint64 + x7, x8 = bits.Add64(arg1[3], arg2[3], uint64(uint1(x6))) + var x9 uint64 + var x10 uint64 + x9, x10 = bits.Sub64(x1, 0xf3b9cac2fc632551, uint64(0x0)) + var x11 uint64 + var x12 uint64 + x11, x12 = bits.Sub64(x3, 0xbce6faada7179e84, uint64(uint1(x10))) + var x13 uint64 + var x14 uint64 + x13, x14 = bits.Sub64(x5, 0xffffffffffffffff, uint64(uint1(x12))) + var x15 uint64 + var x16 uint64 + x15, x16 = bits.Sub64(x7, 0xffffffff00000000, uint64(uint1(x14))) + var x18 uint64 + _, x18 = bits.Sub64(uint64(uint1(x8)), uint64(0x0), uint64(uint1(x16))) + var x19 uint64 + cmovznzU64(&x19, uint1(x18), x9, x1) + var x20 uint64 + cmovznzU64(&x20, uint1(x18), x11, x3) + var x21 uint64 + cmovznzU64(&x21, uint1(x18), x13, x5) + var x22 uint64 + cmovznzU64(&x22, uint1(x18), x15, x7) + out1[0] = x19 + out1[1] = x20 + out1[2] = x21 + out1[3] = x22 +} + +// Sub subtracts two field elements in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// 0 ≤ eval arg2 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) - eval (from_montgomery arg2)) mod m +// 0 ≤ eval out1 < m +func Sub(out1 *MontgomeryDomainFieldElement, arg1 *MontgomeryDomainFieldElement, arg2 *MontgomeryDomainFieldElement) { + var x1 uint64 + var x2 uint64 + x1, x2 = bits.Sub64(arg1[0], arg2[0], uint64(0x0)) + var x3 uint64 + var x4 uint64 + x3, x4 = bits.Sub64(arg1[1], arg2[1], uint64(uint1(x2))) + var x5 uint64 + var x6 uint64 + x5, x6 = bits.Sub64(arg1[2], arg2[2], uint64(uint1(x4))) + var x7 uint64 + var x8 uint64 + x7, x8 = bits.Sub64(arg1[3], arg2[3], uint64(uint1(x6))) + var x9 uint64 + cmovznzU64(&x9, uint1(x8), uint64(0x0), 0xffffffffffffffff) + var x10 uint64 + var x11 uint64 + x10, x11 = bits.Add64(x1, (x9 & 0xf3b9cac2fc632551), uint64(0x0)) + var x12 uint64 + var x13 uint64 + x12, x13 = bits.Add64(x3, (x9 & 0xbce6faada7179e84), uint64(uint1(x11))) + var x14 uint64 + var x15 uint64 + x14, x15 = bits.Add64(x5, x9, uint64(uint1(x13))) + var x16 uint64 + x16, _ = bits.Add64(x7, (x9 & 0xffffffff00000000), uint64(uint1(x15))) + out1[0] = x10 + out1[1] = x12 + out1[2] = x14 + out1[3] = x16 +} + +// Opp negates a field element in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = -eval (from_montgomery arg1) mod m +// 0 ≤ eval out1 < m +func Opp(out1 *MontgomeryDomainFieldElement, arg1 *MontgomeryDomainFieldElement) { + var x1 uint64 + var x2 uint64 + x1, x2 = bits.Sub64(uint64(0x0), arg1[0], uint64(0x0)) + var x3 uint64 + var x4 uint64 + x3, x4 = bits.Sub64(uint64(0x0), arg1[1], uint64(uint1(x2))) + var x5 uint64 + var x6 uint64 + x5, x6 = bits.Sub64(uint64(0x0), arg1[2], uint64(uint1(x4))) + var x7 uint64 + var x8 uint64 + x7, x8 = bits.Sub64(uint64(0x0), arg1[3], uint64(uint1(x6))) + var x9 uint64 + cmovznzU64(&x9, uint1(x8), uint64(0x0), 0xffffffffffffffff) + var x10 uint64 + var x11 uint64 + x10, x11 = bits.Add64(x1, (x9 & 0xf3b9cac2fc632551), uint64(0x0)) + var x12 uint64 + var x13 uint64 + x12, x13 = bits.Add64(x3, (x9 & 0xbce6faada7179e84), uint64(uint1(x11))) + var x14 uint64 + var x15 uint64 + x14, x15 = bits.Add64(x5, x9, uint64(uint1(x13))) + var x16 uint64 + x16, _ = bits.Add64(x7, (x9 & 0xffffffff00000000), uint64(uint1(x15))) + out1[0] = x10 + out1[1] = x12 + out1[2] = x14 + out1[3] = x16 +} + +// FromMontgomery translates a field element out of the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval out1 mod m = (eval arg1 * ((2^64)⁻¹ mod m)^4) mod m +// 0 ≤ eval out1 < m +func FromMontgomery(out1 *NonMontgomeryDomainFieldElement, arg1 *MontgomeryDomainFieldElement) { + x1 := arg1[0] + var x2 uint64 + _, x2 = bits.Mul64(x1, 0xccd1c8aaee00bc4f) + var x4 uint64 + var x5 uint64 + x5, x4 = bits.Mul64(x2, 0xffffffff00000000) + var x6 uint64 + var x7 uint64 + x7, x6 = bits.Mul64(x2, 0xffffffffffffffff) + var x8 uint64 + var x9 uint64 + x9, x8 = bits.Mul64(x2, 0xbce6faada7179e84) + var x10 uint64 + var x11 uint64 + x11, x10 = bits.Mul64(x2, 0xf3b9cac2fc632551) + var x12 uint64 + var x13 uint64 + x12, x13 = bits.Add64(x11, x8, uint64(0x0)) + var x14 uint64 + var x15 uint64 + x14, x15 = bits.Add64(x9, x6, uint64(uint1(x13))) + var x16 uint64 + var x17 uint64 + x16, x17 = bits.Add64(x7, x4, uint64(uint1(x15))) + var x19 uint64 + _, x19 = bits.Add64(x1, x10, uint64(0x0)) + var x20 uint64 + var x21 uint64 + x20, x21 = bits.Add64(uint64(0x0), x12, uint64(uint1(x19))) + var x22 uint64 + var x23 uint64 + x22, x23 = bits.Add64(uint64(0x0), x14, uint64(uint1(x21))) + var x24 uint64 + var x25 uint64 + x24, x25 = bits.Add64(uint64(0x0), x16, uint64(uint1(x23))) + var x26 uint64 + var x27 uint64 + x26, x27 = bits.Add64(x20, arg1[1], uint64(0x0)) + var x28 uint64 + var x29 uint64 + x28, x29 = bits.Add64(x22, uint64(0x0), uint64(uint1(x27))) + var x30 uint64 + var x31 uint64 + x30, x31 = bits.Add64(x24, uint64(0x0), uint64(uint1(x29))) + var x32 uint64 + _, x32 = bits.Mul64(x26, 0xccd1c8aaee00bc4f) + var x34 uint64 + var x35 uint64 + x35, x34 = bits.Mul64(x32, 0xffffffff00000000) + var x36 uint64 + var x37 uint64 + x37, x36 = bits.Mul64(x32, 0xffffffffffffffff) + var x38 uint64 + var x39 uint64 + x39, x38 = bits.Mul64(x32, 0xbce6faada7179e84) + var x40 uint64 + var x41 uint64 + x41, x40 = bits.Mul64(x32, 0xf3b9cac2fc632551) + var x42 uint64 + var x43 uint64 + x42, x43 = bits.Add64(x41, x38, uint64(0x0)) + var x44 uint64 + var x45 uint64 + x44, x45 = bits.Add64(x39, x36, uint64(uint1(x43))) + var x46 uint64 + var x47 uint64 + x46, x47 = bits.Add64(x37, x34, uint64(uint1(x45))) + var x49 uint64 + _, x49 = bits.Add64(x26, x40, uint64(0x0)) + var x50 uint64 + var x51 uint64 + x50, x51 = bits.Add64(x28, x42, uint64(uint1(x49))) + var x52 uint64 + var x53 uint64 + x52, x53 = bits.Add64(x30, x44, uint64(uint1(x51))) + var x54 uint64 + var x55 uint64 + x54, x55 = bits.Add64((uint64(uint1(x31)) + (uint64(uint1(x25)) + (uint64(uint1(x17)) + x5))), x46, uint64(uint1(x53))) + var x56 uint64 + var x57 uint64 + x56, x57 = bits.Add64(x50, arg1[2], uint64(0x0)) + var x58 uint64 + var x59 uint64 + x58, x59 = bits.Add64(x52, uint64(0x0), uint64(uint1(x57))) + var x60 uint64 + var x61 uint64 + x60, x61 = bits.Add64(x54, uint64(0x0), uint64(uint1(x59))) + var x62 uint64 + _, x62 = bits.Mul64(x56, 0xccd1c8aaee00bc4f) + var x64 uint64 + var x65 uint64 + x65, x64 = bits.Mul64(x62, 0xffffffff00000000) + var x66 uint64 + var x67 uint64 + x67, x66 = bits.Mul64(x62, 0xffffffffffffffff) + var x68 uint64 + var x69 uint64 + x69, x68 = bits.Mul64(x62, 0xbce6faada7179e84) + var x70 uint64 + var x71 uint64 + x71, x70 = bits.Mul64(x62, 0xf3b9cac2fc632551) + var x72 uint64 + var x73 uint64 + x72, x73 = bits.Add64(x71, x68, uint64(0x0)) + var x74 uint64 + var x75 uint64 + x74, x75 = bits.Add64(x69, x66, uint64(uint1(x73))) + var x76 uint64 + var x77 uint64 + x76, x77 = bits.Add64(x67, x64, uint64(uint1(x75))) + var x79 uint64 + _, x79 = bits.Add64(x56, x70, uint64(0x0)) + var x80 uint64 + var x81 uint64 + x80, x81 = bits.Add64(x58, x72, uint64(uint1(x79))) + var x82 uint64 + var x83 uint64 + x82, x83 = bits.Add64(x60, x74, uint64(uint1(x81))) + var x84 uint64 + var x85 uint64 + x84, x85 = bits.Add64((uint64(uint1(x61)) + (uint64(uint1(x55)) + (uint64(uint1(x47)) + x35))), x76, uint64(uint1(x83))) + var x86 uint64 + var x87 uint64 + x86, x87 = bits.Add64(x80, arg1[3], uint64(0x0)) + var x88 uint64 + var x89 uint64 + x88, x89 = bits.Add64(x82, uint64(0x0), uint64(uint1(x87))) + var x90 uint64 + var x91 uint64 + x90, x91 = bits.Add64(x84, uint64(0x0), uint64(uint1(x89))) + var x92 uint64 + _, x92 = bits.Mul64(x86, 0xccd1c8aaee00bc4f) + var x94 uint64 + var x95 uint64 + x95, x94 = bits.Mul64(x92, 0xffffffff00000000) + var x96 uint64 + var x97 uint64 + x97, x96 = bits.Mul64(x92, 0xffffffffffffffff) + var x98 uint64 + var x99 uint64 + x99, x98 = bits.Mul64(x92, 0xbce6faada7179e84) + var x100 uint64 + var x101 uint64 + x101, x100 = bits.Mul64(x92, 0xf3b9cac2fc632551) + var x102 uint64 + var x103 uint64 + x102, x103 = bits.Add64(x101, x98, uint64(0x0)) + var x104 uint64 + var x105 uint64 + x104, x105 = bits.Add64(x99, x96, uint64(uint1(x103))) + var x106 uint64 + var x107 uint64 + x106, x107 = bits.Add64(x97, x94, uint64(uint1(x105))) + var x109 uint64 + _, x109 = bits.Add64(x86, x100, uint64(0x0)) + var x110 uint64 + var x111 uint64 + x110, x111 = bits.Add64(x88, x102, uint64(uint1(x109))) + var x112 uint64 + var x113 uint64 + x112, x113 = bits.Add64(x90, x104, uint64(uint1(x111))) + var x114 uint64 + var x115 uint64 + x114, x115 = bits.Add64((uint64(uint1(x91)) + (uint64(uint1(x85)) + (uint64(uint1(x77)) + x65))), x106, uint64(uint1(x113))) + x116 := (uint64(uint1(x115)) + (uint64(uint1(x107)) + x95)) + var x117 uint64 + var x118 uint64 + x117, x118 = bits.Sub64(x110, 0xf3b9cac2fc632551, uint64(0x0)) + var x119 uint64 + var x120 uint64 + x119, x120 = bits.Sub64(x112, 0xbce6faada7179e84, uint64(uint1(x118))) + var x121 uint64 + var x122 uint64 + x121, x122 = bits.Sub64(x114, 0xffffffffffffffff, uint64(uint1(x120))) + var x123 uint64 + var x124 uint64 + x123, x124 = bits.Sub64(x116, 0xffffffff00000000, uint64(uint1(x122))) + var x126 uint64 + _, x126 = bits.Sub64(uint64(0x0), uint64(0x0), uint64(uint1(x124))) + var x127 uint64 + cmovznzU64(&x127, uint1(x126), x117, x110) + var x128 uint64 + cmovznzU64(&x128, uint1(x126), x119, x112) + var x129 uint64 + cmovznzU64(&x129, uint1(x126), x121, x114) + var x130 uint64 + cmovznzU64(&x130, uint1(x126), x123, x116) + out1[0] = x127 + out1[1] = x128 + out1[2] = x129 + out1[3] = x130 +} + +// ToMontgomery translates a field element into the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = eval arg1 mod m +// 0 ≤ eval out1 < m +func ToMontgomery(out1 *MontgomeryDomainFieldElement, arg1 *NonMontgomeryDomainFieldElement) { + x1 := arg1[1] + x2 := arg1[2] + x3 := arg1[3] + x4 := arg1[0] + var x5 uint64 + var x6 uint64 + x6, x5 = bits.Mul64(x4, 0x66e12d94f3d95620) + var x7 uint64 + var x8 uint64 + x8, x7 = bits.Mul64(x4, 0x2845b2392b6bec59) + var x9 uint64 + var x10 uint64 + x10, x9 = bits.Mul64(x4, 0x4699799c49bd6fa6) + var x11 uint64 + var x12 uint64 + x12, x11 = bits.Mul64(x4, 0x83244c95be79eea2) + var x13 uint64 + var x14 uint64 + x13, x14 = bits.Add64(x12, x9, uint64(0x0)) + var x15 uint64 + var x16 uint64 + x15, x16 = bits.Add64(x10, x7, uint64(uint1(x14))) + var x17 uint64 + var x18 uint64 + x17, x18 = bits.Add64(x8, x5, uint64(uint1(x16))) + var x19 uint64 + _, x19 = bits.Mul64(x11, 0xccd1c8aaee00bc4f) + var x21 uint64 + var x22 uint64 + x22, x21 = bits.Mul64(x19, 0xffffffff00000000) + var x23 uint64 + var x24 uint64 + x24, x23 = bits.Mul64(x19, 0xffffffffffffffff) + var x25 uint64 + var x26 uint64 + x26, x25 = bits.Mul64(x19, 0xbce6faada7179e84) + var x27 uint64 + var x28 uint64 + x28, x27 = bits.Mul64(x19, 0xf3b9cac2fc632551) + var x29 uint64 + var x30 uint64 + x29, x30 = bits.Add64(x28, x25, uint64(0x0)) + var x31 uint64 + var x32 uint64 + x31, x32 = bits.Add64(x26, x23, uint64(uint1(x30))) + var x33 uint64 + var x34 uint64 + x33, x34 = bits.Add64(x24, x21, uint64(uint1(x32))) + var x36 uint64 + _, x36 = bits.Add64(x11, x27, uint64(0x0)) + var x37 uint64 + var x38 uint64 + x37, x38 = bits.Add64(x13, x29, uint64(uint1(x36))) + var x39 uint64 + var x40 uint64 + x39, x40 = bits.Add64(x15, x31, uint64(uint1(x38))) + var x41 uint64 + var x42 uint64 + x41, x42 = bits.Add64(x17, x33, uint64(uint1(x40))) + var x43 uint64 + var x44 uint64 + x43, x44 = bits.Add64((uint64(uint1(x18)) + x6), (uint64(uint1(x34)) + x22), uint64(uint1(x42))) + var x45 uint64 + var x46 uint64 + x46, x45 = bits.Mul64(x1, 0x66e12d94f3d95620) + var x47 uint64 + var x48 uint64 + x48, x47 = bits.Mul64(x1, 0x2845b2392b6bec59) + var x49 uint64 + var x50 uint64 + x50, x49 = bits.Mul64(x1, 0x4699799c49bd6fa6) + var x51 uint64 + var x52 uint64 + x52, x51 = bits.Mul64(x1, 0x83244c95be79eea2) + var x53 uint64 + var x54 uint64 + x53, x54 = bits.Add64(x52, x49, uint64(0x0)) + var x55 uint64 + var x56 uint64 + x55, x56 = bits.Add64(x50, x47, uint64(uint1(x54))) + var x57 uint64 + var x58 uint64 + x57, x58 = bits.Add64(x48, x45, uint64(uint1(x56))) + var x59 uint64 + var x60 uint64 + x59, x60 = bits.Add64(x37, x51, uint64(0x0)) + var x61 uint64 + var x62 uint64 + x61, x62 = bits.Add64(x39, x53, uint64(uint1(x60))) + var x63 uint64 + var x64 uint64 + x63, x64 = bits.Add64(x41, x55, uint64(uint1(x62))) + var x65 uint64 + var x66 uint64 + x65, x66 = bits.Add64(x43, x57, uint64(uint1(x64))) + var x67 uint64 + _, x67 = bits.Mul64(x59, 0xccd1c8aaee00bc4f) + var x69 uint64 + var x70 uint64 + x70, x69 = bits.Mul64(x67, 0xffffffff00000000) + var x71 uint64 + var x72 uint64 + x72, x71 = bits.Mul64(x67, 0xffffffffffffffff) + var x73 uint64 + var x74 uint64 + x74, x73 = bits.Mul64(x67, 0xbce6faada7179e84) + var x75 uint64 + var x76 uint64 + x76, x75 = bits.Mul64(x67, 0xf3b9cac2fc632551) + var x77 uint64 + var x78 uint64 + x77, x78 = bits.Add64(x76, x73, uint64(0x0)) + var x79 uint64 + var x80 uint64 + x79, x80 = bits.Add64(x74, x71, uint64(uint1(x78))) + var x81 uint64 + var x82 uint64 + x81, x82 = bits.Add64(x72, x69, uint64(uint1(x80))) + var x84 uint64 + _, x84 = bits.Add64(x59, x75, uint64(0x0)) + var x85 uint64 + var x86 uint64 + x85, x86 = bits.Add64(x61, x77, uint64(uint1(x84))) + var x87 uint64 + var x88 uint64 + x87, x88 = bits.Add64(x63, x79, uint64(uint1(x86))) + var x89 uint64 + var x90 uint64 + x89, x90 = bits.Add64(x65, x81, uint64(uint1(x88))) + var x91 uint64 + var x92 uint64 + x91, x92 = bits.Add64(((uint64(uint1(x66)) + uint64(uint1(x44))) + (uint64(uint1(x58)) + x46)), (uint64(uint1(x82)) + x70), uint64(uint1(x90))) + var x93 uint64 + var x94 uint64 + x94, x93 = bits.Mul64(x2, 0x66e12d94f3d95620) + var x95 uint64 + var x96 uint64 + x96, x95 = bits.Mul64(x2, 0x2845b2392b6bec59) + var x97 uint64 + var x98 uint64 + x98, x97 = bits.Mul64(x2, 0x4699799c49bd6fa6) + var x99 uint64 + var x100 uint64 + x100, x99 = bits.Mul64(x2, 0x83244c95be79eea2) + var x101 uint64 + var x102 uint64 + x101, x102 = bits.Add64(x100, x97, uint64(0x0)) + var x103 uint64 + var x104 uint64 + x103, x104 = bits.Add64(x98, x95, uint64(uint1(x102))) + var x105 uint64 + var x106 uint64 + x105, x106 = bits.Add64(x96, x93, uint64(uint1(x104))) + var x107 uint64 + var x108 uint64 + x107, x108 = bits.Add64(x85, x99, uint64(0x0)) + var x109 uint64 + var x110 uint64 + x109, x110 = bits.Add64(x87, x101, uint64(uint1(x108))) + var x111 uint64 + var x112 uint64 + x111, x112 = bits.Add64(x89, x103, uint64(uint1(x110))) + var x113 uint64 + var x114 uint64 + x113, x114 = bits.Add64(x91, x105, uint64(uint1(x112))) + var x115 uint64 + _, x115 = bits.Mul64(x107, 0xccd1c8aaee00bc4f) + var x117 uint64 + var x118 uint64 + x118, x117 = bits.Mul64(x115, 0xffffffff00000000) + var x119 uint64 + var x120 uint64 + x120, x119 = bits.Mul64(x115, 0xffffffffffffffff) + var x121 uint64 + var x122 uint64 + x122, x121 = bits.Mul64(x115, 0xbce6faada7179e84) + var x123 uint64 + var x124 uint64 + x124, x123 = bits.Mul64(x115, 0xf3b9cac2fc632551) + var x125 uint64 + var x126 uint64 + x125, x126 = bits.Add64(x124, x121, uint64(0x0)) + var x127 uint64 + var x128 uint64 + x127, x128 = bits.Add64(x122, x119, uint64(uint1(x126))) + var x129 uint64 + var x130 uint64 + x129, x130 = bits.Add64(x120, x117, uint64(uint1(x128))) + var x132 uint64 + _, x132 = bits.Add64(x107, x123, uint64(0x0)) + var x133 uint64 + var x134 uint64 + x133, x134 = bits.Add64(x109, x125, uint64(uint1(x132))) + var x135 uint64 + var x136 uint64 + x135, x136 = bits.Add64(x111, x127, uint64(uint1(x134))) + var x137 uint64 + var x138 uint64 + x137, x138 = bits.Add64(x113, x129, uint64(uint1(x136))) + var x139 uint64 + var x140 uint64 + x139, x140 = bits.Add64(((uint64(uint1(x114)) + uint64(uint1(x92))) + (uint64(uint1(x106)) + x94)), (uint64(uint1(x130)) + x118), uint64(uint1(x138))) + var x141 uint64 + var x142 uint64 + x142, x141 = bits.Mul64(x3, 0x66e12d94f3d95620) + var x143 uint64 + var x144 uint64 + x144, x143 = bits.Mul64(x3, 0x2845b2392b6bec59) + var x145 uint64 + var x146 uint64 + x146, x145 = bits.Mul64(x3, 0x4699799c49bd6fa6) + var x147 uint64 + var x148 uint64 + x148, x147 = bits.Mul64(x3, 0x83244c95be79eea2) + var x149 uint64 + var x150 uint64 + x149, x150 = bits.Add64(x148, x145, uint64(0x0)) + var x151 uint64 + var x152 uint64 + x151, x152 = bits.Add64(x146, x143, uint64(uint1(x150))) + var x153 uint64 + var x154 uint64 + x153, x154 = bits.Add64(x144, x141, uint64(uint1(x152))) + var x155 uint64 + var x156 uint64 + x155, x156 = bits.Add64(x133, x147, uint64(0x0)) + var x157 uint64 + var x158 uint64 + x157, x158 = bits.Add64(x135, x149, uint64(uint1(x156))) + var x159 uint64 + var x160 uint64 + x159, x160 = bits.Add64(x137, x151, uint64(uint1(x158))) + var x161 uint64 + var x162 uint64 + x161, x162 = bits.Add64(x139, x153, uint64(uint1(x160))) + var x163 uint64 + _, x163 = bits.Mul64(x155, 0xccd1c8aaee00bc4f) + var x165 uint64 + var x166 uint64 + x166, x165 = bits.Mul64(x163, 0xffffffff00000000) + var x167 uint64 + var x168 uint64 + x168, x167 = bits.Mul64(x163, 0xffffffffffffffff) + var x169 uint64 + var x170 uint64 + x170, x169 = bits.Mul64(x163, 0xbce6faada7179e84) + var x171 uint64 + var x172 uint64 + x172, x171 = bits.Mul64(x163, 0xf3b9cac2fc632551) + var x173 uint64 + var x174 uint64 + x173, x174 = bits.Add64(x172, x169, uint64(0x0)) + var x175 uint64 + var x176 uint64 + x175, x176 = bits.Add64(x170, x167, uint64(uint1(x174))) + var x177 uint64 + var x178 uint64 + x177, x178 = bits.Add64(x168, x165, uint64(uint1(x176))) + var x180 uint64 + _, x180 = bits.Add64(x155, x171, uint64(0x0)) + var x181 uint64 + var x182 uint64 + x181, x182 = bits.Add64(x157, x173, uint64(uint1(x180))) + var x183 uint64 + var x184 uint64 + x183, x184 = bits.Add64(x159, x175, uint64(uint1(x182))) + var x185 uint64 + var x186 uint64 + x185, x186 = bits.Add64(x161, x177, uint64(uint1(x184))) + var x187 uint64 + var x188 uint64 + x187, x188 = bits.Add64(((uint64(uint1(x162)) + uint64(uint1(x140))) + (uint64(uint1(x154)) + x142)), (uint64(uint1(x178)) + x166), uint64(uint1(x186))) + var x189 uint64 + var x190 uint64 + x189, x190 = bits.Sub64(x181, 0xf3b9cac2fc632551, uint64(0x0)) + var x191 uint64 + var x192 uint64 + x191, x192 = bits.Sub64(x183, 0xbce6faada7179e84, uint64(uint1(x190))) + var x193 uint64 + var x194 uint64 + x193, x194 = bits.Sub64(x185, 0xffffffffffffffff, uint64(uint1(x192))) + var x195 uint64 + var x196 uint64 + x195, x196 = bits.Sub64(x187, 0xffffffff00000000, uint64(uint1(x194))) + var x198 uint64 + _, x198 = bits.Sub64(uint64(uint1(x188)), uint64(0x0), uint64(uint1(x196))) + var x199 uint64 + cmovznzU64(&x199, uint1(x198), x189, x181) + var x200 uint64 + cmovznzU64(&x200, uint1(x198), x191, x183) + var x201 uint64 + cmovznzU64(&x201, uint1(x198), x193, x185) + var x202 uint64 + cmovznzU64(&x202, uint1(x198), x195, x187) + out1[0] = x199 + out1[1] = x200 + out1[2] = x201 + out1[3] = x202 +} + +// Nonzero outputs a single non-zero word if the input is non-zero and zero otherwise. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// out1 = 0 ↔ eval (from_montgomery arg1) mod m = 0 +// +// Input Bounds: +// +// arg1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// +// Output Bounds: +// +// out1: [0x0 ~> 0xffffffffffffffff] +func Nonzero(out1 *uint64, arg1 *[4]uint64) { + x1 := (arg1[0] | (arg1[1] | (arg1[2] | arg1[3]))) + *out1 = x1 +} + +// Selectznz is a multi-limb conditional select. +// +// Postconditions: +// +// eval out1 = (if arg1 = 0 then eval arg2 else eval arg3) +// +// Input Bounds: +// +// arg1: [0x0 ~> 0x1] +// arg2: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// arg3: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +func Selectznz(out1 *[4]uint64, arg1 uint1, arg2 *[4]uint64, arg3 *[4]uint64) { + var x1 uint64 + cmovznzU64(&x1, arg1, arg2[0], arg3[0]) + var x2 uint64 + cmovznzU64(&x2, arg1, arg2[1], arg3[1]) + var x3 uint64 + cmovznzU64(&x3, arg1, arg2[2], arg3[2]) + var x4 uint64 + cmovznzU64(&x4, arg1, arg2[3], arg3[3]) + out1[0] = x1 + out1[1] = x2 + out1[2] = x3 + out1[3] = x4 +} + +// ToBytes serializes a field element NOT in the Montgomery domain to bytes in little-endian order. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// out1 = map (λ x, ⌊((eval arg1 mod m) mod 2^(8 * (x + 1))) / 2^(8 * x)⌋) [0..31] +// +// Input Bounds: +// +// arg1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff]] +func ToBytes(out1 *[32]uint8, arg1 *[4]uint64) { + x1 := arg1[3] + x2 := arg1[2] + x3 := arg1[1] + x4 := arg1[0] + x5 := (uint8(x4) & 0xff) + x6 := (x4 >> 8) + x7 := (uint8(x6) & 0xff) + x8 := (x6 >> 8) + x9 := (uint8(x8) & 0xff) + x10 := (x8 >> 8) + x11 := (uint8(x10) & 0xff) + x12 := (x10 >> 8) + x13 := (uint8(x12) & 0xff) + x14 := (x12 >> 8) + x15 := (uint8(x14) & 0xff) + x16 := (x14 >> 8) + x17 := (uint8(x16) & 0xff) + x18 := uint8((x16 >> 8)) + x19 := (uint8(x3) & 0xff) + x20 := (x3 >> 8) + x21 := (uint8(x20) & 0xff) + x22 := (x20 >> 8) + x23 := (uint8(x22) & 0xff) + x24 := (x22 >> 8) + x25 := (uint8(x24) & 0xff) + x26 := (x24 >> 8) + x27 := (uint8(x26) & 0xff) + x28 := (x26 >> 8) + x29 := (uint8(x28) & 0xff) + x30 := (x28 >> 8) + x31 := (uint8(x30) & 0xff) + x32 := uint8((x30 >> 8)) + x33 := (uint8(x2) & 0xff) + x34 := (x2 >> 8) + x35 := (uint8(x34) & 0xff) + x36 := (x34 >> 8) + x37 := (uint8(x36) & 0xff) + x38 := (x36 >> 8) + x39 := (uint8(x38) & 0xff) + x40 := (x38 >> 8) + x41 := (uint8(x40) & 0xff) + x42 := (x40 >> 8) + x43 := (uint8(x42) & 0xff) + x44 := (x42 >> 8) + x45 := (uint8(x44) & 0xff) + x46 := uint8((x44 >> 8)) + x47 := (uint8(x1) & 0xff) + x48 := (x1 >> 8) + x49 := (uint8(x48) & 0xff) + x50 := (x48 >> 8) + x51 := (uint8(x50) & 0xff) + x52 := (x50 >> 8) + x53 := (uint8(x52) & 0xff) + x54 := (x52 >> 8) + x55 := (uint8(x54) & 0xff) + x56 := (x54 >> 8) + x57 := (uint8(x56) & 0xff) + x58 := (x56 >> 8) + x59 := (uint8(x58) & 0xff) + x60 := uint8((x58 >> 8)) + out1[0] = x5 + out1[1] = x7 + out1[2] = x9 + out1[3] = x11 + out1[4] = x13 + out1[5] = x15 + out1[6] = x17 + out1[7] = x18 + out1[8] = x19 + out1[9] = x21 + out1[10] = x23 + out1[11] = x25 + out1[12] = x27 + out1[13] = x29 + out1[14] = x31 + out1[15] = x32 + out1[16] = x33 + out1[17] = x35 + out1[18] = x37 + out1[19] = x39 + out1[20] = x41 + out1[21] = x43 + out1[22] = x45 + out1[23] = x46 + out1[24] = x47 + out1[25] = x49 + out1[26] = x51 + out1[27] = x53 + out1[28] = x55 + out1[29] = x57 + out1[30] = x59 + out1[31] = x60 +} + +// FromBytes deserializes a field element NOT in the Montgomery domain from bytes in little-endian order. +// +// Preconditions: +// +// 0 ≤ bytes_eval arg1 < m +// +// Postconditions: +// +// eval out1 mod m = bytes_eval arg1 mod m +// 0 ≤ eval out1 < m +// +// Input Bounds: +// +// arg1: [[0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff]] +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +func FromBytes(out1 *[4]uint64, arg1 *[32]uint8) { + x1 := (uint64(arg1[31]) << 56) + x2 := (uint64(arg1[30]) << 48) + x3 := (uint64(arg1[29]) << 40) + x4 := (uint64(arg1[28]) << 32) + x5 := (uint64(arg1[27]) << 24) + x6 := (uint64(arg1[26]) << 16) + x7 := (uint64(arg1[25]) << 8) + x8 := arg1[24] + x9 := (uint64(arg1[23]) << 56) + x10 := (uint64(arg1[22]) << 48) + x11 := (uint64(arg1[21]) << 40) + x12 := (uint64(arg1[20]) << 32) + x13 := (uint64(arg1[19]) << 24) + x14 := (uint64(arg1[18]) << 16) + x15 := (uint64(arg1[17]) << 8) + x16 := arg1[16] + x17 := (uint64(arg1[15]) << 56) + x18 := (uint64(arg1[14]) << 48) + x19 := (uint64(arg1[13]) << 40) + x20 := (uint64(arg1[12]) << 32) + x21 := (uint64(arg1[11]) << 24) + x22 := (uint64(arg1[10]) << 16) + x23 := (uint64(arg1[9]) << 8) + x24 := arg1[8] + x25 := (uint64(arg1[7]) << 56) + x26 := (uint64(arg1[6]) << 48) + x27 := (uint64(arg1[5]) << 40) + x28 := (uint64(arg1[4]) << 32) + x29 := (uint64(arg1[3]) << 24) + x30 := (uint64(arg1[2]) << 16) + x31 := (uint64(arg1[1]) << 8) + x32 := arg1[0] + x33 := (x31 + uint64(x32)) + x34 := (x30 + x33) + x35 := (x29 + x34) + x36 := (x28 + x35) + x37 := (x27 + x36) + x38 := (x26 + x37) + x39 := (x25 + x38) + x40 := (x23 + uint64(x24)) + x41 := (x22 + x40) + x42 := (x21 + x41) + x43 := (x20 + x42) + x44 := (x19 + x43) + x45 := (x18 + x44) + x46 := (x17 + x45) + x47 := (x15 + uint64(x16)) + x48 := (x14 + x47) + x49 := (x13 + x48) + x50 := (x12 + x49) + x51 := (x11 + x50) + x52 := (x10 + x51) + x53 := (x9 + x52) + x54 := (x7 + uint64(x8)) + x55 := (x6 + x54) + x56 := (x5 + x55) + x57 := (x4 + x56) + x58 := (x3 + x57) + x59 := (x2 + x58) + x60 := (x1 + x59) + out1[0] = x39 + out1[1] = x46 + out1[2] = x53 + out1[3] = x60 +} + +// SetOne returns the field element one in the Montgomery domain. +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = 1 mod m +// 0 ≤ eval out1 < m +func SetOne(out1 *MontgomeryDomainFieldElement) { + out1[0] = 0xc46353d039cdaaf + out1[1] = 0x4319055258e8617b + out1[2] = uint64(0x0) + out1[3] = 0xffffffff +} diff --git a/core/curves/native/p256/point.go b/core/curves/native/p256/point.go new file mode 100644 index 0000000..e8e14a4 --- /dev/null +++ b/core/curves/native/p256/point.go @@ -0,0 +1,350 @@ +package p256 + +import ( + "sync" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/core/curves/native/p256/fp" + "github.com/sonr-io/sonr/crypto/internal" +) + +var ( + p256PointInitonce sync.Once + p256PointParams native.EllipticPointParams + p256PointSswuInitOnce sync.Once + p256PointSswuParams native.SswuParams +) + +func P256PointNew() *native.EllipticPoint { + return &native.EllipticPoint{ + X: fp.P256FpNew(), + Y: fp.P256FpNew(), + Z: fp.P256FpNew(), + Params: getP256PointParams(), + Arithmetic: &p256PointArithmetic{}, + } +} + +func p256PointParamsInit() { + // How these values were derived + // left for informational purposes + // params := elliptic.P256().Params() + // a := big.NewInt(-3) + // a.Mod(a, params.P) + // capA := fp.P256FpNew().SetBigInt(a) + // capB := fp.P256FpNew().SetBigInt(params.B) + // gx := fp.P256FpNew().SetBigInt(params.Gx) + // gy := fp.P256FpNew().SetBigInt(params.Gy) + + p256PointParams = native.EllipticPointParams{ + A: fp.P256FpNew(). + SetRaw(&[native.FieldLimbs]uint64{0xfffffffffffffffc, 0x00000003ffffffff, 0x0000000000000000, 0xfffffffc00000004}), + B: fp.P256FpNew(). + SetRaw(&[native.FieldLimbs]uint64{0xd89cdf6229c4bddf, 0xacf005cd78843090, 0xe5a220abf7212ed6, 0xdc30061d04874834}), + Gx: fp.P256FpNew(). + SetRaw(&[native.FieldLimbs]uint64{0x79e730d418a9143c, 0x75ba95fc5fedb601, 0x79fb732b77622510, 0x18905f76a53755c6}), + Gy: fp.P256FpNew(). + SetRaw(&[native.FieldLimbs]uint64{0xddf25357ce95560a, 0x8b4ab8e4ba19e45c, 0xd2e88688dd21f325, 0x8571ff1825885d85}), + BitSize: 256, + Name: "P256", + } +} + +func getP256PointParams() *native.EllipticPointParams { + p256PointInitonce.Do(p256PointParamsInit) + return &p256PointParams +} + +func getP256PointSswuParams() *native.SswuParams { + p256PointSswuInitOnce.Do(p256PointSswuParamsInit) + return &p256PointSswuParams +} + +func p256PointSswuParamsInit() { + // How these values were derived + // left for informational purposes + //params := elliptic.P256().Params() + // + //// c1 = (q - 3) / 4 + //c1 := new(big.Int).Set(params.P) + //c1.Sub(c1, big.NewInt(3)) + //c1.Rsh(c1, 2) + // + //a := big.NewInt(-3) + //a.Mod(a, params.P) + //b := new(big.Int).Set(params.B) + //z := big.NewInt(-10) + //z.Mod(z, params.P) + //// sqrt(-Z^3) + //zTmp := new(big.Int).Exp(z, big.NewInt(3), nil) + //zTmp = zTmp.Neg(zTmp) + //zTmp.Mod(zTmp, params.P) + //c2 := new(big.Int).ModSqrt(zTmp, params.P) + // + //var capC1Bytes [32]byte + //c1.FillBytes(capC1Bytes[:]) + //capC1 := fp.P256FpNew().SetRaw(&[native.FieldLimbs]uint64{ + // binary.BigEndian.Uint64(capC1Bytes[24:]), + // binary.BigEndian.Uint64(capC1Bytes[16:24]), + // binary.BigEndian.Uint64(capC1Bytes[8:16]), + // binary.BigEndian.Uint64(capC1Bytes[:8]), + //}) + //capC2 := fp.P256FpNew().SetBigInt(c2) + //capA := fp.P256FpNew().SetBigInt(a) + //capB := fp.P256FpNew().SetBigInt(b) + //capZ := fp.P256FpNew().SetBigInt(z) + + p256PointSswuParams = native.SswuParams{ + C1: [native.FieldLimbs]uint64{ + 0xffffffffffffffff, + 0x000000003fffffff, + 0x4000000000000000, + 0x3fffffffc0000000, + }, + C2: [native.FieldLimbs]uint64{ + 0x53e43951f64fdbe7, + 0xb2806c63966a1a66, + 0x1ac5d59c3298bf50, + 0xa3323851ba997e27, + }, + A: [native.FieldLimbs]uint64{ + 0xfffffffffffffffc, + 0x00000003ffffffff, + 0x0000000000000000, + 0xfffffffc00000004, + }, + B: [native.FieldLimbs]uint64{ + 0xd89cdf6229c4bddf, + 0xacf005cd78843090, + 0xe5a220abf7212ed6, + 0xdc30061d04874834, + }, + Z: [native.FieldLimbs]uint64{ + 0xfffffffffffffff5, + 0x0000000affffffff, + 0x0000000000000000, + 0xfffffff50000000b, + }, + } +} + +type p256PointArithmetic struct{} + +func (k p256PointArithmetic) Hash( + out *native.EllipticPoint, + hash *native.EllipticPointHasher, + msg, dst []byte, +) error { + var u []byte + sswuParams := getP256PointSswuParams() + + switch hash.Type() { + case native.XMD: + u = native.ExpandMsgXmd(hash, msg, dst, 96) + case native.XOF: + u = native.ExpandMsgXof(hash, msg, dst, 96) + } + var buf [64]byte + copy(buf[:48], internal.ReverseScalarBytes(u[:48])) + u0 := fp.P256FpNew().SetBytesWide(&buf) + copy(buf[:48], internal.ReverseScalarBytes(u[48:])) + u1 := fp.P256FpNew().SetBytesWide(&buf) + + q0x, q0y := sswuParams.Osswu3mod4(u0) + q1x, q1y := sswuParams.Osswu3mod4(u1) + out.X = q0x + out.Y = q0y + out.Z.SetOne() + tv := &native.EllipticPoint{ + X: q1x, + Y: q1y, + Z: fp.P256FpNew().SetOne(), + } + k.Add(out, out, tv) + return nil +} + +func (k p256PointArithmetic) Double(out, arg *native.EllipticPoint) { + // Addition formula from Renes-Costello-Batina 2015 + // (https://eprint.iacr.org/2015/1060 Algorithm 6) + var xx, yy, zz, xy2, yz2, xz2, bzz, bzz3 [native.FieldLimbs]uint64 + var yyMBzz3, yyPBzz3, yFrag, xFrag, zz3 [native.FieldLimbs]uint64 + var bxz2, bxz6, xx3Mzz3, x, y, z [native.FieldLimbs]uint64 + b := getP256PointParams().B.Value + f := arg.X.Arithmetic + + f.Square(&xx, &arg.X.Value) + f.Square(&yy, &arg.Y.Value) + f.Square(&zz, &arg.Z.Value) + + f.Mul(&xy2, &arg.X.Value, &arg.Y.Value) + f.Add(&xy2, &xy2, &xy2) + + f.Mul(&yz2, &arg.Y.Value, &arg.Z.Value) + f.Add(&yz2, &yz2, &yz2) + + f.Mul(&xz2, &arg.X.Value, &arg.Z.Value) + f.Add(&xz2, &xz2, &xz2) + + f.Mul(&bzz, &b, &zz) + f.Sub(&bzz, &bzz, &xz2) + + f.Add(&bzz3, &bzz, &bzz) + f.Add(&bzz3, &bzz3, &bzz) + + f.Sub(&yyMBzz3, &yy, &bzz3) + f.Add(&yyPBzz3, &yy, &bzz3) + f.Mul(&yFrag, &yyPBzz3, &yyMBzz3) + f.Mul(&xFrag, &yyMBzz3, &xy2) + + f.Add(&zz3, &zz, &zz) + f.Add(&zz3, &zz3, &zz) + + f.Mul(&bxz2, &b, &xz2) + f.Sub(&bxz2, &bxz2, &zz3) + f.Sub(&bxz2, &bxz2, &xx) + + f.Add(&bxz6, &bxz2, &bxz2) + f.Add(&bxz6, &bxz6, &bxz2) + + f.Add(&xx3Mzz3, &xx, &xx) + f.Add(&xx3Mzz3, &xx3Mzz3, &xx) + f.Sub(&xx3Mzz3, &xx3Mzz3, &zz3) + + f.Mul(&x, &bxz6, &yz2) + f.Sub(&x, &xFrag, &x) + + f.Mul(&y, &xx3Mzz3, &bxz6) + f.Add(&y, &yFrag, &y) + + f.Mul(&z, &yz2, &yy) + f.Add(&z, &z, &z) + f.Add(&z, &z, &z) + + out.X.Value = x + out.Y.Value = y + out.Z.Value = z +} + +func (k p256PointArithmetic) Add(out, arg1, arg2 *native.EllipticPoint) { + // Addition formula from Renes-Costello-Batina 2015 + // (https://eprint.iacr.org/2015/1060 Algorithm 4). + var xx, yy, zz, zz3, bxz, bxz3 [native.FieldLimbs]uint64 + var tv1, xyPairs, yzPairs, xzPairs [native.FieldLimbs]uint64 + var bzz, bzz3, yyMBzz3, yyPBzz3 [native.FieldLimbs]uint64 + var xx3Mzz3, x, y, z [native.FieldLimbs]uint64 + f := arg1.X.Arithmetic + b := getP256PointParams().B.Value + + f.Mul(&xx, &arg1.X.Value, &arg2.X.Value) + f.Mul(&yy, &arg1.Y.Value, &arg2.Y.Value) + f.Mul(&zz, &arg1.Z.Value, &arg2.Z.Value) + + f.Add(&tv1, &arg2.X.Value, &arg2.Y.Value) + f.Add(&xyPairs, &arg1.X.Value, &arg1.Y.Value) + f.Mul(&xyPairs, &xyPairs, &tv1) + f.Sub(&xyPairs, &xyPairs, &xx) + f.Sub(&xyPairs, &xyPairs, &yy) + + f.Add(&tv1, &arg2.Y.Value, &arg2.Z.Value) + f.Add(&yzPairs, &arg1.Y.Value, &arg1.Z.Value) + f.Mul(&yzPairs, &yzPairs, &tv1) + f.Sub(&yzPairs, &yzPairs, &yy) + f.Sub(&yzPairs, &yzPairs, &zz) + + f.Add(&tv1, &arg2.X.Value, &arg2.Z.Value) + f.Add(&xzPairs, &arg1.X.Value, &arg1.Z.Value) + f.Mul(&xzPairs, &xzPairs, &tv1) + f.Sub(&xzPairs, &xzPairs, &xx) + f.Sub(&xzPairs, &xzPairs, &zz) + + f.Mul(&bzz, &b, &zz) + f.Sub(&bzz, &xzPairs, &bzz) + + f.Add(&bzz3, &bzz, &bzz) + f.Add(&bzz3, &bzz3, &bzz) + + f.Sub(&yyMBzz3, &yy, &bzz3) + f.Add(&yyPBzz3, &yy, &bzz3) + + f.Add(&zz3, &zz, &zz) + f.Add(&zz3, &zz3, &zz) + + f.Mul(&bxz, &b, &xzPairs) + f.Sub(&bxz, &bxz, &zz3) + f.Sub(&bxz, &bxz, &xx) + + f.Add(&bxz3, &bxz, &bxz) + f.Add(&bxz3, &bxz3, &bxz) + + f.Add(&xx3Mzz3, &xx, &xx) + f.Add(&xx3Mzz3, &xx3Mzz3, &xx) + f.Sub(&xx3Mzz3, &xx3Mzz3, &zz3) + + f.Mul(&tv1, &yzPairs, &bxz3) + f.Mul(&x, &yyPBzz3, &xyPairs) + f.Sub(&x, &x, &tv1) + + f.Mul(&tv1, &xx3Mzz3, &bxz3) + f.Mul(&y, &yyPBzz3, &yyMBzz3) + f.Add(&y, &y, &tv1) + + f.Mul(&tv1, &xyPairs, &xx3Mzz3) + f.Mul(&z, &yyMBzz3, &yzPairs) + f.Add(&z, &z, &tv1) + + e1 := arg1.Z.IsZero() + e2 := arg2.Z.IsZero() + + // If arg1 is identity set it to arg2 + f.Selectznz(&z, &z, &arg2.Z.Value, e1) + f.Selectznz(&y, &y, &arg2.Y.Value, e1) + f.Selectznz(&x, &x, &arg2.X.Value, e1) + // If arg2 is identity set it to arg1 + f.Selectznz(&z, &z, &arg1.Z.Value, e2) + f.Selectznz(&y, &y, &arg1.Y.Value, e2) + f.Selectznz(&x, &x, &arg1.X.Value, e2) + + out.X.Value = x + out.Y.Value = y + out.Z.Value = z +} + +func (k p256PointArithmetic) IsOnCurve(arg *native.EllipticPoint) bool { + affine := P256PointNew() + k.ToAffine(affine, arg) + lhs := fp.P256FpNew().Square(affine.Y) + rhs := fp.P256FpNew() + k.RhsEq(rhs, affine.X) + return lhs.Equal(rhs) == 1 +} + +func (k p256PointArithmetic) ToAffine(out, arg *native.EllipticPoint) { + var wasInverted int + var zero, x, y, z [native.FieldLimbs]uint64 + f := arg.X.Arithmetic + + f.Invert(&wasInverted, &z, &arg.Z.Value) + f.Mul(&x, &arg.X.Value, &z) + f.Mul(&y, &arg.Y.Value, &z) + + out.Z.SetOne() + // If point at infinity this does nothing + f.Selectznz(&x, &zero, &x, wasInverted) + f.Selectznz(&y, &zero, &y, wasInverted) + f.Selectznz(&z, &zero, &out.Z.Value, wasInverted) + + out.X.Value = x + out.Y.Value = y + out.Z.Value = z + out.Params = arg.Params + out.Arithmetic = arg.Arithmetic +} + +func (k p256PointArithmetic) RhsEq(out, x *native.Field) { + // Elliptic curve equation for p256 is: y^2 = x^3 ax + b + out.Square(x) + out.Mul(out, x) + out.Add(out, getP256PointParams().B) + out.Add(out, fp.P256FpNew().Mul(getP256PointParams().A, x)) +} diff --git a/core/curves/native/p256/point_test.go b/core/curves/native/p256/point_test.go new file mode 100644 index 0000000..da54531 --- /dev/null +++ b/core/curves/native/p256/point_test.go @@ -0,0 +1,38 @@ +package p256_test + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/core/curves/native/p256" + "github.com/sonr-io/sonr/crypto/core/curves/native/p256/fp" +) + +func TestP256PointArithmetic_Double(t *testing.T) { + g := p256.P256PointNew().Generator() + pt1 := p256.P256PointNew().Double(g) + pt2 := p256.P256PointNew().Add(g, g) + pt3 := p256.P256PointNew().Mul(g, fp.P256FpNew().SetUint64(2)) + + e1 := pt1.Equal(pt2) + e2 := pt1.Equal(pt3) + e3 := pt2.Equal(pt3) + require.Equal(t, 1, e1) + require.Equal(t, 1, e2) + require.Equal(t, 1, e3) +} + +func TestP256PointArithmetic_Hash(t *testing.T) { + var b [32]byte + sc, err := p256.P256PointNew().Hash(b[:], native.EllipticPointHasherSha256()) + sc1 := curves.P256().NewIdentityPoint().Hash(b[:]) + fmt.Printf("%v\n", sc1) + + require.NoError(t, err) + require.True(t, !sc.IsIdentity()) + require.True(t, sc.IsOnCurve()) +} diff --git a/core/curves/native/pasta/README.md b/core/curves/native/pasta/README.md new file mode 100755 index 0000000..e0785f2 --- /dev/null +++ b/core/curves/native/pasta/README.md @@ -0,0 +1,10 @@ +--- +aliases: [README] +tags: [] +title: README +linter-yaml-title-alias: README +date created: Wednesday, April 17th 2024, 4:11:40 pm +date modified: Thursday, April 18th 2024, 8:19:25 am +--- + +## Pallas and Pasta Curve diff --git a/core/curves/native/pasta/fp/fp.go b/core/curves/native/pasta/fp/fp.go new file mode 100644 index 0000000..2a025e8 --- /dev/null +++ b/core/curves/native/pasta/fp/fp.go @@ -0,0 +1,375 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package fp + +import ( + "encoding/binary" + "fmt" + "math/big" + + "github.com/sonr-io/sonr/crypto/internal" +) + +type Fp fiat_pasta_fp_montgomery_domain_field_element + +// r = 2^256 mod p +var r = &Fp{0x34786d38fffffffd, 0x992c350be41914ad, 0xffffffffffffffff, 0x3fffffffffffffff} + +// r2 = 2^512 mod p +var r2 = &Fp{0x8c78ecb30000000f, 0xd7d30dbd8b0de0e7, 0x7797a99bc3c95d18, 0x096d41af7b9cb714} + +// r3 = 2^768 mod p +var r3 = &Fp{0xf185a5993a9e10f9, 0xf6a68f3b6ac5b1d1, 0xdf8d1014353fd42c, 0x2ae309222d2d9910} + +// generator = 5 mod p is a generator of the `p - 1` order multiplicative +// subgroup, or in other words a primitive element of the field. +var generator = &Fp{0xa1a55e68ffffffed, 0x74c2a54b4f4982f3, 0xfffffffffffffffd, 0x3fffffffffffffff} + +var s = 32 + +// modulus representation +// p = 0x40000000000000000000000000000000224698fc094cf91b992d30ed00000001 +var modulus = &Fp{0x992d30ed00000001, 0x224698fc094cf91b, 0x0000000000000000, 0x4000000000000000} + +var biModulus = new(big.Int).SetBytes([]byte{ + 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x22, 0x46, 0x98, 0xfc, 0x09, 0x4c, 0xf9, 0x1b, + 0x99, 0x2d, 0x30, 0xed, 0x00, 0x00, 0x00, 0x01, +}) + +// Cmp returns -1 if fp < rhs +// 0 if fp == rhs +// 1 if fp > rhs +func (fp *Fp) Cmp(rhs *Fp) int { + gt := 0 + lt := 0 + for i := len(fp) - 1; i >= 0; i-- { + gt |= int((rhs[i]-fp[i])>>63) &^ lt + lt |= int((fp[i]-rhs[i])>>63) &^ gt + } + return gt - lt +} + +// Equal returns true if fp == rhs +func (fp *Fp) Equal(rhs *Fp) bool { + t := fp[0] ^ rhs[0] + t |= fp[1] ^ rhs[1] + t |= fp[2] ^ rhs[2] + t |= fp[3] ^ rhs[3] + return t == 0 +} + +// IsZero returns true if fp == 0 +func (fp *Fp) IsZero() bool { + t := fp[0] + t |= fp[1] + t |= fp[2] + t |= fp[3] + return t == 0 +} + +// IsOne returns true if fp == R +func (fp *Fp) IsOne() bool { + return fp.Equal(r) +} + +func (fp *Fp) IsOdd() bool { + tv := new(fiat_pasta_fp_non_montgomery_domain_field_element) + fiat_pasta_fp_from_montgomery(tv, (*fiat_pasta_fp_montgomery_domain_field_element)(fp)) + return tv[0]&0x01 == 0x01 +} + +// Set fp == rhs +func (fp *Fp) Set(rhs *Fp) *Fp { + fp[0] = rhs[0] + fp[1] = rhs[1] + fp[2] = rhs[2] + fp[3] = rhs[3] + return fp +} + +// SetUint64 sets fp == rhs +func (fp *Fp) SetUint64(rhs uint64) *Fp { + r := &fiat_pasta_fp_non_montgomery_domain_field_element{rhs, 0, 0, 0} + fiat_pasta_fp_to_montgomery((*fiat_pasta_fp_montgomery_domain_field_element)(fp), r) + return fp +} + +func (fp *Fp) SetBool(rhs bool) *Fp { + if rhs { + fp.SetOne() + } else { + fp.SetZero() + } + return fp +} + +// SetOne fp == R +func (fp *Fp) SetOne() *Fp { + return fp.Set(r) +} + +// SetZero fp == 0 +func (fp *Fp) SetZero() *Fp { + fp[0] = 0 + fp[1] = 0 + fp[2] = 0 + fp[3] = 0 + return fp +} + +// SetBytesWide takes 64 bytes as input and treats them as a 512-bit number. +// Attributed to https://github.com/zcash/pasta_curves/blob/main/src/fields/fp.rs#L255 +// We reduce an arbitrary 512-bit number by decomposing it into two 256-bit digits +// with the higher bits multiplied by 2^256. Thus, we perform two reductions +// +// 1. the lower bits are multiplied by R^2, as normal +// 2. the upper bits are multiplied by R^2 * 2^256 = R^3 +// +// and computing their sum in the field. It remains to see that arbitrary 256-bit +// numbers can be placed into Montgomery form safely using the reduction. The +// reduction works so long as the product is less than R=2^256 multiplied by +// the modulus. This holds because for any `c` smaller than the modulus, we have +// that (2^256 - 1)*c is an acceptable product for the reduction. Therefore, the +// reduction always works so long as `c` is in the field; in this case it is either the +// constant `r2` or `r3`. +func (fp *Fp) SetBytesWide(input *[64]byte) *Fp { + d0 := fiat_pasta_fp_montgomery_domain_field_element{ + binary.LittleEndian.Uint64(input[:8]), + binary.LittleEndian.Uint64(input[8:16]), + binary.LittleEndian.Uint64(input[16:24]), + binary.LittleEndian.Uint64(input[24:32]), + } + d1 := fiat_pasta_fp_montgomery_domain_field_element{ + binary.LittleEndian.Uint64(input[32:40]), + binary.LittleEndian.Uint64(input[40:48]), + binary.LittleEndian.Uint64(input[48:56]), + binary.LittleEndian.Uint64(input[56:64]), + } + // Convert to Montgomery form + tv1 := new(fiat_pasta_fp_montgomery_domain_field_element) + tv2 := new(fiat_pasta_fp_montgomery_domain_field_element) + // d0 * r2 + d1 * r3 + fiat_pasta_fp_mul(tv1, &d0, (*fiat_pasta_fp_montgomery_domain_field_element)(r2)) + fiat_pasta_fp_mul(tv2, &d1, (*fiat_pasta_fp_montgomery_domain_field_element)(r3)) + fiat_pasta_fp_add((*fiat_pasta_fp_montgomery_domain_field_element)(fp), tv1, tv2) + return fp +} + +// SetBytes attempts to convert a little endian byte representation +// of a scalar into a `Fp`, failing if input is not canonical +func (fp *Fp) SetBytes(input *[32]byte) (*Fp, error) { + d0 := &Fp{ + binary.LittleEndian.Uint64(input[:8]), + binary.LittleEndian.Uint64(input[8:16]), + binary.LittleEndian.Uint64(input[16:24]), + binary.LittleEndian.Uint64(input[24:32]), + } + if d0.Cmp(modulus) != -1 { + return nil, fmt.Errorf("invalid byte sequence") + } + fiat_pasta_fp_from_bytes((*[4]uint64)(fp), input) + fiat_pasta_fp_to_montgomery( + (*fiat_pasta_fp_montgomery_domain_field_element)(fp), + (*fiat_pasta_fp_non_montgomery_domain_field_element)(fp), + ) + return fp, nil +} + +// SetBigInt initializes an element from big.Int +// The value is reduced by the modulus +func (fp *Fp) SetBigInt(bi *big.Int) *Fp { + var buffer [32]byte + r := new(big.Int).Set(bi) + r.Mod(r, biModulus) + r.FillBytes(buffer[:]) + copy(buffer[:], internal.ReverseScalarBytes(buffer[:])) + _, _ = fp.SetBytes(&buffer) + return fp +} + +// SetRaw converts a raw array into a field element +func (fp *Fp) SetRaw(array *[4]uint64) *Fp { + fiat_pasta_fp_to_montgomery( + (*fiat_pasta_fp_montgomery_domain_field_element)(fp), + (*fiat_pasta_fp_non_montgomery_domain_field_element)(array), + ) + return fp +} + +// Bytes converts this element into a byte representation +// in little endian byte order +func (fp *Fp) Bytes() [32]byte { + var output [32]byte + tv := new(fiat_pasta_fp_non_montgomery_domain_field_element) + fiat_pasta_fp_from_montgomery(tv, (*fiat_pasta_fp_montgomery_domain_field_element)(fp)) + fiat_pasta_fp_to_bytes(&output, (*[4]uint64)(tv)) + return output +} + +// BigInt converts this element into the big.Int struct +func (fp *Fp) BigInt() *big.Int { + buffer := fp.Bytes() + return new(big.Int).SetBytes(internal.ReverseScalarBytes(buffer[:])) +} + +// Double this element +func (fp *Fp) Double(elem *Fp) *Fp { + delem := (*fiat_pasta_fp_montgomery_domain_field_element)(elem) + fiat_pasta_fp_add((*fiat_pasta_fp_montgomery_domain_field_element)(fp), delem, delem) + return fp +} + +// Square this element +func (fp *Fp) Square(elem *Fp) *Fp { + delem := (*fiat_pasta_fp_montgomery_domain_field_element)(elem) + fiat_pasta_fp_square((*fiat_pasta_fp_montgomery_domain_field_element)(fp), delem) + return fp +} + +// Sqrt this element, if it exists. If true, then value +// is a square root. If false, value is a QNR +func (fp *Fp) Sqrt(elem *Fp) (*Fp, bool) { + return fp.tonelliShanks(elem) +} + +// See sqrt_ts_ct at +// https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-11#appendix-I.4 +func (fp *Fp) tonelliShanks(elem *Fp) (*Fp, bool) { + // c1 := 32 + // c2 := (q - 1) / (2^c1) + // c2 := [4]uint64{ + // 0x094cf91b992d30ed, + // 0x00000000224698fc, + // 0x0000000000000000, + // 0x0000000040000000, + // } + // c3 := (c2 - 1) / 2 + c3 := [4]uint64{ + 0x04a67c8dcc969876, + 0x0000000011234c7e, + 0x0000000000000000, + 0x0000000020000000, + } + // c4 := generator + // c5 := new(Fp).pow(&generator, c2) + c5 := &Fp{ + 0xa28db849bad6dbf0, + 0x9083cd03d3b539df, + 0xfba6b9ca9dc8448e, + 0x3ec928747b89c6da, + } + + z := new(Fp).pow(elem, c3) + t := new(Fp).Square(z) + t.Mul(t, elem) + + z.Mul(z, elem) + + b := new(Fp).Set(t) + c := new(Fp).Set(c5) + flags := map[bool]int{ + true: 1, + false: 0, + } + + for i := s; i >= 2; i-- { + for j := 1; j <= i-2; j++ { + b.Square(b) + } + z.CMove(z, new(Fp).Mul(z, c), flags[!b.IsOne()]) + c.Square(c) + t.CMove(t, new(Fp).Mul(t, c), flags[!b.IsOne()]) + b.Set(t) + } + wasSquare := c.Square(z).Equal(elem) + return fp.Set(z), wasSquare +} + +// Invert this element i.e. compute the multiplicative inverse +// return false, zero if this element is zero +func (fp *Fp) Invert(elem *Fp) (*Fp, bool) { + // computes elem^(p - 2) mod p + exp := [4]uint64{ + 0x992d30ecffffffff, + 0x224698fc094cf91b, + 0x0000000000000000, + 0x4000000000000000, + } + return fp.pow(elem, exp), !elem.IsZero() +} + +// Mul returns the result from multiplying this element by rhs +func (fp *Fp) Mul(lhs, rhs *Fp) *Fp { + dlhs := (*fiat_pasta_fp_montgomery_domain_field_element)(lhs) + drhs := (*fiat_pasta_fp_montgomery_domain_field_element)(rhs) + fiat_pasta_fp_mul((*fiat_pasta_fp_montgomery_domain_field_element)(fp), dlhs, drhs) + return fp +} + +// Sub returns the result from subtracting rhs from this element +func (fp *Fp) Sub(lhs, rhs *Fp) *Fp { + dlhs := (*fiat_pasta_fp_montgomery_domain_field_element)(lhs) + drhs := (*fiat_pasta_fp_montgomery_domain_field_element)(rhs) + fiat_pasta_fp_sub((*fiat_pasta_fp_montgomery_domain_field_element)(fp), dlhs, drhs) + return fp +} + +// Add returns the result from adding rhs to this element +func (fp *Fp) Add(lhs, rhs *Fp) *Fp { + dlhs := (*fiat_pasta_fp_montgomery_domain_field_element)(lhs) + drhs := (*fiat_pasta_fp_montgomery_domain_field_element)(rhs) + fiat_pasta_fp_add((*fiat_pasta_fp_montgomery_domain_field_element)(fp), dlhs, drhs) + return fp +} + +// Neg returns negation of this element +func (fp *Fp) Neg(elem *Fp) *Fp { + delem := (*fiat_pasta_fp_montgomery_domain_field_element)(elem) + fiat_pasta_fp_opp((*fiat_pasta_fp_montgomery_domain_field_element)(fp), delem) + return fp +} + +// Exp exponentiates this element by exp +func (fp *Fp) Exp(base, exp *Fp) *Fp { + // convert exponent to integer form + tv := &fiat_pasta_fp_non_montgomery_domain_field_element{} + fiat_pasta_fp_from_montgomery(tv, (*fiat_pasta_fp_montgomery_domain_field_element)(exp)) + + e := (*[4]uint64)(tv) + return fp.pow(base, *e) +} + +func (fp *Fp) pow(base *Fp, exp [4]uint64) *Fp { + res := new(Fp).SetOne() + tmp := new(Fp) + + for i := len(exp) - 1; i >= 0; i-- { + for j := 63; j >= 0; j-- { + res.Square(res) + tmp.Mul(res, base) + res.CMove(res, tmp, int(exp[i]>>j)&1) + } + } + return fp.Set(res) +} + +// CMove selects lhs if choice == 0 and rhs if choice == 1 +func (fp *Fp) CMove(lhs, rhs *Fp, choice int) *Fp { + dlhs := (*[4]uint64)(lhs) + drhs := (*[4]uint64)(rhs) + fiat_pasta_fp_selectznz((*[4]uint64)(fp), fiat_pasta_fp_uint1(choice), dlhs, drhs) + return fp +} + +// ToRaw converts this element into the a [4]uint64 +func (fp *Fp) ToRaw() [4]uint64 { + res := &fiat_pasta_fp_non_montgomery_domain_field_element{} + fiat_pasta_fp_from_montgomery(res, (*fiat_pasta_fp_montgomery_domain_field_element)(fp)) + return *(*[4]uint64)(res) +} diff --git a/core/curves/native/pasta/fp/fp_test.go b/core/curves/native/pasta/fp/fp_test.go new file mode 100755 index 0000000..b882850 --- /dev/null +++ b/core/curves/native/pasta/fp/fp_test.go @@ -0,0 +1,273 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package fp + +import ( + "math/big" + "math/rand" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestFpSetOne(t *testing.T) { + fp := new(Fp).SetOne() + require.NotNil(t, fp) + require.True(t, fp.Equal(r)) +} + +func TestFpSetUint64(t *testing.T) { + act := new(Fp).SetUint64(1 << 60) + require.NotNil(t, act) + // Remember it will be in montgomery form + require.Equal(t, int(act[0]), 0x592d30ed00000001) +} + +func TestFpAdd(t *testing.T) { + lhs := new(Fp).SetOne() + rhs := new(Fp).SetOne() + exp := new(Fp).SetUint64(2) + res := new(Fp).Add(lhs, rhs) + require.NotNil(t, res) + require.True(t, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint64() >> 2 + r := rand.Uint64() >> 2 + e := l + r + lhs.SetUint64(l) + rhs.SetUint64(r) + exp.SetUint64(e) + + a := new(Fp).Add(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFpSub(t *testing.T) { + lhs := new(Fp).SetOne() + rhs := new(Fp).SetOne() + exp := new(Fp).SetZero() + res := new(Fp).Sub(lhs, rhs) + require.NotNil(t, res) + require.True(t, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint64() >> 2 + r := rand.Uint64() >> 2 + if l < r { + l, r = r, l + } + e := l - r + lhs.SetUint64(l) + rhs.SetUint64(r) + exp.SetUint64(e) + + a := new(Fp).Sub(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFpMul(t *testing.T) { + lhs := new(Fp).SetOne() + rhs := new(Fp).SetOne() + exp := new(Fp).SetOne() + res := new(Fp).Mul(lhs, rhs) + require.NotNil(t, res) + require.True(t, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint32() + r := rand.Uint32() + e := uint64(l) * uint64(r) + lhs.SetUint64(uint64(l)) + rhs.SetUint64(uint64(r)) + exp.SetUint64(e) + + a := new(Fp).Mul(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFpDouble(t *testing.T) { + a := new(Fp).SetUint64(2) + e := new(Fp).SetUint64(4) + require.Equal(t, e, new(Fp).Double(a)) + + for i := 0; i < 25; i++ { + tv := rand.Uint32() + ttv := uint64(tv) * 2 + a = new(Fp).SetUint64(uint64(tv)) + e = new(Fp).SetUint64(ttv) + require.Equal(t, e, new(Fp).Double(a)) + } +} + +func TestFpSquare(t *testing.T) { + a := new(Fp).SetUint64(4) + e := new(Fp).SetUint64(16) + require.Equal(t, e, a.Square(a)) + + for i := 0; i < 25; i++ { + j := rand.Uint32() + exp := uint64(j) * uint64(j) + e.SetUint64(exp) + a.SetUint64(uint64(j)) + require.Equal(t, e, a.Square(a)) + } +} + +func TestFpNeg(t *testing.T) { + a := new(Fp).SetOne() + a.Neg(a) + e := &Fp{7256640077462241284, 9879318615658062958, 0, 0} + require.Equal(t, e, a) + a.Neg(generator) + e = &Fp{0xf787d28400000014, 0xad83f3b0ba037627, 0x2, 0x0} + require.Equal(t, e, a) +} + +func TestFpExp(t *testing.T) { + e := new(Fp).SetUint64(8) + a := new(Fp).SetUint64(2) + by := new(Fp).SetUint64(3) + require.Equal(t, e, a.Exp(a, by)) +} + +func TestFpSqrt(t *testing.T) { + t1 := new(Fp).SetUint64(2) + t2 := new(Fp).Neg(t1) + t3 := new(Fp).Square(t1) + _, wasSquare := t3.Sqrt(t3) + require.True(t, wasSquare) + require.True(t, t1.Equal(t3) || t2.Equal(t3)) + t1.SetUint64(5) + _, wasSquare = new(Fp).Sqrt(t1) + require.False(t, wasSquare) +} + +func TestFpInvert(t *testing.T) { + twoInv := &Fp{0xcc96987680000001, 0x11234c7e04a67c8d, 0x0000000000000000, 0x2000000000000000} + fiat_pasta_fp_to_montgomery( + (*fiat_pasta_fp_montgomery_domain_field_element)(twoInv), + (*fiat_pasta_fp_non_montgomery_domain_field_element)(twoInv), + ) + two := new(Fp).SetUint64(2) + a, inverted := new(Fp).Invert(two) + require.True(t, inverted) + require.Equal(t, a, twoInv) + + rootOfUnity := &Fp{ + 0xbdad6fabd87ea32f, + 0xea322bf2b7bb7584, + 0x362120830561f81a, + 0x2bce74deac30ebda, + } + fiat_pasta_fp_to_montgomery( + (*fiat_pasta_fp_montgomery_domain_field_element)(rootOfUnity), + (*fiat_pasta_fp_non_montgomery_domain_field_element)(rootOfUnity), + ) + rootOfUnityInv := &Fp{ + 0xf0b87c7db2ce91f6, + 0x84a0a1d8859f066f, + 0xb4ed8e647196dad1, + 0x2cd5282c53116b5c, + } + fiat_pasta_fp_to_montgomery( + (*fiat_pasta_fp_montgomery_domain_field_element)(rootOfUnityInv), + (*fiat_pasta_fp_non_montgomery_domain_field_element)(rootOfUnityInv), + ) + a, inverted = new(Fp).Invert(rootOfUnity) + require.True(t, inverted) + require.Equal(t, a, rootOfUnityInv) + + lhs := new(Fp).SetUint64(9) + rhs := new(Fp).SetUint64(3) + rhsInv, inverted := new(Fp).Invert(rhs) + require.True(t, inverted) + require.Equal(t, rhs, new(Fp).Mul(lhs, rhsInv)) + + rhs.SetZero() + _, inverted = new(Fp).Invert(rhs) + require.False(t, inverted) +} + +func TestFpCMove(t *testing.T) { + t1 := new(Fp).SetUint64(5) + t2 := new(Fp).SetUint64(10) + require.Equal(t, t1, new(Fp).CMove(t1, t2, 0)) + require.Equal(t, t2, new(Fp).CMove(t1, t2, 1)) +} + +func TestFpBytes(t *testing.T) { + t1 := new(Fp).SetUint64(99) + seq := t1.Bytes() + t2, err := new(Fp).SetBytes(&seq) + require.NoError(t, err) + require.Equal(t, t1, t2) + + for i := 0; i < 25; i++ { + t1.SetUint64(rand.Uint64()) + seq = t1.Bytes() + _, err = t2.SetBytes(&seq) + require.NoError(t, err) + require.Equal(t, t1, t2) + } +} + +func TestFpBigInt(t *testing.T) { + t1 := new(Fp).SetBigInt(big.NewInt(9999)) + t2 := new(Fp).SetBigInt(t1.BigInt()) + require.Equal(t, t1, t2) + + e := &Fp{0x8c6bc70550c87761, 0xce2c6c48e7063731, 0xf1275fd1e4607cd6, 0x3e6762e63501edbd} + b := new( + big.Int, + ).SetBytes([]byte{9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9}) + t1.SetBigInt(b) + require.Equal(t, e, t1) + e[0] = 0xcc169e7af3788a0 + e[1] = 0x541a2cb32246c1ea + e[2] = 0xed8a02e1b9f8329 + e[3] = 0x1989d19cafe1242 + b.Neg(b) + t1.SetBigInt(b) + require.Equal(t, e, t1) +} + +func TestFpSetBool(t *testing.T) { + require.Equal(t, new(Fp).SetOne(), new(Fp).SetBool(true)) + require.Equal(t, new(Fp).SetZero(), new(Fp).SetBool(false)) +} + +func TestFpSetBytesWide(t *testing.T) { + e := &Fp{0x3daec14d565241d9, 0x0b7af45b6073944b, 0xea5b8bd611a5bd4c, 0x150160330625db3d} + fiat_pasta_fp_to_montgomery( + (*fiat_pasta_fp_montgomery_domain_field_element)(e), + (*fiat_pasta_fp_non_montgomery_domain_field_element)(e), + ) + a := new(Fp).SetBytesWide(&[64]byte{ + 0xa1, 0x78, 0x76, 0x29, 0x41, 0x56, 0x15, 0xee, + 0x65, 0xbe, 0xfd, 0xdb, 0x6b, 0x15, 0x3e, 0xd8, + 0xb5, 0xa0, 0x8b, 0xc6, 0x34, 0xd8, 0xcc, 0xd9, + 0x58, 0x27, 0x27, 0x12, 0xe3, 0xed, 0x08, 0xf5, + 0x89, 0x8e, 0x22, 0xf8, 0xcb, 0xf7, 0x8d, 0x03, + 0x41, 0x4b, 0xc7, 0xa3, 0xe4, 0xa1, 0x05, 0x35, + 0xb3, 0x2d, 0xb8, 0x5e, 0x77, 0x6f, 0xa4, 0xbf, + 0x1d, 0x47, 0x2f, 0x26, 0x7e, 0xe2, 0xeb, 0x26, + }) + require.Equal(t, e, a) +} diff --git a/core/curves/native/pasta/fp/pasta_fp.go b/core/curves/native/pasta/fp/pasta_fp.go new file mode 100755 index 0000000..6735d8c --- /dev/null +++ b/core/curves/native/pasta/fp/pasta_fp.go @@ -0,0 +1,1518 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Autogenerated: './src/ExtractionOCaml/word_by_word_montgomery' --lang Go pasta_fp 64 '2^254 + 45560315531419706090280762371685220353' +// +// curve description: pasta_fp +// +// machine_wordsize = 64 (from "64") +// +// requested operations: (all) +// +// m = 0x40000000000000000000000000000000224698fc094cf91b992d30ed00000001 (from "2^254 + 45560315531419706090280762371685220353") +// +// +// +// NOTE: In addition to the bounds specified above each function, all +// +// functions synthesized for this Montgomery arithmetic require the +// +// input to be strictly less than the prime modulus (m), and also +// +// require the input to be in the unique saturated representation. +// +// All functions also ensure that these two properties are true of +// +// return values. +// +// +// +// Computed values: +// +// eval z = z[0] + (z[1] << 64) + (z[2] << 128) + (z[3] << 192) +// +// bytes_eval z = z[0] + (z[1] << 8) + (z[2] << 16) + (z[3] << 24) + (z[4] << 32) + (z[5] << 40) + (z[6] << 48) + (z[7] << 56) + (z[8] << 64) + (z[9] << 72) + (z[10] << 80) + (z[11] << 88) + (z[12] << 96) + (z[13] << 104) + (z[14] << 112) + (z[15] << 120) + (z[16] << 128) + (z[17] << 136) + (z[18] << 144) + (z[19] << 152) + (z[20] << 160) + (z[21] << 168) + (z[22] << 176) + (z[23] << 184) + (z[24] << 192) + (z[25] << 200) + (z[26] << 208) + (z[27] << 216) + (z[28] << 224) + (z[29] << 232) + (z[30] << 240) + (z[31] << 248) +// +// twos_complement_eval z = let x1 := z[0] + (z[1] << 64) + (z[2] << 128) + (z[3] << 192) in +// +// if x1 & (2^256-1) < 2^255 then x1 & (2^256-1) else (x1 & (2^256-1)) - 2^256 + +package fp + +import "math/bits" + +type ( + fiat_pasta_fp_uint1 uint64 // We use uint64 instead of a more narrow type for performance reasons; see https://github.com/mit-plv/fiat-crypto/pull/1006#issuecomment-892625927 + fiat_pasta_fp_int1 int64 // We use uint64 instead of a more narrow type for performance reasons; see https://github.com/mit-plv/fiat-crypto/pull/1006#issuecomment-892625927 +) + +// The type fiat_pasta_fp_montgomery_domain_field_element is a field element in the Montgomery domain. +// +// Bounds: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +type fiat_pasta_fp_montgomery_domain_field_element [4]uint64 + +// The type fiat_pasta_fp_non_montgomery_domain_field_element is a field element NOT in the Montgomery domain. +// +// Bounds: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +type fiat_pasta_fp_non_montgomery_domain_field_element [4]uint64 + +// The function fiat_pasta_fp_addcarryx_u64 is a thin wrapper around bits.Add64 that uses fiat_pasta_fp_uint1 rather than uint64 +func fiat_pasta_fp_addcarryx_u64( + x uint64, + y uint64, + carry fiat_pasta_fp_uint1, +) (uint64, fiat_pasta_fp_uint1) { + sum, carryOut := bits.Add64(x, y, uint64(carry)) + return sum, fiat_pasta_fp_uint1(carryOut) +} + +// The function fiat_pasta_fp_subborrowx_u64 is a thin wrapper around bits.Sub64 that uses fiat_pasta_fp_uint1 rather than uint64 +func fiat_pasta_fp_subborrowx_u64( + x uint64, + y uint64, + carry fiat_pasta_fp_uint1, +) (uint64, fiat_pasta_fp_uint1) { + sum, carryOut := bits.Sub64(x, y, uint64(carry)) + return sum, fiat_pasta_fp_uint1(carryOut) +} + +// The function fiat_pasta_fp_cmovznz_u64 is a single-word conditional move. +// +// Postconditions: +// +// out1 = (if arg1 = 0 then arg2 else arg3) +// +// Input Bounds: +// +// arg1: [0x0 ~> 0x1] +// arg2: [0x0 ~> 0xffffffffffffffff] +// arg3: [0x0 ~> 0xffffffffffffffff] +// +// Output Bounds: +// +// out1: [0x0 ~> 0xffffffffffffffff] +func fiat_pasta_fp_cmovznz_u64(out1 *uint64, arg1 fiat_pasta_fp_uint1, arg2 uint64, arg3 uint64) { + x1 := arg1 + x2 := (uint64((fiat_pasta_fp_int1(0x0) - fiat_pasta_fp_int1(x1))) & 0xffffffffffffffff) + x3 := ((x2 & arg3) | ((^x2) & arg2)) + *out1 = x3 +} + +// The function fiat_pasta_fp_mul multiplies two field elements in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// 0 ≤ eval arg2 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) * eval (from_montgomery arg2)) mod m +// 0 ≤ eval out1 < m +func fiat_pasta_fp_mul( + out1 *fiat_pasta_fp_montgomery_domain_field_element, + arg1 *fiat_pasta_fp_montgomery_domain_field_element, + arg2 *fiat_pasta_fp_montgomery_domain_field_element, +) { + x1 := arg1[1] + x2 := arg1[2] + x3 := arg1[3] + x4 := arg1[0] + var x5 uint64 + var x6 uint64 + x6, x5 = bits.Mul64(x4, arg2[3]) + var x7 uint64 + var x8 uint64 + x8, x7 = bits.Mul64(x4, arg2[2]) + var x9 uint64 + var x10 uint64 + x10, x9 = bits.Mul64(x4, arg2[1]) + var x11 uint64 + var x12 uint64 + x12, x11 = bits.Mul64(x4, arg2[0]) + var x13 uint64 + var x14 fiat_pasta_fp_uint1 + x13, x14 = fiat_pasta_fp_addcarryx_u64(x12, x9, 0x0) + var x15 uint64 + var x16 fiat_pasta_fp_uint1 + x15, x16 = fiat_pasta_fp_addcarryx_u64(x10, x7, x14) + var x17 uint64 + var x18 fiat_pasta_fp_uint1 + x17, x18 = fiat_pasta_fp_addcarryx_u64(x8, x5, x16) + x19 := (uint64(x18) + x6) + var x20 uint64 + _, x20 = bits.Mul64(x11, 0x992d30ecffffffff) + var x22 uint64 + var x23 uint64 + x23, x22 = bits.Mul64(x20, 0x4000000000000000) + var x24 uint64 + var x25 uint64 + x25, x24 = bits.Mul64(x20, 0x224698fc094cf91b) + var x26 uint64 + var x27 uint64 + x27, x26 = bits.Mul64(x20, 0x992d30ed00000001) + var x28 uint64 + var x29 fiat_pasta_fp_uint1 + x28, x29 = fiat_pasta_fp_addcarryx_u64(x27, x24, 0x0) + x30 := (uint64(x29) + x25) + var x32 fiat_pasta_fp_uint1 + _, x32 = fiat_pasta_fp_addcarryx_u64(x11, x26, 0x0) + var x33 uint64 + var x34 fiat_pasta_fp_uint1 + x33, x34 = fiat_pasta_fp_addcarryx_u64(x13, x28, x32) + var x35 uint64 + var x36 fiat_pasta_fp_uint1 + x35, x36 = fiat_pasta_fp_addcarryx_u64(x15, x30, x34) + var x37 uint64 + var x38 fiat_pasta_fp_uint1 + x37, x38 = fiat_pasta_fp_addcarryx_u64(x17, x22, x36) + var x39 uint64 + var x40 fiat_pasta_fp_uint1 + x39, x40 = fiat_pasta_fp_addcarryx_u64(x19, x23, x38) + var x41 uint64 + var x42 uint64 + x42, x41 = bits.Mul64(x1, arg2[3]) + var x43 uint64 + var x44 uint64 + x44, x43 = bits.Mul64(x1, arg2[2]) + var x45 uint64 + var x46 uint64 + x46, x45 = bits.Mul64(x1, arg2[1]) + var x47 uint64 + var x48 uint64 + x48, x47 = bits.Mul64(x1, arg2[0]) + var x49 uint64 + var x50 fiat_pasta_fp_uint1 + x49, x50 = fiat_pasta_fp_addcarryx_u64(x48, x45, 0x0) + var x51 uint64 + var x52 fiat_pasta_fp_uint1 + x51, x52 = fiat_pasta_fp_addcarryx_u64(x46, x43, x50) + var x53 uint64 + var x54 fiat_pasta_fp_uint1 + x53, x54 = fiat_pasta_fp_addcarryx_u64(x44, x41, x52) + x55 := (uint64(x54) + x42) + var x56 uint64 + var x57 fiat_pasta_fp_uint1 + x56, x57 = fiat_pasta_fp_addcarryx_u64(x33, x47, 0x0) + var x58 uint64 + var x59 fiat_pasta_fp_uint1 + x58, x59 = fiat_pasta_fp_addcarryx_u64(x35, x49, x57) + var x60 uint64 + var x61 fiat_pasta_fp_uint1 + x60, x61 = fiat_pasta_fp_addcarryx_u64(x37, x51, x59) + var x62 uint64 + var x63 fiat_pasta_fp_uint1 + x62, x63 = fiat_pasta_fp_addcarryx_u64(x39, x53, x61) + var x64 uint64 + var x65 fiat_pasta_fp_uint1 + x64, x65 = fiat_pasta_fp_addcarryx_u64(uint64(x40), x55, x63) + var x66 uint64 + _, x66 = bits.Mul64(x56, 0x992d30ecffffffff) + var x68 uint64 + var x69 uint64 + x69, x68 = bits.Mul64(x66, 0x4000000000000000) + var x70 uint64 + var x71 uint64 + x71, x70 = bits.Mul64(x66, 0x224698fc094cf91b) + var x72 uint64 + var x73 uint64 + x73, x72 = bits.Mul64(x66, 0x992d30ed00000001) + var x74 uint64 + var x75 fiat_pasta_fp_uint1 + x74, x75 = fiat_pasta_fp_addcarryx_u64(x73, x70, 0x0) + x76 := (uint64(x75) + x71) + var x78 fiat_pasta_fp_uint1 + _, x78 = fiat_pasta_fp_addcarryx_u64(x56, x72, 0x0) + var x79 uint64 + var x80 fiat_pasta_fp_uint1 + x79, x80 = fiat_pasta_fp_addcarryx_u64(x58, x74, x78) + var x81 uint64 + var x82 fiat_pasta_fp_uint1 + x81, x82 = fiat_pasta_fp_addcarryx_u64(x60, x76, x80) + var x83 uint64 + var x84 fiat_pasta_fp_uint1 + x83, x84 = fiat_pasta_fp_addcarryx_u64(x62, x68, x82) + var x85 uint64 + var x86 fiat_pasta_fp_uint1 + x85, x86 = fiat_pasta_fp_addcarryx_u64(x64, x69, x84) + x87 := (uint64(x86) + uint64(x65)) + var x88 uint64 + var x89 uint64 + x89, x88 = bits.Mul64(x2, arg2[3]) + var x90 uint64 + var x91 uint64 + x91, x90 = bits.Mul64(x2, arg2[2]) + var x92 uint64 + var x93 uint64 + x93, x92 = bits.Mul64(x2, arg2[1]) + var x94 uint64 + var x95 uint64 + x95, x94 = bits.Mul64(x2, arg2[0]) + var x96 uint64 + var x97 fiat_pasta_fp_uint1 + x96, x97 = fiat_pasta_fp_addcarryx_u64(x95, x92, 0x0) + var x98 uint64 + var x99 fiat_pasta_fp_uint1 + x98, x99 = fiat_pasta_fp_addcarryx_u64(x93, x90, x97) + var x100 uint64 + var x101 fiat_pasta_fp_uint1 + x100, x101 = fiat_pasta_fp_addcarryx_u64(x91, x88, x99) + x102 := (uint64(x101) + x89) + var x103 uint64 + var x104 fiat_pasta_fp_uint1 + x103, x104 = fiat_pasta_fp_addcarryx_u64(x79, x94, 0x0) + var x105 uint64 + var x106 fiat_pasta_fp_uint1 + x105, x106 = fiat_pasta_fp_addcarryx_u64(x81, x96, x104) + var x107 uint64 + var x108 fiat_pasta_fp_uint1 + x107, x108 = fiat_pasta_fp_addcarryx_u64(x83, x98, x106) + var x109 uint64 + var x110 fiat_pasta_fp_uint1 + x109, x110 = fiat_pasta_fp_addcarryx_u64(x85, x100, x108) + var x111 uint64 + var x112 fiat_pasta_fp_uint1 + x111, x112 = fiat_pasta_fp_addcarryx_u64(x87, x102, x110) + var x113 uint64 + _, x113 = bits.Mul64(x103, 0x992d30ecffffffff) + var x115 uint64 + var x116 uint64 + x116, x115 = bits.Mul64(x113, 0x4000000000000000) + var x117 uint64 + var x118 uint64 + x118, x117 = bits.Mul64(x113, 0x224698fc094cf91b) + var x119 uint64 + var x120 uint64 + x120, x119 = bits.Mul64(x113, 0x992d30ed00000001) + var x121 uint64 + var x122 fiat_pasta_fp_uint1 + x121, x122 = fiat_pasta_fp_addcarryx_u64(x120, x117, 0x0) + x123 := (uint64(x122) + x118) + var x125 fiat_pasta_fp_uint1 + _, x125 = fiat_pasta_fp_addcarryx_u64(x103, x119, 0x0) + var x126 uint64 + var x127 fiat_pasta_fp_uint1 + x126, x127 = fiat_pasta_fp_addcarryx_u64(x105, x121, x125) + var x128 uint64 + var x129 fiat_pasta_fp_uint1 + x128, x129 = fiat_pasta_fp_addcarryx_u64(x107, x123, x127) + var x130 uint64 + var x131 fiat_pasta_fp_uint1 + x130, x131 = fiat_pasta_fp_addcarryx_u64(x109, x115, x129) + var x132 uint64 + var x133 fiat_pasta_fp_uint1 + x132, x133 = fiat_pasta_fp_addcarryx_u64(x111, x116, x131) + x134 := (uint64(x133) + uint64(x112)) + var x135 uint64 + var x136 uint64 + x136, x135 = bits.Mul64(x3, arg2[3]) + var x137 uint64 + var x138 uint64 + x138, x137 = bits.Mul64(x3, arg2[2]) + var x139 uint64 + var x140 uint64 + x140, x139 = bits.Mul64(x3, arg2[1]) + var x141 uint64 + var x142 uint64 + x142, x141 = bits.Mul64(x3, arg2[0]) + var x143 uint64 + var x144 fiat_pasta_fp_uint1 + x143, x144 = fiat_pasta_fp_addcarryx_u64(x142, x139, 0x0) + var x145 uint64 + var x146 fiat_pasta_fp_uint1 + x145, x146 = fiat_pasta_fp_addcarryx_u64(x140, x137, x144) + var x147 uint64 + var x148 fiat_pasta_fp_uint1 + x147, x148 = fiat_pasta_fp_addcarryx_u64(x138, x135, x146) + x149 := (uint64(x148) + x136) + var x150 uint64 + var x151 fiat_pasta_fp_uint1 + x150, x151 = fiat_pasta_fp_addcarryx_u64(x126, x141, 0x0) + var x152 uint64 + var x153 fiat_pasta_fp_uint1 + x152, x153 = fiat_pasta_fp_addcarryx_u64(x128, x143, x151) + var x154 uint64 + var x155 fiat_pasta_fp_uint1 + x154, x155 = fiat_pasta_fp_addcarryx_u64(x130, x145, x153) + var x156 uint64 + var x157 fiat_pasta_fp_uint1 + x156, x157 = fiat_pasta_fp_addcarryx_u64(x132, x147, x155) + var x158 uint64 + var x159 fiat_pasta_fp_uint1 + x158, x159 = fiat_pasta_fp_addcarryx_u64(x134, x149, x157) + var x160 uint64 + _, x160 = bits.Mul64(x150, 0x992d30ecffffffff) + var x162 uint64 + var x163 uint64 + x163, x162 = bits.Mul64(x160, 0x4000000000000000) + var x164 uint64 + var x165 uint64 + x165, x164 = bits.Mul64(x160, 0x224698fc094cf91b) + var x166 uint64 + var x167 uint64 + x167, x166 = bits.Mul64(x160, 0x992d30ed00000001) + var x168 uint64 + var x169 fiat_pasta_fp_uint1 + x168, x169 = fiat_pasta_fp_addcarryx_u64(x167, x164, 0x0) + x170 := (uint64(x169) + x165) + var x172 fiat_pasta_fp_uint1 + _, x172 = fiat_pasta_fp_addcarryx_u64(x150, x166, 0x0) + var x173 uint64 + var x174 fiat_pasta_fp_uint1 + x173, x174 = fiat_pasta_fp_addcarryx_u64(x152, x168, x172) + var x175 uint64 + var x176 fiat_pasta_fp_uint1 + x175, x176 = fiat_pasta_fp_addcarryx_u64(x154, x170, x174) + var x177 uint64 + var x178 fiat_pasta_fp_uint1 + x177, x178 = fiat_pasta_fp_addcarryx_u64(x156, x162, x176) + var x179 uint64 + var x180 fiat_pasta_fp_uint1 + x179, x180 = fiat_pasta_fp_addcarryx_u64(x158, x163, x178) + x181 := (uint64(x180) + uint64(x159)) + var x182 uint64 + var x183 fiat_pasta_fp_uint1 + x182, x183 = fiat_pasta_fp_subborrowx_u64(x173, 0x992d30ed00000001, 0x0) + var x184 uint64 + var x185 fiat_pasta_fp_uint1 + x184, x185 = fiat_pasta_fp_subborrowx_u64(x175, 0x224698fc094cf91b, x183) + var x186 uint64 + var x187 fiat_pasta_fp_uint1 + x186, x187 = fiat_pasta_fp_subborrowx_u64(x177, uint64(0x0), x185) + var x188 uint64 + var x189 fiat_pasta_fp_uint1 + x188, x189 = fiat_pasta_fp_subborrowx_u64(x179, 0x4000000000000000, x187) + var x191 fiat_pasta_fp_uint1 + _, x191 = fiat_pasta_fp_subborrowx_u64(x181, uint64(0x0), x189) + var x192 uint64 + fiat_pasta_fp_cmovznz_u64(&x192, x191, x182, x173) + var x193 uint64 + fiat_pasta_fp_cmovznz_u64(&x193, x191, x184, x175) + var x194 uint64 + fiat_pasta_fp_cmovznz_u64(&x194, x191, x186, x177) + var x195 uint64 + fiat_pasta_fp_cmovznz_u64(&x195, x191, x188, x179) + out1[0] = x192 + out1[1] = x193 + out1[2] = x194 + out1[3] = x195 +} + +// The function fiat_pasta_fp_square squares a field element in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) * eval (from_montgomery arg1)) mod m +// 0 ≤ eval out1 < m +func fiat_pasta_fp_square( + out1 *fiat_pasta_fp_montgomery_domain_field_element, + arg1 *fiat_pasta_fp_montgomery_domain_field_element, +) { + x1 := arg1[1] + x2 := arg1[2] + x3 := arg1[3] + x4 := arg1[0] + var x5 uint64 + var x6 uint64 + x6, x5 = bits.Mul64(x4, arg1[3]) + var x7 uint64 + var x8 uint64 + x8, x7 = bits.Mul64(x4, arg1[2]) + var x9 uint64 + var x10 uint64 + x10, x9 = bits.Mul64(x4, arg1[1]) + var x11 uint64 + var x12 uint64 + x12, x11 = bits.Mul64(x4, arg1[0]) + var x13 uint64 + var x14 fiat_pasta_fp_uint1 + x13, x14 = fiat_pasta_fp_addcarryx_u64(x12, x9, 0x0) + var x15 uint64 + var x16 fiat_pasta_fp_uint1 + x15, x16 = fiat_pasta_fp_addcarryx_u64(x10, x7, x14) + var x17 uint64 + var x18 fiat_pasta_fp_uint1 + x17, x18 = fiat_pasta_fp_addcarryx_u64(x8, x5, x16) + x19 := (uint64(x18) + x6) + var x20 uint64 + _, x20 = bits.Mul64(x11, 0x992d30ecffffffff) + var x22 uint64 + var x23 uint64 + x23, x22 = bits.Mul64(x20, 0x4000000000000000) + var x24 uint64 + var x25 uint64 + x25, x24 = bits.Mul64(x20, 0x224698fc094cf91b) + var x26 uint64 + var x27 uint64 + x27, x26 = bits.Mul64(x20, 0x992d30ed00000001) + var x28 uint64 + var x29 fiat_pasta_fp_uint1 + x28, x29 = fiat_pasta_fp_addcarryx_u64(x27, x24, 0x0) + x30 := (uint64(x29) + x25) + var x32 fiat_pasta_fp_uint1 + _, x32 = fiat_pasta_fp_addcarryx_u64(x11, x26, 0x0) + var x33 uint64 + var x34 fiat_pasta_fp_uint1 + x33, x34 = fiat_pasta_fp_addcarryx_u64(x13, x28, x32) + var x35 uint64 + var x36 fiat_pasta_fp_uint1 + x35, x36 = fiat_pasta_fp_addcarryx_u64(x15, x30, x34) + var x37 uint64 + var x38 fiat_pasta_fp_uint1 + x37, x38 = fiat_pasta_fp_addcarryx_u64(x17, x22, x36) + var x39 uint64 + var x40 fiat_pasta_fp_uint1 + x39, x40 = fiat_pasta_fp_addcarryx_u64(x19, x23, x38) + var x41 uint64 + var x42 uint64 + x42, x41 = bits.Mul64(x1, arg1[3]) + var x43 uint64 + var x44 uint64 + x44, x43 = bits.Mul64(x1, arg1[2]) + var x45 uint64 + var x46 uint64 + x46, x45 = bits.Mul64(x1, arg1[1]) + var x47 uint64 + var x48 uint64 + x48, x47 = bits.Mul64(x1, arg1[0]) + var x49 uint64 + var x50 fiat_pasta_fp_uint1 + x49, x50 = fiat_pasta_fp_addcarryx_u64(x48, x45, 0x0) + var x51 uint64 + var x52 fiat_pasta_fp_uint1 + x51, x52 = fiat_pasta_fp_addcarryx_u64(x46, x43, x50) + var x53 uint64 + var x54 fiat_pasta_fp_uint1 + x53, x54 = fiat_pasta_fp_addcarryx_u64(x44, x41, x52) + x55 := (uint64(x54) + x42) + var x56 uint64 + var x57 fiat_pasta_fp_uint1 + x56, x57 = fiat_pasta_fp_addcarryx_u64(x33, x47, 0x0) + var x58 uint64 + var x59 fiat_pasta_fp_uint1 + x58, x59 = fiat_pasta_fp_addcarryx_u64(x35, x49, x57) + var x60 uint64 + var x61 fiat_pasta_fp_uint1 + x60, x61 = fiat_pasta_fp_addcarryx_u64(x37, x51, x59) + var x62 uint64 + var x63 fiat_pasta_fp_uint1 + x62, x63 = fiat_pasta_fp_addcarryx_u64(x39, x53, x61) + var x64 uint64 + var x65 fiat_pasta_fp_uint1 + x64, x65 = fiat_pasta_fp_addcarryx_u64(uint64(x40), x55, x63) + var x66 uint64 + _, x66 = bits.Mul64(x56, 0x992d30ecffffffff) + var x68 uint64 + var x69 uint64 + x69, x68 = bits.Mul64(x66, 0x4000000000000000) + var x70 uint64 + var x71 uint64 + x71, x70 = bits.Mul64(x66, 0x224698fc094cf91b) + var x72 uint64 + var x73 uint64 + x73, x72 = bits.Mul64(x66, 0x992d30ed00000001) + var x74 uint64 + var x75 fiat_pasta_fp_uint1 + x74, x75 = fiat_pasta_fp_addcarryx_u64(x73, x70, 0x0) + x76 := (uint64(x75) + x71) + var x78 fiat_pasta_fp_uint1 + _, x78 = fiat_pasta_fp_addcarryx_u64(x56, x72, 0x0) + var x79 uint64 + var x80 fiat_pasta_fp_uint1 + x79, x80 = fiat_pasta_fp_addcarryx_u64(x58, x74, x78) + var x81 uint64 + var x82 fiat_pasta_fp_uint1 + x81, x82 = fiat_pasta_fp_addcarryx_u64(x60, x76, x80) + var x83 uint64 + var x84 fiat_pasta_fp_uint1 + x83, x84 = fiat_pasta_fp_addcarryx_u64(x62, x68, x82) + var x85 uint64 + var x86 fiat_pasta_fp_uint1 + x85, x86 = fiat_pasta_fp_addcarryx_u64(x64, x69, x84) + x87 := (uint64(x86) + uint64(x65)) + var x88 uint64 + var x89 uint64 + x89, x88 = bits.Mul64(x2, arg1[3]) + var x90 uint64 + var x91 uint64 + x91, x90 = bits.Mul64(x2, arg1[2]) + var x92 uint64 + var x93 uint64 + x93, x92 = bits.Mul64(x2, arg1[1]) + var x94 uint64 + var x95 uint64 + x95, x94 = bits.Mul64(x2, arg1[0]) + var x96 uint64 + var x97 fiat_pasta_fp_uint1 + x96, x97 = fiat_pasta_fp_addcarryx_u64(x95, x92, 0x0) + var x98 uint64 + var x99 fiat_pasta_fp_uint1 + x98, x99 = fiat_pasta_fp_addcarryx_u64(x93, x90, x97) + var x100 uint64 + var x101 fiat_pasta_fp_uint1 + x100, x101 = fiat_pasta_fp_addcarryx_u64(x91, x88, x99) + x102 := (uint64(x101) + x89) + var x103 uint64 + var x104 fiat_pasta_fp_uint1 + x103, x104 = fiat_pasta_fp_addcarryx_u64(x79, x94, 0x0) + var x105 uint64 + var x106 fiat_pasta_fp_uint1 + x105, x106 = fiat_pasta_fp_addcarryx_u64(x81, x96, x104) + var x107 uint64 + var x108 fiat_pasta_fp_uint1 + x107, x108 = fiat_pasta_fp_addcarryx_u64(x83, x98, x106) + var x109 uint64 + var x110 fiat_pasta_fp_uint1 + x109, x110 = fiat_pasta_fp_addcarryx_u64(x85, x100, x108) + var x111 uint64 + var x112 fiat_pasta_fp_uint1 + x111, x112 = fiat_pasta_fp_addcarryx_u64(x87, x102, x110) + var x113 uint64 + _, x113 = bits.Mul64(x103, 0x992d30ecffffffff) + var x115 uint64 + var x116 uint64 + x116, x115 = bits.Mul64(x113, 0x4000000000000000) + var x117 uint64 + var x118 uint64 + x118, x117 = bits.Mul64(x113, 0x224698fc094cf91b) + var x119 uint64 + var x120 uint64 + x120, x119 = bits.Mul64(x113, 0x992d30ed00000001) + var x121 uint64 + var x122 fiat_pasta_fp_uint1 + x121, x122 = fiat_pasta_fp_addcarryx_u64(x120, x117, 0x0) + x123 := (uint64(x122) + x118) + var x125 fiat_pasta_fp_uint1 + _, x125 = fiat_pasta_fp_addcarryx_u64(x103, x119, 0x0) + var x126 uint64 + var x127 fiat_pasta_fp_uint1 + x126, x127 = fiat_pasta_fp_addcarryx_u64(x105, x121, x125) + var x128 uint64 + var x129 fiat_pasta_fp_uint1 + x128, x129 = fiat_pasta_fp_addcarryx_u64(x107, x123, x127) + var x130 uint64 + var x131 fiat_pasta_fp_uint1 + x130, x131 = fiat_pasta_fp_addcarryx_u64(x109, x115, x129) + var x132 uint64 + var x133 fiat_pasta_fp_uint1 + x132, x133 = fiat_pasta_fp_addcarryx_u64(x111, x116, x131) + x134 := (uint64(x133) + uint64(x112)) + var x135 uint64 + var x136 uint64 + x136, x135 = bits.Mul64(x3, arg1[3]) + var x137 uint64 + var x138 uint64 + x138, x137 = bits.Mul64(x3, arg1[2]) + var x139 uint64 + var x140 uint64 + x140, x139 = bits.Mul64(x3, arg1[1]) + var x141 uint64 + var x142 uint64 + x142, x141 = bits.Mul64(x3, arg1[0]) + var x143 uint64 + var x144 fiat_pasta_fp_uint1 + x143, x144 = fiat_pasta_fp_addcarryx_u64(x142, x139, 0x0) + var x145 uint64 + var x146 fiat_pasta_fp_uint1 + x145, x146 = fiat_pasta_fp_addcarryx_u64(x140, x137, x144) + var x147 uint64 + var x148 fiat_pasta_fp_uint1 + x147, x148 = fiat_pasta_fp_addcarryx_u64(x138, x135, x146) + x149 := (uint64(x148) + x136) + var x150 uint64 + var x151 fiat_pasta_fp_uint1 + x150, x151 = fiat_pasta_fp_addcarryx_u64(x126, x141, 0x0) + var x152 uint64 + var x153 fiat_pasta_fp_uint1 + x152, x153 = fiat_pasta_fp_addcarryx_u64(x128, x143, x151) + var x154 uint64 + var x155 fiat_pasta_fp_uint1 + x154, x155 = fiat_pasta_fp_addcarryx_u64(x130, x145, x153) + var x156 uint64 + var x157 fiat_pasta_fp_uint1 + x156, x157 = fiat_pasta_fp_addcarryx_u64(x132, x147, x155) + var x158 uint64 + var x159 fiat_pasta_fp_uint1 + x158, x159 = fiat_pasta_fp_addcarryx_u64(x134, x149, x157) + var x160 uint64 + _, x160 = bits.Mul64(x150, 0x992d30ecffffffff) + var x162 uint64 + var x163 uint64 + x163, x162 = bits.Mul64(x160, 0x4000000000000000) + var x164 uint64 + var x165 uint64 + x165, x164 = bits.Mul64(x160, 0x224698fc094cf91b) + var x166 uint64 + var x167 uint64 + x167, x166 = bits.Mul64(x160, 0x992d30ed00000001) + var x168 uint64 + var x169 fiat_pasta_fp_uint1 + x168, x169 = fiat_pasta_fp_addcarryx_u64(x167, x164, 0x0) + x170 := (uint64(x169) + x165) + var x172 fiat_pasta_fp_uint1 + _, x172 = fiat_pasta_fp_addcarryx_u64(x150, x166, 0x0) + var x173 uint64 + var x174 fiat_pasta_fp_uint1 + x173, x174 = fiat_pasta_fp_addcarryx_u64(x152, x168, x172) + var x175 uint64 + var x176 fiat_pasta_fp_uint1 + x175, x176 = fiat_pasta_fp_addcarryx_u64(x154, x170, x174) + var x177 uint64 + var x178 fiat_pasta_fp_uint1 + x177, x178 = fiat_pasta_fp_addcarryx_u64(x156, x162, x176) + var x179 uint64 + var x180 fiat_pasta_fp_uint1 + x179, x180 = fiat_pasta_fp_addcarryx_u64(x158, x163, x178) + x181 := (uint64(x180) + uint64(x159)) + var x182 uint64 + var x183 fiat_pasta_fp_uint1 + x182, x183 = fiat_pasta_fp_subborrowx_u64(x173, 0x992d30ed00000001, 0x0) + var x184 uint64 + var x185 fiat_pasta_fp_uint1 + x184, x185 = fiat_pasta_fp_subborrowx_u64(x175, 0x224698fc094cf91b, x183) + var x186 uint64 + var x187 fiat_pasta_fp_uint1 + x186, x187 = fiat_pasta_fp_subborrowx_u64(x177, uint64(0x0), x185) + var x188 uint64 + var x189 fiat_pasta_fp_uint1 + x188, x189 = fiat_pasta_fp_subborrowx_u64(x179, 0x4000000000000000, x187) + var x191 fiat_pasta_fp_uint1 + _, x191 = fiat_pasta_fp_subborrowx_u64(x181, uint64(0x0), x189) + var x192 uint64 + fiat_pasta_fp_cmovznz_u64(&x192, x191, x182, x173) + var x193 uint64 + fiat_pasta_fp_cmovznz_u64(&x193, x191, x184, x175) + var x194 uint64 + fiat_pasta_fp_cmovznz_u64(&x194, x191, x186, x177) + var x195 uint64 + fiat_pasta_fp_cmovznz_u64(&x195, x191, x188, x179) + out1[0] = x192 + out1[1] = x193 + out1[2] = x194 + out1[3] = x195 +} + +// The function fiat_pasta_fp_add adds two field elements in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// 0 ≤ eval arg2 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) + eval (from_montgomery arg2)) mod m +// 0 ≤ eval out1 < m +func fiat_pasta_fp_add( + out1 *fiat_pasta_fp_montgomery_domain_field_element, + arg1 *fiat_pasta_fp_montgomery_domain_field_element, + arg2 *fiat_pasta_fp_montgomery_domain_field_element, +) { + var x1 uint64 + var x2 fiat_pasta_fp_uint1 + x1, x2 = fiat_pasta_fp_addcarryx_u64(arg1[0], arg2[0], 0x0) + var x3 uint64 + var x4 fiat_pasta_fp_uint1 + x3, x4 = fiat_pasta_fp_addcarryx_u64(arg1[1], arg2[1], x2) + var x5 uint64 + var x6 fiat_pasta_fp_uint1 + x5, x6 = fiat_pasta_fp_addcarryx_u64(arg1[2], arg2[2], x4) + var x7 uint64 + var x8 fiat_pasta_fp_uint1 + x7, x8 = fiat_pasta_fp_addcarryx_u64(arg1[3], arg2[3], x6) + var x9 uint64 + var x10 fiat_pasta_fp_uint1 + x9, x10 = fiat_pasta_fp_subborrowx_u64(x1, 0x992d30ed00000001, 0x0) + var x11 uint64 + var x12 fiat_pasta_fp_uint1 + x11, x12 = fiat_pasta_fp_subborrowx_u64(x3, 0x224698fc094cf91b, x10) + var x13 uint64 + var x14 fiat_pasta_fp_uint1 + x13, x14 = fiat_pasta_fp_subborrowx_u64(x5, uint64(0x0), x12) + var x15 uint64 + var x16 fiat_pasta_fp_uint1 + x15, x16 = fiat_pasta_fp_subborrowx_u64(x7, 0x4000000000000000, x14) + var x18 fiat_pasta_fp_uint1 + _, x18 = fiat_pasta_fp_subborrowx_u64(uint64(x8), uint64(0x0), x16) + var x19 uint64 + fiat_pasta_fp_cmovznz_u64(&x19, x18, x9, x1) + var x20 uint64 + fiat_pasta_fp_cmovznz_u64(&x20, x18, x11, x3) + var x21 uint64 + fiat_pasta_fp_cmovznz_u64(&x21, x18, x13, x5) + var x22 uint64 + fiat_pasta_fp_cmovznz_u64(&x22, x18, x15, x7) + out1[0] = x19 + out1[1] = x20 + out1[2] = x21 + out1[3] = x22 +} + +// The function fiat_pasta_fp_sub subtracts two field elements in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// 0 ≤ eval arg2 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) - eval (from_montgomery arg2)) mod m +// 0 ≤ eval out1 < m +func fiat_pasta_fp_sub( + out1 *fiat_pasta_fp_montgomery_domain_field_element, + arg1 *fiat_pasta_fp_montgomery_domain_field_element, + arg2 *fiat_pasta_fp_montgomery_domain_field_element, +) { + var x1 uint64 + var x2 fiat_pasta_fp_uint1 + x1, x2 = fiat_pasta_fp_subborrowx_u64(arg1[0], arg2[0], 0x0) + var x3 uint64 + var x4 fiat_pasta_fp_uint1 + x3, x4 = fiat_pasta_fp_subborrowx_u64(arg1[1], arg2[1], x2) + var x5 uint64 + var x6 fiat_pasta_fp_uint1 + x5, x6 = fiat_pasta_fp_subborrowx_u64(arg1[2], arg2[2], x4) + var x7 uint64 + var x8 fiat_pasta_fp_uint1 + x7, x8 = fiat_pasta_fp_subborrowx_u64(arg1[3], arg2[3], x6) + var x9 uint64 + fiat_pasta_fp_cmovznz_u64(&x9, x8, uint64(0x0), 0xffffffffffffffff) + var x10 uint64 + var x11 fiat_pasta_fp_uint1 + x10, x11 = fiat_pasta_fp_addcarryx_u64(x1, (x9 & 0x992d30ed00000001), 0x0) + var x12 uint64 + var x13 fiat_pasta_fp_uint1 + x12, x13 = fiat_pasta_fp_addcarryx_u64(x3, (x9 & 0x224698fc094cf91b), x11) + var x14 uint64 + var x15 fiat_pasta_fp_uint1 + x14, x15 = fiat_pasta_fp_addcarryx_u64(x5, uint64(0x0), x13) + var x16 uint64 + x16, _ = fiat_pasta_fp_addcarryx_u64(x7, (x9 & 0x4000000000000000), x15) + out1[0] = x10 + out1[1] = x12 + out1[2] = x14 + out1[3] = x16 +} + +// The function fiat_pasta_fp_opp negates a field element in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = -eval (from_montgomery arg1) mod m +// 0 ≤ eval out1 < m +func fiat_pasta_fp_opp( + out1 *fiat_pasta_fp_montgomery_domain_field_element, + arg1 *fiat_pasta_fp_montgomery_domain_field_element, +) { + var x1 uint64 + var x2 fiat_pasta_fp_uint1 + x1, x2 = fiat_pasta_fp_subborrowx_u64(uint64(0x0), arg1[0], 0x0) + var x3 uint64 + var x4 fiat_pasta_fp_uint1 + x3, x4 = fiat_pasta_fp_subborrowx_u64(uint64(0x0), arg1[1], x2) + var x5 uint64 + var x6 fiat_pasta_fp_uint1 + x5, x6 = fiat_pasta_fp_subborrowx_u64(uint64(0x0), arg1[2], x4) + var x7 uint64 + var x8 fiat_pasta_fp_uint1 + x7, x8 = fiat_pasta_fp_subborrowx_u64(uint64(0x0), arg1[3], x6) + var x9 uint64 + fiat_pasta_fp_cmovznz_u64(&x9, x8, uint64(0x0), 0xffffffffffffffff) + var x10 uint64 + var x11 fiat_pasta_fp_uint1 + x10, x11 = fiat_pasta_fp_addcarryx_u64(x1, (x9 & 0x992d30ed00000001), 0x0) + var x12 uint64 + var x13 fiat_pasta_fp_uint1 + x12, x13 = fiat_pasta_fp_addcarryx_u64(x3, (x9 & 0x224698fc094cf91b), x11) + var x14 uint64 + var x15 fiat_pasta_fp_uint1 + x14, x15 = fiat_pasta_fp_addcarryx_u64(x5, uint64(0x0), x13) + var x16 uint64 + x16, _ = fiat_pasta_fp_addcarryx_u64(x7, (x9 & 0x4000000000000000), x15) + out1[0] = x10 + out1[1] = x12 + out1[2] = x14 + out1[3] = x16 +} + +// The function fiat_pasta_fp_from_montgomery translates a field element out of the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval out1 mod m = (eval arg1 * ((2^64)⁻¹ mod m)^4) mod m +// 0 ≤ eval out1 < m +func fiat_pasta_fp_from_montgomery( + out1 *fiat_pasta_fp_non_montgomery_domain_field_element, + arg1 *fiat_pasta_fp_montgomery_domain_field_element, +) { + x1 := arg1[0] + var x2 uint64 + _, x2 = bits.Mul64(x1, 0x992d30ecffffffff) + var x4 uint64 + var x5 uint64 + x5, x4 = bits.Mul64(x2, 0x4000000000000000) + var x6 uint64 + var x7 uint64 + x7, x6 = bits.Mul64(x2, 0x224698fc094cf91b) + var x8 uint64 + var x9 uint64 + x9, x8 = bits.Mul64(x2, 0x992d30ed00000001) + var x10 uint64 + var x11 fiat_pasta_fp_uint1 + x10, x11 = fiat_pasta_fp_addcarryx_u64(x9, x6, 0x0) + var x13 fiat_pasta_fp_uint1 + _, x13 = fiat_pasta_fp_addcarryx_u64(x1, x8, 0x0) + var x14 uint64 + var x15 fiat_pasta_fp_uint1 + x14, x15 = fiat_pasta_fp_addcarryx_u64(uint64(0x0), x10, x13) + var x16 uint64 + var x17 fiat_pasta_fp_uint1 + x16, x17 = fiat_pasta_fp_addcarryx_u64(x14, arg1[1], 0x0) + var x18 uint64 + _, x18 = bits.Mul64(x16, 0x992d30ecffffffff) + var x20 uint64 + var x21 uint64 + x21, x20 = bits.Mul64(x18, 0x4000000000000000) + var x22 uint64 + var x23 uint64 + x23, x22 = bits.Mul64(x18, 0x224698fc094cf91b) + var x24 uint64 + var x25 uint64 + x25, x24 = bits.Mul64(x18, 0x992d30ed00000001) + var x26 uint64 + var x27 fiat_pasta_fp_uint1 + x26, x27 = fiat_pasta_fp_addcarryx_u64(x25, x22, 0x0) + var x29 fiat_pasta_fp_uint1 + _, x29 = fiat_pasta_fp_addcarryx_u64(x16, x24, 0x0) + var x30 uint64 + var x31 fiat_pasta_fp_uint1 + x30, x31 = fiat_pasta_fp_addcarryx_u64( + (uint64(x17) + (uint64(x15) + (uint64(x11) + x7))), + x26, + x29, + ) + var x32 uint64 + var x33 fiat_pasta_fp_uint1 + x32, x33 = fiat_pasta_fp_addcarryx_u64(x4, (uint64(x27) + x23), x31) + var x34 uint64 + var x35 fiat_pasta_fp_uint1 + x34, x35 = fiat_pasta_fp_addcarryx_u64(x5, x20, x33) + var x36 uint64 + var x37 fiat_pasta_fp_uint1 + x36, x37 = fiat_pasta_fp_addcarryx_u64(x30, arg1[2], 0x0) + var x38 uint64 + var x39 fiat_pasta_fp_uint1 + x38, x39 = fiat_pasta_fp_addcarryx_u64(x32, uint64(0x0), x37) + var x40 uint64 + var x41 fiat_pasta_fp_uint1 + x40, x41 = fiat_pasta_fp_addcarryx_u64(x34, uint64(0x0), x39) + var x42 uint64 + _, x42 = bits.Mul64(x36, 0x992d30ecffffffff) + var x44 uint64 + var x45 uint64 + x45, x44 = bits.Mul64(x42, 0x4000000000000000) + var x46 uint64 + var x47 uint64 + x47, x46 = bits.Mul64(x42, 0x224698fc094cf91b) + var x48 uint64 + var x49 uint64 + x49, x48 = bits.Mul64(x42, 0x992d30ed00000001) + var x50 uint64 + var x51 fiat_pasta_fp_uint1 + x50, x51 = fiat_pasta_fp_addcarryx_u64(x49, x46, 0x0) + var x53 fiat_pasta_fp_uint1 + _, x53 = fiat_pasta_fp_addcarryx_u64(x36, x48, 0x0) + var x54 uint64 + var x55 fiat_pasta_fp_uint1 + x54, x55 = fiat_pasta_fp_addcarryx_u64(x38, x50, x53) + var x56 uint64 + var x57 fiat_pasta_fp_uint1 + x56, x57 = fiat_pasta_fp_addcarryx_u64(x40, (uint64(x51) + x47), x55) + var x58 uint64 + var x59 fiat_pasta_fp_uint1 + x58, x59 = fiat_pasta_fp_addcarryx_u64((uint64(x41) + (uint64(x35) + x21)), x44, x57) + var x60 uint64 + var x61 fiat_pasta_fp_uint1 + x60, x61 = fiat_pasta_fp_addcarryx_u64(x54, arg1[3], 0x0) + var x62 uint64 + var x63 fiat_pasta_fp_uint1 + x62, x63 = fiat_pasta_fp_addcarryx_u64(x56, uint64(0x0), x61) + var x64 uint64 + var x65 fiat_pasta_fp_uint1 + x64, x65 = fiat_pasta_fp_addcarryx_u64(x58, uint64(0x0), x63) + var x66 uint64 + _, x66 = bits.Mul64(x60, 0x992d30ecffffffff) + var x68 uint64 + var x69 uint64 + x69, x68 = bits.Mul64(x66, 0x4000000000000000) + var x70 uint64 + var x71 uint64 + x71, x70 = bits.Mul64(x66, 0x224698fc094cf91b) + var x72 uint64 + var x73 uint64 + x73, x72 = bits.Mul64(x66, 0x992d30ed00000001) + var x74 uint64 + var x75 fiat_pasta_fp_uint1 + x74, x75 = fiat_pasta_fp_addcarryx_u64(x73, x70, 0x0) + var x77 fiat_pasta_fp_uint1 + _, x77 = fiat_pasta_fp_addcarryx_u64(x60, x72, 0x0) + var x78 uint64 + var x79 fiat_pasta_fp_uint1 + x78, x79 = fiat_pasta_fp_addcarryx_u64(x62, x74, x77) + var x80 uint64 + var x81 fiat_pasta_fp_uint1 + x80, x81 = fiat_pasta_fp_addcarryx_u64(x64, (uint64(x75) + x71), x79) + var x82 uint64 + var x83 fiat_pasta_fp_uint1 + x82, x83 = fiat_pasta_fp_addcarryx_u64((uint64(x65) + (uint64(x59) + x45)), x68, x81) + x84 := (uint64(x83) + x69) + var x85 uint64 + var x86 fiat_pasta_fp_uint1 + x85, x86 = fiat_pasta_fp_subborrowx_u64(x78, 0x992d30ed00000001, 0x0) + var x87 uint64 + var x88 fiat_pasta_fp_uint1 + x87, x88 = fiat_pasta_fp_subborrowx_u64(x80, 0x224698fc094cf91b, x86) + var x89 uint64 + var x90 fiat_pasta_fp_uint1 + x89, x90 = fiat_pasta_fp_subborrowx_u64(x82, uint64(0x0), x88) + var x91 uint64 + var x92 fiat_pasta_fp_uint1 + x91, x92 = fiat_pasta_fp_subborrowx_u64(x84, 0x4000000000000000, x90) + var x94 fiat_pasta_fp_uint1 + _, x94 = fiat_pasta_fp_subborrowx_u64(uint64(0x0), uint64(0x0), x92) + var x95 uint64 + fiat_pasta_fp_cmovznz_u64(&x95, x94, x85, x78) + var x96 uint64 + fiat_pasta_fp_cmovznz_u64(&x96, x94, x87, x80) + var x97 uint64 + fiat_pasta_fp_cmovznz_u64(&x97, x94, x89, x82) + var x98 uint64 + fiat_pasta_fp_cmovznz_u64(&x98, x94, x91, x84) + out1[0] = x95 + out1[1] = x96 + out1[2] = x97 + out1[3] = x98 +} + +// The function fiat_pasta_fp_to_montgomery translates a field element into the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = eval arg1 mod m +// 0 ≤ eval out1 < m +func fiat_pasta_fp_to_montgomery( + out1 *fiat_pasta_fp_montgomery_domain_field_element, + arg1 *fiat_pasta_fp_non_montgomery_domain_field_element, +) { + x1 := arg1[1] + x2 := arg1[2] + x3 := arg1[3] + x4 := arg1[0] + var x5 uint64 + var x6 uint64 + x6, x5 = bits.Mul64(x4, 0x96d41af7b9cb714) + var x7 uint64 + var x8 uint64 + x8, x7 = bits.Mul64(x4, 0x7797a99bc3c95d18) + var x9 uint64 + var x10 uint64 + x10, x9 = bits.Mul64(x4, 0xd7d30dbd8b0de0e7) + var x11 uint64 + var x12 uint64 + x12, x11 = bits.Mul64(x4, 0x8c78ecb30000000f) + var x13 uint64 + var x14 fiat_pasta_fp_uint1 + x13, x14 = fiat_pasta_fp_addcarryx_u64(x12, x9, 0x0) + var x15 uint64 + var x16 fiat_pasta_fp_uint1 + x15, x16 = fiat_pasta_fp_addcarryx_u64(x10, x7, x14) + var x17 uint64 + var x18 fiat_pasta_fp_uint1 + x17, x18 = fiat_pasta_fp_addcarryx_u64(x8, x5, x16) + var x19 uint64 + _, x19 = bits.Mul64(x11, 0x992d30ecffffffff) + var x21 uint64 + var x22 uint64 + x22, x21 = bits.Mul64(x19, 0x4000000000000000) + var x23 uint64 + var x24 uint64 + x24, x23 = bits.Mul64(x19, 0x224698fc094cf91b) + var x25 uint64 + var x26 uint64 + x26, x25 = bits.Mul64(x19, 0x992d30ed00000001) + var x27 uint64 + var x28 fiat_pasta_fp_uint1 + x27, x28 = fiat_pasta_fp_addcarryx_u64(x26, x23, 0x0) + var x30 fiat_pasta_fp_uint1 + _, x30 = fiat_pasta_fp_addcarryx_u64(x11, x25, 0x0) + var x31 uint64 + var x32 fiat_pasta_fp_uint1 + x31, x32 = fiat_pasta_fp_addcarryx_u64(x13, x27, x30) + var x33 uint64 + var x34 fiat_pasta_fp_uint1 + x33, x34 = fiat_pasta_fp_addcarryx_u64(x15, (uint64(x28) + x24), x32) + var x35 uint64 + var x36 fiat_pasta_fp_uint1 + x35, x36 = fiat_pasta_fp_addcarryx_u64(x17, x21, x34) + var x37 uint64 + var x38 uint64 + x38, x37 = bits.Mul64(x1, 0x96d41af7b9cb714) + var x39 uint64 + var x40 uint64 + x40, x39 = bits.Mul64(x1, 0x7797a99bc3c95d18) + var x41 uint64 + var x42 uint64 + x42, x41 = bits.Mul64(x1, 0xd7d30dbd8b0de0e7) + var x43 uint64 + var x44 uint64 + x44, x43 = bits.Mul64(x1, 0x8c78ecb30000000f) + var x45 uint64 + var x46 fiat_pasta_fp_uint1 + x45, x46 = fiat_pasta_fp_addcarryx_u64(x44, x41, 0x0) + var x47 uint64 + var x48 fiat_pasta_fp_uint1 + x47, x48 = fiat_pasta_fp_addcarryx_u64(x42, x39, x46) + var x49 uint64 + var x50 fiat_pasta_fp_uint1 + x49, x50 = fiat_pasta_fp_addcarryx_u64(x40, x37, x48) + var x51 uint64 + var x52 fiat_pasta_fp_uint1 + x51, x52 = fiat_pasta_fp_addcarryx_u64(x31, x43, 0x0) + var x53 uint64 + var x54 fiat_pasta_fp_uint1 + x53, x54 = fiat_pasta_fp_addcarryx_u64(x33, x45, x52) + var x55 uint64 + var x56 fiat_pasta_fp_uint1 + x55, x56 = fiat_pasta_fp_addcarryx_u64(x35, x47, x54) + var x57 uint64 + var x58 fiat_pasta_fp_uint1 + x57, x58 = fiat_pasta_fp_addcarryx_u64(((uint64(x36) + (uint64(x18) + x6)) + x22), x49, x56) + var x59 uint64 + _, x59 = bits.Mul64(x51, 0x992d30ecffffffff) + var x61 uint64 + var x62 uint64 + x62, x61 = bits.Mul64(x59, 0x4000000000000000) + var x63 uint64 + var x64 uint64 + x64, x63 = bits.Mul64(x59, 0x224698fc094cf91b) + var x65 uint64 + var x66 uint64 + x66, x65 = bits.Mul64(x59, 0x992d30ed00000001) + var x67 uint64 + var x68 fiat_pasta_fp_uint1 + x67, x68 = fiat_pasta_fp_addcarryx_u64(x66, x63, 0x0) + var x70 fiat_pasta_fp_uint1 + _, x70 = fiat_pasta_fp_addcarryx_u64(x51, x65, 0x0) + var x71 uint64 + var x72 fiat_pasta_fp_uint1 + x71, x72 = fiat_pasta_fp_addcarryx_u64(x53, x67, x70) + var x73 uint64 + var x74 fiat_pasta_fp_uint1 + x73, x74 = fiat_pasta_fp_addcarryx_u64(x55, (uint64(x68) + x64), x72) + var x75 uint64 + var x76 fiat_pasta_fp_uint1 + x75, x76 = fiat_pasta_fp_addcarryx_u64(x57, x61, x74) + var x77 uint64 + var x78 uint64 + x78, x77 = bits.Mul64(x2, 0x96d41af7b9cb714) + var x79 uint64 + var x80 uint64 + x80, x79 = bits.Mul64(x2, 0x7797a99bc3c95d18) + var x81 uint64 + var x82 uint64 + x82, x81 = bits.Mul64(x2, 0xd7d30dbd8b0de0e7) + var x83 uint64 + var x84 uint64 + x84, x83 = bits.Mul64(x2, 0x8c78ecb30000000f) + var x85 uint64 + var x86 fiat_pasta_fp_uint1 + x85, x86 = fiat_pasta_fp_addcarryx_u64(x84, x81, 0x0) + var x87 uint64 + var x88 fiat_pasta_fp_uint1 + x87, x88 = fiat_pasta_fp_addcarryx_u64(x82, x79, x86) + var x89 uint64 + var x90 fiat_pasta_fp_uint1 + x89, x90 = fiat_pasta_fp_addcarryx_u64(x80, x77, x88) + var x91 uint64 + var x92 fiat_pasta_fp_uint1 + x91, x92 = fiat_pasta_fp_addcarryx_u64(x71, x83, 0x0) + var x93 uint64 + var x94 fiat_pasta_fp_uint1 + x93, x94 = fiat_pasta_fp_addcarryx_u64(x73, x85, x92) + var x95 uint64 + var x96 fiat_pasta_fp_uint1 + x95, x96 = fiat_pasta_fp_addcarryx_u64(x75, x87, x94) + var x97 uint64 + var x98 fiat_pasta_fp_uint1 + x97, x98 = fiat_pasta_fp_addcarryx_u64( + ((uint64(x76) + (uint64(x58) + (uint64(x50) + x38))) + x62), + x89, + x96, + ) + var x99 uint64 + _, x99 = bits.Mul64(x91, 0x992d30ecffffffff) + var x101 uint64 + var x102 uint64 + x102, x101 = bits.Mul64(x99, 0x4000000000000000) + var x103 uint64 + var x104 uint64 + x104, x103 = bits.Mul64(x99, 0x224698fc094cf91b) + var x105 uint64 + var x106 uint64 + x106, x105 = bits.Mul64(x99, 0x992d30ed00000001) + var x107 uint64 + var x108 fiat_pasta_fp_uint1 + x107, x108 = fiat_pasta_fp_addcarryx_u64(x106, x103, 0x0) + var x110 fiat_pasta_fp_uint1 + _, x110 = fiat_pasta_fp_addcarryx_u64(x91, x105, 0x0) + var x111 uint64 + var x112 fiat_pasta_fp_uint1 + x111, x112 = fiat_pasta_fp_addcarryx_u64(x93, x107, x110) + var x113 uint64 + var x114 fiat_pasta_fp_uint1 + x113, x114 = fiat_pasta_fp_addcarryx_u64(x95, (uint64(x108) + x104), x112) + var x115 uint64 + var x116 fiat_pasta_fp_uint1 + x115, x116 = fiat_pasta_fp_addcarryx_u64(x97, x101, x114) + var x117 uint64 + var x118 uint64 + x118, x117 = bits.Mul64(x3, 0x96d41af7b9cb714) + var x119 uint64 + var x120 uint64 + x120, x119 = bits.Mul64(x3, 0x7797a99bc3c95d18) + var x121 uint64 + var x122 uint64 + x122, x121 = bits.Mul64(x3, 0xd7d30dbd8b0de0e7) + var x123 uint64 + var x124 uint64 + x124, x123 = bits.Mul64(x3, 0x8c78ecb30000000f) + var x125 uint64 + var x126 fiat_pasta_fp_uint1 + x125, x126 = fiat_pasta_fp_addcarryx_u64(x124, x121, 0x0) + var x127 uint64 + var x128 fiat_pasta_fp_uint1 + x127, x128 = fiat_pasta_fp_addcarryx_u64(x122, x119, x126) + var x129 uint64 + var x130 fiat_pasta_fp_uint1 + x129, x130 = fiat_pasta_fp_addcarryx_u64(x120, x117, x128) + var x131 uint64 + var x132 fiat_pasta_fp_uint1 + x131, x132 = fiat_pasta_fp_addcarryx_u64(x111, x123, 0x0) + var x133 uint64 + var x134 fiat_pasta_fp_uint1 + x133, x134 = fiat_pasta_fp_addcarryx_u64(x113, x125, x132) + var x135 uint64 + var x136 fiat_pasta_fp_uint1 + x135, x136 = fiat_pasta_fp_addcarryx_u64(x115, x127, x134) + var x137 uint64 + var x138 fiat_pasta_fp_uint1 + x137, x138 = fiat_pasta_fp_addcarryx_u64( + ((uint64(x116) + (uint64(x98) + (uint64(x90) + x78))) + x102), + x129, + x136, + ) + var x139 uint64 + _, x139 = bits.Mul64(x131, 0x992d30ecffffffff) + var x141 uint64 + var x142 uint64 + x142, x141 = bits.Mul64(x139, 0x4000000000000000) + var x143 uint64 + var x144 uint64 + x144, x143 = bits.Mul64(x139, 0x224698fc094cf91b) + var x145 uint64 + var x146 uint64 + x146, x145 = bits.Mul64(x139, 0x992d30ed00000001) + var x147 uint64 + var x148 fiat_pasta_fp_uint1 + x147, x148 = fiat_pasta_fp_addcarryx_u64(x146, x143, 0x0) + var x150 fiat_pasta_fp_uint1 + _, x150 = fiat_pasta_fp_addcarryx_u64(x131, x145, 0x0) + var x151 uint64 + var x152 fiat_pasta_fp_uint1 + x151, x152 = fiat_pasta_fp_addcarryx_u64(x133, x147, x150) + var x153 uint64 + var x154 fiat_pasta_fp_uint1 + x153, x154 = fiat_pasta_fp_addcarryx_u64(x135, (uint64(x148) + x144), x152) + var x155 uint64 + var x156 fiat_pasta_fp_uint1 + x155, x156 = fiat_pasta_fp_addcarryx_u64(x137, x141, x154) + x157 := ((uint64(x156) + (uint64(x138) + (uint64(x130) + x118))) + x142) + var x158 uint64 + var x159 fiat_pasta_fp_uint1 + x158, x159 = fiat_pasta_fp_subborrowx_u64(x151, 0x992d30ed00000001, 0x0) + var x160 uint64 + var x161 fiat_pasta_fp_uint1 + x160, x161 = fiat_pasta_fp_subborrowx_u64(x153, 0x224698fc094cf91b, x159) + var x162 uint64 + var x163 fiat_pasta_fp_uint1 + x162, x163 = fiat_pasta_fp_subborrowx_u64(x155, uint64(0x0), x161) + var x164 uint64 + var x165 fiat_pasta_fp_uint1 + x164, x165 = fiat_pasta_fp_subborrowx_u64(x157, 0x4000000000000000, x163) + var x167 fiat_pasta_fp_uint1 + _, x167 = fiat_pasta_fp_subborrowx_u64(uint64(0x0), uint64(0x0), x165) + var x168 uint64 + fiat_pasta_fp_cmovznz_u64(&x168, x167, x158, x151) + var x169 uint64 + fiat_pasta_fp_cmovznz_u64(&x169, x167, x160, x153) + var x170 uint64 + fiat_pasta_fp_cmovznz_u64(&x170, x167, x162, x155) + var x171 uint64 + fiat_pasta_fp_cmovznz_u64(&x171, x167, x164, x157) + out1[0] = x168 + out1[1] = x169 + out1[2] = x170 + out1[3] = x171 +} + +// The function fiat_pasta_fp_selectznz is a multi-limb conditional select. +// +// Postconditions: +// +// eval out1 = (if arg1 = 0 then eval arg2 else eval arg3) +// +// Input Bounds: +// +// arg1: [0x0 ~> 0x1] +// arg2: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// arg3: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +func fiat_pasta_fp_selectznz( + out1 *[4]uint64, + arg1 fiat_pasta_fp_uint1, + arg2 *[4]uint64, + arg3 *[4]uint64, +) { + var x1 uint64 + fiat_pasta_fp_cmovznz_u64(&x1, arg1, arg2[0], arg3[0]) + var x2 uint64 + fiat_pasta_fp_cmovznz_u64(&x2, arg1, arg2[1], arg3[1]) + var x3 uint64 + fiat_pasta_fp_cmovznz_u64(&x3, arg1, arg2[2], arg3[2]) + var x4 uint64 + fiat_pasta_fp_cmovznz_u64(&x4, arg1, arg2[3], arg3[3]) + out1[0] = x1 + out1[1] = x2 + out1[2] = x3 + out1[3] = x4 +} + +// The function fiat_pasta_fp_to_bytes serializes a field element NOT in the Montgomery domain to bytes in little-endian order. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// out1 = map (λ x, ⌊((eval arg1 mod m) mod 2^(8 * (x + 1))) / 2^(8 * x)⌋) [0..31] +// +// Input Bounds: +// +// arg1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0x7fffffffffffffff]] +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0x7f]] +func fiat_pasta_fp_to_bytes(out1 *[32]uint8, arg1 *[4]uint64) { + x1 := arg1[3] + x2 := arg1[2] + x3 := arg1[1] + x4 := arg1[0] + x5 := (uint8(x4) & 0xff) + x6 := (x4 >> 8) + x7 := (uint8(x6) & 0xff) + x8 := (x6 >> 8) + x9 := (uint8(x8) & 0xff) + x10 := (x8 >> 8) + x11 := (uint8(x10) & 0xff) + x12 := (x10 >> 8) + x13 := (uint8(x12) & 0xff) + x14 := (x12 >> 8) + x15 := (uint8(x14) & 0xff) + x16 := (x14 >> 8) + x17 := (uint8(x16) & 0xff) + x18 := uint8((x16 >> 8)) + x19 := (uint8(x3) & 0xff) + x20 := (x3 >> 8) + x21 := (uint8(x20) & 0xff) + x22 := (x20 >> 8) + x23 := (uint8(x22) & 0xff) + x24 := (x22 >> 8) + x25 := (uint8(x24) & 0xff) + x26 := (x24 >> 8) + x27 := (uint8(x26) & 0xff) + x28 := (x26 >> 8) + x29 := (uint8(x28) & 0xff) + x30 := (x28 >> 8) + x31 := (uint8(x30) & 0xff) + x32 := uint8((x30 >> 8)) + x33 := (uint8(x2) & 0xff) + x34 := (x2 >> 8) + x35 := (uint8(x34) & 0xff) + x36 := (x34 >> 8) + x37 := (uint8(x36) & 0xff) + x38 := (x36 >> 8) + x39 := (uint8(x38) & 0xff) + x40 := (x38 >> 8) + x41 := (uint8(x40) & 0xff) + x42 := (x40 >> 8) + x43 := (uint8(x42) & 0xff) + x44 := (x42 >> 8) + x45 := (uint8(x44) & 0xff) + x46 := uint8((x44 >> 8)) + x47 := (uint8(x1) & 0xff) + x48 := (x1 >> 8) + x49 := (uint8(x48) & 0xff) + x50 := (x48 >> 8) + x51 := (uint8(x50) & 0xff) + x52 := (x50 >> 8) + x53 := (uint8(x52) & 0xff) + x54 := (x52 >> 8) + x55 := (uint8(x54) & 0xff) + x56 := (x54 >> 8) + x57 := (uint8(x56) & 0xff) + x58 := (x56 >> 8) + x59 := (uint8(x58) & 0xff) + x60 := uint8((x58 >> 8)) + out1[0] = x5 + out1[1] = x7 + out1[2] = x9 + out1[3] = x11 + out1[4] = x13 + out1[5] = x15 + out1[6] = x17 + out1[7] = x18 + out1[8] = x19 + out1[9] = x21 + out1[10] = x23 + out1[11] = x25 + out1[12] = x27 + out1[13] = x29 + out1[14] = x31 + out1[15] = x32 + out1[16] = x33 + out1[17] = x35 + out1[18] = x37 + out1[19] = x39 + out1[20] = x41 + out1[21] = x43 + out1[22] = x45 + out1[23] = x46 + out1[24] = x47 + out1[25] = x49 + out1[26] = x51 + out1[27] = x53 + out1[28] = x55 + out1[29] = x57 + out1[30] = x59 + out1[31] = x60 +} + +// The function fiat_pasta_fp_from_bytes deserializes a field element NOT in the Montgomery domain from bytes in little-endian order. +// +// Preconditions: +// +// 0 ≤ bytes_eval arg1 < m +// +// Postconditions: +// +// eval out1 mod m = bytes_eval arg1 mod m +// 0 ≤ eval out1 < m +// +// Input Bounds: +// +// arg1: [[0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0x7f]] +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0x7fffffffffffffff]] +func fiat_pasta_fp_from_bytes(out1 *[4]uint64, arg1 *[32]uint8) { + x1 := (uint64(arg1[31]) << 56) + x2 := (uint64(arg1[30]) << 48) + x3 := (uint64(arg1[29]) << 40) + x4 := (uint64(arg1[28]) << 32) + x5 := (uint64(arg1[27]) << 24) + x6 := (uint64(arg1[26]) << 16) + x7 := (uint64(arg1[25]) << 8) + x8 := arg1[24] + x9 := (uint64(arg1[23]) << 56) + x10 := (uint64(arg1[22]) << 48) + x11 := (uint64(arg1[21]) << 40) + x12 := (uint64(arg1[20]) << 32) + x13 := (uint64(arg1[19]) << 24) + x14 := (uint64(arg1[18]) << 16) + x15 := (uint64(arg1[17]) << 8) + x16 := arg1[16] + x17 := (uint64(arg1[15]) << 56) + x18 := (uint64(arg1[14]) << 48) + x19 := (uint64(arg1[13]) << 40) + x20 := (uint64(arg1[12]) << 32) + x21 := (uint64(arg1[11]) << 24) + x22 := (uint64(arg1[10]) << 16) + x23 := (uint64(arg1[9]) << 8) + x24 := arg1[8] + x25 := (uint64(arg1[7]) << 56) + x26 := (uint64(arg1[6]) << 48) + x27 := (uint64(arg1[5]) << 40) + x28 := (uint64(arg1[4]) << 32) + x29 := (uint64(arg1[3]) << 24) + x30 := (uint64(arg1[2]) << 16) + x31 := (uint64(arg1[1]) << 8) + x32 := arg1[0] + x33 := (x31 + uint64(x32)) + x34 := (x30 + x33) + x35 := (x29 + x34) + x36 := (x28 + x35) + x37 := (x27 + x36) + x38 := (x26 + x37) + x39 := (x25 + x38) + x40 := (x23 + uint64(x24)) + x41 := (x22 + x40) + x42 := (x21 + x41) + x43 := (x20 + x42) + x44 := (x19 + x43) + x45 := (x18 + x44) + x46 := (x17 + x45) + x47 := (x15 + uint64(x16)) + x48 := (x14 + x47) + x49 := (x13 + x48) + x50 := (x12 + x49) + x51 := (x11 + x50) + x52 := (x10 + x51) + x53 := (x9 + x52) + x54 := (x7 + uint64(x8)) + x55 := (x6 + x54) + x56 := (x5 + x55) + x57 := (x4 + x56) + x58 := (x3 + x57) + x59 := (x2 + x58) + x60 := (x1 + x59) + out1[0] = x39 + out1[1] = x46 + out1[2] = x53 + out1[3] = x60 +} diff --git a/core/curves/native/pasta/fq/fq.go b/core/curves/native/pasta/fq/fq.go new file mode 100644 index 0000000..2cbf01f --- /dev/null +++ b/core/curves/native/pasta/fq/fq.go @@ -0,0 +1,369 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package fq + +import ( + "encoding/binary" + "fmt" + "math/big" + + "github.com/sonr-io/sonr/crypto/internal" +) + +type Fq fiat_pasta_fq_montgomery_domain_field_element + +// r = 2^256 mod p +var r = &Fq{0x5b2b3e9cfffffffd, 0x992c350be3420567, 0xffffffffffffffff, 0x3fffffffffffffff} + +// r2 = 2^512 mod p +var r2 = &Fq{0xfc9678ff0000000f, 0x67bb433d891a16e3, 0x7fae231004ccf590, 0x096d41af7ccfdaa9} + +// r3 = 2^768 mod p +var r3 = &Fq{0x008b421c249dae4c, 0xe13bda50dba41326, 0x88fececb8e15cb63, 0x07dd97a06e6792c8} + +// generator = 5 mod p is a generator of the `p - 1` order multiplicative +// subgroup, or in other words a primitive element of the field. +var generator = &Fq{0x96bc8c8cffffffed, 0x74c2a54b49f7778e, 0xfffffffffffffffd, 0x3fffffffffffffff} + +var s = 32 + +// modulus representation +// p = 0x40000000000000000000000000000000224698fc0994a8dd8c46eb2100000001 +var modulus = &Fq{0x8c46eb2100000001, 0x224698fc0994a8dd, 0x0000000000000000, 0x4000000000000000} + +var BiModulus = new(big.Int).SetBytes([]byte{ + 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x22, 0x46, 0x98, 0xfc, 0x09, 0x94, 0xa8, 0xdd, + 0x8c, 0x46, 0xeb, 0x21, 0x00, 0x00, 0x00, 0x01, +}) + +// Cmp returns -1 if fp < rhs +// 0 if fp == rhs +// 1 if fp > rhs +func (fq *Fq) Cmp(rhs *Fq) int { + gt := 0 + lt := 0 + for i := len(fq) - 1; i >= 0; i-- { + gt |= int((rhs[i]-fq[i])>>63) &^ lt + lt |= int((fq[i]-rhs[i])>>63) &^ gt + } + return gt - lt +} + +// Equal returns true if fp == rhs +func (fq *Fq) Equal(rhs *Fq) bool { + t := fq[0] ^ rhs[0] + t |= fq[1] ^ rhs[1] + t |= fq[2] ^ rhs[2] + t |= fq[3] ^ rhs[3] + return t == 0 +} + +// IsZero returns true if fp == 0 +func (fq *Fq) IsZero() bool { + t := fq[0] + t |= fq[1] + t |= fq[2] + t |= fq[3] + return t == 0 +} + +// IsOne returns true if fp == r +func (fq *Fq) IsOne() bool { + return fq.Equal(r) +} + +// Set fp == rhs +func (fq *Fq) Set(rhs *Fq) *Fq { + fq[0] = rhs[0] + fq[1] = rhs[1] + fq[2] = rhs[2] + fq[3] = rhs[3] + return fq +} + +// SetUint64 sets fp == rhs +func (fq *Fq) SetUint64(rhs uint64) *Fq { + r := &fiat_pasta_fq_non_montgomery_domain_field_element{rhs, 0, 0, 0} + fiat_pasta_fq_to_montgomery((*fiat_pasta_fq_montgomery_domain_field_element)(fq), r) + return fq +} + +func (fq *Fq) SetBool(rhs bool) *Fq { + if rhs { + fq.SetOne() + } else { + fq.SetZero() + } + return fq +} + +// SetOne fp == r +func (fq *Fq) SetOne() *Fq { + return fq.Set(r) +} + +// SetZero fp == 0 +func (fq *Fq) SetZero() *Fq { + fq[0] = 0 + fq[1] = 0 + fq[2] = 0 + fq[3] = 0 + return fq +} + +// SetBytesWide takes 64 bytes as input and treats them as a 512-bit number. +// Attributed to https://github.com/zcash/pasta_curves/blob/main/src/fields/fq.rs#L255 +// We reduce an arbitrary 512-bit number by decomposing it into two 256-bit digits +// with the higher bits multiplied by 2^256. Thus, we perform two reductions +// +// 1. the lower bits are multiplied by r^2, as normal +// 2. the upper bits are multiplied by r^2 * 2^256 = r^3 +// +// and computing their sum in the field. It remains to see that arbitrary 256-bit +// numbers can be placed into Montgomery form safely using the reduction. The +// reduction works so long as the product is less than r=2^256 multiplied by +// the modulus. This holds because for any `c` smaller than the modulus, we have +// that (2^256 - 1)*c is an acceptable product for the reduction. Therefore, the +// reduction always works so long as `c` is in the field; in this case it is either the +// constant `r2` or `r3`. +func (fq *Fq) SetBytesWide(input *[64]byte) *Fq { + d0 := fiat_pasta_fq_montgomery_domain_field_element{ + binary.LittleEndian.Uint64(input[:8]), + binary.LittleEndian.Uint64(input[8:16]), + binary.LittleEndian.Uint64(input[16:24]), + binary.LittleEndian.Uint64(input[24:32]), + } + d1 := fiat_pasta_fq_montgomery_domain_field_element{ + binary.LittleEndian.Uint64(input[32:40]), + binary.LittleEndian.Uint64(input[40:48]), + binary.LittleEndian.Uint64(input[48:56]), + binary.LittleEndian.Uint64(input[56:64]), + } + // Convert to Montgomery form + tv1 := &fiat_pasta_fq_montgomery_domain_field_element{} + tv2 := &fiat_pasta_fq_montgomery_domain_field_element{} + // d0 * r2 + d1 * r3 + fiat_pasta_fq_mul(tv1, &d0, (*fiat_pasta_fq_montgomery_domain_field_element)(r2)) + fiat_pasta_fq_mul(tv2, &d1, (*fiat_pasta_fq_montgomery_domain_field_element)(r3)) + fiat_pasta_fq_add((*fiat_pasta_fq_montgomery_domain_field_element)(fq), tv1, tv2) + return fq +} + +// SetBytes attempts to convert a little endian byte representation +// of a scalar into a `Fq`, failing if input is not canonical +func (fq *Fq) SetBytes(input *[32]byte) (*Fq, error) { + d0 := &Fq{ + binary.LittleEndian.Uint64(input[:8]), + binary.LittleEndian.Uint64(input[8:16]), + binary.LittleEndian.Uint64(input[16:24]), + binary.LittleEndian.Uint64(input[24:32]), + } + if d0.Cmp(modulus) != -1 { + return nil, fmt.Errorf("invalid byte sequence") + } + fiat_pasta_fq_from_bytes((*[4]uint64)(fq), input) + fiat_pasta_fq_to_montgomery( + (*fiat_pasta_fq_montgomery_domain_field_element)(fq), + (*fiat_pasta_fq_non_montgomery_domain_field_element)(fq), + ) + return fq, nil +} + +// SetBigInt initializes an element from big.Int +// The value is reduced by the modulus +func (fq *Fq) SetBigInt(bi *big.Int) *Fq { + var buffer [32]byte + r := new(big.Int).Set(bi) + r.Mod(r, BiModulus) + r.FillBytes(buffer[:]) + copy(buffer[:], internal.ReverseScalarBytes(buffer[:])) + _, _ = fq.SetBytes(&buffer) + return fq +} + +// SetRaw converts a raw array into a field element +func (fq *Fq) SetRaw(array *[4]uint64) *Fq { + fiat_pasta_fq_to_montgomery( + (*fiat_pasta_fq_montgomery_domain_field_element)(fq), + (*fiat_pasta_fq_non_montgomery_domain_field_element)(array), + ) + return fq +} + +// Bytes converts this element into a byte representation +// in little endian byte order +func (fq *Fq) Bytes() [32]byte { + var output [32]byte + tv := &fiat_pasta_fq_non_montgomery_domain_field_element{} + fiat_pasta_fq_from_montgomery(tv, (*fiat_pasta_fq_montgomery_domain_field_element)(fq)) + fiat_pasta_fq_to_bytes(&output, (*[4]uint64)(tv)) + return output +} + +// BigInt converts this element into the big.Int struct +func (fq *Fq) BigInt() *big.Int { + buffer := fq.Bytes() + return new(big.Int).SetBytes(internal.ReverseScalarBytes(buffer[:])) +} + +// Double this element +func (fq *Fq) Double(elem *Fq) *Fq { + delem := (*fiat_pasta_fq_montgomery_domain_field_element)(elem) + fiat_pasta_fq_add((*fiat_pasta_fq_montgomery_domain_field_element)(fq), delem, delem) + return fq +} + +// Square this element +func (fq *Fq) Square(elem *Fq) *Fq { + delem := (*fiat_pasta_fq_montgomery_domain_field_element)(elem) + fiat_pasta_fq_square((*fiat_pasta_fq_montgomery_domain_field_element)(fq), delem) + return fq +} + +// Sqrt this element, if it exists. If true, then value +// is a square root. If false, value is a QNR +func (fq *Fq) Sqrt(elem *Fq) (*Fq, bool) { + return fq.tonelliShanks(elem) +} + +// See sqrt_ts_ct at +// https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-11#appendix-I.4 +func (fq *Fq) tonelliShanks(elem *Fq) (*Fq, bool) { + // c1 := 32 + // c2 := (q - 1) / (2^c1) + //c2 := [4]uint64{ + // 0x0994a8dd8c46eb21, + // 0x00000000224698fc, + // 0x0000000000000000, + // 0x0000000040000000, + //} + // c3 := (c2 - 1) / 2 + c3 := [4]uint64{ + 0x04ca546ec6237590, + 0x0000000011234c7e, + 0x0000000000000000, + 0x0000000020000000, + } + // c4 := generator + // c5 := new(Fq).pow(&generator, c2) + c5 := &Fq{ + 0x218077428c9942de, + 0xcc49578921b60494, + 0xac2e5d27b2efbee2, + 0xb79fa897f2db056, + } + + z := new(Fq).pow(elem, c3) + t := new(Fq).Square(z) + t.Mul(t, elem) + + z.Mul(z, elem) + + b := new(Fq).Set(t) + c := new(Fq).Set(c5) + flags := map[bool]int{ + true: 1, + false: 0, + } + + for i := s; i >= 2; i-- { + for j := 1; j <= i-2; j++ { + b.Square(b) + } + z.CMove(z, new(Fq).Mul(z, c), flags[!b.IsOne()]) + c.Square(c) + t.CMove(t, new(Fq).Mul(t, c), flags[!b.IsOne()]) + b.Set(t) + } + wasSquare := c.Square(z).Equal(elem) + return fq.Set(z), wasSquare +} + +// Invert this element i.e. compute the multiplicative inverse +// return false, zero if this element is zero +func (fq *Fq) Invert(elem *Fq) (*Fq, bool) { + // computes elem^(p - 2) mod p + exp := [4]uint64{ + 0x8c46eb20ffffffff, + 0x224698fc0994a8dd, + 0x0000000000000000, + 0x4000000000000000, + } + return fq.pow(elem, exp), !elem.IsZero() +} + +// Mul returns the result from multiplying this element by rhs +func (fq *Fq) Mul(lhs, rhs *Fq) *Fq { + dlhs := (*fiat_pasta_fq_montgomery_domain_field_element)(lhs) + drhs := (*fiat_pasta_fq_montgomery_domain_field_element)(rhs) + fiat_pasta_fq_mul((*fiat_pasta_fq_montgomery_domain_field_element)(fq), dlhs, drhs) + return fq +} + +// Sub returns the result from subtracting rhs from this element +func (fq *Fq) Sub(lhs, rhs *Fq) *Fq { + dlhs := (*fiat_pasta_fq_montgomery_domain_field_element)(lhs) + drhs := (*fiat_pasta_fq_montgomery_domain_field_element)(rhs) + fiat_pasta_fq_sub((*fiat_pasta_fq_montgomery_domain_field_element)(fq), dlhs, drhs) + return fq +} + +// Add returns the result from adding rhs to this element +func (fq *Fq) Add(lhs, rhs *Fq) *Fq { + dlhs := (*fiat_pasta_fq_montgomery_domain_field_element)(lhs) + drhs := (*fiat_pasta_fq_montgomery_domain_field_element)(rhs) + fiat_pasta_fq_add((*fiat_pasta_fq_montgomery_domain_field_element)(fq), dlhs, drhs) + return fq +} + +// Neg returns negation of this element +func (fq *Fq) Neg(elem *Fq) *Fq { + delem := (*fiat_pasta_fq_montgomery_domain_field_element)(elem) + fiat_pasta_fq_opp((*fiat_pasta_fq_montgomery_domain_field_element)(fq), delem) + return fq +} + +// Exp exponentiates this element by exp +func (fq *Fq) Exp(base, exp *Fq) *Fq { + // convert exponent to integer form + tv := &fiat_pasta_fq_non_montgomery_domain_field_element{} + fiat_pasta_fq_from_montgomery(tv, (*fiat_pasta_fq_montgomery_domain_field_element)(exp)) + + e := (*[4]uint64)(tv) + return fq.pow(base, *e) +} + +func (fq *Fq) pow(base *Fq, exp [4]uint64) *Fq { + res := new(Fq).SetOne() + tmp := new(Fq) + + for i := len(exp) - 1; i >= 0; i-- { + for j := 63; j >= 0; j-- { + res.Square(res) + tmp.Mul(res, base) + res.CMove(res, tmp, int(exp[i]>>j)&1) + } + } + return fq.Set(res) +} + +// CMove selects lhs if choice == 0 and rhs if choice == 1 +func (fq *Fq) CMove(lhs, rhs *Fq, choice int) *Fq { + dlhs := (*[4]uint64)(lhs) + drhs := (*[4]uint64)(rhs) + fiat_pasta_fq_selectznz((*[4]uint64)(fq), fiat_pasta_fq_uint1(choice), dlhs, drhs) + return fq +} + +// ToRaw converts this element into the a [4]uint64 +func (fq *Fq) ToRaw() [4]uint64 { + res := &fiat_pasta_fq_non_montgomery_domain_field_element{} + fiat_pasta_fq_from_montgomery(res, (*fiat_pasta_fq_montgomery_domain_field_element)(fq)) + return *(*[4]uint64)(res) +} diff --git a/core/curves/native/pasta/fq/fq_test.go b/core/curves/native/pasta/fq/fq_test.go new file mode 100755 index 0000000..c203ddf --- /dev/null +++ b/core/curves/native/pasta/fq/fq_test.go @@ -0,0 +1,273 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package fq + +import ( + "math/big" + "math/rand" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestFqSetOne(t *testing.T) { + fq := new(Fq).SetOne() + require.NotNil(t, fq) + require.True(t, fq.Equal(r)) +} + +func TestFqSetUint64(t *testing.T) { + act := new(Fq).SetUint64(1 << 60) + require.NotNil(t, act) + // Remember it will be in montgomery form + require.Equal(t, int(act[0]), 0x4c46eb2100000001) +} + +func TestFqAdd(t *testing.T) { + lhs := new(Fq).SetOne() + rhs := new(Fq).SetOne() + exp := new(Fq).SetUint64(2) + res := new(Fq).Add(lhs, rhs) + require.NotNil(t, res) + require.True(t, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint64() >> 2 + r := rand.Uint64() >> 2 + e := l + r + lhs.SetUint64(l) + rhs.SetUint64(r) + exp.SetUint64(e) + + a := new(Fq).Add(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFqSub(t *testing.T) { + lhs := new(Fq).SetOne() + rhs := new(Fq).SetOne() + exp := new(Fq).SetZero() + res := new(Fq).Sub(lhs, rhs) + require.NotNil(t, res) + require.True(t, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint64() >> 2 + r := rand.Uint64() >> 2 + if l < r { + l, r = r, l + } + e := l - r + lhs.SetUint64(l) + rhs.SetUint64(r) + exp.SetUint64(e) + + a := new(Fq).Sub(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFqMul(t *testing.T) { + lhs := new(Fq).SetOne() + rhs := new(Fq).SetOne() + exp := new(Fq).SetOne() + res := new(Fq).Mul(lhs, rhs) + require.NotNil(t, res) + require.True(t, res.Equal(exp)) + + // Fuzz test + for i := 0; i < 25; i++ { + // Divide by 4 to prevent overflow false errors + l := rand.Uint32() + r := rand.Uint32() + e := uint64(l) * uint64(r) + lhs.SetUint64(uint64(l)) + rhs.SetUint64(uint64(r)) + exp.SetUint64(e) + + a := new(Fq).Mul(lhs, rhs) + require.NotNil(t, a) + require.Equal(t, exp, a) + } +} + +func TestFqDouble(t *testing.T) { + a := new(Fq).SetUint64(2) + e := new(Fq).SetUint64(4) + require.Equal(t, e, new(Fq).Double(a)) + + for i := 0; i < 25; i++ { + tv := rand.Uint32() + ttv := uint64(tv) * 2 + a = new(Fq).SetUint64(uint64(tv)) + e = new(Fq).SetUint64(ttv) + require.Equal(t, e, new(Fq).Double(a)) + } +} + +func TestFqSquare(t *testing.T) { + a := new(Fq).SetUint64(4) + e := new(Fq).SetUint64(16) + require.Equal(t, e, a.Square(a)) + + for i := 0; i < 25; i++ { + j := rand.Uint32() + exp := uint64(j) * uint64(j) + e.SetUint64(exp) + a.SetUint64(uint64(j)) + require.Equal(t, e, a.Square(a)) + } +} + +func TestFqNeg(t *testing.T) { + a := new(Fq).SetOne() + a.Neg(a) + e := &Fq{0x311bac8400000004, 0x891a63f02652a376, 0, 0} + require.Equal(t, e, a) + a.Neg(generator) + e = &Fq{0xf58a5e9400000014, 0xad83f3b0bf9d314e, 0x2, 0x0} + require.Equal(t, e, a) +} + +func TestFqExp(t *testing.T) { + e := new(Fq).SetUint64(8) + a := new(Fq).SetUint64(2) + by := new(Fq).SetUint64(3) + require.Equal(t, e, a.Exp(a, by)) +} + +func TestFqSqrt(t *testing.T) { + t1 := new(Fq).SetUint64(2) + t2 := new(Fq).Neg(t1) + t3 := new(Fq).Square(t1) + _, wasSquare := t3.Sqrt(t3) + require.True(t, wasSquare) + require.True(t, t1.Equal(t3) || t2.Equal(t3)) + t1.SetUint64(5) + _, wasSquare = new(Fq).Sqrt(t1) + require.False(t, wasSquare) +} + +func TestFqInvert(t *testing.T) { + twoInv := &Fq{0xc623759080000001, 0x11234c7e04ca546e, 0x0000000000000000, 0x2000000000000000} + fiat_pasta_fq_to_montgomery( + (*fiat_pasta_fq_montgomery_domain_field_element)(twoInv), + (*fiat_pasta_fq_non_montgomery_domain_field_element)(twoInv), + ) + two := new(Fq).SetUint64(2) + a, inverted := new(Fq).Invert(two) + require.True(t, inverted) + require.Equal(t, a, twoInv) + + rootOfUnity := &Fq{ + 0xa70e2c1102b6d05f, + 0x9bb97ea3c106f049, + 0x9e5c4dfd492ae26e, + 0x2de6a9b8746d3f58, + } + fiat_pasta_fq_to_montgomery( + (*fiat_pasta_fq_montgomery_domain_field_element)(rootOfUnity), + (*fiat_pasta_fq_non_montgomery_domain_field_element)(rootOfUnity), + ) + rootOfUnityInv := &Fq{ + 0x57eecda0a84b6836, + 0x4ad38b9084b8a80c, + 0xf4c8f353124086c1, + 0x2235e1a7415bf936, + } + fiat_pasta_fq_to_montgomery( + (*fiat_pasta_fq_montgomery_domain_field_element)(rootOfUnityInv), + (*fiat_pasta_fq_non_montgomery_domain_field_element)(rootOfUnityInv), + ) + a, inverted = new(Fq).Invert(rootOfUnity) + require.True(t, inverted) + require.Equal(t, a, rootOfUnityInv) + + lhs := new(Fq).SetUint64(9) + rhs := new(Fq).SetUint64(3) + rhsInv, inverted := new(Fq).Invert(rhs) + require.True(t, inverted) + require.Equal(t, rhs, new(Fq).Mul(lhs, rhsInv)) + + rhs.SetZero() + _, inverted = new(Fq).Invert(rhs) + require.False(t, inverted) +} + +func TestFqCMove(t *testing.T) { + t1 := new(Fq).SetUint64(5) + t2 := new(Fq).SetUint64(10) + require.Equal(t, t1, new(Fq).CMove(t1, t2, 0)) + require.Equal(t, t2, new(Fq).CMove(t1, t2, 1)) +} + +func TestFqBytes(t *testing.T) { + t1 := new(Fq).SetUint64(99) + seq := t1.Bytes() + t2, err := new(Fq).SetBytes(&seq) + require.NoError(t, err) + require.Equal(t, t1, t2) + + for i := 0; i < 25; i++ { + t1.SetUint64(rand.Uint64()) + seq = t1.Bytes() + _, err = t2.SetBytes(&seq) + require.NoError(t, err) + require.Equal(t, t1, t2) + } +} + +func TestFqBigInt(t *testing.T) { + t1 := new(Fq).SetBigInt(big.NewInt(9999)) + t2 := new(Fq).SetBigInt(t1.BigInt()) + require.Equal(t, t1, t2) + + e := &Fq{0x7bb1416dea3d6ae3, 0x62f9108a340aa525, 0x303b3f30fcaa477f, 0x11c9ef5422d80a4d} + b := new( + big.Int, + ).SetBytes([]byte{9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9}) + t1.SetBigInt(b) + require.Equal(t, e, t1) + e[0] = 0x1095a9b315c2951e + e[1] = 0xbf4d8871d58a03b8 + e[2] = 0xcfc4c0cf0355b880 + e[3] = 0x2e3610abdd27f5b2 + b.Neg(b) + t1.SetBigInt(b) + require.Equal(t, e, t1) +} + +func TestFqSetBool(t *testing.T) { + require.Equal(t, new(Fq).SetOne(), new(Fq).SetBool(true)) + require.Equal(t, new(Fq).SetZero(), new(Fq).SetBool(false)) +} + +func TestFqSetBytesWide(t *testing.T) { + e := &Fq{0xe22bd0d1b22cc43e, 0x6b84e5b52490a7c8, 0x264262941ac9e229, 0x27dcfdf361ce4254} + fiat_pasta_fq_to_montgomery( + (*fiat_pasta_fq_montgomery_domain_field_element)(e), + (*fiat_pasta_fq_non_montgomery_domain_field_element)(e), + ) + a := new(Fq).SetBytesWide(&[64]byte{ + 0x69, 0x23, 0x5a, 0x0b, 0xce, 0x0c, 0xa8, 0x64, + 0x3c, 0x78, 0xbc, 0x01, 0x05, 0xef, 0xf2, 0x84, + 0xde, 0xbb, 0x6b, 0xc8, 0x63, 0x5e, 0x6e, 0x69, + 0x62, 0xcc, 0xc6, 0x2d, 0xf5, 0x72, 0x40, 0x92, + 0x28, 0x11, 0xd6, 0xc8, 0x07, 0xa5, 0x88, 0x82, + 0xfe, 0xe3, 0x97, 0xf6, 0x1e, 0xfb, 0x2e, 0x3b, + 0x27, 0x5f, 0x85, 0x06, 0x8d, 0x99, 0xa4, 0x75, + 0xc0, 0x2c, 0x71, 0x69, 0x9e, 0x58, 0xea, 0x52, + }) + require.Equal(t, e, a) +} diff --git a/core/curves/native/pasta/fq/pasta_fq.go b/core/curves/native/pasta/fq/pasta_fq.go new file mode 100755 index 0000000..ad6b214 --- /dev/null +++ b/core/curves/native/pasta/fq/pasta_fq.go @@ -0,0 +1,1518 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Autogenerated: './src/ExtractionOCaml/word_by_word_montgomery' --lang Go pasta_fq 64 '2^254 + 45560315531506369815346746415080538113' +// +// curve description: pasta_fq +// +// machine_wordsize = 64 (from "64") +// +// requested operations: (all) +// +// m = 0x40000000000000000000000000000000224698fc0994a8dd8c46eb2100000001 (from "2^254 + 45560315531506369815346746415080538113") +// +// +// +// NOTE: In addition to the bounds specified above each function, all +// +// functions synthesized for this Montgomery arithmetic require the +// +// input to be strictly less than the prime modulus (m), and also +// +// require the input to be in the unique saturated representation. +// +// All functions also ensure that these two properties are true of +// +// return values. +// +// +// +// Computed values: +// +// eval z = z[0] + (z[1] << 64) + (z[2] << 128) + (z[3] << 192) +// +// bytes_eval z = z[0] + (z[1] << 8) + (z[2] << 16) + (z[3] << 24) + (z[4] << 32) + (z[5] << 40) + (z[6] << 48) + (z[7] << 56) + (z[8] << 64) + (z[9] << 72) + (z[10] << 80) + (z[11] << 88) + (z[12] << 96) + (z[13] << 104) + (z[14] << 112) + (z[15] << 120) + (z[16] << 128) + (z[17] << 136) + (z[18] << 144) + (z[19] << 152) + (z[20] << 160) + (z[21] << 168) + (z[22] << 176) + (z[23] << 184) + (z[24] << 192) + (z[25] << 200) + (z[26] << 208) + (z[27] << 216) + (z[28] << 224) + (z[29] << 232) + (z[30] << 240) + (z[31] << 248) +// +// twos_complement_eval z = let x1 := z[0] + (z[1] << 64) + (z[2] << 128) + (z[3] << 192) in +// +// if x1 & (2^256-1) < 2^255 then x1 & (2^256-1) else (x1 & (2^256-1)) - 2^256 + +package fq + +import "math/bits" + +type ( + fiat_pasta_fq_uint1 uint64 // We use uint64 instead of a more narrow type for performance reasons; see https://github.com/mit-plv/fiat-crypto/pull/1006#issuecomment-892625927 + fiat_pasta_fq_int1 int64 // We use uint64 instead of a more narrow type for performance reasons; see https://github.com/mit-plv/fiat-crypto/pull/1006#issuecomment-892625927 +) + +// The type fiat_pasta_fq_montgomery_domain_field_element is a field element in the Montgomery domain. +// +// Bounds: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +type fiat_pasta_fq_montgomery_domain_field_element [4]uint64 + +// The type fiat_pasta_fq_non_montgomery_domain_field_element is a field element NOT in the Montgomery domain. +// +// Bounds: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +type fiat_pasta_fq_non_montgomery_domain_field_element [4]uint64 + +// The function fiat_pasta_fq_addcarryx_u64 is a thin wrapper around bits.Add64 that uses fiat_pasta_fq_uint1 rather than uint64 +func fiat_pasta_fq_addcarryx_u64( + x uint64, + y uint64, + carry fiat_pasta_fq_uint1, +) (uint64, fiat_pasta_fq_uint1) { + sum, carryOut := bits.Add64(x, y, uint64(carry)) + return sum, fiat_pasta_fq_uint1(carryOut) +} + +// The function fiat_pasta_fq_subborrowx_u64 is a thin wrapper around bits.Sub64 that uses fiat_pasta_fq_uint1 rather than uint64 +func fiat_pasta_fq_subborrowx_u64( + x uint64, + y uint64, + carry fiat_pasta_fq_uint1, +) (uint64, fiat_pasta_fq_uint1) { + sum, carryOut := bits.Sub64(x, y, uint64(carry)) + return sum, fiat_pasta_fq_uint1(carryOut) +} + +// The function fiat_pasta_fq_cmovznz_u64 is a single-word conditional move. +// +// Postconditions: +// +// out1 = (if arg1 = 0 then arg2 else arg3) +// +// Input Bounds: +// +// arg1: [0x0 ~> 0x1] +// arg2: [0x0 ~> 0xffffffffffffffff] +// arg3: [0x0 ~> 0xffffffffffffffff] +// +// Output Bounds: +// +// out1: [0x0 ~> 0xffffffffffffffff] +func fiat_pasta_fq_cmovznz_u64(out1 *uint64, arg1 fiat_pasta_fq_uint1, arg2 uint64, arg3 uint64) { + x1 := arg1 + x2 := (uint64((fiat_pasta_fq_int1(0x0) - fiat_pasta_fq_int1(x1))) & 0xffffffffffffffff) + x3 := ((x2 & arg3) | ((^x2) & arg2)) + *out1 = x3 +} + +// The function fiat_pasta_fq_mul multiplies two field elements in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// 0 ≤ eval arg2 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) * eval (from_montgomery arg2)) mod m +// 0 ≤ eval out1 < m +func fiat_pasta_fq_mul( + out1 *fiat_pasta_fq_montgomery_domain_field_element, + arg1 *fiat_pasta_fq_montgomery_domain_field_element, + arg2 *fiat_pasta_fq_montgomery_domain_field_element, +) { + x1 := arg1[1] + x2 := arg1[2] + x3 := arg1[3] + x4 := arg1[0] + var x5 uint64 + var x6 uint64 + x6, x5 = bits.Mul64(x4, arg2[3]) + var x7 uint64 + var x8 uint64 + x8, x7 = bits.Mul64(x4, arg2[2]) + var x9 uint64 + var x10 uint64 + x10, x9 = bits.Mul64(x4, arg2[1]) + var x11 uint64 + var x12 uint64 + x12, x11 = bits.Mul64(x4, arg2[0]) + var x13 uint64 + var x14 fiat_pasta_fq_uint1 + x13, x14 = fiat_pasta_fq_addcarryx_u64(x12, x9, 0x0) + var x15 uint64 + var x16 fiat_pasta_fq_uint1 + x15, x16 = fiat_pasta_fq_addcarryx_u64(x10, x7, x14) + var x17 uint64 + var x18 fiat_pasta_fq_uint1 + x17, x18 = fiat_pasta_fq_addcarryx_u64(x8, x5, x16) + x19 := (uint64(x18) + x6) + var x20 uint64 + _, x20 = bits.Mul64(x11, 0x8c46eb20ffffffff) + var x22 uint64 + var x23 uint64 + x23, x22 = bits.Mul64(x20, 0x4000000000000000) + var x24 uint64 + var x25 uint64 + x25, x24 = bits.Mul64(x20, 0x224698fc0994a8dd) + var x26 uint64 + var x27 uint64 + x27, x26 = bits.Mul64(x20, 0x8c46eb2100000001) + var x28 uint64 + var x29 fiat_pasta_fq_uint1 + x28, x29 = fiat_pasta_fq_addcarryx_u64(x27, x24, 0x0) + x30 := (uint64(x29) + x25) + var x32 fiat_pasta_fq_uint1 + _, x32 = fiat_pasta_fq_addcarryx_u64(x11, x26, 0x0) + var x33 uint64 + var x34 fiat_pasta_fq_uint1 + x33, x34 = fiat_pasta_fq_addcarryx_u64(x13, x28, x32) + var x35 uint64 + var x36 fiat_pasta_fq_uint1 + x35, x36 = fiat_pasta_fq_addcarryx_u64(x15, x30, x34) + var x37 uint64 + var x38 fiat_pasta_fq_uint1 + x37, x38 = fiat_pasta_fq_addcarryx_u64(x17, x22, x36) + var x39 uint64 + var x40 fiat_pasta_fq_uint1 + x39, x40 = fiat_pasta_fq_addcarryx_u64(x19, x23, x38) + var x41 uint64 + var x42 uint64 + x42, x41 = bits.Mul64(x1, arg2[3]) + var x43 uint64 + var x44 uint64 + x44, x43 = bits.Mul64(x1, arg2[2]) + var x45 uint64 + var x46 uint64 + x46, x45 = bits.Mul64(x1, arg2[1]) + var x47 uint64 + var x48 uint64 + x48, x47 = bits.Mul64(x1, arg2[0]) + var x49 uint64 + var x50 fiat_pasta_fq_uint1 + x49, x50 = fiat_pasta_fq_addcarryx_u64(x48, x45, 0x0) + var x51 uint64 + var x52 fiat_pasta_fq_uint1 + x51, x52 = fiat_pasta_fq_addcarryx_u64(x46, x43, x50) + var x53 uint64 + var x54 fiat_pasta_fq_uint1 + x53, x54 = fiat_pasta_fq_addcarryx_u64(x44, x41, x52) + x55 := (uint64(x54) + x42) + var x56 uint64 + var x57 fiat_pasta_fq_uint1 + x56, x57 = fiat_pasta_fq_addcarryx_u64(x33, x47, 0x0) + var x58 uint64 + var x59 fiat_pasta_fq_uint1 + x58, x59 = fiat_pasta_fq_addcarryx_u64(x35, x49, x57) + var x60 uint64 + var x61 fiat_pasta_fq_uint1 + x60, x61 = fiat_pasta_fq_addcarryx_u64(x37, x51, x59) + var x62 uint64 + var x63 fiat_pasta_fq_uint1 + x62, x63 = fiat_pasta_fq_addcarryx_u64(x39, x53, x61) + var x64 uint64 + var x65 fiat_pasta_fq_uint1 + x64, x65 = fiat_pasta_fq_addcarryx_u64(uint64(x40), x55, x63) + var x66 uint64 + _, x66 = bits.Mul64(x56, 0x8c46eb20ffffffff) + var x68 uint64 + var x69 uint64 + x69, x68 = bits.Mul64(x66, 0x4000000000000000) + var x70 uint64 + var x71 uint64 + x71, x70 = bits.Mul64(x66, 0x224698fc0994a8dd) + var x72 uint64 + var x73 uint64 + x73, x72 = bits.Mul64(x66, 0x8c46eb2100000001) + var x74 uint64 + var x75 fiat_pasta_fq_uint1 + x74, x75 = fiat_pasta_fq_addcarryx_u64(x73, x70, 0x0) + x76 := (uint64(x75) + x71) + var x78 fiat_pasta_fq_uint1 + _, x78 = fiat_pasta_fq_addcarryx_u64(x56, x72, 0x0) + var x79 uint64 + var x80 fiat_pasta_fq_uint1 + x79, x80 = fiat_pasta_fq_addcarryx_u64(x58, x74, x78) + var x81 uint64 + var x82 fiat_pasta_fq_uint1 + x81, x82 = fiat_pasta_fq_addcarryx_u64(x60, x76, x80) + var x83 uint64 + var x84 fiat_pasta_fq_uint1 + x83, x84 = fiat_pasta_fq_addcarryx_u64(x62, x68, x82) + var x85 uint64 + var x86 fiat_pasta_fq_uint1 + x85, x86 = fiat_pasta_fq_addcarryx_u64(x64, x69, x84) + x87 := (uint64(x86) + uint64(x65)) + var x88 uint64 + var x89 uint64 + x89, x88 = bits.Mul64(x2, arg2[3]) + var x90 uint64 + var x91 uint64 + x91, x90 = bits.Mul64(x2, arg2[2]) + var x92 uint64 + var x93 uint64 + x93, x92 = bits.Mul64(x2, arg2[1]) + var x94 uint64 + var x95 uint64 + x95, x94 = bits.Mul64(x2, arg2[0]) + var x96 uint64 + var x97 fiat_pasta_fq_uint1 + x96, x97 = fiat_pasta_fq_addcarryx_u64(x95, x92, 0x0) + var x98 uint64 + var x99 fiat_pasta_fq_uint1 + x98, x99 = fiat_pasta_fq_addcarryx_u64(x93, x90, x97) + var x100 uint64 + var x101 fiat_pasta_fq_uint1 + x100, x101 = fiat_pasta_fq_addcarryx_u64(x91, x88, x99) + x102 := (uint64(x101) + x89) + var x103 uint64 + var x104 fiat_pasta_fq_uint1 + x103, x104 = fiat_pasta_fq_addcarryx_u64(x79, x94, 0x0) + var x105 uint64 + var x106 fiat_pasta_fq_uint1 + x105, x106 = fiat_pasta_fq_addcarryx_u64(x81, x96, x104) + var x107 uint64 + var x108 fiat_pasta_fq_uint1 + x107, x108 = fiat_pasta_fq_addcarryx_u64(x83, x98, x106) + var x109 uint64 + var x110 fiat_pasta_fq_uint1 + x109, x110 = fiat_pasta_fq_addcarryx_u64(x85, x100, x108) + var x111 uint64 + var x112 fiat_pasta_fq_uint1 + x111, x112 = fiat_pasta_fq_addcarryx_u64(x87, x102, x110) + var x113 uint64 + _, x113 = bits.Mul64(x103, 0x8c46eb20ffffffff) + var x115 uint64 + var x116 uint64 + x116, x115 = bits.Mul64(x113, 0x4000000000000000) + var x117 uint64 + var x118 uint64 + x118, x117 = bits.Mul64(x113, 0x224698fc0994a8dd) + var x119 uint64 + var x120 uint64 + x120, x119 = bits.Mul64(x113, 0x8c46eb2100000001) + var x121 uint64 + var x122 fiat_pasta_fq_uint1 + x121, x122 = fiat_pasta_fq_addcarryx_u64(x120, x117, 0x0) + x123 := (uint64(x122) + x118) + var x125 fiat_pasta_fq_uint1 + _, x125 = fiat_pasta_fq_addcarryx_u64(x103, x119, 0x0) + var x126 uint64 + var x127 fiat_pasta_fq_uint1 + x126, x127 = fiat_pasta_fq_addcarryx_u64(x105, x121, x125) + var x128 uint64 + var x129 fiat_pasta_fq_uint1 + x128, x129 = fiat_pasta_fq_addcarryx_u64(x107, x123, x127) + var x130 uint64 + var x131 fiat_pasta_fq_uint1 + x130, x131 = fiat_pasta_fq_addcarryx_u64(x109, x115, x129) + var x132 uint64 + var x133 fiat_pasta_fq_uint1 + x132, x133 = fiat_pasta_fq_addcarryx_u64(x111, x116, x131) + x134 := (uint64(x133) + uint64(x112)) + var x135 uint64 + var x136 uint64 + x136, x135 = bits.Mul64(x3, arg2[3]) + var x137 uint64 + var x138 uint64 + x138, x137 = bits.Mul64(x3, arg2[2]) + var x139 uint64 + var x140 uint64 + x140, x139 = bits.Mul64(x3, arg2[1]) + var x141 uint64 + var x142 uint64 + x142, x141 = bits.Mul64(x3, arg2[0]) + var x143 uint64 + var x144 fiat_pasta_fq_uint1 + x143, x144 = fiat_pasta_fq_addcarryx_u64(x142, x139, 0x0) + var x145 uint64 + var x146 fiat_pasta_fq_uint1 + x145, x146 = fiat_pasta_fq_addcarryx_u64(x140, x137, x144) + var x147 uint64 + var x148 fiat_pasta_fq_uint1 + x147, x148 = fiat_pasta_fq_addcarryx_u64(x138, x135, x146) + x149 := (uint64(x148) + x136) + var x150 uint64 + var x151 fiat_pasta_fq_uint1 + x150, x151 = fiat_pasta_fq_addcarryx_u64(x126, x141, 0x0) + var x152 uint64 + var x153 fiat_pasta_fq_uint1 + x152, x153 = fiat_pasta_fq_addcarryx_u64(x128, x143, x151) + var x154 uint64 + var x155 fiat_pasta_fq_uint1 + x154, x155 = fiat_pasta_fq_addcarryx_u64(x130, x145, x153) + var x156 uint64 + var x157 fiat_pasta_fq_uint1 + x156, x157 = fiat_pasta_fq_addcarryx_u64(x132, x147, x155) + var x158 uint64 + var x159 fiat_pasta_fq_uint1 + x158, x159 = fiat_pasta_fq_addcarryx_u64(x134, x149, x157) + var x160 uint64 + _, x160 = bits.Mul64(x150, 0x8c46eb20ffffffff) + var x162 uint64 + var x163 uint64 + x163, x162 = bits.Mul64(x160, 0x4000000000000000) + var x164 uint64 + var x165 uint64 + x165, x164 = bits.Mul64(x160, 0x224698fc0994a8dd) + var x166 uint64 + var x167 uint64 + x167, x166 = bits.Mul64(x160, 0x8c46eb2100000001) + var x168 uint64 + var x169 fiat_pasta_fq_uint1 + x168, x169 = fiat_pasta_fq_addcarryx_u64(x167, x164, 0x0) + x170 := (uint64(x169) + x165) + var x172 fiat_pasta_fq_uint1 + _, x172 = fiat_pasta_fq_addcarryx_u64(x150, x166, 0x0) + var x173 uint64 + var x174 fiat_pasta_fq_uint1 + x173, x174 = fiat_pasta_fq_addcarryx_u64(x152, x168, x172) + var x175 uint64 + var x176 fiat_pasta_fq_uint1 + x175, x176 = fiat_pasta_fq_addcarryx_u64(x154, x170, x174) + var x177 uint64 + var x178 fiat_pasta_fq_uint1 + x177, x178 = fiat_pasta_fq_addcarryx_u64(x156, x162, x176) + var x179 uint64 + var x180 fiat_pasta_fq_uint1 + x179, x180 = fiat_pasta_fq_addcarryx_u64(x158, x163, x178) + x181 := (uint64(x180) + uint64(x159)) + var x182 uint64 + var x183 fiat_pasta_fq_uint1 + x182, x183 = fiat_pasta_fq_subborrowx_u64(x173, 0x8c46eb2100000001, 0x0) + var x184 uint64 + var x185 fiat_pasta_fq_uint1 + x184, x185 = fiat_pasta_fq_subborrowx_u64(x175, 0x224698fc0994a8dd, x183) + var x186 uint64 + var x187 fiat_pasta_fq_uint1 + x186, x187 = fiat_pasta_fq_subborrowx_u64(x177, uint64(0x0), x185) + var x188 uint64 + var x189 fiat_pasta_fq_uint1 + x188, x189 = fiat_pasta_fq_subborrowx_u64(x179, 0x4000000000000000, x187) + var x191 fiat_pasta_fq_uint1 + _, x191 = fiat_pasta_fq_subborrowx_u64(x181, uint64(0x0), x189) + var x192 uint64 + fiat_pasta_fq_cmovznz_u64(&x192, x191, x182, x173) + var x193 uint64 + fiat_pasta_fq_cmovznz_u64(&x193, x191, x184, x175) + var x194 uint64 + fiat_pasta_fq_cmovznz_u64(&x194, x191, x186, x177) + var x195 uint64 + fiat_pasta_fq_cmovznz_u64(&x195, x191, x188, x179) + out1[0] = x192 + out1[1] = x193 + out1[2] = x194 + out1[3] = x195 +} + +// The function fiat_pasta_fq_square squares a field element in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) * eval (from_montgomery arg1)) mod m +// 0 ≤ eval out1 < m +func fiat_pasta_fq_square( + out1 *fiat_pasta_fq_montgomery_domain_field_element, + arg1 *fiat_pasta_fq_montgomery_domain_field_element, +) { + x1 := arg1[1] + x2 := arg1[2] + x3 := arg1[3] + x4 := arg1[0] + var x5 uint64 + var x6 uint64 + x6, x5 = bits.Mul64(x4, arg1[3]) + var x7 uint64 + var x8 uint64 + x8, x7 = bits.Mul64(x4, arg1[2]) + var x9 uint64 + var x10 uint64 + x10, x9 = bits.Mul64(x4, arg1[1]) + var x11 uint64 + var x12 uint64 + x12, x11 = bits.Mul64(x4, arg1[0]) + var x13 uint64 + var x14 fiat_pasta_fq_uint1 + x13, x14 = fiat_pasta_fq_addcarryx_u64(x12, x9, 0x0) + var x15 uint64 + var x16 fiat_pasta_fq_uint1 + x15, x16 = fiat_pasta_fq_addcarryx_u64(x10, x7, x14) + var x17 uint64 + var x18 fiat_pasta_fq_uint1 + x17, x18 = fiat_pasta_fq_addcarryx_u64(x8, x5, x16) + x19 := (uint64(x18) + x6) + var x20 uint64 + _, x20 = bits.Mul64(x11, 0x8c46eb20ffffffff) + var x22 uint64 + var x23 uint64 + x23, x22 = bits.Mul64(x20, 0x4000000000000000) + var x24 uint64 + var x25 uint64 + x25, x24 = bits.Mul64(x20, 0x224698fc0994a8dd) + var x26 uint64 + var x27 uint64 + x27, x26 = bits.Mul64(x20, 0x8c46eb2100000001) + var x28 uint64 + var x29 fiat_pasta_fq_uint1 + x28, x29 = fiat_pasta_fq_addcarryx_u64(x27, x24, 0x0) + x30 := (uint64(x29) + x25) + var x32 fiat_pasta_fq_uint1 + _, x32 = fiat_pasta_fq_addcarryx_u64(x11, x26, 0x0) + var x33 uint64 + var x34 fiat_pasta_fq_uint1 + x33, x34 = fiat_pasta_fq_addcarryx_u64(x13, x28, x32) + var x35 uint64 + var x36 fiat_pasta_fq_uint1 + x35, x36 = fiat_pasta_fq_addcarryx_u64(x15, x30, x34) + var x37 uint64 + var x38 fiat_pasta_fq_uint1 + x37, x38 = fiat_pasta_fq_addcarryx_u64(x17, x22, x36) + var x39 uint64 + var x40 fiat_pasta_fq_uint1 + x39, x40 = fiat_pasta_fq_addcarryx_u64(x19, x23, x38) + var x41 uint64 + var x42 uint64 + x42, x41 = bits.Mul64(x1, arg1[3]) + var x43 uint64 + var x44 uint64 + x44, x43 = bits.Mul64(x1, arg1[2]) + var x45 uint64 + var x46 uint64 + x46, x45 = bits.Mul64(x1, arg1[1]) + var x47 uint64 + var x48 uint64 + x48, x47 = bits.Mul64(x1, arg1[0]) + var x49 uint64 + var x50 fiat_pasta_fq_uint1 + x49, x50 = fiat_pasta_fq_addcarryx_u64(x48, x45, 0x0) + var x51 uint64 + var x52 fiat_pasta_fq_uint1 + x51, x52 = fiat_pasta_fq_addcarryx_u64(x46, x43, x50) + var x53 uint64 + var x54 fiat_pasta_fq_uint1 + x53, x54 = fiat_pasta_fq_addcarryx_u64(x44, x41, x52) + x55 := (uint64(x54) + x42) + var x56 uint64 + var x57 fiat_pasta_fq_uint1 + x56, x57 = fiat_pasta_fq_addcarryx_u64(x33, x47, 0x0) + var x58 uint64 + var x59 fiat_pasta_fq_uint1 + x58, x59 = fiat_pasta_fq_addcarryx_u64(x35, x49, x57) + var x60 uint64 + var x61 fiat_pasta_fq_uint1 + x60, x61 = fiat_pasta_fq_addcarryx_u64(x37, x51, x59) + var x62 uint64 + var x63 fiat_pasta_fq_uint1 + x62, x63 = fiat_pasta_fq_addcarryx_u64(x39, x53, x61) + var x64 uint64 + var x65 fiat_pasta_fq_uint1 + x64, x65 = fiat_pasta_fq_addcarryx_u64(uint64(x40), x55, x63) + var x66 uint64 + _, x66 = bits.Mul64(x56, 0x8c46eb20ffffffff) + var x68 uint64 + var x69 uint64 + x69, x68 = bits.Mul64(x66, 0x4000000000000000) + var x70 uint64 + var x71 uint64 + x71, x70 = bits.Mul64(x66, 0x224698fc0994a8dd) + var x72 uint64 + var x73 uint64 + x73, x72 = bits.Mul64(x66, 0x8c46eb2100000001) + var x74 uint64 + var x75 fiat_pasta_fq_uint1 + x74, x75 = fiat_pasta_fq_addcarryx_u64(x73, x70, 0x0) + x76 := (uint64(x75) + x71) + var x78 fiat_pasta_fq_uint1 + _, x78 = fiat_pasta_fq_addcarryx_u64(x56, x72, 0x0) + var x79 uint64 + var x80 fiat_pasta_fq_uint1 + x79, x80 = fiat_pasta_fq_addcarryx_u64(x58, x74, x78) + var x81 uint64 + var x82 fiat_pasta_fq_uint1 + x81, x82 = fiat_pasta_fq_addcarryx_u64(x60, x76, x80) + var x83 uint64 + var x84 fiat_pasta_fq_uint1 + x83, x84 = fiat_pasta_fq_addcarryx_u64(x62, x68, x82) + var x85 uint64 + var x86 fiat_pasta_fq_uint1 + x85, x86 = fiat_pasta_fq_addcarryx_u64(x64, x69, x84) + x87 := (uint64(x86) + uint64(x65)) + var x88 uint64 + var x89 uint64 + x89, x88 = bits.Mul64(x2, arg1[3]) + var x90 uint64 + var x91 uint64 + x91, x90 = bits.Mul64(x2, arg1[2]) + var x92 uint64 + var x93 uint64 + x93, x92 = bits.Mul64(x2, arg1[1]) + var x94 uint64 + var x95 uint64 + x95, x94 = bits.Mul64(x2, arg1[0]) + var x96 uint64 + var x97 fiat_pasta_fq_uint1 + x96, x97 = fiat_pasta_fq_addcarryx_u64(x95, x92, 0x0) + var x98 uint64 + var x99 fiat_pasta_fq_uint1 + x98, x99 = fiat_pasta_fq_addcarryx_u64(x93, x90, x97) + var x100 uint64 + var x101 fiat_pasta_fq_uint1 + x100, x101 = fiat_pasta_fq_addcarryx_u64(x91, x88, x99) + x102 := (uint64(x101) + x89) + var x103 uint64 + var x104 fiat_pasta_fq_uint1 + x103, x104 = fiat_pasta_fq_addcarryx_u64(x79, x94, 0x0) + var x105 uint64 + var x106 fiat_pasta_fq_uint1 + x105, x106 = fiat_pasta_fq_addcarryx_u64(x81, x96, x104) + var x107 uint64 + var x108 fiat_pasta_fq_uint1 + x107, x108 = fiat_pasta_fq_addcarryx_u64(x83, x98, x106) + var x109 uint64 + var x110 fiat_pasta_fq_uint1 + x109, x110 = fiat_pasta_fq_addcarryx_u64(x85, x100, x108) + var x111 uint64 + var x112 fiat_pasta_fq_uint1 + x111, x112 = fiat_pasta_fq_addcarryx_u64(x87, x102, x110) + var x113 uint64 + _, x113 = bits.Mul64(x103, 0x8c46eb20ffffffff) + var x115 uint64 + var x116 uint64 + x116, x115 = bits.Mul64(x113, 0x4000000000000000) + var x117 uint64 + var x118 uint64 + x118, x117 = bits.Mul64(x113, 0x224698fc0994a8dd) + var x119 uint64 + var x120 uint64 + x120, x119 = bits.Mul64(x113, 0x8c46eb2100000001) + var x121 uint64 + var x122 fiat_pasta_fq_uint1 + x121, x122 = fiat_pasta_fq_addcarryx_u64(x120, x117, 0x0) + x123 := (uint64(x122) + x118) + var x125 fiat_pasta_fq_uint1 + _, x125 = fiat_pasta_fq_addcarryx_u64(x103, x119, 0x0) + var x126 uint64 + var x127 fiat_pasta_fq_uint1 + x126, x127 = fiat_pasta_fq_addcarryx_u64(x105, x121, x125) + var x128 uint64 + var x129 fiat_pasta_fq_uint1 + x128, x129 = fiat_pasta_fq_addcarryx_u64(x107, x123, x127) + var x130 uint64 + var x131 fiat_pasta_fq_uint1 + x130, x131 = fiat_pasta_fq_addcarryx_u64(x109, x115, x129) + var x132 uint64 + var x133 fiat_pasta_fq_uint1 + x132, x133 = fiat_pasta_fq_addcarryx_u64(x111, x116, x131) + x134 := (uint64(x133) + uint64(x112)) + var x135 uint64 + var x136 uint64 + x136, x135 = bits.Mul64(x3, arg1[3]) + var x137 uint64 + var x138 uint64 + x138, x137 = bits.Mul64(x3, arg1[2]) + var x139 uint64 + var x140 uint64 + x140, x139 = bits.Mul64(x3, arg1[1]) + var x141 uint64 + var x142 uint64 + x142, x141 = bits.Mul64(x3, arg1[0]) + var x143 uint64 + var x144 fiat_pasta_fq_uint1 + x143, x144 = fiat_pasta_fq_addcarryx_u64(x142, x139, 0x0) + var x145 uint64 + var x146 fiat_pasta_fq_uint1 + x145, x146 = fiat_pasta_fq_addcarryx_u64(x140, x137, x144) + var x147 uint64 + var x148 fiat_pasta_fq_uint1 + x147, x148 = fiat_pasta_fq_addcarryx_u64(x138, x135, x146) + x149 := (uint64(x148) + x136) + var x150 uint64 + var x151 fiat_pasta_fq_uint1 + x150, x151 = fiat_pasta_fq_addcarryx_u64(x126, x141, 0x0) + var x152 uint64 + var x153 fiat_pasta_fq_uint1 + x152, x153 = fiat_pasta_fq_addcarryx_u64(x128, x143, x151) + var x154 uint64 + var x155 fiat_pasta_fq_uint1 + x154, x155 = fiat_pasta_fq_addcarryx_u64(x130, x145, x153) + var x156 uint64 + var x157 fiat_pasta_fq_uint1 + x156, x157 = fiat_pasta_fq_addcarryx_u64(x132, x147, x155) + var x158 uint64 + var x159 fiat_pasta_fq_uint1 + x158, x159 = fiat_pasta_fq_addcarryx_u64(x134, x149, x157) + var x160 uint64 + _, x160 = bits.Mul64(x150, 0x8c46eb20ffffffff) + var x162 uint64 + var x163 uint64 + x163, x162 = bits.Mul64(x160, 0x4000000000000000) + var x164 uint64 + var x165 uint64 + x165, x164 = bits.Mul64(x160, 0x224698fc0994a8dd) + var x166 uint64 + var x167 uint64 + x167, x166 = bits.Mul64(x160, 0x8c46eb2100000001) + var x168 uint64 + var x169 fiat_pasta_fq_uint1 + x168, x169 = fiat_pasta_fq_addcarryx_u64(x167, x164, 0x0) + x170 := (uint64(x169) + x165) + var x172 fiat_pasta_fq_uint1 + _, x172 = fiat_pasta_fq_addcarryx_u64(x150, x166, 0x0) + var x173 uint64 + var x174 fiat_pasta_fq_uint1 + x173, x174 = fiat_pasta_fq_addcarryx_u64(x152, x168, x172) + var x175 uint64 + var x176 fiat_pasta_fq_uint1 + x175, x176 = fiat_pasta_fq_addcarryx_u64(x154, x170, x174) + var x177 uint64 + var x178 fiat_pasta_fq_uint1 + x177, x178 = fiat_pasta_fq_addcarryx_u64(x156, x162, x176) + var x179 uint64 + var x180 fiat_pasta_fq_uint1 + x179, x180 = fiat_pasta_fq_addcarryx_u64(x158, x163, x178) + x181 := (uint64(x180) + uint64(x159)) + var x182 uint64 + var x183 fiat_pasta_fq_uint1 + x182, x183 = fiat_pasta_fq_subborrowx_u64(x173, 0x8c46eb2100000001, 0x0) + var x184 uint64 + var x185 fiat_pasta_fq_uint1 + x184, x185 = fiat_pasta_fq_subborrowx_u64(x175, 0x224698fc0994a8dd, x183) + var x186 uint64 + var x187 fiat_pasta_fq_uint1 + x186, x187 = fiat_pasta_fq_subborrowx_u64(x177, uint64(0x0), x185) + var x188 uint64 + var x189 fiat_pasta_fq_uint1 + x188, x189 = fiat_pasta_fq_subborrowx_u64(x179, 0x4000000000000000, x187) + var x191 fiat_pasta_fq_uint1 + _, x191 = fiat_pasta_fq_subborrowx_u64(x181, uint64(0x0), x189) + var x192 uint64 + fiat_pasta_fq_cmovznz_u64(&x192, x191, x182, x173) + var x193 uint64 + fiat_pasta_fq_cmovznz_u64(&x193, x191, x184, x175) + var x194 uint64 + fiat_pasta_fq_cmovznz_u64(&x194, x191, x186, x177) + var x195 uint64 + fiat_pasta_fq_cmovznz_u64(&x195, x191, x188, x179) + out1[0] = x192 + out1[1] = x193 + out1[2] = x194 + out1[3] = x195 +} + +// The function fiat_pasta_fq_add adds two field elements in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// 0 ≤ eval arg2 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) + eval (from_montgomery arg2)) mod m +// 0 ≤ eval out1 < m +func fiat_pasta_fq_add( + out1 *fiat_pasta_fq_montgomery_domain_field_element, + arg1 *fiat_pasta_fq_montgomery_domain_field_element, + arg2 *fiat_pasta_fq_montgomery_domain_field_element, +) { + var x1 uint64 + var x2 fiat_pasta_fq_uint1 + x1, x2 = fiat_pasta_fq_addcarryx_u64(arg1[0], arg2[0], 0x0) + var x3 uint64 + var x4 fiat_pasta_fq_uint1 + x3, x4 = fiat_pasta_fq_addcarryx_u64(arg1[1], arg2[1], x2) + var x5 uint64 + var x6 fiat_pasta_fq_uint1 + x5, x6 = fiat_pasta_fq_addcarryx_u64(arg1[2], arg2[2], x4) + var x7 uint64 + var x8 fiat_pasta_fq_uint1 + x7, x8 = fiat_pasta_fq_addcarryx_u64(arg1[3], arg2[3], x6) + var x9 uint64 + var x10 fiat_pasta_fq_uint1 + x9, x10 = fiat_pasta_fq_subborrowx_u64(x1, 0x8c46eb2100000001, 0x0) + var x11 uint64 + var x12 fiat_pasta_fq_uint1 + x11, x12 = fiat_pasta_fq_subborrowx_u64(x3, 0x224698fc0994a8dd, x10) + var x13 uint64 + var x14 fiat_pasta_fq_uint1 + x13, x14 = fiat_pasta_fq_subborrowx_u64(x5, uint64(0x0), x12) + var x15 uint64 + var x16 fiat_pasta_fq_uint1 + x15, x16 = fiat_pasta_fq_subborrowx_u64(x7, 0x4000000000000000, x14) + var x18 fiat_pasta_fq_uint1 + _, x18 = fiat_pasta_fq_subborrowx_u64(uint64(x8), uint64(0x0), x16) + var x19 uint64 + fiat_pasta_fq_cmovznz_u64(&x19, x18, x9, x1) + var x20 uint64 + fiat_pasta_fq_cmovznz_u64(&x20, x18, x11, x3) + var x21 uint64 + fiat_pasta_fq_cmovznz_u64(&x21, x18, x13, x5) + var x22 uint64 + fiat_pasta_fq_cmovznz_u64(&x22, x18, x15, x7) + out1[0] = x19 + out1[1] = x20 + out1[2] = x21 + out1[3] = x22 +} + +// The function fiat_pasta_fq_sub subtracts two field elements in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// 0 ≤ eval arg2 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = (eval (from_montgomery arg1) - eval (from_montgomery arg2)) mod m +// 0 ≤ eval out1 < m +func fiat_pasta_fq_sub( + out1 *fiat_pasta_fq_montgomery_domain_field_element, + arg1 *fiat_pasta_fq_montgomery_domain_field_element, + arg2 *fiat_pasta_fq_montgomery_domain_field_element, +) { + var x1 uint64 + var x2 fiat_pasta_fq_uint1 + x1, x2 = fiat_pasta_fq_subborrowx_u64(arg1[0], arg2[0], 0x0) + var x3 uint64 + var x4 fiat_pasta_fq_uint1 + x3, x4 = fiat_pasta_fq_subborrowx_u64(arg1[1], arg2[1], x2) + var x5 uint64 + var x6 fiat_pasta_fq_uint1 + x5, x6 = fiat_pasta_fq_subborrowx_u64(arg1[2], arg2[2], x4) + var x7 uint64 + var x8 fiat_pasta_fq_uint1 + x7, x8 = fiat_pasta_fq_subborrowx_u64(arg1[3], arg2[3], x6) + var x9 uint64 + fiat_pasta_fq_cmovznz_u64(&x9, x8, uint64(0x0), 0xffffffffffffffff) + var x10 uint64 + var x11 fiat_pasta_fq_uint1 + x10, x11 = fiat_pasta_fq_addcarryx_u64(x1, (x9 & 0x8c46eb2100000001), 0x0) + var x12 uint64 + var x13 fiat_pasta_fq_uint1 + x12, x13 = fiat_pasta_fq_addcarryx_u64(x3, (x9 & 0x224698fc0994a8dd), x11) + var x14 uint64 + var x15 fiat_pasta_fq_uint1 + x14, x15 = fiat_pasta_fq_addcarryx_u64(x5, uint64(0x0), x13) + var x16 uint64 + x16, _ = fiat_pasta_fq_addcarryx_u64(x7, (x9 & 0x4000000000000000), x15) + out1[0] = x10 + out1[1] = x12 + out1[2] = x14 + out1[3] = x16 +} + +// The function fiat_pasta_fq_opp negates a field element in the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = -eval (from_montgomery arg1) mod m +// 0 ≤ eval out1 < m +func fiat_pasta_fq_opp( + out1 *fiat_pasta_fq_montgomery_domain_field_element, + arg1 *fiat_pasta_fq_montgomery_domain_field_element, +) { + var x1 uint64 + var x2 fiat_pasta_fq_uint1 + x1, x2 = fiat_pasta_fq_subborrowx_u64(uint64(0x0), arg1[0], 0x0) + var x3 uint64 + var x4 fiat_pasta_fq_uint1 + x3, x4 = fiat_pasta_fq_subborrowx_u64(uint64(0x0), arg1[1], x2) + var x5 uint64 + var x6 fiat_pasta_fq_uint1 + x5, x6 = fiat_pasta_fq_subborrowx_u64(uint64(0x0), arg1[2], x4) + var x7 uint64 + var x8 fiat_pasta_fq_uint1 + x7, x8 = fiat_pasta_fq_subborrowx_u64(uint64(0x0), arg1[3], x6) + var x9 uint64 + fiat_pasta_fq_cmovznz_u64(&x9, x8, uint64(0x0), 0xffffffffffffffff) + var x10 uint64 + var x11 fiat_pasta_fq_uint1 + x10, x11 = fiat_pasta_fq_addcarryx_u64(x1, (x9 & 0x8c46eb2100000001), 0x0) + var x12 uint64 + var x13 fiat_pasta_fq_uint1 + x12, x13 = fiat_pasta_fq_addcarryx_u64(x3, (x9 & 0x224698fc0994a8dd), x11) + var x14 uint64 + var x15 fiat_pasta_fq_uint1 + x14, x15 = fiat_pasta_fq_addcarryx_u64(x5, uint64(0x0), x13) + var x16 uint64 + x16, _ = fiat_pasta_fq_addcarryx_u64(x7, (x9 & 0x4000000000000000), x15) + out1[0] = x10 + out1[1] = x12 + out1[2] = x14 + out1[3] = x16 +} + +// The function fiat_pasta_fq_from_montgomery translates a field element out of the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval out1 mod m = (eval arg1 * ((2^64)⁻¹ mod m)^4) mod m +// 0 ≤ eval out1 < m +func fiat_pasta_fq_from_montgomery( + out1 *fiat_pasta_fq_non_montgomery_domain_field_element, + arg1 *fiat_pasta_fq_montgomery_domain_field_element, +) { + x1 := arg1[0] + var x2 uint64 + _, x2 = bits.Mul64(x1, 0x8c46eb20ffffffff) + var x4 uint64 + var x5 uint64 + x5, x4 = bits.Mul64(x2, 0x4000000000000000) + var x6 uint64 + var x7 uint64 + x7, x6 = bits.Mul64(x2, 0x224698fc0994a8dd) + var x8 uint64 + var x9 uint64 + x9, x8 = bits.Mul64(x2, 0x8c46eb2100000001) + var x10 uint64 + var x11 fiat_pasta_fq_uint1 + x10, x11 = fiat_pasta_fq_addcarryx_u64(x9, x6, 0x0) + var x13 fiat_pasta_fq_uint1 + _, x13 = fiat_pasta_fq_addcarryx_u64(x1, x8, 0x0) + var x14 uint64 + var x15 fiat_pasta_fq_uint1 + x14, x15 = fiat_pasta_fq_addcarryx_u64(uint64(0x0), x10, x13) + var x16 uint64 + var x17 fiat_pasta_fq_uint1 + x16, x17 = fiat_pasta_fq_addcarryx_u64(x14, arg1[1], 0x0) + var x18 uint64 + _, x18 = bits.Mul64(x16, 0x8c46eb20ffffffff) + var x20 uint64 + var x21 uint64 + x21, x20 = bits.Mul64(x18, 0x4000000000000000) + var x22 uint64 + var x23 uint64 + x23, x22 = bits.Mul64(x18, 0x224698fc0994a8dd) + var x24 uint64 + var x25 uint64 + x25, x24 = bits.Mul64(x18, 0x8c46eb2100000001) + var x26 uint64 + var x27 fiat_pasta_fq_uint1 + x26, x27 = fiat_pasta_fq_addcarryx_u64(x25, x22, 0x0) + var x29 fiat_pasta_fq_uint1 + _, x29 = fiat_pasta_fq_addcarryx_u64(x16, x24, 0x0) + var x30 uint64 + var x31 fiat_pasta_fq_uint1 + x30, x31 = fiat_pasta_fq_addcarryx_u64( + (uint64(x17) + (uint64(x15) + (uint64(x11) + x7))), + x26, + x29, + ) + var x32 uint64 + var x33 fiat_pasta_fq_uint1 + x32, x33 = fiat_pasta_fq_addcarryx_u64(x4, (uint64(x27) + x23), x31) + var x34 uint64 + var x35 fiat_pasta_fq_uint1 + x34, x35 = fiat_pasta_fq_addcarryx_u64(x5, x20, x33) + var x36 uint64 + var x37 fiat_pasta_fq_uint1 + x36, x37 = fiat_pasta_fq_addcarryx_u64(x30, arg1[2], 0x0) + var x38 uint64 + var x39 fiat_pasta_fq_uint1 + x38, x39 = fiat_pasta_fq_addcarryx_u64(x32, uint64(0x0), x37) + var x40 uint64 + var x41 fiat_pasta_fq_uint1 + x40, x41 = fiat_pasta_fq_addcarryx_u64(x34, uint64(0x0), x39) + var x42 uint64 + _, x42 = bits.Mul64(x36, 0x8c46eb20ffffffff) + var x44 uint64 + var x45 uint64 + x45, x44 = bits.Mul64(x42, 0x4000000000000000) + var x46 uint64 + var x47 uint64 + x47, x46 = bits.Mul64(x42, 0x224698fc0994a8dd) + var x48 uint64 + var x49 uint64 + x49, x48 = bits.Mul64(x42, 0x8c46eb2100000001) + var x50 uint64 + var x51 fiat_pasta_fq_uint1 + x50, x51 = fiat_pasta_fq_addcarryx_u64(x49, x46, 0x0) + var x53 fiat_pasta_fq_uint1 + _, x53 = fiat_pasta_fq_addcarryx_u64(x36, x48, 0x0) + var x54 uint64 + var x55 fiat_pasta_fq_uint1 + x54, x55 = fiat_pasta_fq_addcarryx_u64(x38, x50, x53) + var x56 uint64 + var x57 fiat_pasta_fq_uint1 + x56, x57 = fiat_pasta_fq_addcarryx_u64(x40, (uint64(x51) + x47), x55) + var x58 uint64 + var x59 fiat_pasta_fq_uint1 + x58, x59 = fiat_pasta_fq_addcarryx_u64((uint64(x41) + (uint64(x35) + x21)), x44, x57) + var x60 uint64 + var x61 fiat_pasta_fq_uint1 + x60, x61 = fiat_pasta_fq_addcarryx_u64(x54, arg1[3], 0x0) + var x62 uint64 + var x63 fiat_pasta_fq_uint1 + x62, x63 = fiat_pasta_fq_addcarryx_u64(x56, uint64(0x0), x61) + var x64 uint64 + var x65 fiat_pasta_fq_uint1 + x64, x65 = fiat_pasta_fq_addcarryx_u64(x58, uint64(0x0), x63) + var x66 uint64 + _, x66 = bits.Mul64(x60, 0x8c46eb20ffffffff) + var x68 uint64 + var x69 uint64 + x69, x68 = bits.Mul64(x66, 0x4000000000000000) + var x70 uint64 + var x71 uint64 + x71, x70 = bits.Mul64(x66, 0x224698fc0994a8dd) + var x72 uint64 + var x73 uint64 + x73, x72 = bits.Mul64(x66, 0x8c46eb2100000001) + var x74 uint64 + var x75 fiat_pasta_fq_uint1 + x74, x75 = fiat_pasta_fq_addcarryx_u64(x73, x70, 0x0) + var x77 fiat_pasta_fq_uint1 + _, x77 = fiat_pasta_fq_addcarryx_u64(x60, x72, 0x0) + var x78 uint64 + var x79 fiat_pasta_fq_uint1 + x78, x79 = fiat_pasta_fq_addcarryx_u64(x62, x74, x77) + var x80 uint64 + var x81 fiat_pasta_fq_uint1 + x80, x81 = fiat_pasta_fq_addcarryx_u64(x64, (uint64(x75) + x71), x79) + var x82 uint64 + var x83 fiat_pasta_fq_uint1 + x82, x83 = fiat_pasta_fq_addcarryx_u64((uint64(x65) + (uint64(x59) + x45)), x68, x81) + x84 := (uint64(x83) + x69) + var x85 uint64 + var x86 fiat_pasta_fq_uint1 + x85, x86 = fiat_pasta_fq_subborrowx_u64(x78, 0x8c46eb2100000001, 0x0) + var x87 uint64 + var x88 fiat_pasta_fq_uint1 + x87, x88 = fiat_pasta_fq_subborrowx_u64(x80, 0x224698fc0994a8dd, x86) + var x89 uint64 + var x90 fiat_pasta_fq_uint1 + x89, x90 = fiat_pasta_fq_subborrowx_u64(x82, uint64(0x0), x88) + var x91 uint64 + var x92 fiat_pasta_fq_uint1 + x91, x92 = fiat_pasta_fq_subborrowx_u64(x84, 0x4000000000000000, x90) + var x94 fiat_pasta_fq_uint1 + _, x94 = fiat_pasta_fq_subborrowx_u64(uint64(0x0), uint64(0x0), x92) + var x95 uint64 + fiat_pasta_fq_cmovznz_u64(&x95, x94, x85, x78) + var x96 uint64 + fiat_pasta_fq_cmovznz_u64(&x96, x94, x87, x80) + var x97 uint64 + fiat_pasta_fq_cmovznz_u64(&x97, x94, x89, x82) + var x98 uint64 + fiat_pasta_fq_cmovznz_u64(&x98, x94, x91, x84) + out1[0] = x95 + out1[1] = x96 + out1[2] = x97 + out1[3] = x98 +} + +// The function fiat_pasta_fq_to_montgomery translates a field element into the Montgomery domain. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// eval (from_montgomery out1) mod m = eval arg1 mod m +// 0 ≤ eval out1 < m +func fiat_pasta_fq_to_montgomery( + out1 *fiat_pasta_fq_montgomery_domain_field_element, + arg1 *fiat_pasta_fq_non_montgomery_domain_field_element, +) { + x1 := arg1[1] + x2 := arg1[2] + x3 := arg1[3] + x4 := arg1[0] + var x5 uint64 + var x6 uint64 + x6, x5 = bits.Mul64(x4, 0x96d41af7ccfdaa9) + var x7 uint64 + var x8 uint64 + x8, x7 = bits.Mul64(x4, 0x7fae231004ccf590) + var x9 uint64 + var x10 uint64 + x10, x9 = bits.Mul64(x4, 0x67bb433d891a16e3) + var x11 uint64 + var x12 uint64 + x12, x11 = bits.Mul64(x4, 0xfc9678ff0000000f) + var x13 uint64 + var x14 fiat_pasta_fq_uint1 + x13, x14 = fiat_pasta_fq_addcarryx_u64(x12, x9, 0x0) + var x15 uint64 + var x16 fiat_pasta_fq_uint1 + x15, x16 = fiat_pasta_fq_addcarryx_u64(x10, x7, x14) + var x17 uint64 + var x18 fiat_pasta_fq_uint1 + x17, x18 = fiat_pasta_fq_addcarryx_u64(x8, x5, x16) + var x19 uint64 + _, x19 = bits.Mul64(x11, 0x8c46eb20ffffffff) + var x21 uint64 + var x22 uint64 + x22, x21 = bits.Mul64(x19, 0x4000000000000000) + var x23 uint64 + var x24 uint64 + x24, x23 = bits.Mul64(x19, 0x224698fc0994a8dd) + var x25 uint64 + var x26 uint64 + x26, x25 = bits.Mul64(x19, 0x8c46eb2100000001) + var x27 uint64 + var x28 fiat_pasta_fq_uint1 + x27, x28 = fiat_pasta_fq_addcarryx_u64(x26, x23, 0x0) + var x30 fiat_pasta_fq_uint1 + _, x30 = fiat_pasta_fq_addcarryx_u64(x11, x25, 0x0) + var x31 uint64 + var x32 fiat_pasta_fq_uint1 + x31, x32 = fiat_pasta_fq_addcarryx_u64(x13, x27, x30) + var x33 uint64 + var x34 fiat_pasta_fq_uint1 + x33, x34 = fiat_pasta_fq_addcarryx_u64(x15, (uint64(x28) + x24), x32) + var x35 uint64 + var x36 fiat_pasta_fq_uint1 + x35, x36 = fiat_pasta_fq_addcarryx_u64(x17, x21, x34) + var x37 uint64 + var x38 uint64 + x38, x37 = bits.Mul64(x1, 0x96d41af7ccfdaa9) + var x39 uint64 + var x40 uint64 + x40, x39 = bits.Mul64(x1, 0x7fae231004ccf590) + var x41 uint64 + var x42 uint64 + x42, x41 = bits.Mul64(x1, 0x67bb433d891a16e3) + var x43 uint64 + var x44 uint64 + x44, x43 = bits.Mul64(x1, 0xfc9678ff0000000f) + var x45 uint64 + var x46 fiat_pasta_fq_uint1 + x45, x46 = fiat_pasta_fq_addcarryx_u64(x44, x41, 0x0) + var x47 uint64 + var x48 fiat_pasta_fq_uint1 + x47, x48 = fiat_pasta_fq_addcarryx_u64(x42, x39, x46) + var x49 uint64 + var x50 fiat_pasta_fq_uint1 + x49, x50 = fiat_pasta_fq_addcarryx_u64(x40, x37, x48) + var x51 uint64 + var x52 fiat_pasta_fq_uint1 + x51, x52 = fiat_pasta_fq_addcarryx_u64(x31, x43, 0x0) + var x53 uint64 + var x54 fiat_pasta_fq_uint1 + x53, x54 = fiat_pasta_fq_addcarryx_u64(x33, x45, x52) + var x55 uint64 + var x56 fiat_pasta_fq_uint1 + x55, x56 = fiat_pasta_fq_addcarryx_u64(x35, x47, x54) + var x57 uint64 + var x58 fiat_pasta_fq_uint1 + x57, x58 = fiat_pasta_fq_addcarryx_u64(((uint64(x36) + (uint64(x18) + x6)) + x22), x49, x56) + var x59 uint64 + _, x59 = bits.Mul64(x51, 0x8c46eb20ffffffff) + var x61 uint64 + var x62 uint64 + x62, x61 = bits.Mul64(x59, 0x4000000000000000) + var x63 uint64 + var x64 uint64 + x64, x63 = bits.Mul64(x59, 0x224698fc0994a8dd) + var x65 uint64 + var x66 uint64 + x66, x65 = bits.Mul64(x59, 0x8c46eb2100000001) + var x67 uint64 + var x68 fiat_pasta_fq_uint1 + x67, x68 = fiat_pasta_fq_addcarryx_u64(x66, x63, 0x0) + var x70 fiat_pasta_fq_uint1 + _, x70 = fiat_pasta_fq_addcarryx_u64(x51, x65, 0x0) + var x71 uint64 + var x72 fiat_pasta_fq_uint1 + x71, x72 = fiat_pasta_fq_addcarryx_u64(x53, x67, x70) + var x73 uint64 + var x74 fiat_pasta_fq_uint1 + x73, x74 = fiat_pasta_fq_addcarryx_u64(x55, (uint64(x68) + x64), x72) + var x75 uint64 + var x76 fiat_pasta_fq_uint1 + x75, x76 = fiat_pasta_fq_addcarryx_u64(x57, x61, x74) + var x77 uint64 + var x78 uint64 + x78, x77 = bits.Mul64(x2, 0x96d41af7ccfdaa9) + var x79 uint64 + var x80 uint64 + x80, x79 = bits.Mul64(x2, 0x7fae231004ccf590) + var x81 uint64 + var x82 uint64 + x82, x81 = bits.Mul64(x2, 0x67bb433d891a16e3) + var x83 uint64 + var x84 uint64 + x84, x83 = bits.Mul64(x2, 0xfc9678ff0000000f) + var x85 uint64 + var x86 fiat_pasta_fq_uint1 + x85, x86 = fiat_pasta_fq_addcarryx_u64(x84, x81, 0x0) + var x87 uint64 + var x88 fiat_pasta_fq_uint1 + x87, x88 = fiat_pasta_fq_addcarryx_u64(x82, x79, x86) + var x89 uint64 + var x90 fiat_pasta_fq_uint1 + x89, x90 = fiat_pasta_fq_addcarryx_u64(x80, x77, x88) + var x91 uint64 + var x92 fiat_pasta_fq_uint1 + x91, x92 = fiat_pasta_fq_addcarryx_u64(x71, x83, 0x0) + var x93 uint64 + var x94 fiat_pasta_fq_uint1 + x93, x94 = fiat_pasta_fq_addcarryx_u64(x73, x85, x92) + var x95 uint64 + var x96 fiat_pasta_fq_uint1 + x95, x96 = fiat_pasta_fq_addcarryx_u64(x75, x87, x94) + var x97 uint64 + var x98 fiat_pasta_fq_uint1 + x97, x98 = fiat_pasta_fq_addcarryx_u64( + ((uint64(x76) + (uint64(x58) + (uint64(x50) + x38))) + x62), + x89, + x96, + ) + var x99 uint64 + _, x99 = bits.Mul64(x91, 0x8c46eb20ffffffff) + var x101 uint64 + var x102 uint64 + x102, x101 = bits.Mul64(x99, 0x4000000000000000) + var x103 uint64 + var x104 uint64 + x104, x103 = bits.Mul64(x99, 0x224698fc0994a8dd) + var x105 uint64 + var x106 uint64 + x106, x105 = bits.Mul64(x99, 0x8c46eb2100000001) + var x107 uint64 + var x108 fiat_pasta_fq_uint1 + x107, x108 = fiat_pasta_fq_addcarryx_u64(x106, x103, 0x0) + var x110 fiat_pasta_fq_uint1 + _, x110 = fiat_pasta_fq_addcarryx_u64(x91, x105, 0x0) + var x111 uint64 + var x112 fiat_pasta_fq_uint1 + x111, x112 = fiat_pasta_fq_addcarryx_u64(x93, x107, x110) + var x113 uint64 + var x114 fiat_pasta_fq_uint1 + x113, x114 = fiat_pasta_fq_addcarryx_u64(x95, (uint64(x108) + x104), x112) + var x115 uint64 + var x116 fiat_pasta_fq_uint1 + x115, x116 = fiat_pasta_fq_addcarryx_u64(x97, x101, x114) + var x117 uint64 + var x118 uint64 + x118, x117 = bits.Mul64(x3, 0x96d41af7ccfdaa9) + var x119 uint64 + var x120 uint64 + x120, x119 = bits.Mul64(x3, 0x7fae231004ccf590) + var x121 uint64 + var x122 uint64 + x122, x121 = bits.Mul64(x3, 0x67bb433d891a16e3) + var x123 uint64 + var x124 uint64 + x124, x123 = bits.Mul64(x3, 0xfc9678ff0000000f) + var x125 uint64 + var x126 fiat_pasta_fq_uint1 + x125, x126 = fiat_pasta_fq_addcarryx_u64(x124, x121, 0x0) + var x127 uint64 + var x128 fiat_pasta_fq_uint1 + x127, x128 = fiat_pasta_fq_addcarryx_u64(x122, x119, x126) + var x129 uint64 + var x130 fiat_pasta_fq_uint1 + x129, x130 = fiat_pasta_fq_addcarryx_u64(x120, x117, x128) + var x131 uint64 + var x132 fiat_pasta_fq_uint1 + x131, x132 = fiat_pasta_fq_addcarryx_u64(x111, x123, 0x0) + var x133 uint64 + var x134 fiat_pasta_fq_uint1 + x133, x134 = fiat_pasta_fq_addcarryx_u64(x113, x125, x132) + var x135 uint64 + var x136 fiat_pasta_fq_uint1 + x135, x136 = fiat_pasta_fq_addcarryx_u64(x115, x127, x134) + var x137 uint64 + var x138 fiat_pasta_fq_uint1 + x137, x138 = fiat_pasta_fq_addcarryx_u64( + ((uint64(x116) + (uint64(x98) + (uint64(x90) + x78))) + x102), + x129, + x136, + ) + var x139 uint64 + _, x139 = bits.Mul64(x131, 0x8c46eb20ffffffff) + var x141 uint64 + var x142 uint64 + x142, x141 = bits.Mul64(x139, 0x4000000000000000) + var x143 uint64 + var x144 uint64 + x144, x143 = bits.Mul64(x139, 0x224698fc0994a8dd) + var x145 uint64 + var x146 uint64 + x146, x145 = bits.Mul64(x139, 0x8c46eb2100000001) + var x147 uint64 + var x148 fiat_pasta_fq_uint1 + x147, x148 = fiat_pasta_fq_addcarryx_u64(x146, x143, 0x0) + var x150 fiat_pasta_fq_uint1 + _, x150 = fiat_pasta_fq_addcarryx_u64(x131, x145, 0x0) + var x151 uint64 + var x152 fiat_pasta_fq_uint1 + x151, x152 = fiat_pasta_fq_addcarryx_u64(x133, x147, x150) + var x153 uint64 + var x154 fiat_pasta_fq_uint1 + x153, x154 = fiat_pasta_fq_addcarryx_u64(x135, (uint64(x148) + x144), x152) + var x155 uint64 + var x156 fiat_pasta_fq_uint1 + x155, x156 = fiat_pasta_fq_addcarryx_u64(x137, x141, x154) + x157 := ((uint64(x156) + (uint64(x138) + (uint64(x130) + x118))) + x142) + var x158 uint64 + var x159 fiat_pasta_fq_uint1 + x158, x159 = fiat_pasta_fq_subborrowx_u64(x151, 0x8c46eb2100000001, 0x0) + var x160 uint64 + var x161 fiat_pasta_fq_uint1 + x160, x161 = fiat_pasta_fq_subborrowx_u64(x153, 0x224698fc0994a8dd, x159) + var x162 uint64 + var x163 fiat_pasta_fq_uint1 + x162, x163 = fiat_pasta_fq_subborrowx_u64(x155, uint64(0x0), x161) + var x164 uint64 + var x165 fiat_pasta_fq_uint1 + x164, x165 = fiat_pasta_fq_subborrowx_u64(x157, 0x4000000000000000, x163) + var x167 fiat_pasta_fq_uint1 + _, x167 = fiat_pasta_fq_subborrowx_u64(uint64(0x0), uint64(0x0), x165) + var x168 uint64 + fiat_pasta_fq_cmovznz_u64(&x168, x167, x158, x151) + var x169 uint64 + fiat_pasta_fq_cmovznz_u64(&x169, x167, x160, x153) + var x170 uint64 + fiat_pasta_fq_cmovznz_u64(&x170, x167, x162, x155) + var x171 uint64 + fiat_pasta_fq_cmovznz_u64(&x171, x167, x164, x157) + out1[0] = x168 + out1[1] = x169 + out1[2] = x170 + out1[3] = x171 +} + +// The function fiat_pasta_fq_selectznz is a multi-limb conditional select. +// +// Postconditions: +// +// eval out1 = (if arg1 = 0 then eval arg2 else eval arg3) +// +// Input Bounds: +// +// arg1: [0x0 ~> 0x1] +// arg2: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// arg3: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff]] +func fiat_pasta_fq_selectznz( + out1 *[4]uint64, + arg1 fiat_pasta_fq_uint1, + arg2 *[4]uint64, + arg3 *[4]uint64, +) { + var x1 uint64 + fiat_pasta_fq_cmovznz_u64(&x1, arg1, arg2[0], arg3[0]) + var x2 uint64 + fiat_pasta_fq_cmovznz_u64(&x2, arg1, arg2[1], arg3[1]) + var x3 uint64 + fiat_pasta_fq_cmovznz_u64(&x3, arg1, arg2[2], arg3[2]) + var x4 uint64 + fiat_pasta_fq_cmovznz_u64(&x4, arg1, arg2[3], arg3[3]) + out1[0] = x1 + out1[1] = x2 + out1[2] = x3 + out1[3] = x4 +} + +// The function fiat_pasta_fq_to_bytes serializes a field element NOT in the Montgomery domain to bytes in little-endian order. +// +// Preconditions: +// +// 0 ≤ eval arg1 < m +// +// Postconditions: +// +// out1 = map (λ x, ⌊((eval arg1 mod m) mod 2^(8 * (x + 1))) / 2^(8 * x)⌋) [0..31] +// +// Input Bounds: +// +// arg1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0x7fffffffffffffff]] +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0x7f]] +func fiat_pasta_fq_to_bytes(out1 *[32]uint8, arg1 *[4]uint64) { + x1 := arg1[3] + x2 := arg1[2] + x3 := arg1[1] + x4 := arg1[0] + x5 := (uint8(x4) & 0xff) + x6 := (x4 >> 8) + x7 := (uint8(x6) & 0xff) + x8 := (x6 >> 8) + x9 := (uint8(x8) & 0xff) + x10 := (x8 >> 8) + x11 := (uint8(x10) & 0xff) + x12 := (x10 >> 8) + x13 := (uint8(x12) & 0xff) + x14 := (x12 >> 8) + x15 := (uint8(x14) & 0xff) + x16 := (x14 >> 8) + x17 := (uint8(x16) & 0xff) + x18 := uint8((x16 >> 8)) + x19 := (uint8(x3) & 0xff) + x20 := (x3 >> 8) + x21 := (uint8(x20) & 0xff) + x22 := (x20 >> 8) + x23 := (uint8(x22) & 0xff) + x24 := (x22 >> 8) + x25 := (uint8(x24) & 0xff) + x26 := (x24 >> 8) + x27 := (uint8(x26) & 0xff) + x28 := (x26 >> 8) + x29 := (uint8(x28) & 0xff) + x30 := (x28 >> 8) + x31 := (uint8(x30) & 0xff) + x32 := uint8((x30 >> 8)) + x33 := (uint8(x2) & 0xff) + x34 := (x2 >> 8) + x35 := (uint8(x34) & 0xff) + x36 := (x34 >> 8) + x37 := (uint8(x36) & 0xff) + x38 := (x36 >> 8) + x39 := (uint8(x38) & 0xff) + x40 := (x38 >> 8) + x41 := (uint8(x40) & 0xff) + x42 := (x40 >> 8) + x43 := (uint8(x42) & 0xff) + x44 := (x42 >> 8) + x45 := (uint8(x44) & 0xff) + x46 := uint8((x44 >> 8)) + x47 := (uint8(x1) & 0xff) + x48 := (x1 >> 8) + x49 := (uint8(x48) & 0xff) + x50 := (x48 >> 8) + x51 := (uint8(x50) & 0xff) + x52 := (x50 >> 8) + x53 := (uint8(x52) & 0xff) + x54 := (x52 >> 8) + x55 := (uint8(x54) & 0xff) + x56 := (x54 >> 8) + x57 := (uint8(x56) & 0xff) + x58 := (x56 >> 8) + x59 := (uint8(x58) & 0xff) + x60 := uint8((x58 >> 8)) + out1[0] = x5 + out1[1] = x7 + out1[2] = x9 + out1[3] = x11 + out1[4] = x13 + out1[5] = x15 + out1[6] = x17 + out1[7] = x18 + out1[8] = x19 + out1[9] = x21 + out1[10] = x23 + out1[11] = x25 + out1[12] = x27 + out1[13] = x29 + out1[14] = x31 + out1[15] = x32 + out1[16] = x33 + out1[17] = x35 + out1[18] = x37 + out1[19] = x39 + out1[20] = x41 + out1[21] = x43 + out1[22] = x45 + out1[23] = x46 + out1[24] = x47 + out1[25] = x49 + out1[26] = x51 + out1[27] = x53 + out1[28] = x55 + out1[29] = x57 + out1[30] = x59 + out1[31] = x60 +} + +// The function fiat_pasta_fq_from_bytes deserializes a field element NOT in the Montgomery domain from bytes in little-endian order. +// +// Preconditions: +// +// 0 ≤ bytes_eval arg1 < m +// +// Postconditions: +// +// eval out1 mod m = bytes_eval arg1 mod m +// 0 ≤ eval out1 < m +// +// Input Bounds: +// +// arg1: [[0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0xff], [0x0 ~> 0x7f]] +// +// Output Bounds: +// +// out1: [[0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0xffffffffffffffff], [0x0 ~> 0x7fffffffffffffff]] +func fiat_pasta_fq_from_bytes(out1 *[4]uint64, arg1 *[32]uint8) { + x1 := (uint64(arg1[31]) << 56) + x2 := (uint64(arg1[30]) << 48) + x3 := (uint64(arg1[29]) << 40) + x4 := (uint64(arg1[28]) << 32) + x5 := (uint64(arg1[27]) << 24) + x6 := (uint64(arg1[26]) << 16) + x7 := (uint64(arg1[25]) << 8) + x8 := arg1[24] + x9 := (uint64(arg1[23]) << 56) + x10 := (uint64(arg1[22]) << 48) + x11 := (uint64(arg1[21]) << 40) + x12 := (uint64(arg1[20]) << 32) + x13 := (uint64(arg1[19]) << 24) + x14 := (uint64(arg1[18]) << 16) + x15 := (uint64(arg1[17]) << 8) + x16 := arg1[16] + x17 := (uint64(arg1[15]) << 56) + x18 := (uint64(arg1[14]) << 48) + x19 := (uint64(arg1[13]) << 40) + x20 := (uint64(arg1[12]) << 32) + x21 := (uint64(arg1[11]) << 24) + x22 := (uint64(arg1[10]) << 16) + x23 := (uint64(arg1[9]) << 8) + x24 := arg1[8] + x25 := (uint64(arg1[7]) << 56) + x26 := (uint64(arg1[6]) << 48) + x27 := (uint64(arg1[5]) << 40) + x28 := (uint64(arg1[4]) << 32) + x29 := (uint64(arg1[3]) << 24) + x30 := (uint64(arg1[2]) << 16) + x31 := (uint64(arg1[1]) << 8) + x32 := arg1[0] + x33 := (x31 + uint64(x32)) + x34 := (x30 + x33) + x35 := (x29 + x34) + x36 := (x28 + x35) + x37 := (x27 + x36) + x38 := (x26 + x37) + x39 := (x25 + x38) + x40 := (x23 + uint64(x24)) + x41 := (x22 + x40) + x42 := (x21 + x41) + x43 := (x20 + x42) + x44 := (x19 + x43) + x45 := (x18 + x44) + x46 := (x17 + x45) + x47 := (x15 + uint64(x16)) + x48 := (x14 + x47) + x49 := (x13 + x48) + x50 := (x12 + x49) + x51 := (x11 + x50) + x52 := (x10 + x51) + x53 := (x9 + x52) + x54 := (x7 + uint64(x8)) + x55 := (x6 + x54) + x56 := (x5 + x55) + x57 := (x4 + x56) + x58 := (x3 + x57) + x59 := (x2 + x58) + x60 := (x1 + x59) + out1[0] = x39 + out1[1] = x46 + out1[2] = x53 + out1[3] = x60 +} diff --git a/core/curves/native/pasta/pallas.go b/core/curves/native/pasta/pallas.go new file mode 100755 index 0000000..d90bc3d --- /dev/null +++ b/core/curves/native/pasta/pallas.go @@ -0,0 +1,7 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package pasta diff --git a/core/curves/native/point.go b/core/curves/native/point.go new file mode 100755 index 0000000..1718df6 --- /dev/null +++ b/core/curves/native/point.go @@ -0,0 +1,471 @@ +package native + +import ( + "crypto/sha256" + "crypto/sha512" + "fmt" + "hash" + "io" + "math/big" + + "github.com/pkg/errors" + "golang.org/x/crypto/blake2b" + "golang.org/x/crypto/sha3" +) + +// EllipticPointHashType is to indicate which expand operation is used +// for hash to curve operations +type EllipticPointHashType uint + +// EllipticPointHashName is to indicate the hash function is used +// for hash to curve operations +type EllipticPointHashName uint + +const ( + // XMD - use ExpandMsgXmd + XMD EllipticPointHashType = iota + // XOF - use ExpandMsgXof + XOF +) + +const ( + SHA256 EllipticPointHashName = iota + SHA512 + SHA3_256 + SHA3_384 + SHA3_512 + BLAKE2B + SHAKE128 + SHAKE256 +) + +// EllipticPoint represents a Weierstrauss elliptic curve point +type EllipticPoint struct { + X *Field + Y *Field + Z *Field + Params *EllipticPointParams + Arithmetic EllipticPointArithmetic +} + +// EllipticPointParams are the Weierstrauss curve parameters +// such as the name, the coefficients the generator point, +// and the prime bit size +type EllipticPointParams struct { + Name string + A *Field + B *Field + Gx *Field + Gy *Field + BitSize int +} + +// EllipticPointHasher is the type of hashing methods for +// hashing byte sequences to curve point. +type EllipticPointHasher struct { + name EllipticPointHashName + hashType EllipticPointHashType + xmd hash.Hash + xof sha3.ShakeHash +} + +// Name returns the hash name for this hasher +func (e *EllipticPointHasher) Name() string { + return e.name.String() +} + +// Type returns the hash type for this hasher +func (e *EllipticPointHasher) Type() EllipticPointHashType { + return e.hashType +} + +// Xmd returns the hash method for ExpandMsgXmd +func (e *EllipticPointHasher) Xmd() hash.Hash { + return e.xmd +} + +// Xof returns the hash method for ExpandMsgXof +func (e *EllipticPointHasher) Xof() sha3.ShakeHash { + return e.xof +} + +// EllipticPointHasherSha256 creates a point hasher that uses Sha256 +func EllipticPointHasherSha256() *EllipticPointHasher { + return &EllipticPointHasher{ + name: SHA256, + hashType: XMD, + xmd: sha256.New(), + } +} + +// EllipticPointHasherSha512 creates a point hasher that uses Sha512 +func EllipticPointHasherSha512() *EllipticPointHasher { + return &EllipticPointHasher{ + name: SHA512, + hashType: XMD, + xmd: sha512.New(), + } +} + +// EllipticPointHasherSha3256 creates a point hasher that uses Sha3256 +func EllipticPointHasherSha3256() *EllipticPointHasher { + return &EllipticPointHasher{ + name: SHA3_256, + hashType: XMD, + xmd: sha3.New256(), + } +} + +// EllipticPointHasherSha3384 creates a point hasher that uses Sha3384 +func EllipticPointHasherSha3384() *EllipticPointHasher { + return &EllipticPointHasher{ + name: SHA3_384, + hashType: XMD, + xmd: sha3.New384(), + } +} + +// EllipticPointHasherSha3512 creates a point hasher that uses Sha3512 +func EllipticPointHasherSha3512() *EllipticPointHasher { + return &EllipticPointHasher{ + name: SHA3_512, + hashType: XMD, + xmd: sha3.New512(), + } +} + +// EllipticPointHasherBlake2b creates a point hasher that uses Blake2b +func EllipticPointHasherBlake2b() *EllipticPointHasher { + h, _ := blake2b.New(64, []byte{}) + return &EllipticPointHasher{ + name: BLAKE2B, + hashType: XMD, + xmd: h, + } +} + +// EllipticPointHasherShake128 creates a point hasher that uses Shake128 +func EllipticPointHasherShake128() *EllipticPointHasher { + return &EllipticPointHasher{ + name: SHAKE128, + hashType: XOF, + xof: sha3.NewShake128(), + } +} + +// EllipticPointHasherShake256 creates a point hasher that uses Shake256 +func EllipticPointHasherShake256() *EllipticPointHasher { + return &EllipticPointHasher{ + name: SHAKE128, + hashType: XOF, + xof: sha3.NewShake256(), + } +} + +// EllipticPointArithmetic are the methods that specific curves +// need to implement for higher abstractions to wrap the point +type EllipticPointArithmetic interface { + // Hash a byte sequence to the curve using the specified hasher + // and dst and store the result in out + Hash(out *EllipticPoint, hasher *EllipticPointHasher, bytes, dst []byte) error + // Double arg and store the result in out + Double(out, arg *EllipticPoint) + // Add arg1 with arg2 and store the result in out + Add(out, arg1, arg2 *EllipticPoint) + // IsOnCurve tests arg if it represents a valid point on the curve + IsOnCurve(arg *EllipticPoint) bool + // ToAffine converts arg to affine coordinates storing the result in out + ToAffine(out, arg *EllipticPoint) + // RhsEq computes the right-hand side of the ecc equation + RhsEq(out, x *Field) +} + +func (t EllipticPointHashType) String() string { + switch t { + case XMD: + return "XMD" + case XOF: + return "XOF" + } + return "unknown" +} + +func (n EllipticPointHashName) String() string { + switch n { + case SHA256: + return "SHA-256" + case SHA512: + return "SHA-512" + case SHA3_256: + return "SHA3-256" + case SHA3_384: + return "SHA3-384" + case SHA3_512: + return "SHA3-512" + case BLAKE2B: + return "BLAKE2b" + case SHAKE128: + return "SHAKE-128" + case SHAKE256: + return "SHAKE-256" + } + return "unknown" +} + +// Random creates a random point on the curve +// from the specified reader +func (p *EllipticPoint) Random(reader io.Reader) (*EllipticPoint, error) { + var seed [WideFieldBytes]byte + n, err := reader.Read(seed[:]) + if err != nil { + return nil, errors.Wrap(err, "random could not read from stream") + } + if n != WideFieldBytes { + return nil, fmt.Errorf("insufficient bytes read %d when %d are needed", n, WideFieldBytes) + } + dst := []byte(fmt.Sprintf("%s_XMD:SHA-256_SSWU_RO_", p.Params.Name)) + err = p.Arithmetic.Hash(p, EllipticPointHasherSha256(), seed[:], dst) + if err != nil { + return nil, errors.Wrap(err, "ecc hash failed") + } + return p, nil +} + +// Hash uses the hasher to map bytes to a valid point +func (p *EllipticPoint) Hash(bytes []byte, hasher *EllipticPointHasher) (*EllipticPoint, error) { + dst := []byte(fmt.Sprintf("%s_%s:%s_SSWU_RO_", p.Params.Name, hasher.hashType, hasher.name)) + err := p.Arithmetic.Hash(p, hasher, bytes, dst) + if err != nil { + return nil, errors.Wrap(err, "hash failed") + } + return p, nil +} + +// Identity returns the identity point +func (p *EllipticPoint) Identity() *EllipticPoint { + p.X.SetZero() + p.Y.SetZero() + p.Z.SetZero() + return p +} + +// Generator returns the base point for the curve +func (p *EllipticPoint) Generator() *EllipticPoint { + p.X.Set(p.Params.Gx) + p.Y.Set(p.Params.Gy) + p.Z.SetOne() + return p +} + +// IsIdentity returns true if this point is at infinity +func (p *EllipticPoint) IsIdentity() bool { + return p.Z.IsZero() == 1 +} + +// Double this point +func (p *EllipticPoint) Double(point *EllipticPoint) *EllipticPoint { + p.Set(point) + p.Arithmetic.Double(p, point) + return p +} + +// Neg negates this point +func (p *EllipticPoint) Neg(point *EllipticPoint) *EllipticPoint { + p.Set(point) + p.Y.Neg(p.Y) + return p +} + +// Add adds the two points +func (p *EllipticPoint) Add(lhs, rhs *EllipticPoint) *EllipticPoint { + p.Set(lhs) + p.Arithmetic.Add(p, lhs, rhs) + return p +} + +// Sub subtracts the two points +func (p *EllipticPoint) Sub(lhs, rhs *EllipticPoint) *EllipticPoint { + p.Set(lhs) + p.Arithmetic.Add(p, lhs, new(EllipticPoint).Neg(rhs)) + return p +} + +// Mul multiplies this point by the input scalar +func (p *EllipticPoint) Mul(point *EllipticPoint, scalar *Field) *EllipticPoint { + bytes := scalar.Bytes() + precomputed := [16]*EllipticPoint{} + precomputed[0] = new(EllipticPoint).Set(point).Identity() + precomputed[1] = new(EllipticPoint).Set(point) + for i := 2; i < 16; i += 2 { + precomputed[i] = new(EllipticPoint).Set(point).Double(precomputed[i>>1]) + precomputed[i+1] = new(EllipticPoint).Set(point).Add(precomputed[i], point) + } + p.Identity() + for i := 0; i < 256; i += 4 { + // Brouwer / windowing method. window size of 4. + for j := 0; j < 4; j++ { + p.Double(p) + } + window := bytes[32-1-i>>3] >> (4 - i&0x04) & 0x0F + p.Add(p, precomputed[window]) + } + return p +} + +// Equal returns 1 if the two points are equal 0 otherwise. +func (p *EllipticPoint) Equal(rhs *EllipticPoint) int { + var x1, x2, y1, y2 Field + + x1.Arithmetic = p.X.Arithmetic + x2.Arithmetic = p.X.Arithmetic + y1.Arithmetic = p.Y.Arithmetic + y2.Arithmetic = p.Y.Arithmetic + + x1.Mul(p.X, rhs.Z) + x2.Mul(rhs.X, p.Z) + + y1.Mul(p.Y, rhs.Z) + y2.Mul(rhs.Y, p.Z) + + e1 := p.Z.IsZero() + e2 := rhs.Z.IsZero() + + // Both at infinity or coordinates are the same + return (e1 & e2) | (^e1 & ^e2)&x1.Equal(&x2)&y1.Equal(&y2) +} + +// Set copies clone into p +func (p *EllipticPoint) Set(clone *EllipticPoint) *EllipticPoint { + p.X = new(Field).Set(clone.X) + p.Y = new(Field).Set(clone.Y) + p.Z = new(Field).Set(clone.Z) + p.Params = clone.Params + p.Arithmetic = clone.Arithmetic + return p +} + +// BigInt returns the x and y as big.Ints in affine +func (p *EllipticPoint) BigInt() (x, y *big.Int) { + t := new(EllipticPoint).Set(p) + p.Arithmetic.ToAffine(t, p) + x = t.X.BigInt() + y = t.Y.BigInt() + return x, y +} + +// SetBigInt creates a point from affine x, y +// and returns the point if it is on the curve +func (p *EllipticPoint) SetBigInt(x, y *big.Int) (*EllipticPoint, error) { + xx := &Field{ + Params: p.Params.Gx.Params, + Arithmetic: p.Params.Gx.Arithmetic, + } + xx.SetBigInt(x) + yy := &Field{ + Params: p.Params.Gx.Params, + Arithmetic: p.Params.Gx.Arithmetic, + } + yy.SetBigInt(y) + pp := new(EllipticPoint).Set(p) + + zero := new(Field).Set(xx).SetZero() + one := new(Field).Set(xx).SetOne() + isIdentity := xx.IsZero() & yy.IsZero() + pp.X = xx.CMove(xx, zero, isIdentity) + pp.Y = yy.CMove(yy, zero, isIdentity) + pp.Z = one.CMove(one, zero, isIdentity) + if !p.Arithmetic.IsOnCurve(pp) && isIdentity == 0 { + return nil, fmt.Errorf("invalid coordinates") + } + return p.Set(pp), nil +} + +// GetX returns the affine X coordinate +func (p *EllipticPoint) GetX() *Field { + t := new(EllipticPoint).Set(p) + p.Arithmetic.ToAffine(t, p) + return t.X +} + +// GetY returns the affine Y coordinate +func (p *EllipticPoint) GetY() *Field { + t := new(EllipticPoint).Set(p) + p.Arithmetic.ToAffine(t, p) + return t.Y +} + +// IsOnCurve determines if this point represents a valid curve point +func (p *EllipticPoint) IsOnCurve() bool { + return p.Arithmetic.IsOnCurve(p) +} + +// ToAffine converts the point into affine coordinates +func (p *EllipticPoint) ToAffine(clone *EllipticPoint) *EllipticPoint { + p.Arithmetic.ToAffine(p, clone) + return p +} + +// SumOfProducts computes the multi-exponentiation for the specified +// points and scalars and stores the result in `p`. +// Returns an error if the lengths of the arguments is not equal. +func (p *EllipticPoint) SumOfProducts( + points []*EllipticPoint, + scalars []*Field, +) (*EllipticPoint, error) { + const Upper = 256 + const W = 4 + const Windows = Upper / W // careful--use ceiling division in case this doesn't divide evenly + if len(points) != len(scalars) { + return nil, fmt.Errorf("length mismatch") + } + + bucketSize := 1 << W + windows := make([]*EllipticPoint, Windows) + bytes := make([][32]byte, len(scalars)) + buckets := make([]*EllipticPoint, bucketSize) + + for i, scalar := range scalars { + bytes[i] = scalar.Bytes() + } + for i := range windows { + windows[i] = new(EllipticPoint).Set(p).Identity() + } + + for i := 0; i < bucketSize; i++ { + buckets[i] = new(EllipticPoint).Set(p).Identity() + } + + sum := new(EllipticPoint).Set(p) + + for j := 0; j < len(windows); j++ { + for i := 0; i < bucketSize; i++ { + buckets[i].Identity() + } + + for i := 0; i < len(scalars); i++ { + // j*W to get the nibble + // >> 3 to convert to byte, / 8 + // (W * j & W) gets the nibble, mod W + // 1 << W - 1 to get the offset + index := bytes[i][j*W>>3] >> (W * j & W) & (1< 0; i-- { + sum.Add(sum, buckets[i]) + windows[j].Add(windows[j], sum) + } + } + + p.Identity() + for i := len(windows) - 1; i >= 0; i-- { + for j := 0; j < W; j++ { + p.Double(p) + } + + p.Add(p, windows[i]) + } + return p, nil +} diff --git a/core/curves/p256_bench_test.go b/core/curves/p256_bench_test.go new file mode 100644 index 0000000..2202653 --- /dev/null +++ b/core/curves/p256_bench_test.go @@ -0,0 +1,756 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package curves + +import ( + "crypto/elliptic" + crand "crypto/rand" + "crypto/sha256" + "crypto/subtle" + "fmt" + "io" + "math/big" + "testing" + + "github.com/sonr-io/sonr/crypto/core" +) + +func BenchmarkP256(b *testing.B) { + // 1000 points + + b.Run("1000 point hash - p256", func(b *testing.B) { + b.StopTimer() + points := make([][]byte, 1000) + for i := range points { + t := make([]byte, 32) + _, _ = crand.Read(t) + points[i] = t + } + acc := new(BenchPointP256).Identity() + b.StartTimer() + for _, pt := range points { + acc = acc.Hash(pt) + } + }) + + b.Run("1000 point hash - ct p256", func(b *testing.B) { + b.StopTimer() + points := make([][]byte, 1000) + for i := range points { + t := make([]byte, 32) + _, _ = crand.Read(t) + points[i] = t + } + acc := new(PointP256).Identity() + b.StartTimer() + for _, pt := range points { + acc = acc.Hash(pt) + } + }) + + b.Run("1000 point add - p256", func(b *testing.B) { + b.StopTimer() + points := make([]*BenchPointP256, 1000) + for i := range points { + points[i] = points[i].Random(crand.Reader).(*BenchPointP256) + } + acc := new(BenchPointP256).Identity() + b.StartTimer() + for _, pt := range points { + acc = acc.Add(pt) + } + }) + b.Run("1000 point add - ct p256", func(b *testing.B) { + b.StopTimer() + curve := P256() + points := make([]*PointP256, 1000) + for i := range points { + points[i] = curve.NewIdentityPoint().Random(crand.Reader).(*PointP256) + } + acc := curve.NewIdentityPoint() + b.StartTimer() + for _, pt := range points { + acc = acc.Add(pt) + } + }) + b.Run("1000 point double - p256", func(b *testing.B) { + b.StopTimer() + acc := new(BenchPointP256).Generator() + b.StartTimer() + for i := 0; i < 1000; i++ { + acc = acc.Double() + } + }) + b.Run("1000 point double - ct p256", func(b *testing.B) { + b.StopTimer() + acc := new(PointP256).Generator() + b.StartTimer() + for i := 0; i < 1000; i++ { + acc = acc.Double() + } + }) + b.Run("1000 point multiply - p256", func(b *testing.B) { + b.StopTimer() + scalars := make([]*BenchScalarP256, 1000) + for i := range scalars { + s := new(BenchScalarP256).Random(crand.Reader) + scalars[i] = s.(*BenchScalarP256) + } + acc := new(BenchPointP256).Generator().Mul(new(BenchScalarP256).New(2)) + b.StartTimer() + for _, sc := range scalars { + acc = acc.Mul(sc) + } + }) + b.Run("1000 point multiply - ct p256", func(b *testing.B) { + b.StopTimer() + scalars := make([]*ScalarP256, 1000) + for i := range scalars { + s := new(ScalarP256).Random(crand.Reader) + scalars[i] = s.(*ScalarP256) + } + acc := new(PointP256).Generator() + b.StartTimer() + for _, sc := range scalars { + acc = acc.Mul(sc) + } + }) + b.Run("1000 scalar invert - p256", func(b *testing.B) { + b.StopTimer() + scalars := make([]*BenchScalarP256, 1000) + for i := range scalars { + s := new(BenchScalarP256).Random(crand.Reader) + scalars[i] = s.(*BenchScalarP256) + } + b.StartTimer() + for _, sc := range scalars { + _, _ = sc.Invert() + } + }) + b.Run("1000 scalar invert - ct p256", func(b *testing.B) { + b.StopTimer() + scalars := make([]*ScalarP256, 1000) + for i := range scalars { + s := new(ScalarP256).Random(crand.Reader) + scalars[i] = s.(*ScalarP256) + } + b.StartTimer() + for _, sc := range scalars { + _, _ = sc.Invert() + } + }) + b.Run("1000 scalar sqrt - p256", func(b *testing.B) { + b.StopTimer() + scalars := make([]*BenchScalarP256, 1000) + for i := range scalars { + s := new(BenchScalarP256).Random(crand.Reader) + scalars[i] = s.(*BenchScalarP256) + } + b.StartTimer() + for _, sc := range scalars { + _, _ = sc.Sqrt() + } + }) + b.Run("1000 scalar sqrt - ct p256", func(b *testing.B) { + b.StopTimer() + scalars := make([]*ScalarP256, 1000) + for i := range scalars { + s := new(ScalarP256).Random(crand.Reader) + scalars[i] = s.(*ScalarP256) + } + b.StartTimer() + for _, sc := range scalars { + _, _ = sc.Sqrt() + } + }) +} + +type BenchScalarP256 struct { + value *big.Int +} + +type BenchPointP256 struct { + x, y *big.Int +} + +func (s *BenchScalarP256) Random(reader io.Reader) Scalar { + if reader == nil { + return nil + } + var seed [64]byte + _, _ = reader.Read(seed[:]) + return s.Hash(seed[:]) +} + +func (s *BenchScalarP256) Hash(bytes []byte) Scalar { + xmd, err := expandMsgXmd(sha256.New(), bytes, []byte("P256_XMD:SHA-256_SSWU_RO_"), 48) + if err != nil { + return nil + } + v := new(big.Int).SetBytes(xmd) + return &BenchScalarP256{ + value: v.Mod(v, elliptic.P256().Params().N), + } +} + +func (s *BenchScalarP256) Zero() Scalar { + return &BenchScalarP256{ + value: big.NewInt(0), + } +} + +func (s *BenchScalarP256) One() Scalar { + return &BenchScalarP256{ + value: big.NewInt(1), + } +} + +func (s *BenchScalarP256) IsZero() bool { + return subtle.ConstantTimeCompare(s.value.Bytes(), []byte{}) == 1 +} + +func (s *BenchScalarP256) IsOne() bool { + return subtle.ConstantTimeCompare(s.value.Bytes(), []byte{1}) == 1 +} + +func (s *BenchScalarP256) IsOdd() bool { + return s.value.Bit(0) == 1 +} + +func (s *BenchScalarP256) IsEven() bool { + return s.value.Bit(0) == 0 +} + +func (s *BenchScalarP256) New(value int) Scalar { + v := big.NewInt(int64(value)) + if value < 0 { + v.Mod(v, elliptic.P256().Params().N) + } + return &BenchScalarP256{ + value: v, + } +} + +func (s *BenchScalarP256) Cmp(rhs Scalar) int { + r, ok := rhs.(*BenchScalarP256) + if ok { + return s.value.Cmp(r.value) + } else { + return -2 + } +} + +func (s *BenchScalarP256) Square() Scalar { + return &BenchScalarP256{ + value: new(big.Int).Exp(s.value, big.NewInt(2), elliptic.P256().Params().N), + } +} + +func (s *BenchScalarP256) Double() Scalar { + v := new(big.Int).Add(s.value, s.value) + return &BenchScalarP256{ + value: v.Mod(v, elliptic.P256().Params().N), + } +} + +func (s *BenchScalarP256) Invert() (Scalar, error) { + return &BenchScalarP256{ + value: new(big.Int).ModInverse(s.value, elliptic.P256().Params().N), + }, nil +} + +func (s *BenchScalarP256) Sqrt() (Scalar, error) { + return &BenchScalarP256{ + value: new(big.Int).ModSqrt(s.value, elliptic.P256().Params().N), + }, nil +} + +func (s *BenchScalarP256) Cube() Scalar { + return &BenchScalarP256{ + value: new(big.Int).Exp(s.value, big.NewInt(3), elliptic.P256().Params().N), + } +} + +func (s *BenchScalarP256) Add(rhs Scalar) Scalar { + r, ok := rhs.(*BenchScalarP256) + if ok { + v := new(big.Int).Add(s.value, r.value) + return &BenchScalarP256{ + value: v.Mod(v, elliptic.P256().Params().N), + } + } else { + return nil + } +} + +func (s *BenchScalarP256) Sub(rhs Scalar) Scalar { + r, ok := rhs.(*BenchScalarP256) + if ok { + v := new(big.Int).Sub(s.value, r.value) + return &BenchScalarP256{ + value: v.Mod(v, elliptic.P256().Params().N), + } + } else { + return nil + } +} + +func (s *BenchScalarP256) Mul(rhs Scalar) Scalar { + r, ok := rhs.(*BenchScalarP256) + if ok { + v := new(big.Int).Mul(s.value, r.value) + return &BenchScalarP256{ + value: v.Mod(v, elliptic.P256().Params().N), + } + } else { + return nil + } +} + +func (s *BenchScalarP256) MulAdd(y, z Scalar) Scalar { + return s.Mul(y).Add(z) +} + +func (s *BenchScalarP256) Div(rhs Scalar) Scalar { + n := elliptic.P256().Params().N + r, ok := rhs.(*BenchScalarP256) + if ok { + v := new(big.Int).ModInverse(r.value, n) + v.Mul(v, s.value) + return &BenchScalarP256{ + value: v.Mod(v, n), + } + } else { + return nil + } +} + +func (s *BenchScalarP256) Neg() Scalar { + z := new(big.Int).Neg(s.value) + return &BenchScalarP256{ + value: z.Mod(z, elliptic.P256().Params().N), + } +} + +func (s *BenchScalarP256) SetBigInt(v *big.Int) (Scalar, error) { + if v == nil { + return nil, fmt.Errorf("invalid value") + } + t := new(big.Int).Mod(v, elliptic.P256().Params().N) + if t.Cmp(v) != 0 { + return nil, fmt.Errorf("invalid value") + } + return &BenchScalarP256{ + value: t, + }, nil +} + +func (s *BenchScalarP256) BigInt() *big.Int { + return new(big.Int).Set(s.value) +} + +func (s *BenchScalarP256) Bytes() []byte { + var out [32]byte + return s.value.FillBytes(out[:]) +} + +func (s *BenchScalarP256) SetBytes(bytes []byte) (Scalar, error) { + value := new(big.Int).SetBytes(bytes) + t := new(big.Int).Mod(value, elliptic.P256().Params().N) + if t.Cmp(value) != 0 { + return nil, fmt.Errorf("invalid byte sequence") + } + return &BenchScalarP256{ + value: t, + }, nil +} + +func (s *BenchScalarP256) SetBytesWide(bytes []byte) (Scalar, error) { + if len(bytes) < 32 || len(bytes) > 128 { + return nil, fmt.Errorf("invalid byte sequence") + } + value := new(big.Int).SetBytes(bytes) + value.Mod(value, elliptic.P256().Params().N) + return &BenchScalarP256{ + value, + }, nil +} + +func (s *BenchScalarP256) Point() Point { + return new(BenchPointP256).Identity() +} + +func (s *BenchScalarP256) Clone() Scalar { + return &BenchScalarP256{ + value: new(big.Int).Set(s.value), + } +} + +func (s *BenchScalarP256) MarshalBinary() ([]byte, error) { + return scalarMarshalBinary(s) +} + +func (s *BenchScalarP256) UnmarshalBinary(input []byte) error { + sc, err := scalarUnmarshalBinary(input) + if err != nil { + return err + } + ss, ok := sc.(*BenchScalarP256) + if !ok { + return fmt.Errorf("invalid scalar") + } + s.value = ss.value + return nil +} + +func (s *BenchScalarP256) MarshalText() ([]byte, error) { + return scalarMarshalText(s) +} + +func (s *BenchScalarP256) UnmarshalText(input []byte) error { + sc, err := scalarUnmarshalText(input) + if err != nil { + return err + } + ss, ok := sc.(*BenchScalarP256) + if !ok { + return fmt.Errorf("invalid scalar") + } + s.value = ss.value + return nil +} + +func (s *BenchScalarP256) MarshalJSON() ([]byte, error) { + return scalarMarshalJson(s) +} + +func (s *BenchScalarP256) UnmarshalJSON(input []byte) error { + sc, err := scalarUnmarshalJson(input) + if err != nil { + return err + } + S, ok := sc.(*BenchScalarP256) + if !ok { + return fmt.Errorf("invalid type") + } + s.value = S.value + return nil +} + +func (p *BenchPointP256) Random(reader io.Reader) Point { + var seed [64]byte + _, _ = reader.Read(seed[:]) + return p.Hash(seed[:]) +} + +func (p *BenchPointP256) Hash(bytes []byte) Point { + curve := elliptic.P256() + + domain := []byte("P256_XMD:SHA-256_SSWU_RO_") + uniformBytes, _ := expandMsgXmd(sha256.New(), bytes, domain, 96) + + u0 := new(big.Int).SetBytes(uniformBytes[:48]) + u1 := new(big.Int).SetBytes(uniformBytes[48:]) + + u0.Mod(u0, curve.Params().P) + u1.Mod(u1, curve.Params().P) + + ssParams := p256SswuParams() + q0x, q0y := osswu3mod4(u0, ssParams) + q1x, q1y := osswu3mod4(u1, ssParams) + + x, y := curve.Add(q0x, q0y, q1x, q1y) + + return &BenchPointP256{ + x, y, + } +} + +func (p *BenchPointP256) Identity() Point { + return &BenchPointP256{ + x: big.NewInt(0), y: big.NewInt(0), + } +} + +func (p *BenchPointP256) Generator() Point { + curve := elliptic.P256().Params() + return &BenchPointP256{ + x: new(big.Int).Set(curve.Gx), + y: new(big.Int).Set(curve.Gy), + } +} + +func (p *BenchPointP256) IsIdentity() bool { + x := core.ConstantTimeEqByte(p.x, core.Zero) + y := core.ConstantTimeEqByte(p.y, core.Zero) + return (x & y) == 1 +} + +func (p *BenchPointP256) IsNegative() bool { + return p.y.Bit(0) == 1 +} + +func (p *BenchPointP256) IsOnCurve() bool { + return elliptic.P256().IsOnCurve(p.x, p.y) +} + +func (p *BenchPointP256) Double() Point { + curve := elliptic.P256() + x, y := curve.Double(p.x, p.y) + return &BenchPointP256{x, y} +} + +func (p *BenchPointP256) Scalar() Scalar { + return new(BenchScalarP256).Zero() +} + +func (p *BenchPointP256) Neg() Point { + y := new(big.Int).Sub(elliptic.P256().Params().P, p.y) + y.Mod(y, elliptic.P256().Params().P) + return &BenchPointP256{x: p.x, y: y} +} + +func (p *BenchPointP256) Add(rhs Point) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*BenchPointP256) + if ok { + x, y := elliptic.P256().Add(p.x, p.y, r.x, r.y) + return &BenchPointP256{x, y} + } else { + return nil + } +} + +func (p *BenchPointP256) Sub(rhs Point) Point { + if rhs == nil { + return nil + } + r, ok := rhs.Neg().(*BenchPointP256) + if ok { + x, y := elliptic.P256().Add(p.x, p.y, r.x, r.y) + return &BenchPointP256{x, y} + } else { + return nil + } +} + +func (p *BenchPointP256) Mul(rhs Scalar) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*BenchScalarP256) + if ok { + x, y := elliptic.P256().ScalarMult(p.x, p.y, r.value.Bytes()) + return &BenchPointP256{x, y} + } else { + return nil + } +} + +func (p *BenchPointP256) Equal(rhs Point) bool { + r, ok := rhs.(*BenchPointP256) + if ok { + x := core.ConstantTimeEqByte(p.x, r.x) + y := core.ConstantTimeEqByte(p.y, r.y) + return (x & y) == 1 + } else { + return false + } +} + +func (p *BenchPointP256) Set(x, y *big.Int) (Point, error) { + // check is identity or on curve + xx := subtle.ConstantTimeCompare(x.Bytes(), []byte{}) + yy := subtle.ConstantTimeCompare(y.Bytes(), []byte{}) + // Checks are constant time + onCurve := elliptic.P256().IsOnCurve(x, y) + if !onCurve && (xx&yy) != 1 { + return nil, fmt.Errorf("invalid coordinates") + } + x = new(big.Int).Set(x) + y = new(big.Int).Set(y) + return &BenchPointP256{x, y}, nil +} + +func (p *BenchPointP256) ToAffineCompressed() []byte { + var x [33]byte + x[0] = byte(2) + x[0] |= byte(p.y.Bit(0)) + p.x.FillBytes(x[1:]) + return x[:] +} + +func (p *BenchPointP256) ToAffineUncompressed() []byte { + var out [65]byte + out[0] = byte(4) + p.x.FillBytes(out[1:33]) + p.y.FillBytes(out[33:]) + return out[:] +} + +func (p *BenchPointP256) FromAffineCompressed(bytes []byte) (Point, error) { + if len(bytes) != 33 { + return nil, fmt.Errorf("invalid byte sequence") + } + sign := int(bytes[0]) + if sign != 2 && sign != 3 { + return nil, fmt.Errorf("invalid sign byte") + } + sign &= 0x1 + + x := new(big.Int).SetBytes(bytes[1:]) + rhs := rhsP256(x, elliptic.P256().Params()) + // test that rhs is quadratic residue + // if not, then this Point is at infinity + y := new(big.Int).ModSqrt(rhs, elliptic.P256().Params().P) + if y != nil { + // fix the sign + if int(y.Bit(0)) != sign { + y.Neg(y) + y.Mod(y, elliptic.P256().Params().P) + } + } else { + x = new(big.Int) + y = new(big.Int) + } + return &BenchPointP256{ + x, y, + }, nil +} + +func (p *BenchPointP256) FromAffineUncompressed(bytes []byte) (Point, error) { + if len(bytes) != 65 { + return nil, fmt.Errorf("invalid byte sequence") + } + if bytes[0] != 4 { + return nil, fmt.Errorf("invalid sign byte") + } + x := new(big.Int).SetBytes(bytes[1:33]) + y := new(big.Int).SetBytes(bytes[33:]) + return &BenchPointP256{x, y}, nil +} + +func (p *BenchPointP256) CurveName() string { + return elliptic.P256().Params().Name +} + +func (p *BenchPointP256) SumOfProducts(points []Point, scalars []Scalar) Point { + nScalars := make([]*big.Int, len(scalars)) + for i, sc := range scalars { + s, ok := sc.(*BenchScalarP256) + if !ok { + return nil + } + nScalars[i] = s.value + } + return sumOfProductsPippenger(points, nScalars) +} + +func (p *BenchPointP256) X() *big.Int { + return new(big.Int).Set(p.x) +} + +func (p *BenchPointP256) Y() *big.Int { + return new(big.Int).Set(p.y) +} + +func (p *BenchPointP256) Params() *elliptic.CurveParams { + return elliptic.P256().Params() +} + +func (p *BenchPointP256) MarshalBinary() ([]byte, error) { + return pointMarshalBinary(p) +} + +func (p *BenchPointP256) UnmarshalBinary(input []byte) error { + pt, err := pointUnmarshalBinary(input) + if err != nil { + return err + } + ppt, ok := pt.(*BenchPointP256) + if !ok { + return fmt.Errorf("invalid point") + } + p.x = ppt.x + p.y = ppt.y + return nil +} + +func (p *BenchPointP256) MarshalText() ([]byte, error) { + return pointMarshalText(p) +} + +func (p *BenchPointP256) UnmarshalText(input []byte) error { + pt, err := pointUnmarshalText(input) + if err != nil { + return err + } + ppt, ok := pt.(*BenchPointP256) + if !ok { + return fmt.Errorf("invalid point") + } + p.x = ppt.x + p.y = ppt.y + return nil +} + +func (p *BenchPointP256) MarshalJSON() ([]byte, error) { + return pointMarshalJSON(p) +} + +func (p *BenchPointP256) UnmarshalJSON(input []byte) error { + pt, err := pointUnmarshalJSON(input) + if err != nil { + return err + } + P, ok := pt.(*BenchPointP256) + if !ok { + return fmt.Errorf("invalid type") + } + p.x = P.x + p.y = P.y + return nil +} + +// From https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-11#section-8.2 +func p256SswuParams() *sswuParams { + params := elliptic.P256().Params() + + // c1 = (q - 3) / 4 + c1 := new(big.Int).Set(params.P) + c1.Sub(c1, big.NewInt(3)) + c1.Rsh(c1, 2) + + a := big.NewInt(-3) + a.Mod(a, params.P) + b := new(big.Int).Set(params.B) + z := big.NewInt(-10) + z.Mod(z, params.P) + // sqrt(-Z^3) + zTmp := new(big.Int).Exp(z, big.NewInt(3), nil) + zTmp = zTmp.Neg(zTmp) + zTmp.Mod(zTmp, params.P) + c2 := new(big.Int).ModSqrt(zTmp, params.P) + + return &sswuParams{ + params, c1, c2, a, b, z, + } +} + +// rhs of the curve equation +func rhsP256(x *big.Int, params *elliptic.CurveParams) *big.Int { + f := NewField(params.P) + r := f.NewElement(x) + r2 := r.Mul(r) + + // x^3-3x+B + a := r.Mul(f.NewElement(big.NewInt(3))) + r = r2.Mul(r) + return r.Add(a.Neg()).Add(f.NewElement(params.B)).Value +} diff --git a/core/curves/p256_curve.go b/core/curves/p256_curve.go new file mode 100644 index 0000000..17d2c0a --- /dev/null +++ b/core/curves/p256_curve.go @@ -0,0 +1,670 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package curves + +import ( + "crypto/elliptic" + "fmt" + "io" + "math/big" + "sync" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + p256n "github.com/sonr-io/sonr/crypto/core/curves/native/p256" + "github.com/sonr-io/sonr/crypto/core/curves/native/p256/fp" + "github.com/sonr-io/sonr/crypto/core/curves/native/p256/fq" + "github.com/sonr-io/sonr/crypto/internal" +) + +var ( + oldP256InitOnce sync.Once + oldP256 NistP256 +) + +type NistP256 struct { + *elliptic.CurveParams +} + +func oldP256InitAll() { + curve := elliptic.P256() + oldP256.CurveParams = curve.Params() + oldP256.P = curve.Params().P + oldP256.N = curve.Params().N + oldP256.Gx = curve.Params().Gx + oldP256.Gy = curve.Params().Gy + oldP256.B = curve.Params().B + oldP256.BitSize = curve.Params().BitSize + oldP256.Name = curve.Params().Name +} + +func NistP256Curve() *NistP256 { + oldP256InitOnce.Do(oldP256InitAll) + return &oldP256 +} + +func (curve *NistP256) Params() *elliptic.CurveParams { + return curve.CurveParams +} + +func (curve *NistP256) IsOnCurve(x, y *big.Int) bool { + _, err := p256n.P256PointNew().SetBigInt(x, y) + return err == nil +} + +func (curve *NistP256) Add(x1, y1, x2, y2 *big.Int) (*big.Int, *big.Int) { + p1, err := p256n.P256PointNew().SetBigInt(x1, y1) + if err != nil { + return nil, nil + } + p2, err := p256n.P256PointNew().SetBigInt(x2, y2) + if err != nil { + return nil, nil + } + return p1.Add(p1, p2).BigInt() +} + +func (curve *NistP256) Double(x1, y1 *big.Int) (*big.Int, *big.Int) { + p1, err := p256n.P256PointNew().SetBigInt(x1, y1) + if err != nil { + return nil, nil + } + return p1.Double(p1).BigInt() +} + +func (curve *NistP256) ScalarMul(Bx, By *big.Int, k []byte) (*big.Int, *big.Int) { + p1, err := p256n.P256PointNew().SetBigInt(Bx, By) + if err != nil { + return nil, nil + } + var bytes [32]byte + copy(bytes[:], internal.ReverseScalarBytes(k)) + s, err := fq.P256FqNew().SetBytes(&bytes) + if err != nil { + return nil, nil + } + return p1.Mul(p1, s).BigInt() +} + +func (curve *NistP256) ScalarBaseMult(k []byte) (*big.Int, *big.Int) { + var bytes [32]byte + copy(bytes[:], internal.ReverseScalarBytes(k)) + s, err := fq.P256FqNew().SetBytes(&bytes) + if err != nil { + return nil, nil + } + p1 := p256n.P256PointNew().Generator() + return p1.Mul(p1, s).BigInt() +} + +type ScalarP256 struct { + value *native.Field +} + +type PointP256 struct { + value *native.EllipticPoint +} + +func (s *ScalarP256) Random(reader io.Reader) Scalar { + if reader == nil { + return nil + } + var seed [64]byte + _, _ = reader.Read(seed[:]) + return s.Hash(seed[:]) +} + +func (s *ScalarP256) Hash(bytes []byte) Scalar { + dst := []byte("P256_XMD:SHA-256_SSWU_RO_") + xmd := native.ExpandMsgXmd(native.EllipticPointHasherSha256(), bytes, dst, 48) + var t [64]byte + copy(t[:48], internal.ReverseScalarBytes(xmd)) + + return &ScalarP256{ + value: fq.P256FqNew().SetBytesWide(&t), + } +} + +func (s *ScalarP256) Zero() Scalar { + return &ScalarP256{ + value: fq.P256FqNew().SetZero(), + } +} + +func (s *ScalarP256) One() Scalar { + return &ScalarP256{ + value: fq.P256FqNew().SetOne(), + } +} + +func (s *ScalarP256) IsZero() bool { + return s.value.IsZero() == 1 +} + +func (s *ScalarP256) IsOne() bool { + return s.value.IsOne() == 1 +} + +func (s *ScalarP256) IsOdd() bool { + return s.value.Bytes()[0]&1 == 1 +} + +func (s *ScalarP256) IsEven() bool { + return s.value.Bytes()[0]&1 == 0 +} + +func (s *ScalarP256) New(value int) Scalar { + t := fq.P256FqNew() + v := big.NewInt(int64(value)) + if value < 0 { + v.Mod(v, t.Params.BiModulus) + } + return &ScalarP256{ + value: t.SetBigInt(v), + } +} + +func (s *ScalarP256) Cmp(rhs Scalar) int { + r, ok := rhs.(*ScalarP256) + if ok { + return s.value.Cmp(r.value) + } else { + return -2 + } +} + +func (s *ScalarP256) Square() Scalar { + return &ScalarP256{ + value: fq.P256FqNew().Square(s.value), + } +} + +func (s *ScalarP256) Double() Scalar { + return &ScalarP256{ + value: fq.P256FqNew().Double(s.value), + } +} + +func (s *ScalarP256) Invert() (Scalar, error) { + value, wasInverted := fq.P256FqNew().Invert(s.value) + if !wasInverted { + return nil, fmt.Errorf("inverse doesn't exist") + } + return &ScalarP256{ + value, + }, nil +} + +func (s *ScalarP256) Sqrt() (Scalar, error) { + value, wasSquare := fq.P256FqNew().Sqrt(s.value) + if !wasSquare { + return nil, fmt.Errorf("not a square") + } + return &ScalarP256{ + value, + }, nil +} + +func (s *ScalarP256) Cube() Scalar { + value := fq.P256FqNew().Mul(s.value, s.value) + value.Mul(value, s.value) + return &ScalarP256{ + value, + } +} + +func (s *ScalarP256) Add(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarP256) + if ok { + return &ScalarP256{ + value: fq.P256FqNew().Add(s.value, r.value), + } + } else { + return nil + } +} + +func (s *ScalarP256) Sub(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarP256) + if ok { + return &ScalarP256{ + value: fq.P256FqNew().Sub(s.value, r.value), + } + } else { + return nil + } +} + +func (s *ScalarP256) Mul(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarP256) + if ok { + return &ScalarP256{ + value: fq.P256FqNew().Mul(s.value, r.value), + } + } else { + return nil + } +} + +func (s *ScalarP256) MulAdd(y, z Scalar) Scalar { + return s.Mul(y).Add(z) +} + +func (s *ScalarP256) Div(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarP256) + if ok { + v, wasInverted := fq.P256FqNew().Invert(r.value) + if !wasInverted { + return nil + } + v.Mul(v, s.value) + return &ScalarP256{value: v} + } else { + return nil + } +} + +func (s *ScalarP256) Neg() Scalar { + return &ScalarP256{ + value: fq.P256FqNew().Neg(s.value), + } +} + +func (s *ScalarP256) SetBigInt(v *big.Int) (Scalar, error) { + if v == nil { + return nil, fmt.Errorf("'v' cannot be nil") + } + value := fq.P256FqNew().SetBigInt(v) + return &ScalarP256{ + value, + }, nil +} + +func (s *ScalarP256) BigInt() *big.Int { + return s.value.BigInt() +} + +func (s *ScalarP256) Bytes() []byte { + t := s.value.Bytes() + return internal.ReverseScalarBytes(t[:]) +} + +func (s *ScalarP256) SetBytes(bytes []byte) (Scalar, error) { + if len(bytes) != 32 { + return nil, fmt.Errorf("invalid length") + } + var seq [32]byte + copy(seq[:], internal.ReverseScalarBytes(bytes)) + value, err := fq.P256FqNew().SetBytes(&seq) + if err != nil { + return nil, err + } + return &ScalarP256{ + value, + }, nil +} + +func (s *ScalarP256) SetBytesWide(bytes []byte) (Scalar, error) { + if len(bytes) != 64 { + return nil, fmt.Errorf("invalid length") + } + var seq [64]byte + copy(seq[:], bytes) + return &ScalarP256{ + value: fq.P256FqNew().SetBytesWide(&seq), + }, nil +} + +func (s *ScalarP256) Point() Point { + return new(PointP256).Identity() +} + +func (s *ScalarP256) Clone() Scalar { + return &ScalarP256{ + value: fq.P256FqNew().Set(s.value), + } +} + +func (s *ScalarP256) MarshalBinary() ([]byte, error) { + return scalarMarshalBinary(s) +} + +func (s *ScalarP256) UnmarshalBinary(input []byte) error { + sc, err := scalarUnmarshalBinary(input) + if err != nil { + return err + } + ss, ok := sc.(*ScalarP256) + if !ok { + return fmt.Errorf("invalid scalar") + } + s.value = ss.value + return nil +} + +func (s *ScalarP256) MarshalText() ([]byte, error) { + return scalarMarshalText(s) +} + +func (s *ScalarP256) UnmarshalText(input []byte) error { + sc, err := scalarUnmarshalText(input) + if err != nil { + return err + } + ss, ok := sc.(*ScalarP256) + if !ok { + return fmt.Errorf("invalid scalar") + } + s.value = ss.value + return nil +} + +func (s *ScalarP256) MarshalJSON() ([]byte, error) { + return scalarMarshalJson(s) +} + +func (s *ScalarP256) UnmarshalJSON(input []byte) error { + sc, err := scalarUnmarshalJson(input) + if err != nil { + return err + } + S, ok := sc.(*ScalarP256) + if !ok { + return fmt.Errorf("invalid type") + } + s.value = S.value + return nil +} + +func (p *PointP256) Random(reader io.Reader) Point { + var seed [64]byte + _, _ = reader.Read(seed[:]) + return p.Hash(seed[:]) +} + +func (p *PointP256) Hash(bytes []byte) Point { + value, err := p256n.P256PointNew().Hash(bytes, native.EllipticPointHasherSha256()) + // Note: Interface constraint prevents returning error directly + // Returning nil on error for now, caller should check for nil + if err != nil { + // Log error for debugging if logger is available + return nil + } + + return &PointP256{value} +} + +func (p *PointP256) Identity() Point { + return &PointP256{ + value: p256n.P256PointNew().Identity(), + } +} + +func (p *PointP256) Generator() Point { + return &PointP256{ + value: p256n.P256PointNew().Generator(), + } +} + +func (p *PointP256) IsIdentity() bool { + return p.value.IsIdentity() +} + +func (p *PointP256) IsNegative() bool { + return p.value.GetY().Value[0]&1 == 1 +} + +func (p *PointP256) IsOnCurve() bool { + return p.value.IsOnCurve() +} + +func (p *PointP256) Double() Point { + value := p256n.P256PointNew().Double(p.value) + return &PointP256{value} +} + +func (p *PointP256) Scalar() Scalar { + return new(ScalarP256).Zero() +} + +func (p *PointP256) Neg() Point { + value := p256n.P256PointNew().Neg(p.value) + return &PointP256{value} +} + +func (p *PointP256) Add(rhs Point) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*PointP256) + if ok { + value := p256n.P256PointNew().Add(p.value, r.value) + return &PointP256{value} + } else { + return nil + } +} + +func (p *PointP256) Sub(rhs Point) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*PointP256) + if ok { + value := p256n.P256PointNew().Sub(p.value, r.value) + return &PointP256{value} + } else { + return nil + } +} + +func (p *PointP256) Mul(rhs Scalar) Point { + if rhs == nil { + return nil + } + r, ok := rhs.(*ScalarP256) + if ok { + value := p256n.P256PointNew().Mul(p.value, r.value) + return &PointP256{value} + } else { + return nil + } +} + +func (p *PointP256) Equal(rhs Point) bool { + r, ok := rhs.(*PointP256) + if ok { + return p.value.Equal(r.value) == 1 + } else { + return false + } +} + +func (p *PointP256) Set(x, y *big.Int) (Point, error) { + value, err := p256n.P256PointNew().SetBigInt(x, y) + if err != nil { + return nil, err + } + return &PointP256{value}, nil +} + +func (p *PointP256) ToAffineCompressed() []byte { + var x [33]byte + x[0] = byte(2) + + t := p256n.P256PointNew().ToAffine(p.value) + + x[0] |= t.Y.Bytes()[0] & 1 + + xBytes := t.X.Bytes() + copy(x[1:], internal.ReverseScalarBytes(xBytes[:])) + return x[:] +} + +func (p *PointP256) ToAffineUncompressed() []byte { + var out [65]byte + out[0] = byte(4) + t := p256n.P256PointNew().ToAffine(p.value) + arr := t.X.Bytes() + copy(out[1:33], internal.ReverseScalarBytes(arr[:])) + arr = t.Y.Bytes() + copy(out[33:], internal.ReverseScalarBytes(arr[:])) + return out[:] +} + +func (p *PointP256) FromAffineCompressed(bytes []byte) (Point, error) { + var raw [native.FieldBytes]byte + if len(bytes) != 33 { + return nil, fmt.Errorf("invalid byte sequence") + } + sign := int(bytes[0]) + if sign != 2 && sign != 3 { + return nil, fmt.Errorf("invalid sign byte") + } + sign &= 0x1 + + copy(raw[:], internal.ReverseScalarBytes(bytes[1:])) + x, err := fp.P256FpNew().SetBytes(&raw) + if err != nil { + return nil, err + } + + value := p256n.P256PointNew().Identity() + rhs := fp.P256FpNew() + p.value.Arithmetic.RhsEq(rhs, x) + // test that rhs is quadratic residue + // if not, then this Point is at infinity + y, wasQr := fp.P256FpNew().Sqrt(rhs) + if wasQr { + // fix the sign + sigY := int(y.Bytes()[0] & 1) + if sigY != sign { + y.Neg(y) + } + value.X = x + value.Y = y + value.Z.SetOne() + } + return &PointP256{value}, nil +} + +func (p *PointP256) FromAffineUncompressed(bytes []byte) (Point, error) { + var arr [native.FieldBytes]byte + if len(bytes) != 65 { + return nil, fmt.Errorf("invalid byte sequence") + } + if bytes[0] != 4 { + return nil, fmt.Errorf("invalid sign byte") + } + + copy(arr[:], internal.ReverseScalarBytes(bytes[1:33])) + x, err := fp.P256FpNew().SetBytes(&arr) + if err != nil { + return nil, err + } + copy(arr[:], internal.ReverseScalarBytes(bytes[33:])) + y, err := fp.P256FpNew().SetBytes(&arr) + if err != nil { + return nil, err + } + value := p256n.P256PointNew() + value.X = x + value.Y = y + value.Z.SetOne() + return &PointP256{value}, nil +} + +func (p *PointP256) CurveName() string { + return elliptic.P256().Params().Name +} + +func (p *PointP256) SumOfProducts(points []Point, scalars []Scalar) Point { + nPoints := make([]*native.EllipticPoint, len(points)) + nScalars := make([]*native.Field, len(scalars)) + for i, pt := range points { + ptv, ok := pt.(*PointP256) + if !ok { + return nil + } + nPoints[i] = ptv.value + } + for i, sc := range scalars { + s, ok := sc.(*ScalarP256) + if !ok { + return nil + } + nScalars[i] = s.value + } + value := p256n.P256PointNew() + _, err := value.SumOfProducts(nPoints, nScalars) + if err != nil { + return nil + } + return &PointP256{value} +} + +func (p *PointP256) X() *native.Field { + return p.value.GetX() +} + +func (p *PointP256) Y() *native.Field { + return p.value.GetY() +} + +func (p *PointP256) Params() *elliptic.CurveParams { + return elliptic.P256().Params() +} + +func (p *PointP256) MarshalBinary() ([]byte, error) { + return pointMarshalBinary(p) +} + +func (p *PointP256) UnmarshalBinary(input []byte) error { + pt, err := pointUnmarshalBinary(input) + if err != nil { + return err + } + ppt, ok := pt.(*PointP256) + if !ok { + return fmt.Errorf("invalid point") + } + p.value = ppt.value + return nil +} + +func (p *PointP256) MarshalText() ([]byte, error) { + return pointMarshalText(p) +} + +func (p *PointP256) UnmarshalText(input []byte) error { + pt, err := pointUnmarshalText(input) + if err != nil { + return err + } + ppt, ok := pt.(*PointP256) + if !ok { + return fmt.Errorf("invalid point") + } + p.value = ppt.value + return nil +} + +func (p *PointP256) MarshalJSON() ([]byte, error) { + return pointMarshalJSON(p) +} + +func (p *PointP256) UnmarshalJSON(input []byte) error { + pt, err := pointUnmarshalJSON(input) + if err != nil { + return err + } + P, ok := pt.(*PointP256) + if !ok { + return fmt.Errorf("invalid type") + } + p.value = P.value + return nil +} diff --git a/core/curves/p256_curve_test.go b/core/curves/p256_curve_test.go new file mode 100755 index 0000000..962ec20 --- /dev/null +++ b/core/curves/p256_curve_test.go @@ -0,0 +1,556 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package curves + +import ( + "crypto/elliptic" + crand "crypto/rand" + "math/big" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestScalarP256Random(t *testing.T) { + p256 := P256() + sc := p256.Scalar.Random(testRng()) + s, ok := sc.(*ScalarP256) + require.True(t, ok) + expected := bhex("02ca487e56f8cb1ad9666027b2282d1159792d39a5e05f0bc696f85de5acc6d4") + require.Equal(t, s.value.BigInt(), expected) + // Try 10 random values + for i := 0; i < 10; i++ { + sc := p256.Scalar.Random(crand.Reader) + _, ok := sc.(*ScalarP256) + require.True(t, ok) + require.True(t, !sc.IsZero()) + } +} + +func TestScalarP256Hash(t *testing.T) { + var b [32]byte + p256 := P256() + sc := p256.Scalar.Hash(b[:]) + s, ok := sc.(*ScalarP256) + require.True(t, ok) + expected := bhex("43ca74a23022b221e60c8499781afff2a2776ec23362712df90a3080b5557f90") + require.Equal(t, s.value.BigInt(), expected) +} + +func TestScalarP256Zero(t *testing.T) { + p256 := P256() + sc := p256.Scalar.Zero() + require.True(t, sc.IsZero()) + require.True(t, sc.IsEven()) +} + +func TestScalarP256One(t *testing.T) { + p256 := P256() + sc := p256.Scalar.One() + require.True(t, sc.IsOne()) + require.True(t, sc.IsOdd()) +} + +func TestScalarP256New(t *testing.T) { + p256 := P256() + three := p256.Scalar.New(3) + require.True(t, three.IsOdd()) + four := p256.Scalar.New(4) + require.True(t, four.IsEven()) + neg1 := p256.Scalar.New(-1) + require.True(t, neg1.IsEven()) + neg2 := p256.Scalar.New(-2) + require.True(t, neg2.IsOdd()) +} + +func TestScalarP256Square(t *testing.T) { + p256 := P256() + three := p256.Scalar.New(3) + nine := p256.Scalar.New(9) + require.Equal(t, three.Square().Cmp(nine), 0) +} + +func TestScalarP256Cube(t *testing.T) { + p256 := P256() + three := p256.Scalar.New(3) + twentySeven := p256.Scalar.New(27) + require.Equal(t, three.Cube().Cmp(twentySeven), 0) +} + +func TestScalarP256Double(t *testing.T) { + p256 := P256() + three := p256.Scalar.New(3) + six := p256.Scalar.New(6) + require.Equal(t, three.Double().Cmp(six), 0) +} + +func TestScalarP256Neg(t *testing.T) { + p256 := P256() + one := p256.Scalar.One() + neg1 := p256.Scalar.New(-1) + require.Equal(t, one.Neg().Cmp(neg1), 0) + lotsOfThrees := p256.Scalar.New(333333) + expected := p256.Scalar.New(-333333) + require.Equal(t, lotsOfThrees.Neg().Cmp(expected), 0) +} + +func TestScalarP256Invert(t *testing.T) { + p256 := P256() + nine := p256.Scalar.New(9) + actual, _ := nine.Invert() + sa, _ := actual.(*ScalarP256) + bn := bhex("8e38e38daaaaaaab38e38e38e38e38e368f2197ceb0d1f2d6af570a536e1bf66") + expected, err := p256.Scalar.SetBigInt(bn) + require.NoError(t, err) + require.Equal(t, sa.Cmp(expected), 0) +} + +func TestScalarP256Sqrt(t *testing.T) { + p256 := P256() + nine := p256.Scalar.New(9) + actual, err := nine.Sqrt() + sa, _ := actual.(*ScalarP256) + expected := p256.Scalar.New(3) + require.NoError(t, err) + require.Equal(t, sa.Cmp(expected), 0) +} + +func TestScalarP256Add(t *testing.T) { + p256 := P256() + nine := p256.Scalar.New(9) + six := p256.Scalar.New(6) + fifteen := nine.Add(six) + require.NotNil(t, fifteen) + expected := p256.Scalar.New(15) + require.Equal(t, expected.Cmp(fifteen), 0) + n := new(big.Int).Set(elliptic.P256().Params().N) + n.Sub(n, big.NewInt(3)) + + upper, err := p256.Scalar.SetBigInt(n) + require.NoError(t, err) + actual := upper.Add(nine) + require.NotNil(t, actual) + require.Equal(t, actual.Cmp(six), 0) +} + +func TestScalarP256Sub(t *testing.T) { + p256 := P256() + nine := p256.Scalar.New(9) + six := p256.Scalar.New(6) + n := new(big.Int).Set(elliptic.P256().Params().N) + n.Sub(n, big.NewInt(3)) + + expected, err := p256.Scalar.SetBigInt(n) + require.NoError(t, err) + actual := six.Sub(nine) + require.Equal(t, expected.Cmp(actual), 0) + + actual = nine.Sub(six) + require.Equal(t, actual.Cmp(p256.Scalar.New(3)), 0) +} + +func TestScalarP256Mul(t *testing.T) { + p256 := P256() + nine := p256.Scalar.New(9) + six := p256.Scalar.New(6) + actual := nine.Mul(six) + require.Equal(t, actual.Cmp(p256.Scalar.New(54)), 0) + n := new(big.Int).Set(elliptic.P256().Params().N) + n.Sub(n, big.NewInt(1)) + upper, err := p256.Scalar.SetBigInt(n) + require.NoError(t, err) + require.Equal(t, upper.Mul(upper).Cmp(p256.Scalar.New(1)), 0) +} + +func TestScalarP256Div(t *testing.T) { + p256 := P256() + nine := p256.Scalar.New(9) + actual := nine.Div(nine) + require.Equal(t, actual.Cmp(p256.Scalar.New(1)), 0) + require.Equal(t, p256.Scalar.New(54).Div(nine).Cmp(p256.Scalar.New(6)), 0) +} + +func TestScalarP256Serialize(t *testing.T) { + p256 := P256() + sc := p256.Scalar.New(255) + sequence := sc.Bytes() + require.Equal(t, len(sequence), 32) + require.Equal( + t, + sequence, + []byte{ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0xff, + }, + ) + ret, err := p256.Scalar.SetBytes(sequence) + require.NoError(t, err) + require.Equal(t, ret.Cmp(sc), 0) + + // Try 10 random values + for i := 0; i < 10; i++ { + sc = p256.Scalar.Random(crand.Reader) + sequence = sc.Bytes() + require.Equal(t, len(sequence), 32) + ret, err = p256.Scalar.SetBytes(sequence) + require.NoError(t, err) + require.Equal(t, ret.Cmp(sc), 0) + } +} + +func TestScalarP256Nil(t *testing.T) { + p256 := P256() + one := p256.Scalar.New(1) + require.Nil(t, one.Add(nil)) + require.Nil(t, one.Sub(nil)) + require.Nil(t, one.Mul(nil)) + require.Nil(t, one.Div(nil)) + require.Nil(t, p256.Scalar.Random(nil)) + require.Equal(t, one.Cmp(nil), -2) + _, err := p256.Scalar.SetBigInt(nil) + require.Error(t, err) +} + +func TestPointP256Random(t *testing.T) { + p256 := P256() + sc := p256.Point.Random(testRng()) + s, ok := sc.(*PointP256) + require.True(t, ok) + expectedX, _ := new( + big.Int, + ).SetString("7d31a079d75687cd0dd1118996f726c3e4d52806a5124d23c1faeee9fadb2201", 16) + expectedY, _ := new( + big.Int, + ).SetString("da62629181a0e2ec6943c263bbe81f53d87cb94d0039a707309f415f04d47bab", 16) + require.Equal(t, s.X().BigInt(), expectedX) + require.Equal(t, s.Y().BigInt(), expectedY) + // Try 10 random values + for i := 0; i < 10; i++ { + sc := p256.Point.Random(crand.Reader) + _, ok := sc.(*PointP256) + require.True(t, ok) + require.True(t, !sc.IsIdentity()) + } +} + +func TestPointP256Hash(t *testing.T) { + var b [32]byte + p256 := P256() + sc := p256.Point.Hash(b[:]) + s, ok := sc.(*PointP256) + require.True(t, ok) + expectedX, _ := new( + big.Int, + ).SetString("10f1699c24cf20f5322b92bfab4fdf14814ee1472ffc6b1ef2ec36be3051925d", 16) + expectedY, _ := new( + big.Int, + ).SetString("99efe055f13c3e5e3fc65be12043a89d0482b806c7a9945c276b7aee887c1de0", 16) + require.Equal(t, s.X().BigInt(), expectedX) + require.Equal(t, s.Y().BigInt(), expectedY) +} + +func TestPointP256Identity(t *testing.T) { + p256 := P256() + sc := p256.Point.Identity() + require.True(t, sc.IsIdentity()) + require.Equal( + t, + sc.ToAffineCompressed(), + []byte{ + 2, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + }, + ) +} + +func TestPointP256Generator(t *testing.T) { + p256 := P256() + sc := p256.Point.Generator() + s, ok := sc.(*PointP256) + require.True(t, ok) + require.Equal(t, s.X().BigInt(), elliptic.P256().Params().Gx) + require.Equal(t, s.Y().BigInt(), elliptic.P256().Params().Gy) +} + +func TestPointP256Set(t *testing.T) { + p256 := P256() + iden, err := p256.Point.Set(big.NewInt(0), big.NewInt(0)) + require.NoError(t, err) + require.True(t, iden.IsIdentity()) + _, err = p256.Point.Set(elliptic.P256().Params().Gx, elliptic.P256().Params().Gy) + require.NoError(t, err) +} + +func TestPointP256Double(t *testing.T) { + p256 := P256() + g := p256.Point.Generator() + g2 := g.Double() + require.True(t, g2.Equal(g.Mul(p256.Scalar.New(2)))) + i := p256.Point.Identity() + require.True(t, i.Double().Equal(i)) +} + +func TestPointP256Neg(t *testing.T) { + p256 := P256() + g := p256.Point.Generator().Neg() + require.True(t, g.Neg().Equal(p256.Point.Generator())) + require.True(t, p256.Point.Identity().Neg().Equal(p256.Point.Identity())) +} + +func TestPointP256Add(t *testing.T) { + p256 := P256() + pt := p256.Point.Generator() + require.True(t, pt.Add(pt).Equal(pt.Double())) + require.True(t, pt.Mul(p256.Scalar.New(3)).Equal(pt.Add(pt).Add(pt))) +} + +func TestPointP256Sub(t *testing.T) { + p256 := P256() + g := p256.Point.Generator() + pt := p256.Point.Generator().Mul(p256.Scalar.New(4)) + require.True(t, pt.Sub(g).Sub(g).Sub(g).Equal(g)) + require.True(t, pt.Sub(g).Sub(g).Sub(g).Sub(g).IsIdentity()) +} + +func TestPointP256Mul(t *testing.T) { + p256 := P256() + g := p256.Point.Generator() + pt := p256.Point.Generator().Mul(p256.Scalar.New(4)) + require.True(t, g.Double().Double().Equal(pt)) +} + +func TestPointP256Serialize(t *testing.T) { + p256 := P256() + ss := p256.Scalar.Random(testRng()) + g := p256.Point.Generator() + + ppt := g.Mul(ss) + require.Equal( + t, + ppt.ToAffineCompressed(), + []byte{ + 0x3, + 0x1b, + 0xfa, + 0x44, + 0xfb, + 0x6, + 0xa4, + 0x6d, + 0xe1, + 0x38, + 0x72, + 0x39, + 0x9a, + 0x69, + 0xd4, + 0x71, + 0x38, + 0x3f, + 0x8b, + 0x13, + 0x39, + 0x60, + 0xdc, + 0x61, + 0x91, + 0x22, + 0x70, + 0x58, + 0x7c, + 0xdf, + 0x82, + 0x33, + 0xba, + }, + ) + require.Equal( + t, + ppt.ToAffineUncompressed(), + []byte{ + 0x4, + 0x1b, + 0xfa, + 0x44, + 0xfb, + 0x6, + 0xa4, + 0x6d, + 0xe1, + 0x38, + 0x72, + 0x39, + 0x9a, + 0x69, + 0xd4, + 0x71, + 0x38, + 0x3f, + 0x8b, + 0x13, + 0x39, + 0x60, + 0xdc, + 0x61, + 0x91, + 0x22, + 0x70, + 0x58, + 0x7c, + 0xdf, + 0x82, + 0x33, + 0xba, + 0x48, + 0xdf, + 0x58, + 0xac, + 0x70, + 0xf3, + 0x9b, + 0x9d, + 0x5d, + 0x84, + 0x6e, + 0x2d, + 0x75, + 0xc9, + 0x6, + 0x1e, + 0xd9, + 0x62, + 0x8b, + 0x15, + 0x0, + 0x65, + 0x69, + 0x79, + 0xfb, + 0x42, + 0xc, + 0x35, + 0xce, + 0x2e, + 0x8b, + 0xfd, + }, + ) + retP, err := ppt.FromAffineCompressed(ppt.ToAffineCompressed()) + require.NoError(t, err) + require.True(t, ppt.Equal(retP)) + retP, err = ppt.FromAffineUncompressed(ppt.ToAffineUncompressed()) + require.NoError(t, err) + require.True(t, ppt.Equal(retP)) + + // smoke test + for i := 0; i < 25; i++ { + s := p256.Scalar.Random(crand.Reader) + pt := g.Mul(s) + cmprs := pt.ToAffineCompressed() + require.Equal(t, len(cmprs), 33) + retC, err := pt.FromAffineCompressed(cmprs) + require.NoError(t, err) + require.True(t, pt.Equal(retC)) + + un := pt.ToAffineUncompressed() + require.Equal(t, len(un), 65) + retU, err := pt.FromAffineUncompressed(un) + require.NoError(t, err) + require.True(t, pt.Equal(retU)) + } +} + +func TestPointP256Nil(t *testing.T) { + p256 := P256() + one := p256.Point.Generator() + require.Nil(t, one.Add(nil)) + require.Nil(t, one.Sub(nil)) + require.Nil(t, one.Mul(nil)) + require.Nil(t, p256.Scalar.Random(nil)) + require.False(t, one.Equal(nil)) + _, err := p256.Scalar.SetBigInt(nil) + require.Error(t, err) +} + +func TestPointP256SumOfProducts(t *testing.T) { + lhs := new(PointP256).Generator().Mul(new(ScalarP256).New(50)) + points := make([]Point, 5) + for i := range points { + points[i] = new(PointP256).Generator() + } + scalars := []Scalar{ + new(ScalarP256).New(8), + new(ScalarP256).New(9), + new(ScalarP256).New(10), + new(ScalarP256).New(11), + new(ScalarP256).New(12), + } + rhs := lhs.SumOfProducts(points, scalars) + require.NotNil(t, rhs) + require.True(t, lhs.Equal(rhs)) +} diff --git a/core/curves/pallas_curve.go b/core/curves/pallas_curve.go new file mode 100644 index 0000000..d300043 --- /dev/null +++ b/core/curves/pallas_curve.go @@ -0,0 +1,1216 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package curves + +import ( + "crypto/elliptic" + crand "crypto/rand" + "crypto/subtle" + "errors" + "fmt" + "io" + "math/big" + "sync" + + "golang.org/x/crypto/blake2b" + + "github.com/sonr-io/sonr/crypto/core/curves/native/pasta/fp" + "github.com/sonr-io/sonr/crypto/core/curves/native/pasta/fq" +) + +var ( + b = new(fp.Fp).SetUint64(5) + three = &fp.Fp{ + 0x6b0ee5d0fffffff5, + 0x86f76d2b99b14bd0, + 0xfffffffffffffffe, + 0x3fffffffffffffff, + } + eight = &fp.Fp{ + 0x7387134cffffffe1, + 0xd973797adfadd5a8, + 0xfffffffffffffffb, + 0x3fffffffffffffff, + } + bool2int = map[bool]int{ + true: 1, + false: 0, + } +) + +var isomapper = [13]*fp.Fp{ + new( + fp.Fp, + ).SetRaw(&[4]uint64{0x775f6034aaaaaaab, 0x4081775473d8375b, 0xe38e38e38e38e38e, 0x0e38e38e38e38e38}), + new( + fp.Fp, + ).SetRaw(&[4]uint64{0x8cf863b02814fb76, 0x0f93b82ee4b99495, 0x267c7ffa51cf412a, 0x3509afd51872d88e}), + new( + fp.Fp, + ).SetRaw(&[4]uint64{0x0eb64faef37ea4f7, 0x380af066cfeb6d69, 0x98c7d7ac3d98fd13, 0x17329b9ec5253753}), + new( + fp.Fp, + ).SetRaw(&[4]uint64{0xeebec06955555580, 0x8102eea8e7b06eb6, 0xc71c71c71c71c71c, 0x1c71c71c71c71c71}), + new( + fp.Fp, + ).SetRaw(&[4]uint64{0xc47f2ab668bcd71f, 0x9c434ac1c96b6980, 0x5a607fcce0494a79, 0x1d572e7ddc099cff}), + new( + fp.Fp, + ).SetRaw(&[4]uint64{0x2aa3af1eae5b6604, 0xb4abf9fb9a1fc81c, 0x1d13bf2a7f22b105, 0x325669becaecd5d1}), + new( + fp.Fp, + ).SetRaw(&[4]uint64{0x5ad985b5e38e38e4, 0x7642b01ad461bad2, 0x4bda12f684bda12f, 0x1a12f684bda12f68}), + new( + fp.Fp, + ).SetRaw(&[4]uint64{0xc67c31d8140a7dbb, 0x07c9dc17725cca4a, 0x133e3ffd28e7a095, 0x1a84d7ea8c396c47}), + new( + fp.Fp, + ).SetRaw(&[4]uint64{0x02e2be87d225b234, 0x1765e924f7459378, 0x303216cce1db9ff1, 0x3fb98ff0d2ddcadd}), + new( + fp.Fp, + ).SetRaw(&[4]uint64{0x93e53ab371c71c4f, 0x0ac03e8e134eb3e4, 0x7b425ed097b425ed, 0x025ed097b425ed09}), + new( + fp.Fp, + ).SetRaw(&[4]uint64{0x5a28279b1d1b42ae, 0x5941a3a4a97aa1b3, 0x0790bfb3506defb6, 0x0c02c5bcca0e6b7f}), + new( + fp.Fp, + ).SetRaw(&[4]uint64{0x4d90ab820b12320a, 0xd976bbfabbc5661d, 0x573b3d7f7d681310, 0x17033d3c60c68173}), + new( + fp.Fp, + ).SetRaw(&[4]uint64{0x992d30ecfffffde5, 0x224698fc094cf91b, 0x0000000000000000, 0x4000000000000000}), +} + +var ( + isoa = new( + fp.Fp, + ).SetRaw(&[4]uint64{0x92bb4b0b657a014b, 0xb74134581a27a59f, 0x49be2d7258370742, 0x18354a2eb0ea8c9c}) + isob = new(fp.Fp).SetRaw(&[4]uint64{1265, 0, 0, 0}) + z = new( + fp.Fp, + ).SetRaw(&[4]uint64{0x992d30ecfffffff4, 0x224698fc094cf91b, 0x0000000000000000, 0x4000000000000000}) +) + +var ( + oldPallasInitonce sync.Once + oldPallas PallasCurve +) + +type PallasCurve struct { + *elliptic.CurveParams +} + +func oldPallasInitAll() { + oldPallas.CurveParams = new(elliptic.CurveParams) + oldPallas.P = new(big.Int).SetBytes([]byte{ + 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x22, 0x46, 0x98, 0xfc, 0x09, 0x4c, 0xf9, 0x1b, + 0x99, 0x2d, 0x30, 0xed, 0x00, 0x00, 0x00, 0x01, + }) + oldPallas.N = new(big.Int).SetBytes([]byte{ + 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x22, 0x46, 0x98, 0xfc, 0x09, 0x94, 0xa8, 0xdd, + 0x8c, 0x46, 0xeb, 0x21, 0x00, 0x00, 0x00, 0x01, + }) + g := new(Ep).Generator() + oldPallas.Gx = g.x.BigInt() + oldPallas.Gy = g.y.BigInt() + oldPallas.B = big.NewInt(5) + oldPallas.BitSize = 255 + pallas.Name = PallasName +} + +func Pallas() *PallasCurve { + oldPallasInitonce.Do(oldPallasInitAll) + return &oldPallas +} + +func (curve *PallasCurve) Params() *elliptic.CurveParams { + return curve.CurveParams +} + +func (curve *PallasCurve) IsOnCurve(x, y *big.Int) bool { + p := new(Ep) + p.x = new(fp.Fp).SetBigInt(x) + p.y = new(fp.Fp).SetBigInt(y) + p.z = new(fp.Fp).SetOne() + + return p.IsOnCurve() +} + +func (curve *PallasCurve) Add(x1, y1, x2, y2 *big.Int) (*big.Int, *big.Int) { + p := new(Ep) + p.x = new(fp.Fp).SetBigInt(x1) + p.y = new(fp.Fp).SetBigInt(y1) + p.z = new(fp.Fp).SetOne() + if p.x.IsZero() && p.y.IsZero() { + p.z.SetZero() + } + + q := new(Ep) + q.x = new(fp.Fp).SetBigInt(x2) + q.y = new(fp.Fp).SetBigInt(y2) + q.z = new(fp.Fp).SetOne() + if q.x.IsZero() && q.y.IsZero() { + q.z.SetZero() + } + p.Add(p, q) + p.toAffine() + return p.x.BigInt(), p.y.BigInt() +} + +func (curve *PallasCurve) Double(x1, y1 *big.Int) (*big.Int, *big.Int) { + p := new(Ep) + p.x = new(fp.Fp).SetBigInt(x1) + p.y = new(fp.Fp).SetBigInt(y1) + p.z = new(fp.Fp).SetOne() + if p.x.IsZero() && p.y.IsZero() { + p.z.SetZero() + } + p.Double(p) + p.toAffine() + return p.x.BigInt(), p.y.BigInt() +} + +func (curve *PallasCurve) ScalarMult(Bx, By *big.Int, k []byte) (*big.Int, *big.Int) { + p := new(Ep) + p.x = new(fp.Fp).SetBigInt(Bx) + p.y = new(fp.Fp).SetBigInt(By) + p.z = new(fp.Fp).SetOne() + if p.x.IsZero() && p.y.IsZero() { + p.z.SetZero() + } + var t [32]byte + copy(t[:], k) + ss := new(big.Int).SetBytes(k) + sc := new(fq.Fq).SetBigInt(ss) + p.Mul(p, sc) + p.toAffine() + return p.x.BigInt(), p.y.BigInt() +} + +func (curve *PallasCurve) ScalarBaseMult(k []byte) (*big.Int, *big.Int) { + p := new(Ep).Generator() + var t [32]byte + copy(t[:], k) + ss := new(big.Int).SetBytes(k) + sc := new(fq.Fq).SetBigInt(ss) + p.Mul(p, sc) + p.toAffine() + return p.x.BigInt(), p.y.BigInt() +} + +// PallasScalar - Old interface +type PallasScalar struct{} + +func NewPallasScalar() *PallasScalar { + return &PallasScalar{} +} + +func (k PallasScalar) Add(x, y *big.Int) *big.Int { + r := new(big.Int).Add(x, y) + return r.Mod(r, fq.BiModulus) +} + +func (k PallasScalar) Sub(x, y *big.Int) *big.Int { + r := new(big.Int).Sub(x, y) + return r.Mod(r, fq.BiModulus) +} + +func (k PallasScalar) Neg(x *big.Int) *big.Int { + r := new(big.Int).Neg(x) + return r.Mod(r, fq.BiModulus) +} + +func (k PallasScalar) Mul(x, y *big.Int) *big.Int { + r := new(big.Int).Mul(x, y) + return r.Mod(r, fq.BiModulus) +} + +func (k PallasScalar) Div(x, y *big.Int) *big.Int { + r := new(big.Int).ModInverse(y, fq.BiModulus) + r.Mul(r, x) + return r.Mod(r, fq.BiModulus) +} + +func (k PallasScalar) Hash(input []byte) *big.Int { + return new(ScalarPallas).Hash(input).(*ScalarPallas).value.BigInt() +} + +func (k PallasScalar) Bytes(x *big.Int) []byte { + return x.Bytes() +} + +func (k PallasScalar) Random() (*big.Int, error) { + s, ok := new(ScalarPallas).Random(crand.Reader).(*ScalarPallas) + if !ok { + return nil, errors.New("incorrect type conversion") + } + return s.value.BigInt(), nil +} + +func (k PallasScalar) IsValid(x *big.Int) bool { + return x.Cmp(fq.BiModulus) == -1 +} + +// ScalarPallas - New interface +type ScalarPallas struct { + value *fq.Fq +} + +func (s *ScalarPallas) Random(reader io.Reader) Scalar { + if reader == nil { + return nil + } + var seed [64]byte + _, _ = reader.Read(seed[:]) + return s.Hash(seed[:]) +} + +func (s *ScalarPallas) Hash(bytes []byte) Scalar { + h, _ := blake2b.New(64, []byte{}) + xmd, err := expandMsgXmd(h, bytes, []byte("pallas_XMD:BLAKE2b_SSWU_RO_"), 64) + if err != nil { + return nil + } + var t [64]byte + copy(t[:], xmd) + return &ScalarPallas{ + value: new(fq.Fq).SetBytesWide(&t), + } +} + +func (s *ScalarPallas) Zero() Scalar { + return &ScalarPallas{ + value: new(fq.Fq).SetZero(), + } +} + +func (s *ScalarPallas) One() Scalar { + return &ScalarPallas{ + value: new(fq.Fq).SetOne(), + } +} + +func (s *ScalarPallas) IsZero() bool { + return s.value.IsZero() +} + +func (s *ScalarPallas) IsOne() bool { + return s.value.IsOne() +} + +func (s *ScalarPallas) IsOdd() bool { + return (s.value[0] & 1) == 1 +} + +func (s *ScalarPallas) IsEven() bool { + return (s.value[0] & 1) == 0 +} + +func (s *ScalarPallas) New(value int) Scalar { + v := big.NewInt(int64(value)) + return &ScalarPallas{ + value: new(fq.Fq).SetBigInt(v), + } +} + +func (s *ScalarPallas) Cmp(rhs Scalar) int { + r, ok := rhs.(*ScalarPallas) + if ok { + return s.value.Cmp(r.value) + } else { + return -2 + } +} + +func (s *ScalarPallas) Square() Scalar { + return &ScalarPallas{ + value: new(fq.Fq).Square(s.value), + } +} + +func (s *ScalarPallas) Double() Scalar { + return &ScalarPallas{ + value: new(fq.Fq).Double(s.value), + } +} + +func (s *ScalarPallas) Invert() (Scalar, error) { + value, wasInverted := new(fq.Fq).Invert(s.value) + if !wasInverted { + return nil, fmt.Errorf("inverse doesn't exist") + } + return &ScalarPallas{ + value, + }, nil +} + +func (s *ScalarPallas) Sqrt() (Scalar, error) { + value, wasSquare := new(fq.Fq).Sqrt(s.value) + if !wasSquare { + return nil, fmt.Errorf("not a square") + } + return &ScalarPallas{ + value, + }, nil +} + +func (s *ScalarPallas) Cube() Scalar { + value := new(fq.Fq).Mul(s.value, s.value) + value.Mul(value, s.value) + return &ScalarPallas{ + value, + } +} + +func (s *ScalarPallas) Add(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarPallas) + if ok { + return &ScalarPallas{ + value: new(fq.Fq).Add(s.value, r.value), + } + } else { + return nil + } +} + +func (s *ScalarPallas) Sub(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarPallas) + if ok { + return &ScalarPallas{ + value: new(fq.Fq).Sub(s.value, r.value), + } + } else { + return nil + } +} + +func (s *ScalarPallas) Mul(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarPallas) + if ok { + return &ScalarPallas{ + value: new(fq.Fq).Mul(s.value, r.value), + } + } else { + return nil + } +} + +func (s *ScalarPallas) MulAdd(y, z Scalar) Scalar { + return s.Mul(y).Add(z) +} + +func (s *ScalarPallas) Div(rhs Scalar) Scalar { + r, ok := rhs.(*ScalarPallas) + if ok { + v, wasInverted := new(fq.Fq).Invert(r.value) + if !wasInverted { + return nil + } + v.Mul(v, s.value) + return &ScalarPallas{value: v} + } else { + return nil + } +} + +func (s *ScalarPallas) Neg() Scalar { + return &ScalarPallas{ + value: new(fq.Fq).Neg(s.value), + } +} + +func (s *ScalarPallas) SetBigInt(v *big.Int) (Scalar, error) { + return &ScalarPallas{ + value: new(fq.Fq).SetBigInt(v), + }, nil +} + +func (s *ScalarPallas) BigInt() *big.Int { + return s.value.BigInt() +} + +func (s *ScalarPallas) Bytes() []byte { + t := s.value.Bytes() + return t[:] +} + +func (s *ScalarPallas) SetBytes(bytes []byte) (Scalar, error) { + if len(bytes) != 32 { + return nil, fmt.Errorf("invalid length") + } + var seq [32]byte + copy(seq[:], bytes) + value, err := new(fq.Fq).SetBytes(&seq) + if err != nil { + return nil, err + } + return &ScalarPallas{ + value, + }, nil +} + +func (s *ScalarPallas) SetBytesWide(bytes []byte) (Scalar, error) { + if len(bytes) != 64 { + return nil, fmt.Errorf("invalid length") + } + var seq [64]byte + copy(seq[:], bytes) + return &ScalarPallas{ + value: new(fq.Fq).SetBytesWide(&seq), + }, nil +} + +func (s *ScalarPallas) Point() Point { + return new(PointPallas).Identity() +} + +func (s *ScalarPallas) Clone() Scalar { + return &ScalarPallas{ + value: new(fq.Fq).Set(s.value), + } +} + +func (s *ScalarPallas) GetFq() *fq.Fq { + return new(fq.Fq).Set(s.value) +} + +func (s *ScalarPallas) SetFq(fq *fq.Fq) *ScalarPallas { + s.value = fq + return s +} + +func (s *ScalarPallas) MarshalBinary() ([]byte, error) { + return scalarMarshalBinary(s) +} + +func (s *ScalarPallas) UnmarshalBinary(input []byte) error { + sc, err := scalarUnmarshalBinary(input) + if err != nil { + return err + } + ss, ok := sc.(*ScalarPallas) + if !ok { + return fmt.Errorf("invalid scalar") + } + s.value = ss.value + return nil +} + +func (s *ScalarPallas) MarshalText() ([]byte, error) { + return scalarMarshalText(s) +} + +func (s *ScalarPallas) UnmarshalText(input []byte) error { + sc, err := scalarUnmarshalText(input) + if err != nil { + return err + } + ss, ok := sc.(*ScalarPallas) + if !ok { + return fmt.Errorf("invalid scalar") + } + s.value = ss.value + return nil +} + +func (s *ScalarPallas) MarshalJSON() ([]byte, error) { + return scalarMarshalJson(s) +} + +func (s *ScalarPallas) UnmarshalJSON(input []byte) error { + sc, err := scalarUnmarshalJson(input) + if err != nil { + return err + } + S, ok := sc.(*ScalarPallas) + if !ok { + return fmt.Errorf("invalid type") + } + s.value = S.value + return nil +} + +type PointPallas struct { + value *Ep +} + +func (p *PointPallas) Random(reader io.Reader) Point { + return &PointPallas{new(Ep).Random(reader)} +} + +func (p *PointPallas) Hash(bytes []byte) Point { + return &PointPallas{new(Ep).Hash(bytes)} +} + +func (p *PointPallas) Identity() Point { + return &PointPallas{new(Ep).Identity()} +} + +func (p *PointPallas) Generator() Point { + return &PointPallas{new(Ep).Generator()} +} + +func (p *PointPallas) IsIdentity() bool { + return p.value.IsIdentity() +} + +func (p *PointPallas) IsNegative() bool { + return p.value.Y().IsOdd() +} + +func (p *PointPallas) IsOnCurve() bool { + return p.value.IsOnCurve() +} + +func (p *PointPallas) Double() Point { + return &PointPallas{new(Ep).Double(p.value)} +} + +func (p *PointPallas) Scalar() Scalar { + return &ScalarPallas{new(fq.Fq).SetZero()} +} + +func (p *PointPallas) Neg() Point { + return &PointPallas{new(Ep).Neg(p.value)} +} + +func (p *PointPallas) Add(rhs Point) Point { + r, ok := rhs.(*PointPallas) + if !ok { + return nil + } + return &PointPallas{new(Ep).Add(p.value, r.value)} +} + +func (p *PointPallas) Sub(rhs Point) Point { + r, ok := rhs.(*PointPallas) + if !ok { + return nil + } + return &PointPallas{new(Ep).Sub(p.value, r.value)} +} + +func (p *PointPallas) Mul(rhs Scalar) Point { + s, ok := rhs.(*ScalarPallas) + if !ok { + return nil + } + return &PointPallas{new(Ep).Mul(p.value, s.value)} +} + +func (p *PointPallas) Equal(rhs Point) bool { + r, ok := rhs.(*PointPallas) + if !ok { + return false + } + return p.value.Equal(r.value) +} + +func (p *PointPallas) Set(x, y *big.Int) (Point, error) { + xx := subtle.ConstantTimeCompare(x.Bytes(), []byte{}) + yy := subtle.ConstantTimeCompare(y.Bytes(), []byte{}) + xElem := new(fp.Fp).SetBigInt(x) + var data [32]byte + if yy == 1 { + if xx == 1 { + return &PointPallas{new(Ep).Identity()}, nil + } + data = xElem.Bytes() + return p.FromAffineCompressed(data[:]) + } + yElem := new(fp.Fp).SetBigInt(y) + value := &Ep{xElem, yElem, new(fp.Fp).SetOne()} + if !value.IsOnCurve() { + return nil, fmt.Errorf("point is not on the curve") + } + return &PointPallas{value}, nil +} + +func (p *PointPallas) ToAffineCompressed() []byte { + return p.value.ToAffineCompressed() +} + +func (p *PointPallas) ToAffineUncompressed() []byte { + return p.value.ToAffineUncompressed() +} + +func (p *PointPallas) FromAffineCompressed(bytes []byte) (Point, error) { + value, err := new(Ep).FromAffineCompressed(bytes) + if err != nil { + return nil, err + } + return &PointPallas{value}, nil +} + +func (p *PointPallas) FromAffineUncompressed(bytes []byte) (Point, error) { + value, err := new(Ep).FromAffineUncompressed(bytes) + if err != nil { + return nil, err + } + return &PointPallas{value}, nil +} + +func (p *PointPallas) CurveName() string { + return PallasName +} + +func (p *PointPallas) SumOfProducts(points []Point, scalars []Scalar) Point { + eps := make([]*Ep, len(points)) + for i, pt := range points { + ps, ok := pt.(*PointPallas) + if !ok { + return nil + } + eps[i] = ps.value + } + value := p.value.SumOfProducts(eps, scalars) + return &PointPallas{value} +} + +func (p *PointPallas) MarshalBinary() ([]byte, error) { + return pointMarshalBinary(p) +} + +func (p *PointPallas) UnmarshalBinary(input []byte) error { + pt, err := pointUnmarshalBinary(input) + if err != nil { + return err + } + ppt, ok := pt.(*PointPallas) + if !ok { + return fmt.Errorf("invalid point") + } + p.value = ppt.value + return nil +} + +func (p *PointPallas) MarshalText() ([]byte, error) { + return pointMarshalText(p) +} + +func (p *PointPallas) UnmarshalText(input []byte) error { + pt, err := pointUnmarshalText(input) + if err != nil { + return err + } + ppt, ok := pt.(*PointPallas) + if !ok { + return fmt.Errorf("invalid point") + } + p.value = ppt.value + return nil +} + +func (p *PointPallas) MarshalJSON() ([]byte, error) { + return pointMarshalJSON(p) +} + +func (p *PointPallas) UnmarshalJSON(input []byte) error { + pt, err := pointUnmarshalJSON(input) + if err != nil { + return err + } + P, ok := pt.(*PointPallas) + if !ok { + return fmt.Errorf("invalid type") + } + p.value = P.value + return nil +} + +func (p *PointPallas) X() *fp.Fp { + return p.value.X() +} + +func (p *PointPallas) Y() *fp.Fp { + return p.value.Y() +} + +func (p *PointPallas) GetEp() *Ep { + return new(Ep).Set(p.value) +} + +type Ep struct { + x *fp.Fp + y *fp.Fp + z *fp.Fp +} + +func (p *Ep) Random(reader io.Reader) *Ep { + var seed [64]byte + _, _ = reader.Read(seed[:]) + return p.Hash(seed[:]) +} + +func (p *Ep) Hash(bytes []byte) *Ep { + if bytes == nil { + bytes = []byte{} + } + h, _ := blake2b.New(64, []byte{}) + u, _ := expandMsgXmd(h, bytes, []byte("pallas_XMD:BLAKE2b_SSWU_RO_"), 128) + var buf [64]byte + copy(buf[:], u[:64]) + u0 := new(fp.Fp).SetBytesWide(&buf) + copy(buf[:], u[64:]) + u1 := new(fp.Fp).SetBytesWide(&buf) + + q0 := mapSswu(u0) + q1 := mapSswu(u1) + r1 := isoMap(q0) + r2 := isoMap(q1) + return p.Identity().Add(r1, r2) +} + +func (p *Ep) Identity() *Ep { + p.x = new(fp.Fp).SetZero() + p.y = new(fp.Fp).SetZero() + p.z = new(fp.Fp).SetZero() + return p +} + +func (p *Ep) Generator() *Ep { + p.x = new(fp.Fp).SetOne() + p.y = &fp.Fp{0x2f474795455d409d, 0xb443b9b74b8255d9, 0x270c412f2c9a5d66, 0x8e00f71ba43dd6b} + p.z = new(fp.Fp).SetOne() + return p +} + +func (p *Ep) IsIdentity() bool { + return p.z.IsZero() +} + +func (p *Ep) Double(other *Ep) *Ep { + if other.IsIdentity() { + p.Set(other) + return p + } + r := new(Ep) + // essentially paraphrased https://github.com/MinaProtocol/c-reference-signer/blob/master/crypto.c#L306-L337 + a := new(fp.Fp).Square(other.x) + b := new(fp.Fp).Square(other.y) + c := new(fp.Fp).Square(b) + r.x = new(fp.Fp).Add(other.x, b) + r.y = new(fp.Fp).Square(r.x) + r.z = new(fp.Fp).Sub(r.y, a) + r.x.Sub(r.z, c) + d := new(fp.Fp).Double(r.x) + e := new(fp.Fp).Mul(three, a) + f := new(fp.Fp).Square(e) + r.y.Double(d) + r.x.Sub(f, r.y) + r.y.Sub(d, r.x) + f.Mul(eight, c) + r.z.Mul(e, r.y) + r.y.Sub(r.z, f) + f.Mul(other.y, other.z) + r.z.Double(f) + p.Set(r) + return p +} + +func (p *Ep) Neg(other *Ep) *Ep { + p.x = new(fp.Fp).Set(other.x) + p.y = new(fp.Fp).Neg(other.y) + p.z = new(fp.Fp).Set(other.z) + return p +} + +func (p *Ep) Add(lhs *Ep, rhs *Ep) *Ep { + if lhs.IsIdentity() { + return p.Set(rhs) + } + if rhs.IsIdentity() { + return p.Set(lhs) + } + z1z1 := new(fp.Fp).Square(lhs.z) + z2z2 := new(fp.Fp).Square(rhs.z) + u1 := new(fp.Fp).Mul(lhs.x, z2z2) + u2 := new(fp.Fp).Mul(rhs.x, z1z1) + s1 := new(fp.Fp).Mul(lhs.y, z2z2) + s1.Mul(s1, rhs.z) + s2 := new(fp.Fp).Mul(rhs.y, z1z1) + s2.Mul(s2, lhs.z) + + if u1.Equal(u2) { + if s1.Equal(s2) { + return p.Double(lhs) + } else { + return p.Identity() + } + } else { + h := new(fp.Fp).Sub(u2, u1) + i := new(fp.Fp).Double(h) + i.Square(i) + j := new(fp.Fp).Mul(i, h) + r := new(fp.Fp).Sub(s2, s1) + r.Double(r) + v := new(fp.Fp).Mul(u1, i) + x3 := new(fp.Fp).Square(r) + x3.Sub(x3, j) + x3.Sub(x3, new(fp.Fp).Double(v)) + s1.Mul(s1, j) + s1.Double(s1) + y3 := new(fp.Fp).Mul(r, new(fp.Fp).Sub(v, x3)) + y3.Sub(y3, s1) + z3 := new(fp.Fp).Add(lhs.z, rhs.z) + z3.Square(z3) + z3.Sub(z3, z1z1) + z3.Sub(z3, z2z2) + z3.Mul(z3, h) + p.x = new(fp.Fp).Set(x3) + p.y = new(fp.Fp).Set(y3) + p.z = new(fp.Fp).Set(z3) + + return p + } +} + +func (p *Ep) Sub(lhs, rhs *Ep) *Ep { + return p.Add(lhs, new(Ep).Neg(rhs)) +} + +func (p *Ep) Mul(point *Ep, scalar *fq.Fq) *Ep { + bytes := scalar.Bytes() + precomputed := [16]*Ep{} + precomputed[0] = new(Ep).Identity() + precomputed[1] = new(Ep).Set(point) + for i := 2; i < 16; i += 2 { + precomputed[i] = new(Ep).Double(precomputed[i>>1]) + precomputed[i+1] = new(Ep).Add(precomputed[i], point) + } + p.Identity() + for i := 0; i < 256; i += 4 { + // Brouwer / windowing method. window size of 4. + for j := 0; j < 4; j++ { + p.Double(p) + } + window := bytes[32-1-i>>3] >> (4 - i&0x04) & 0x0F + p.Add(p, precomputed[window]) + } + return p +} + +func (p *Ep) Equal(other *Ep) bool { + // warning: requires converting both to affine + // could save slightly by modifying one so that its z-value equals the other + // this would save one inversion and a handful of multiplications + // but this is more subtle and error-prone, so going to just convert both to affine. + lhs := new(Ep).Set(p) + rhs := new(Ep).Set(other) + lhs.toAffine() + rhs.toAffine() + return lhs.x.Equal(rhs.x) && lhs.y.Equal(rhs.y) +} + +func (p *Ep) Set(other *Ep) *Ep { + // check is identity or on curve + p.x = new(fp.Fp).Set(other.x) + p.y = new(fp.Fp).Set(other.y) + p.z = new(fp.Fp).Set(other.z) + return p +} + +func (p *Ep) toAffine() *Ep { + // mutates `p` in-place to convert it to "affine" form. + if p.IsIdentity() { + // warning: control flow / not constant-time + p.x.SetZero() + p.y.SetZero() + p.z.SetOne() + return p + } + zInv3, _ := new(fp.Fp).Invert(p.z) // z is necessarily nonzero + zInv2 := new(fp.Fp).Square(zInv3) + zInv3.Mul(zInv3, zInv2) + p.x.Mul(p.x, zInv2) + p.y.Mul(p.y, zInv3) + p.z.SetOne() + return p +} + +func (p *Ep) ToAffineCompressed() []byte { + // Use ZCash encoding where infinity is all zeros + // and the top bit represents the sign of y and the + // remainder represent the x-coordinate + var inf [32]byte + p1 := new(Ep).Set(p) + p1.toAffine() + x := p1.x.Bytes() + x[31] |= (p1.y.Bytes()[0] & 1) << 7 + subtle.ConstantTimeCopy(bool2int[p1.IsIdentity()], x[:], inf[:]) + return x[:] +} + +func (p *Ep) ToAffineUncompressed() []byte { + p1 := new(Ep).Set(p) + p1.toAffine() + x := p1.x.Bytes() + y := p1.y.Bytes() + return append(x[:], y[:]...) +} + +func (p *Ep) FromAffineCompressed(bytes []byte) (*Ep, error) { + if len(bytes) != 32 { + return nil, fmt.Errorf("invalid byte sequence") + } + + var input [32]byte + copy(input[:], bytes) + sign := (input[31] >> 7) & 1 + input[31] &= 0x7F + + x := new(fp.Fp) + if _, err := x.SetBytes(&input); err != nil { + return nil, err + } + rhs := rhsPallas(x) + if _, square := rhs.Sqrt(rhs); !square { + return nil, fmt.Errorf("rhs of given x-coordinate is not a square") + } + if rhs.Bytes()[0]&1 != sign { + rhs.Neg(rhs) + } + p.x = x + p.y = rhs + p.z = new(fp.Fp).SetOne() + if !p.IsOnCurve() { + return nil, fmt.Errorf("invalid point") + } + return p, nil +} + +func (p *Ep) FromAffineUncompressed(bytes []byte) (*Ep, error) { + if len(bytes) != 64 { + return nil, fmt.Errorf("invalid length") + } + p.z = new(fp.Fp).SetOne() + p.x = new(fp.Fp) + p.y = new(fp.Fp) + var x, y [32]byte + copy(x[:], bytes[:32]) + copy(y[:], bytes[32:]) + if _, err := p.x.SetBytes(&x); err != nil { + return nil, err + } + if _, err := p.y.SetBytes(&y); err != nil { + return nil, err + } + if !p.IsOnCurve() { + return nil, fmt.Errorf("invalid point") + } + return p, nil +} + +// rhs of the curve equation +func rhsPallas(x *fp.Fp) *fp.Fp { + x2 := new(fp.Fp).Square(x) + x3 := new(fp.Fp).Mul(x, x2) + return new(fp.Fp).Add(x3, b) +} + +func (p Ep) CurveName() string { + return "pallas" +} + +func (p Ep) SumOfProducts(points []*Ep, scalars []Scalar) *Ep { + nScalars := make([]*big.Int, len(scalars)) + for i, s := range scalars { + sc, ok := s.(*ScalarPallas) + if !ok { + return nil + } + nScalars[i] = sc.value.BigInt() + } + return sumOfProductsPippengerPallas(points, nScalars) +} + +func (p *Ep) X() *fp.Fp { + t := new(Ep).Set(p) + t.toAffine() + return new(fp.Fp).Set(t.x) +} + +func (p *Ep) Y() *fp.Fp { + t := new(Ep).Set(p) + t.toAffine() + return new(fp.Fp).Set(t.y) +} + +func (p *Ep) IsOnCurve() bool { + // y^2 = x^3 + axz^4 + bz^6 + // a = 0 + // b = 5 + z2 := new(fp.Fp).Square(p.z) + z4 := new(fp.Fp).Square(z2) + z6 := new(fp.Fp).Mul(z2, z4) + x2 := new(fp.Fp).Square(p.x) + x3 := new(fp.Fp).Mul(x2, p.x) + + lhs := new(fp.Fp).Square(p.y) + rhs := new(fp.Fp).SetUint64(5) + rhs.Mul(rhs, z6) + rhs.Add(rhs, x3) + return p.z.IsZero() || lhs.Equal(rhs) +} + +func (p *Ep) CMove(lhs, rhs *Ep, condition int) *Ep { + p.x = new(fp.Fp).CMove(lhs.x, rhs.x, condition) + p.y = new(fp.Fp).CMove(lhs.y, rhs.y, condition) + p.z = new(fp.Fp).CMove(lhs.z, rhs.z, condition) + return p +} + +func sumOfProductsPippengerPallas(points []*Ep, scalars []*big.Int) *Ep { + if len(points) != len(scalars) { + return nil + } + + const w = 6 + + bucketSize := (1 << w) - 1 + windows := make([]*Ep, 255/w+1) + for i := range windows { + windows[i] = new(Ep).Identity() + } + bucket := make([]*Ep, bucketSize) + + for j := 0; j < len(windows); j++ { + for i := 0; i < bucketSize; i++ { + bucket[i] = new(Ep).Identity() + } + + for i := 0; i < len(scalars); i++ { + index := bucketSize & int(new(big.Int).Rsh(scalars[i], uint(w*j)).Int64()) + if index != 0 { + bucket[index-1].Add(bucket[index-1], points[i]) + } + } + + acc, sum := new(Ep).Identity(), new(Ep).Identity() + + for i := bucketSize - 1; i >= 0; i-- { + sum.Add(sum, bucket[i]) + acc.Add(acc, sum) + } + windows[j] = acc + } + + acc := new(Ep).Identity() + for i := len(windows) - 1; i >= 0; i-- { + for j := 0; j < w; j++ { + acc.Double(acc) + } + acc.Add(acc, windows[i]) + } + return acc +} + +// Implements a degree 3 isogeny map. +// The input and output are in Jacobian coordinates, using the method +// in "Avoiding inversions" [WB2019, section 4.3]. +func isoMap(p *Ep) *Ep { + var z [4]*fp.Fp + z[0] = new(fp.Fp).Square(p.z) // z^2 + z[1] = new(fp.Fp).Mul(z[0], p.z) // z^3 + z[2] = new(fp.Fp).Square(z[0]) // z^4 + z[3] = new(fp.Fp).Square(z[1]) // z^6 + + // ((iso[0] * x + iso[1] * z^2) * x + iso[2] * z^4) * x + iso[3] * z^6 + numX := new(fp.Fp).Set(isomapper[0]) + numX.Mul(numX, p.x) + numX.Add(numX, new(fp.Fp).Mul(isomapper[1], z[0])) + numX.Mul(numX, p.x) + numX.Add(numX, new(fp.Fp).Mul(isomapper[2], z[2])) + numX.Mul(numX, p.x) + numX.Add(numX, new(fp.Fp).Mul(isomapper[3], z[3])) + + // (z^2 * x + iso[4] * z^4) * x + iso[5] * z^6 + divX := new(fp.Fp).Set(z[0]) + divX.Mul(divX, p.x) + divX.Add(divX, new(fp.Fp).Mul(isomapper[4], z[2])) + divX.Mul(divX, p.x) + divX.Add(divX, new(fp.Fp).Mul(isomapper[5], z[3])) + + // (((iso[6] * x + iso[7] * z2) * x + iso[8] * z4) * x + iso[9] * z6) * y + numY := new(fp.Fp).Set(isomapper[6]) + numY.Mul(numY, p.x) + numY.Add(numY, new(fp.Fp).Mul(isomapper[7], z[0])) + numY.Mul(numY, p.x) + numY.Add(numY, new(fp.Fp).Mul(isomapper[8], z[2])) + numY.Mul(numY, p.x) + numY.Add(numY, new(fp.Fp).Mul(isomapper[9], z[3])) + numY.Mul(numY, p.y) + + // (((x + iso[10] * z2) * x + iso[11] * z4) * x + iso[12] * z6) * z3 + divY := new(fp.Fp).Set(p.x) + divY.Add(divY, new(fp.Fp).Mul(isomapper[10], z[0])) + divY.Mul(divY, p.x) + divY.Add(divY, new(fp.Fp).Mul(isomapper[11], z[2])) + divY.Mul(divY, p.x) + divY.Add(divY, new(fp.Fp).Mul(isomapper[12], z[3])) + divY.Mul(divY, z[1]) + + z0 := new(fp.Fp).Mul(divX, divY) + x := new(fp.Fp).Mul(numX, divY) + x.Mul(x, z0) + y := new(fp.Fp).Mul(numY, divX) + y.Mul(y, new(fp.Fp).Square(z0)) + + return &Ep{ + x, y, z0, + } +} + +func mapSswu(u *fp.Fp) *Ep { + // c1 := new(fp.Fp).Neg(isoa) + // c1.Invert(c1) + // c1.Mul(isob, c1) + c1 := &fp.Fp{ + 0x1ee770ce078456ec, + 0x48cfd64c2ce76be0, + 0x43d5774c0ab79e2f, + 0x23368d2bdce28cf3, + } + // c2 := new(fp.Fp).Neg(z) + // c2.Invert(c2) + c2 := &fp.Fp{ + 0x03df915f89d89d8a, + 0x8f1e8db09ef82653, + 0xd89d89d89d89d89d, + 0x1d89d89d89d89d89, + } + + u2 := new(fp.Fp).Square(u) + tv1 := new(fp.Fp).Mul(z, u2) + tv2 := new(fp.Fp).Square(tv1) + x1 := new(fp.Fp).Add(tv1, tv2) + x1.Invert(x1) + e1 := bool2int[x1.IsZero()] + x1.Add(x1, new(fp.Fp).SetOne()) + x1.CMove(x1, c2, e1) + x1.Mul(x1, c1) + gx1 := new(fp.Fp).Square(x1) + gx1.Add(gx1, isoa) + gx1.Mul(gx1, x1) + gx1.Add(gx1, isob) + x2 := new(fp.Fp).Mul(tv1, x1) + tv2.Mul(tv1, tv2) + gx2 := new(fp.Fp).Mul(gx1, tv2) + gx1Sqrt, e2 := new(fp.Fp).Sqrt(gx1) + x := new(fp.Fp).CMove(x2, x1, bool2int[e2]) + gx2Sqrt, _ := new(fp.Fp).Sqrt(gx2) + y := new(fp.Fp).CMove(gx2Sqrt, gx1Sqrt, bool2int[e2]) + e3 := u.IsOdd() == y.IsOdd() + y.CMove(new(fp.Fp).Neg(y), y, bool2int[e3]) + + return &Ep{ + x: x, y: y, z: new(fp.Fp).SetOne(), + } +} diff --git a/core/curves/pallas_curve_test.go b/core/curves/pallas_curve_test.go new file mode 100644 index 0000000..76545fe --- /dev/null +++ b/core/curves/pallas_curve_test.go @@ -0,0 +1,244 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package curves + +import ( + crand "crypto/rand" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves/native/pasta/fp" + "github.com/sonr-io/sonr/crypto/core/curves/native/pasta/fq" +) + +func TestPointPallasAddDoubleMul(t *testing.T) { + g := new(Ep).Generator() + id := new(Ep).Identity() + require.Equal(t, g.Add(g, id), g) + + g2 := new(Ep).Add(g, g) + require.True(t, new(Ep).Double(g).Equal(g2)) + require.Equal(t, new(Ep).Double(g), new(Ep).Add(g, g)) + g3 := new(Ep).Add(g, g2) + require.True(t, g3.Equal(new(Ep).Mul(g, new(fq.Fq).SetUint64(3)))) + + g4 := new(Ep).Add(g3, g) + require.True(t, g4.Equal(new(Ep).Double(g2))) + require.True(t, g4.Equal(new(Ep).Mul(g, new(fq.Fq).SetUint64(4)))) +} + +func TestPointPallasHash(t *testing.T) { + h0 := new(Ep).Hash(nil) + require.True(t, h0.IsOnCurve()) + h1 := new(Ep).Hash([]byte{}) + require.True(t, h1.IsOnCurve()) + require.True(t, h0.Equal(h1)) + h2 := new(Ep).Hash([]byte{1}) + require.True(t, h2.IsOnCurve()) +} + +func TestPointPallasNeg(t *testing.T) { + g := new(Ep).Generator() + g.Neg(g) + require.True(t, g.Neg(g).Equal(new(Ep).Generator())) + id := new(Ep).Identity() + require.True(t, new(Ep).Neg(id).Equal(id)) +} + +func TestPointPallasRandom(t *testing.T) { + a := new(Ep).Random(testRng()) + require.NotNil(t, a.x) + require.NotNil(t, a.y) + require.NotNil(t, a.z) + require.True(t, a.IsOnCurve()) + e := &Ep{ + x: &fp.Fp{ + 0x7263083d01d4859c, + 0x65a03323b5a3d204, + 0xe71d73222b136668, + 0x1d1b1bcf1256b539, + }, + y: &fp.Fp{ + 0x8cc2516ffe23e1bb, + 0x5418f941eeaca812, + 0x16c9af658a846f29, + 0x11c572091c418668, + }, + z: &fp.Fp{ + 0xa879589adb77a88e, + 0x5444a531a19f2406, + 0x637ff77c51dda524, + 0x0369e90d219ce821, + }, + } + require.True(t, a.Equal(e)) +} + +func TestPointPallasSerialize(t *testing.T) { + ss := new(ScalarPallas).Random(testRng()).(*ScalarPallas) + g := new(Ep).Generator() + + ppt := new(Ep).Mul(g, ss.value) + require.Equal( + t, + ppt.ToAffineCompressed(), + []byte{ + 0x1c, + 0x6d, + 0x47, + 0x1f, + 0x4a, + 0x81, + 0xcd, + 0x8, + 0x4e, + 0xb3, + 0x17, + 0x9a, + 0xcd, + 0x17, + 0xe2, + 0x9a, + 0x24, + 0x69, + 0xb, + 0x4e, + 0x69, + 0x5f, + 0x35, + 0x1a, + 0x92, + 0x12, + 0x95, + 0xc9, + 0xe6, + 0xd3, + 0x7a, + 0x0, + }, + ) + require.Equal( + t, + ppt.ToAffineUncompressed(), + []byte{ + 0x1c, + 0x6d, + 0x47, + 0x1f, + 0x4a, + 0x81, + 0xcd, + 0x8, + 0x4e, + 0xb3, + 0x17, + 0x9a, + 0xcd, + 0x17, + 0xe2, + 0x9a, + 0x24, + 0x69, + 0xb, + 0x4e, + 0x69, + 0x5f, + 0x35, + 0x1a, + 0x92, + 0x12, + 0x95, + 0xc9, + 0xe6, + 0xd3, + 0x7a, + 0x0, + 0x80, + 0x5c, + 0xa1, + 0x56, + 0x6d, + 0x1b, + 0x87, + 0x5f, + 0xb0, + 0x2e, + 0xae, + 0x85, + 0x4e, + 0x86, + 0xa9, + 0xcd, + 0xde, + 0x37, + 0x6a, + 0xc8, + 0x4a, + 0x80, + 0xf6, + 0x43, + 0xaa, + 0xe6, + 0x2c, + 0x2d, + 0x15, + 0xdb, + 0xda, + 0x29, + }, + ) + retP, err := new(Ep).FromAffineCompressed(ppt.ToAffineCompressed()) + require.NoError(t, err) + require.True(t, ppt.Equal(retP)) + retP, err = new(Ep).FromAffineUncompressed(ppt.ToAffineUncompressed()) + require.NoError(t, err) + require.True(t, ppt.Equal(retP)) + + // smoke test + for i := 0; i < 25; i++ { + s := new(ScalarPallas).Random(crand.Reader).(*ScalarPallas) + pt := new(Ep).Mul(g, s.value) + cmprs := pt.ToAffineCompressed() + require.Equal(t, len(cmprs), 32) + retC, err := new(Ep).FromAffineCompressed(cmprs) + require.NoError(t, err) + require.True(t, pt.Equal(retC)) + + un := pt.ToAffineUncompressed() + require.Equal(t, len(un), 64) + retU, err := new(Ep).FromAffineUncompressed(un) + require.NoError(t, err) + require.True(t, pt.Equal(retU)) + } +} + +func TestPointPallasCMove(t *testing.T) { + a := new(Ep).Random(crand.Reader) + b := new(Ep).Random(crand.Reader) + require.True(t, new(Ep).CMove(a, b, 1).Equal(b)) + require.True(t, new(Ep).CMove(a, b, 0).Equal(a)) +} + +func TestPointPallasSumOfProducts(t *testing.T) { + lhs := new(Ep).Generator() + lhs.Mul(lhs, new(fq.Fq).SetUint64(50)) + points := make([]*Ep, 5) + for i := range points { + points[i] = new(Ep).Generator() + } + scalars := []Scalar{ + new(ScalarPallas).New(8), + new(ScalarPallas).New(9), + new(ScalarPallas).New(10), + new(ScalarPallas).New(11), + new(ScalarPallas).New(12), + } + rhs := lhs.SumOfProducts(points, scalars) + require.NotNil(t, rhs) + require.True(t, lhs.Equal(rhs)) +} diff --git a/core/curves/secp256k1/secp256k1.go b/core/curves/secp256k1/secp256k1.go new file mode 100644 index 0000000..add8f6d --- /dev/null +++ b/core/curves/secp256k1/secp256k1.go @@ -0,0 +1,327 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Copyright 2011 ThePiachu. All rights reserved. +// Copyright 2015 Jeffrey Wilcke, Felix Lange, Gustav Simonsson. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// * The name of ThePiachu may not be used to endorse or promote products +// derived from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package secp256k1 + +import ( + "crypto/elliptic" + "math/big" +) + +const ( + // number of bits in a big.Word + wordBits = 32 << (uint64(^big.Word(0)) >> 63) + // number of bytes in a big.Word + wordBytes = wordBits / 8 +) + +// ScalarMult returns k*(Bx,By) where k is a number in big-endian form. +func (bitCurve *BitCurve) ScalarMult(Bx, By *big.Int, k []byte) (*big.Int, *big.Int) { + Bz := new(big.Int).SetInt64(1) + x, y, z := new(big.Int), new(big.Int), new(big.Int) + + for _, byte := range k { + for bitNum := 0; bitNum < 8; bitNum++ { + x, y, z = bitCurve.doubleJacobian(x, y, z) + if byte&0x80 == 0x80 { + x, y, z = bitCurve.addJacobian(Bx, By, Bz, x, y, z) + } + byte <<= 1 + } + } + + return bitCurve.affineFromJacobian(x, y, z) +} + +// readBits encodes the absolute value of bigint as big-endian bytes. Callers +// must ensure that buf has enough space. If buf is too short the result will +// be incomplete. +func readBits(bigint *big.Int, buf []byte) { + i := len(buf) + for _, d := range bigint.Bits() { + for j := 0; j < wordBytes && i > 0; j++ { + i-- + buf[i] = byte(d) + d >>= 8 + } + } +} + +// This code is from https://github.com/ThePiachu/GoBit and implements +// several Koblitz elliptic curves over prime fields. +// +// The curve methods, internally, on Jacobian coordinates. For a given +// (x, y) position on the curve, the Jacobian coordinates are (x1, y1, +// z1) where x = x1/z1² and y = y1/z1³. The greatest speedups come +// when the whole calculation can be performed within the transform +// (as in ScalarMult and ScalarBaseMult). But even for Add and Double, +// it's faster to apply and reverse the transform than to operate in +// affine coordinates. + +// A BitCurve represents a Koblitz Curve with a=0. +// See http://www.hyperelliptic.org/EFD/g1p/auto-shortw.html +type BitCurve struct { + P *big.Int // the order of the underlying field + N *big.Int // the order of the base point + B *big.Int // the constant of the BitCurve equation + Gx, Gy *big.Int // (x,y) of the base point + BitSize int // the size of the underlying field +} + +func (bitCurve *BitCurve) Params() *elliptic.CurveParams { + return &elliptic.CurveParams{ + P: bitCurve.P, + N: bitCurve.N, + B: bitCurve.B, + Gx: bitCurve.Gx, + Gy: bitCurve.Gy, + BitSize: bitCurve.BitSize, + } +} + +// IsOnCurve returns true if the given (x,y) lies on the BitCurve. +func (bitCurve *BitCurve) IsOnCurve(x, y *big.Int) bool { + // y² = x³ + b + y2 := new(big.Int).Mul(y, y) // y² + y2.Mod(y2, bitCurve.P) // y²%P + + x3 := new(big.Int).Mul(x, x) // x² + x3.Mul(x3, x) // x³ + + x3.Add(x3, bitCurve.B) // x³+B + x3.Mod(x3, bitCurve.P) //(x³+B)%P + + return x3.Cmp(y2) == 0 +} + +// affineFromJacobian reverses the Jacobian transform. See the comment at the +// top of the file. +func (bitCurve *BitCurve) affineFromJacobian(x, y, z *big.Int) (xOut, yOut *big.Int) { + if z.Sign() == 0 { + return new(big.Int), new(big.Int) + } + + zinv := new(big.Int).ModInverse(z, bitCurve.P) + zinvsq := new(big.Int).Mul(zinv, zinv) + + xOut = new(big.Int).Mul(x, zinvsq) + xOut.Mod(xOut, bitCurve.P) + zinvsq.Mul(zinvsq, zinv) + yOut = new(big.Int).Mul(y, zinvsq) + yOut.Mod(yOut, bitCurve.P) + return xOut, yOut +} + +// Add returns the sum of (x1,y1) and (x2,y2) +func (bitCurve *BitCurve) Add(x1, y1, x2, y2 *big.Int) (*big.Int, *big.Int) { + // If one point is at infinity, return the other point. + // Adding the point at infinity to any point will preserve the other point. + if x1.Sign() == 0 && y1.Sign() == 0 { + return x2, y2 + } + if x2.Sign() == 0 && y2.Sign() == 0 { + return x1, y1 + } + z := new(big.Int).SetInt64(1) + if x1.Cmp(x2) == 0 && y1.Cmp(y2) == 0 { + return bitCurve.affineFromJacobian(bitCurve.doubleJacobian(x1, y1, z)) + } + return bitCurve.affineFromJacobian(bitCurve.addJacobian(x1, y1, z, x2, y2, z)) +} + +// addJacobian takes two points in Jacobian coordinates, (x1, y1, z1) and +// (x2, y2, z2) and returns their sum, also in Jacobian form. +func (bitCurve *BitCurve) addJacobian( + x1, y1, z1, x2, y2, z2 *big.Int, +) (*big.Int, *big.Int, *big.Int) { + // See http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-0.html#addition-add-2007-bl + z1z1 := new(big.Int).Mul(z1, z1) + z1z1.Mod(z1z1, bitCurve.P) + z2z2 := new(big.Int).Mul(z2, z2) + z2z2.Mod(z2z2, bitCurve.P) + + u1 := new(big.Int).Mul(x1, z2z2) + u1.Mod(u1, bitCurve.P) + u2 := new(big.Int).Mul(x2, z1z1) + u2.Mod(u2, bitCurve.P) + h := new(big.Int).Sub(u2, u1) + if h.Sign() == -1 { + h.Add(h, bitCurve.P) + } + i := new(big.Int).Lsh(h, 1) + i.Mul(i, i) + j := new(big.Int).Mul(h, i) + + s1 := new(big.Int).Mul(y1, z2) + s1.Mul(s1, z2z2) + s1.Mod(s1, bitCurve.P) + s2 := new(big.Int).Mul(y2, z1) + s2.Mul(s2, z1z1) + s2.Mod(s2, bitCurve.P) + r := new(big.Int).Sub(s2, s1) + if r.Sign() == -1 { + r.Add(r, bitCurve.P) + } + r.Lsh(r, 1) + v := new(big.Int).Mul(u1, i) + + x3 := new(big.Int).Set(r) + x3.Mul(x3, x3) + x3.Sub(x3, j) + x3.Sub(x3, v) + x3.Sub(x3, v) + x3.Mod(x3, bitCurve.P) + + y3 := new(big.Int).Set(r) + v.Sub(v, x3) + y3.Mul(y3, v) + s1.Mul(s1, j) + s1.Lsh(s1, 1) + y3.Sub(y3, s1) + y3.Mod(y3, bitCurve.P) + + z3 := new(big.Int).Add(z1, z2) + z3.Mul(z3, z3) + z3.Sub(z3, z1z1) + if z3.Sign() == -1 { + z3.Add(z3, bitCurve.P) + } + z3.Sub(z3, z2z2) + if z3.Sign() == -1 { + z3.Add(z3, bitCurve.P) + } + z3.Mul(z3, h) + z3.Mod(z3, bitCurve.P) + + return x3, y3, z3 +} + +// Double returns 2*(x,y) +func (bitCurve *BitCurve) Double(x1, y1 *big.Int) (*big.Int, *big.Int) { + z1 := new(big.Int).SetInt64(1) + return bitCurve.affineFromJacobian(bitCurve.doubleJacobian(x1, y1, z1)) +} + +// doubleJacobian takes a point in Jacobian coordinates, (x, y, z), and +// returns its double, also in Jacobian form. +func (bitCurve *BitCurve) doubleJacobian(x, y, z *big.Int) (*big.Int, *big.Int, *big.Int) { + // See http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-0.html#doubling-dbl-2009-l + + a := new(big.Int).Mul(x, x) // X1² + b := new(big.Int).Mul(y, y) // Y1² + c := new(big.Int).Mul(b, b) // B² + + d := new(big.Int).Add(x, b) // X1+B + d.Mul(d, d) //(X1+B)² + d.Sub(d, a) //(X1+B)²-A + d.Sub(d, c) //(X1+B)²-A-C + d.Mul(d, big.NewInt(2)) // 2*((X1+B)²-A-C) + + e := new(big.Int).Mul(big.NewInt(3), a) // 3*A + f := new(big.Int).Mul(e, e) // E² + + x3 := new(big.Int).Mul(big.NewInt(2), d) // 2*D + x3.Sub(f, x3) // F-2*D + x3.Mod(x3, bitCurve.P) + + y3 := new(big.Int).Sub(d, x3) // D-X3 + y3.Mul(e, y3) // E*(D-X3) + y3.Sub(y3, new(big.Int).Mul(big.NewInt(8), c)) // E*(D-X3)-8*C + y3.Mod(y3, bitCurve.P) + + z3 := new(big.Int).Mul(y, z) // Y1*Z1 + z3.Mul(big.NewInt(2), z3) // 3*Y1*Z1 + z3.Mod(z3, bitCurve.P) + + return x3, y3, z3 +} + +// ScalarBaseMult returns k*G, where G is the base point of the group and k is +// an integer in big-endian form. +func (bitCurve *BitCurve) ScalarBaseMult(k []byte) (*big.Int, *big.Int) { + return bitCurve.ScalarMult(bitCurve.Gx, bitCurve.Gy, k) +} + +// Marshal converts a point into the form specified in section 4.3.6 of ANSI +// X9.62. +func (bitCurve *BitCurve) Marshal(x, y *big.Int) []byte { + byteLen := (bitCurve.BitSize + 7) >> 3 + ret := make([]byte, 1+2*byteLen) + ret[0] = 4 // uncompressed point flag + readBits(x, ret[1:1+byteLen]) + readBits(y, ret[1+byteLen:]) + return ret +} + +// Unmarshal converts a point, serialised by Marshal, into an x, y pair. On +// error, x = nil. +func (bitCurve *BitCurve) Unmarshal(data []byte) (x, y *big.Int) { + byteLen := (bitCurve.BitSize + 7) >> 3 + if len(data) != 1+2*byteLen { + return x, y + } + if data[0] != 4 { // uncompressed form + return x, y + } + x = new(big.Int).SetBytes(data[1 : 1+byteLen]) + y = new(big.Int).SetBytes(data[1+byteLen:]) + return x, y +} + +var theCurve = new(BitCurve) + +func init() { + // See SEC 2 section 2.7.1 + // curve parameters taken from: + // http://www.secg.org/sec2-v2.pdf + theCurve.P, _ = new( + big.Int, + ).SetString("0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F", 0) + theCurve.N, _ = new( + big.Int, + ).SetString("0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141", 0) + theCurve.B, _ = new( + big.Int, + ).SetString("0x0000000000000000000000000000000000000000000000000000000000000007", 0) + theCurve.Gx, _ = new( + big.Int, + ).SetString("0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798", 0) + theCurve.Gy, _ = new( + big.Int, + ).SetString("0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8", 0) + theCurve.BitSize = 256 +} + +// S256 returns a BitCurve which implements secp256k1. +func S256() *BitCurve { + return theCurve +} diff --git a/core/curves/sp256_curve.go b/core/curves/sp256_curve.go new file mode 100644 index 0000000..e7831f0 --- /dev/null +++ b/core/curves/sp256_curve.go @@ -0,0 +1,11 @@ +package curves + +import ( + "crypto/elliptic" + + "github.com/dustinxie/ecc" +) + +func SP256() elliptic.Curve { + return ecc.P256k1() +} diff --git a/core/hash.go b/core/hash.go new file mode 100644 index 0000000..5a975df --- /dev/null +++ b/core/hash.go @@ -0,0 +1,339 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package core + +import ( + "bytes" + "crypto/elliptic" + "crypto/sha256" + "crypto/sha512" + "fmt" + "hash" + "math" + "math/big" + + "github.com/btcsuite/btcd/btcec/v2" + "golang.org/x/crypto/hkdf" + "golang.org/x/crypto/sha3" + + "github.com/sonr-io/sonr/crypto/internal" +) + +type HashField struct { + // F_p^k + Order *big.Int // p^k + Characteristic *big.Int // p + ExtensionDegree *big.Int // k +} + +type Params struct { + F *HashField + SecurityParameter int + Hash func() hash.Hash + L int +} + +// Helper functions reserved for future use + +// getCurveFromName returns the appropriate elliptic curve for a given name +// nolint:unused,deadcode +func getCurveFromName(name string) elliptic.Curve { + switch name { + case "P-256", "secp256r1": + return elliptic.P256() + case "P-384", "secp384r1": + return elliptic.P384() + case "P-521", "secp521r1": + return elliptic.P521() + default: + return nil + } +} + +// validateCurveParams validates curve parameters for security +// nolint:unused,deadcode +func validateCurveParams(curve elliptic.Curve) error { + if curve == nil { + return fmt.Errorf("curve is nil") + } + params := curve.Params() + if params == nil { + return fmt.Errorf("curve parameters are nil") + } + if params.P == nil || params.N == nil { + return fmt.Errorf("invalid curve parameters: P or N is nil") + } + // Ensure the curve order is large enough for security + if params.N.BitLen() < 224 { + return fmt.Errorf("curve order too small for security: %d bits", params.N.BitLen()) + } + return nil +} + +func getParams(curve elliptic.Curve) (*Params, error) { + switch curve.Params().Name { + case btcec.S256().Name, elliptic.P256().Params().Name: + return &Params{ + F: &HashField{ + Order: curve.Params().P, + Characteristic: curve.Params().P, + ExtensionDegree: new(big.Int).SetInt64(1), + }, + SecurityParameter: 128, + Hash: sha256.New, + L: 48, + }, nil + case elliptic.P384().Params().Name, "P-384", "secp384r1": + // P-384 curve with 192-bit security level + return &Params{ + F: &HashField{ + Order: curve.Params().P, + Characteristic: curve.Params().P, + ExtensionDegree: new(big.Int).SetInt64(1), + }, + SecurityParameter: 192, + Hash: sha3.New384, // Using SHA3-384 for P-384 + L: 72, // 192-bit security requires 72 bytes + }, nil + case elliptic.P521().Params().Name, "P-521", "secp521r1": + // P-521 curve with 256-bit security level + return &Params{ + F: &HashField{ + Order: curve.Params().P, + Characteristic: curve.Params().P, + ExtensionDegree: new(big.Int).SetInt64(1), + }, + SecurityParameter: 256, + Hash: sha512.New, // Using SHA-512 for P-521 + L: 98, // 256-bit security with P-521's 521-bit field + }, nil + case "Bls12381G1": + return &Params{ + F: &HashField{ + Order: curve.Params().P, + Characteristic: curve.Params().P, + ExtensionDegree: new(big.Int).SetInt64(1), + }, + SecurityParameter: 128, + Hash: sha256.New, + L: 48, + }, nil + case "ed25519": + return &Params{ + F: &HashField{ + Order: curve.Params().P, + Characteristic: curve.Params().P, + ExtensionDegree: new(big.Int).SetInt64(1), + }, + SecurityParameter: 128, + Hash: sha256.New, + L: 48, + }, nil + default: + return nil, fmt.Errorf("unsupported curve: %s", curve.Params().Name) + } +} + +func I2OSP(b, n int) []byte { + os := new(big.Int).SetInt64(int64(b)).Bytes() + if n > len(os) { + var buf bytes.Buffer + buf.Write(make([]byte, n-len(os))) + buf.Write(os) + return buf.Bytes() + } + return os[:n] +} + +func OS2IP(os []byte) *big.Int { + return new(big.Int).SetBytes(os) +} + +func hashThis(f func() hash.Hash, this []byte) ([]byte, error) { + h := f() + w, err := h.Write(this) + if w != len(this) { + return nil, fmt.Errorf("bytes written to hash doesn't match expected") + } else if err != nil { + return nil, err + } + v := h.Sum(nil) + return v, nil +} + +func concat(xs ...[]byte) []byte { + var result []byte + for _, x := range xs { + result = append(result, x...) + } + return result +} + +func xor(b1, b2 []byte) []byte { + // b1 and b2 must be same length + result := make([]byte, len(b1)) + for i := range b1 { + result[i] = b1[i] ^ b2[i] + } + + return result +} + +func ExpandMessageXmd(f func() hash.Hash, msg, DST []byte, lenInBytes int) ([]byte, error) { + // https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-10#section-5.4.1 + + // step 1 + ell := int(math.Ceil(float64(lenInBytes) / float64(f().Size()))) + + // step 2 + if ell > 255 { + return nil, fmt.Errorf("ell > 255") + } + + // step 3 + dstPrime := append(DST, I2OSP(len(DST), 1)...) + + // step 4 + zPad := I2OSP(0, f().BlockSize()) + + // step 5 & 6 + msgPrime := concat(zPad, msg, I2OSP(lenInBytes, 2), I2OSP(0, 1), dstPrime) + + var err error + + b := make([][]byte, ell+1) + + // step 7 + b[0], err = hashThis(f, msgPrime) + if err != nil { + return nil, err + } + + // step 8 + b[1], err = hashThis(f, concat(b[0], I2OSP(1, 1), dstPrime)) + if err != nil { + return nil, err + } + + // step 9 + for i := 2; i <= ell; i++ { + // step 10 + b[i], err = hashThis(f, concat(xor(b[0], b[i-1]), I2OSP(i, 1), dstPrime)) + if err != nil { + return nil, err + } + } + // step 11 + uniformBytes := concat(b[1:]...) + + // step 12 + return uniformBytes[:lenInBytes], nil +} + +func hashToField(msg []byte, count int, curve elliptic.Curve) ([][]*big.Int, error) { + // https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-10#section-5.3 + + parameters, err := getParams(curve) + if err != nil { + return nil, err + } + + f := parameters.Hash + + DST := []byte("Coinbase_tECDSA") + + m := int(parameters.F.ExtensionDegree.Int64()) + L := parameters.L + + // step 1 + lenInBytes := count * m * L + + // step 2 + uniformBytes, err := ExpandMessageXmd(f, msg, DST, lenInBytes) + if err != nil { + return nil, err + } + + u := make([][]*big.Int, count) + + // step 3 + for i := 0; i < count; i++ { + e := make([]*big.Int, m) + // step 4 + for j := 0; j < m; j++ { + // step 5 + elmOffset := L * (j + i*m) + // step 6 + tv := uniformBytes[elmOffset : elmOffset+L] + // step 7 + e[j] = new(big.Int).Mod(OS2IP(tv), parameters.F.Characteristic) + + } + // step 8 + u[i] = e + } + // step 9 + return u, nil +} + +func Hash(msg []byte, curve elliptic.Curve) (*big.Int, error) { + u, err := hashToField(msg, 1, curve) + if err != nil { + return nil, err + } + return u[0][0], nil +} + +// fiatShamir computes the HKDF over many values +// iteratively such that each value is hashed separately +// and based on preceding values +// +// The first value is computed as okm_0 = KDF(f || value) where +// f is a byte slice of 32 0xFF +// salt is zero-filled byte slice with length equal to the hash output length +// info is the protocol name +// okm is the 32 byte output +// +// The each subsequent iteration is computed by as okm_i = KDF(f_i || value || okm_{i-1}) +// where f_i = 2^b - 1 - i such that there are 0xFF bytes prior to the value. +// f_1 changes the first byte to 0xFE, f_2 to 0xFD. The previous okm is appended to the value +// to provide cryptographic domain separation. +// See https://signal.org/docs/specifications/x3dh/#cryptographic-notation +// and https://signal.org/docs/specifications/xeddsa/#hash-functions +// for more details. +// This uses the KDF function similar to X3DH for each `value` +// But changes the key just like XEdDSA where the prefix bytes change by a single bit +func FiatShamir(values ...*big.Int) ([]byte, error) { + // Don't accept any nil arguments + if AnyNil(values...) { + return nil, internal.ErrNilArguments + } + + info := []byte("Coinbase tECDSA 1.0") + salt := make([]byte, 32) + okm := make([]byte, 32) + f := bytes.Repeat([]byte{0xFF}, 32) + + for _, b := range values { + ikm := append(f, b.Bytes()...) + ikm = append(ikm, okm...) + kdf := hkdf.New(sha256.New, ikm, salt, info) + n, err := kdf.Read(okm) + if err != nil { + return nil, err + } + if n != len(okm) { + return nil, fmt.Errorf( + "unable to read expected number of bytes want=%v got=%v", + len(okm), + n, + ) + } + internal.ByteSub(f) + } + return okm, nil +} diff --git a/core/hash_test.go b/core/hash_test.go new file mode 100755 index 0000000..77b8963 --- /dev/null +++ b/core/hash_test.go @@ -0,0 +1,323 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package core + +import ( + "crypto/sha256" + "crypto/sha512" + "encoding/hex" + "fmt" + "hash" + "math/big" + "strconv" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestExpandMessageXmd(t *testing.T) { + // https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-10#appendix-K.1 + tests := []struct { + f func() hash.Hash + msg []byte + lenInBytesHex string + expectedHex string + }{ + { + sha256.New, + []byte(""), + "20", + "f659819a6473c1835b25ea59e3d38914c98b374f0970b7e4c92181df928fca88", + }, + { + sha256.New, + []byte("abc"), + "20", + "1c38f7c211ef233367b2420d04798fa4698080a8901021a795a1151775fe4da7", + }, + { + sha256.New, + []byte("abcdef0123456789"), + "20", + "8f7e7b66791f0da0dbb5ec7c22ec637f79758c0a48170bfb7c4611bd304ece89", + }, + { + sha256.New, + []byte( + "q128_qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq", + ), + "20", + "72d5aa5ec810370d1f0013c0df2f1d65699494ee2a39f72e1716b1b964e1c642", + }, + { + sha256.New, + []byte( + "a512_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + ), + "20", + "3b8e704fc48336aca4c2a12195b720882f2162a4b7b13a9c350db46f429b771b", + }, + { + sha256.New, + []byte(""), + "80", + "8bcffd1a3cae24cf9cd7ab85628fd111bb17e3739d3b53f89580d217aa79526f1708354a76a402d3569d6a9d19ef3de4d0b991e4f54b9f20dcde9b95a66824cbdf6c1a963a1913d43fd7ac443a02fc5d9d8d77e2071b86ab114a9f34150954a7531da568a1ea8c760861c0cde2005afc2c114042ee7b5848f5303f0611cf297f", + }, + { + sha256.New, + []byte("abc"), + "80", + "fe994ec51bdaa821598047b3121c149b364b178606d5e72bfbb713933acc29c186f316baecf7ea22212f2496ef3f785a27e84a40d8b299cec56032763eceeff4c61bd1fe65ed81decafff4a31d0198619c0aa0c6c51fca15520789925e813dcfd318b542f8799441271f4db9ee3b8092a7a2e8d5b75b73e28fb1ab6b4573c192", + }, + { + sha256.New, + []byte("abcdef0123456789"), + "80", + "c9ec7941811b1e19ce98e21db28d22259354d4d0643e301175e2f474e030d32694e9dd5520dde93f3600d8edad94e5c364903088a7228cc9eff685d7eaac50d5a5a8229d083b51de4ccc3733917f4b9535a819b445814890b7029b5de805bf62b33a4dc7e24acdf2c924e9fe50d55a6b832c8c84c7f82474b34e48c6d43867be", + }, + { + sha256.New, + []byte( + "q128_qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq", + ), + "80", + "48e256ddba722053ba462b2b93351fc966026e6d6db493189798181c5f3feea377b5a6f1d8368d7453faef715f9aecb078cd402cbd548c0e179c4ed1e4c7e5b048e0a39d31817b5b24f50db58bb3720fe96ba53db947842120a068816ac05c159bb5266c63658b4f000cbf87b1209a225def8ef1dca917bcda79a1e42acd8069", + }, + { + sha256.New, + []byte( + "a512_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + ), + "80", + "396962db47f749ec3b5042ce2452b619607f27fd3939ece2746a7614fb83a1d097f554df3927b084e55de92c7871430d6b95c2a13896d8a33bc48587b1f66d21b128a1a8240d5b0c26dfe795a1a842a0807bb148b77c2ef82ed4b6c9f7fcb732e7f94466c8b51e52bf378fba044a31f5cb44583a892f5969dcd73b3fa128816e", + }, + + { + sha512.New, + []byte(""), + "20", + "2eaa1f7b5715f4736e6a5dbe288257abf1faa028680c1d938cd62ac699ead642", + }, + { + sha512.New, + []byte("abc"), + "20", + "0eeda81f69376c80c0f8986496f22f21124cb3c562cf1dc608d2c13005553b0f", + }, + { + sha512.New, + []byte("abcdef0123456789"), + "20", + "2e375fc05e05e80dbf3083796fde2911789d9e8847e1fcebf4ca4b36e239b338", + }, + { + sha512.New, + []byte( + "q128_qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq", + ), + "20", + "c37f9095fe7fe4f01c03c3540c1229e6ac8583b07510085920f62ec66acc0197", + }, + { + sha512.New, + []byte( + "a512_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + ), + "20", + "af57a7f56e9ed2aa88c6eab45c8c6e7638ae02da7c92cc04f6648c874ebd560e", + }, + { + sha512.New, + []byte(""), + "80", + "0687ce02eba5eb3faf1c3c539d1f04babd3c0f420edae244eeb2253b6c6d6865145c31458e824b4e87ca61c3442dc7c8c9872b0b7250aa33e0668ccebbd2b386de658ca11a1dcceb51368721ae6dcd2d4bc86eaebc4e0d11fa02ad053289c9b28a03da6c942b2e12c14e88dbde3b0ba619d6214f47212b628f3e1b537b66efcf", + }, + { + sha512.New, + []byte("abc"), + "80", + "779ae4fd8a92f365e4df96b9fde97b40486bb005c1a2096c86f55f3d92875d89045fbdbc4a0e9f2d3e1e6bcd870b2d7131d868225b6fe72881a81cc5166b5285393f71d2e68bb0ac603479959370d06bdbe5f0d8bfd9af9494d1e4029bd68ab35a561341dd3f866b3ef0c95c1fdfaab384ce24a23427803dda1db0c7d8d5344a", + }, + { + sha512.New, + []byte("abcdef0123456789"), + "80", + "f0953d28846a50e9f88b7ae35b643fc43733c9618751b569a73960c655c068db7b9f044ad5a40d49d91c62302eaa26163c12abfa982e2b5d753049e000adf7630ae117aeb1fb9b61fc724431ac68b369e12a9481b4294384c3c890d576a79264787bc8076e7cdabe50c044130e480501046920ff090c1a091c88391502f0fbac", + }, + { + sha512.New, + []byte( + "q128_qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq", + ), + "80", + "64d3e59f0bc3c5e653011c914b419ba8310390a9585311fddb26791d26663bd71971c347e1b5e88ba9274d2445ed9dcf48eea9528d807b7952924159b7c27caa4f25a2ea94df9508e70a7012dfce0e8021b37e59ea21b80aa9af7f1a1f2efa4fbe523c4266ce7d342acaacd438e452c501c131156b4945515e9008d2b155c258", + }, + { + sha512.New, + []byte( + "a512_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + ), + "80", + "01524feea5b22f6509f6b1e805c97df94faf4d821b01aadeebc89e9daaed0733b4544e50852fd3e019d58eaad6d267a134c8bc2c08bc46c10bfeff3ee03110bcd8a0d695d75a34092bd8b677bdd369a13325549abab54f4ac907b712bdd3567f38c4554c51902b735b81f43a7ef6f938c7690d107c052c7e7b795ac635b3200a", + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf("msg: %s", test.msg), func(t *testing.T) { + DST := []byte("QUUX-V01-CS02-with-expander") + lenInBytes, err := strconv.ParseInt(test.lenInBytesHex, 16, 64) + require.NoError(t, err) + expected, err := hex.DecodeString(test.expectedHex) + require.NoError(t, err) + actual, err := ExpandMessageXmd(test.f, test.msg, DST, int(lenInBytes)) + require.NoError(t, err) + require.Equal(t, actual, expected) + }) + } +} + +func TestFiatShamirDeterministic(t *testing.T) { + a := big.NewInt(1) + hash, err := FiatShamir(a) + require.Nil(t, err) + require.Equal( + t, + hash, + []byte{ + 0x3d, + 0x95, + 0xc0, + 0x9f, + 0x41, + 0x33, + 0x25, + 0xbb, + 0x10, + 0x77, + 0x2d, + 0x83, + 0x1d, + 0x3e, + 0x67, + 0x98, + 0xce, + 0x7b, + 0xde, + 0xd5, + 0x7f, + 0x9, + 0x7e, + 0xfc, + 0x77, + 0x23, + 0xfc, + 0x49, + 0x36, + 0x82, + 0xdd, + 0x6a, + }, + ) + + a = big.NewInt(0xaaa) + hash, err = FiatShamir(a) + require.Nil(t, err) + require.Equal( + t, + hash, + []byte{ + 0x61, + 0xa9, + 0x7f, + 0x6, + 0x74, + 0xec, + 0x47, + 0xf5, + 0xac, + 0x30, + 0xa0, + 0x4c, + 0x34, + 0xdb, + 0x51, + 0x97, + 0x60, + 0x7e, + 0xb2, + 0x4a, + 0x97, + 0x9, + 0xa5, + 0xb9, + 0x1c, + 0x89, + 0x30, + 0x39, + 0xb7, + 0x29, + 0xe6, + 0x30, + }, + ) +} + +func TestFiatShamirEqual(t *testing.T) { + pi, err := FiatShamir(One) + require.Nil(t, err) + pi_, err := FiatShamir(One) + require.Nil(t, err) + require.Equal(t, pi, pi_) +} + +func TestFiatShamirNotEqual(t *testing.T) { + pi, err := FiatShamir(One) + require.Nil(t, err) + pi_, err := FiatShamir(Two) + require.Nil(t, err) + require.NotEqual(t, pi, pi_) +} + +func TestFiatShamirOrderDependent(t *testing.T) { + a := big.NewInt(1) + b := big.NewInt(100) + + pi, err := FiatShamir(a, b) + require.Nil(t, err) + pi_, err := FiatShamir(a, b) + require.Nil(t, err) + require.Equal(t, pi, pi_) + + q, _ := FiatShamir(b, a) + require.NotEqual(t, pi, q) +} + +func TestFiatShamirExtensionAttackResistance(t *testing.T) { + a := big.NewInt(0x00FF) + b := big.NewInt(0xFF00) + + c := big.NewInt(0x00) + d := big.NewInt(0xFFFF00) + + pi, err := FiatShamir(a, b) + require.Nil(t, err) + pi_, err := FiatShamir(c, d) + require.Nil(t, err) + require.NotEqual(t, pi, pi_) + + a = big.NewInt(0x0000) + b = big.NewInt(0xFFFF) + + c = big.NewInt(0x0000F) + d = big.NewInt(0xFFF) + + q, err := FiatShamir(a, b) + require.Nil(t, err) + q_, err := FiatShamir(c, d) + require.Nil(t, err) + require.NotEqual(t, q, q_) +} diff --git a/core/mod.go b/core/mod.go new file mode 100644 index 0000000..0b4f4cb --- /dev/null +++ b/core/mod.go @@ -0,0 +1,178 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Package core contains convenience functions for modular arithmetic. + +// Package core contains a set of primitives, including but not limited to various +// elliptic curves, hashes, and commitment schemes. These primitives are used internally +// and can also be used independently on their own externally. +package core + +import ( + crand "crypto/rand" + "crypto/subtle" + "fmt" + "math/big" + + "github.com/sonr-io/sonr/crypto/internal" +) + +var ( + // Zero is additive identity in the set of integers + Zero = big.NewInt(0) + + // One is the multiplicative identity in the set of integers + One = big.NewInt(1) + + // Two is the odd prime + Two = big.NewInt(2) +) + +// ConstantTimeEqByte determines if a, b have identical byte serialization +// and signs. It uses the crypto/subtle package to get a constant time comparison +// over byte representations. Return value is a byte which may be +// useful in bitwise operations. Returns 0x1 if the two values have the +// identical sign and byte representation; 0x0 otherwise. +func ConstantTimeEqByte(a, b *big.Int) byte { + if a == nil && a == b { + return 1 + } + if a == nil || b == nil { + return 0 + } + // Determine if the byte representations are the same + var sameBytes byte + if subtle.ConstantTimeCompare(a.Bytes(), b.Bytes()) == 1 { + sameBytes = 1 + } else { + sameBytes = 0 + } + + // Determine if the signs are the same + var sameSign byte + if a.Sign() == b.Sign() { + sameSign = 1 + } else { + sameSign = 0 + } + + // Report the conjunction + return sameBytes & sameSign +} + +// ConstantTimeEq determines if a, b have identical byte serialization +// and uses the crypto/subtle package to get a constant time comparison +// over byte representations. +func ConstantTimeEq(a, b *big.Int) bool { + return ConstantTimeEqByte(a, b) == 1 +} + +// In determines ring membership before modular reduction: x ∈ Z_m +// returns nil if 0 ≤ x < m +func In(x, m *big.Int) error { + if AnyNil(x, m) { + return internal.ErrNilArguments + } + // subtle doesn't support constant time big.Int compare + // just use big.Cmp for now + // x ∈ Z_m ⇔ 0 ≤ x < m + if x.Cmp(Zero) != -1 && x.Cmp(m) == -1 { + return nil + } + return internal.ErrZmMembership +} + +// Add (modular addition): z = x+y (modulo m) +func Add(x, y, m *big.Int) (*big.Int, error) { + if AnyNil(x, y) { + return nil, internal.ErrNilArguments + } + z := new(big.Int).Add(x, y) + // Compute the residue if one is specified, otherwise + // we leave the value as an unbound integer + if m != nil { + z.Mod(z, m) + } + return z, nil +} + +// Mul (modular multiplication): z = x*y (modulo m) +func Mul(x, y, m *big.Int) (*big.Int, error) { + if AnyNil(x, y) { + return nil, internal.ErrNilArguments + } + z := new(big.Int).Mul(x, y) + + // Compute the residue if one is specified, otherwise + // we leave the value as an unbound integer + if m != nil { + z.Mod(z, m) + } + return z, nil +} + +// Exp (modular exponentiation): z = x^y (modulo m) +func Exp(x, y, m *big.Int) (*big.Int, error) { + if AnyNil(x, y) { + return nil, internal.ErrNilArguments + } + // This wrapper looks silly, but it makes the calling code read more consistently. + return new(big.Int).Exp(x, y, m), nil +} + +// Neg (modular negation): z = -x (modulo m) +func Neg(x, m *big.Int) (*big.Int, error) { + if AnyNil(x, m) { + return nil, internal.ErrNilArguments + } + z := new(big.Int).Neg(x) + z.Mod(z, m) + return z, nil +} + +// Inv (modular inverse): returns y such that xy = 1 (modulo m). +func Inv(x, m *big.Int) (*big.Int, error) { + if AnyNil(x, m) { + return nil, internal.ErrNilArguments + } + z := new(big.Int).ModInverse(x, m) + if z == nil { + return nil, fmt.Errorf("cannot compute the multiplicative inverse") + } + return z, nil +} + +// Rand generates a cryptographically secure random integer in the range: 1 < r < m. +func Rand(m *big.Int) (*big.Int, error) { + if m == nil { + return nil, internal.ErrNilArguments + } + + // Select a random element, but not zero or one + // The reason is the random element may be used as a Scalar or an exponent. + // An exponent of 1 is generally acceptable because the generator can't be + // 1. If a Scalar is combined with another Scalar like in fiat-shamir, it + // offers no hiding properties when multiplied. + for { + result, err := crand.Int(crand.Reader, m) + if err != nil { + return nil, err + } + + if result.Cmp(One) == 1 { // result > 1 + return result, nil + } + } +} + +// AnyNil determines if any of values are nil +func AnyNil(values ...*big.Int) bool { + for _, x := range values { + if x == nil { + return true + } + } + return false +} diff --git a/core/mod_test.go b/core/mod_test.go new file mode 100644 index 0000000..e9e3e67 --- /dev/null +++ b/core/mod_test.go @@ -0,0 +1,589 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package core + +import ( + "math/big" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/internal" +) + +var ( + four = big.NewInt(4) + + // Large numbers for testing -- computing with independent tooling + // x,y 100-digit numbers + x, _ = new( + big.Int, + ).SetString("7146643783615963513942641287213372249533955323510461217840179896547799100626220786140425637990097431", 10) + y, _ = new( + big.Int, + ).SetString("1747698065194620177681258504464368264357359841192790848951902311522815739310792522712583635858354245", 10) + sumxy, _ = new( + big.Int, + ).SetString("8894341848810583691623899791677740513891315164703252066792082208070614839937013308853009273848451676", 10) + xy, _ = new( + big.Int, + ).SetString("12490175513260779219420155073726764321605372267033815716483640700978475653623775696463227582174703069158832890348206546318843052423532258178885792744599932977235221784868792263260215861775082862444595", 10) + + // 101-digit modulus + m, _ = new( + big.Int, + ).SetString("85832751158419329546684678412285185885848111422509523329716452068504806021136687603399722116388773253", 10) + + // 99-digit modulus + n, _ = new( + big.Int, + ).SetString("604464499356780653111583485887412477603580949137220100557796699530113283915988830359783807274682723", 10) +) + +func TestConstantTimeEqByteSound(t *testing.T) { + hundoDigit := internal.B10( + "3593421565679030456559622742114065111786271367498220644136232358421457354322411370928949366452183472", + ) + tests := []struct { + name string + a, b *big.Int + expected byte + }{ + {"positive: 5", internal.B10("5"), internal.B10("5"), 1}, + {"positive: 100", internal.B10("100"), internal.B10("100"), 1}, + {"positive: -1204", internal.B10("-1204"), internal.B10("-1204"), 1}, + {"positive: 100 digits", hundoDigit, hundoDigit, 1}, + {"positive: 0", internal.B10("0"), internal.B10("0"), 1}, + {"positive: 0/-0", internal.B10("0"), internal.B10("-0"), 1}, + {"positive: -0/-0", internal.B10("-0"), internal.B10("-0"), 1}, + + {"negative: 5/-5", internal.B10("5"), internal.B10("-5"), 0}, + {"negative: 5/500", internal.B10("5"), internal.B10("500"), 0}, + {"negative: 100/100 digit", internal.B10("100"), hundoDigit, 0}, + {"negative: -1204/-5", internal.B10("-1204"), internal.B10("-15"), 0}, + {"negative: 0/-5 digits", internal.B10("0"), internal.B10("-5"), 0}, + } + // Run all the tests! + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + actual := ConstantTimeEqByte(test.a, test.b) + require.Equal(t, test.expected, actual) + }) + } +} + +func TestConstantTimeEqSound(t *testing.T) { + hundoDigit := internal.B10( + "3593421565679030456559622742114065111786271367498220644136232358421457354322411370928949366452183472", + ) + tests := []struct { + name string + a, b *big.Int + expected bool + }{ + {"positive: 5", internal.B10("5"), internal.B10("5"), true}, + {"positive: 100", internal.B10("100"), internal.B10("100"), true}, + {"positive: -1204", internal.B10("-1204"), internal.B10("-1204"), true}, + {"positive: 100 digits", hundoDigit, hundoDigit, true}, + {"positive: 0", internal.B10("0"), internal.B10("0"), true}, + {"positive: 0/-0", internal.B10("0"), internal.B10("-0"), true}, + {"positive: -0/-0", internal.B10("-0"), internal.B10("-0"), true}, + + {"negative: 5/-5", internal.B10("5"), internal.B10("-5"), false}, + {"negative: 5/500", internal.B10("5"), internal.B10("500"), false}, + {"negative: 100/100 digit", internal.B10("100"), hundoDigit, false}, + {"negative: -1204/-5", internal.B10("-1204"), internal.B10("-15"), false}, + {"negative: 0/-5 digits", internal.B10("0"), internal.B10("-5"), false}, + } + // Run all the tests! + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + actual := ConstantTimeEq(test.a, test.b) + require.Equal(t, test.expected, actual) + }) + } +} + +// Ring membership tests +func TestIn(t *testing.T) { + // Some large numbers for testing + x, _ := new( + big.Int, + ).SetString("21888242871839275222246405745257275088696311157297823662689037894645226208583", 10) + y, _ := new( + big.Int, + ).SetString("32168432167132168106409840321684604654063138460840123871234181628904319728058", 10) + N := new(big.Int).Mul(x, y) // N = xy + NN := new(big.Int).Mul(N, N) // N^2 = N*N = x^2y^2 + errMember := internal.ErrZmMembership + + tests := []struct { + x *big.Int + m *big.Int + expected error + }{ + // + // Completist test for: Z_4 + // + // Too small: -x ∉ Z_4, \forall \x \in \N + {big.NewInt(-4), four, errMember}, + {big.NewInt(-3), four, errMember}, + {big.NewInt(-2), four, errMember}, + {big.NewInt(-1), four, errMember}, + + // Just right: {0,1,2,3} = Z_4 + {big.NewInt(0), four, nil}, + {big.NewInt(1), four, nil}, + {big.NewInt(2), four, nil}, + {big.NewInt(3), four, nil}, + + // Too big: {4,5,6,7} ∉ Z_4 + {big.NewInt(4), four, errMember}, + {big.NewInt(5), four, errMember}, + {big.NewInt(6), four, errMember}, + {big.NewInt(7), four, errMember}, + + // + // Large numbers + // + // x,y,N < N^2 + {x, NN, nil}, + {y, NN, nil}, + {N, NN, nil}, + + // N+x,N+y,2N < N^2 ⇒ x ∈ Z_N^2 + {big.NewInt(0).Add(N, x), NN, nil}, + {big.NewInt(0).Add(N, y), NN, nil}, + {big.NewInt(0).Add(N, N), NN, nil}, + + // Nx,Ny < N^2 ⇒ x ∈ Z_N^2 + {big.NewInt(0).Mul(N, x), NN, nil}, + {big.NewInt(0).Mul(N, y), NN, nil}, + + // -x,-y,-N ∉ Z_N^2 + {big.NewInt(0).Neg(x), NN, errMember}, + {big.NewInt(0).Neg(y), NN, errMember}, + {big.NewInt(0).Neg(N), NN, errMember}, + + // N^2 ∉ Z_N^2 + {NN, NN, errMember}, + } + + // All the tests! + for _, test := range tests { + actual := In(test.x, test.m) + require.Equal(t, test.expected, actual) + } +} + +// Tests for modular addition with known answers +func TestAdd(t *testing.T) { + // Pre-compute some values + sumXyModn, err := Add(x, y, n) + require.Nil(t, err) + + tests := []struct { + x, y, m, expected *big.Int // inputs: x,y,m + }{ + // Small number tests + {big.NewInt(-1), big.NewInt(1), four, big.NewInt(0)}, + {big.NewInt(2), big.NewInt(1), four, big.NewInt(3)}, + {big.NewInt(0), big.NewInt(2), four, big.NewInt(2)}, + {big.NewInt(2), big.NewInt(4), four, big.NewInt(2)}, + {big.NewInt(15), big.NewInt(15), four, big.NewInt(2)}, + + // Large number tests + {x, y, m, sumxy}, + {y, x, m, sumxy}, + + // Large number Zero tests + {Zero, x, m, x}, + {x, Zero, m, x}, + {Zero, y, m, y}, + {y, Zero, m, y}, + + // Commutative + {x, y, m, sumxy}, + {y, x, m, sumxy}, + {x, y, n, sumXyModn}, + {y, x, n, sumXyModn}, + {sumXyModn, Zero, n, sumXyModn}, + {Zero, sumXyModn, n, sumXyModn}, + } + // All the tests! + for _, test := range tests { + actual, err := Add(test.x, test.y, test.m) + require.NoError(t, err) + require.Zero(t, actual.Cmp(test.expected)) + } +} + +// Tests for modular addition according to known invariants +func TestAddInvariants(t *testing.T) { + inputs := []*big.Int{x, y, Zero, One, new(big.Int).Neg(x), new(big.Int).Neg(y)} + moduli := []*big.Int{m, n, big.NewInt(10001)} + + // Run all combinations of the inputs/moduli + for _, x := range inputs { + for _, y := range inputs { + for _, m := range moduli { + + // Addition is commutative + z0, err := Add(x, y, m) + require.NoError(t, err) + z1, err := Add(y, x, m) + require.NoError(t, err) + require.Equal(t, z0, z1) + + // Addition is transitive: x+x+y == y+x+x == x+y+x + a0, _ := Add(x, x, m) + a0, _ = Add(a0, y, m) + + a1, _ := Add(y, x, m) + a1, _ = Add(a1, x, m) + + a2, _ := Add(x, y, m) + a2, _ = Add(a2, x, m) + + require.Equal(t, a0, a1) + require.Equal(t, a1, a2) + } + } + } +} + +// Tests modular multiplication with known answers +func TestMul(t *testing.T) { + // Pre-compute some values + xyModm := new(big.Int).Mod(xy, m) + + tests := []struct { + x, y, m, expected *big.Int // inputs: x,y,m + }{ + // Small number tests + {big.NewInt(-1), big.NewInt(1), four, big.NewInt(3)}, + {big.NewInt(2), big.NewInt(1), four, big.NewInt(2)}, + {big.NewInt(0), big.NewInt(2), four, big.NewInt(0)}, + {big.NewInt(2), big.NewInt(4), four, big.NewInt(0)}, + {big.NewInt(15), big.NewInt(15), four, big.NewInt(1)}, + + // Large number tests + {x, y, m, xyModm}, + {y, x, m, xyModm}, + + // Large number Zero tests + {Zero, x, m, Zero}, + {x, Zero, m, Zero}, + {Zero, y, n, Zero}, + } + // All the tests! + for _, test := range tests { + z, err := Mul(test.x, test.y, test.m) + require.NoError(t, err) + require.Zero(t, z.Cmp(test.expected)) + } +} + +// Tests for modular multiplication according to known invariants +func TestMulInvariants(t *testing.T) { + inputs := []*big.Int{x, y, Zero, One, new(big.Int).Neg(x), new(big.Int).Neg(y)} + moduli := []*big.Int{m, n, big.NewInt(10001)} + + // Run all combinations of the inputs/moduli + for _, x := range inputs { + for _, y := range inputs { + for _, m := range moduli { + + // Mul is commutative + a, err := Mul(x, y, m) + require.NoError(t, err) + aʹ, err := Mul(y, x, m) + require.NoError(t, err) + require.Equal(t, a, aʹ) + + // Mul is transitive: (xx)y == (xy)x + z, _ := Mul(x, x, m) + z, _ = Mul(z, y, m) + + zʹ, _ := Mul(x, y, m) + zʹ, _ = Mul(zʹ, x, m) + require.Equal(t, z, zʹ) + } + } + } +} + +// Tests modular negation with known answers +func TestNeg(t *testing.T) { + tests := []struct { + x, m, e *big.Int + }{ + {big.NewInt(1), big.NewInt(7), big.NewInt(6)}, + {big.NewInt(2), big.NewInt(7), big.NewInt(5)}, + {big.NewInt(3), big.NewInt(7), big.NewInt(4)}, + {big.NewInt(4), big.NewInt(7), big.NewInt(3)}, + {big.NewInt(5), big.NewInt(7), big.NewInt(2)}, + {big.NewInt(6), big.NewInt(7), big.NewInt(1)}, + + {big.NewInt(-1), big.NewInt(7), big.NewInt(1)}, + {big.NewInt(-2), big.NewInt(7), big.NewInt(2)}, + {big.NewInt(-3), big.NewInt(7), big.NewInt(3)}, + {big.NewInt(-4), big.NewInt(7), big.NewInt(4)}, + {big.NewInt(-5), big.NewInt(7), big.NewInt(5)}, + {big.NewInt(-6), big.NewInt(7), big.NewInt(6)}, + + {big.NewInt(8), big.NewInt(7), big.NewInt(6)}, + {big.NewInt(9), big.NewInt(7), big.NewInt(5)}, + {big.NewInt(10), big.NewInt(7), big.NewInt(4)}, + {big.NewInt(11), big.NewInt(7), big.NewInt(3)}, + {big.NewInt(12), big.NewInt(7), big.NewInt(2)}, + {big.NewInt(13), big.NewInt(7), big.NewInt(1)}, + } + + for _, test := range tests { + r, err := Neg(test.x, test.m) + require.NoError(t, err) + if r.Cmp(test.e) != 0 { + t.Errorf("TestNeg failed. Expected %v, got: %v ", test.e, r) + } + } +} + +func TestNegInvariants(t *testing.T) { + tests := []struct { + x, m, e *big.Int + }{ + {big.NewInt(0), big.NewInt(7), big.NewInt(0)}, + {big.NewInt(7), big.NewInt(7), big.NewInt(0)}, + {big.NewInt(-7), big.NewInt(7), big.NewInt(0)}, + } + + for _, test := range tests { + r, err := Neg(test.x, test.m) + require.NoError(t, err) + if r.Cmp(test.e) != 0 { + t.Errorf("TestNeg failed. Expected %v, got: %v ", test.e, r) + } + } +} + +// Simple test for distinct Rand output +func TestRandDistinct(t *testing.T) { + // Each value should be distinct + a, _ := Rand(n) + b, _ := Rand(n) + c, _ := Rand(n) + + // ❄️❄️❄️ + require.NotEqual(t, a, b) + require.NotEqual(t, a, c) + require.NotEqual(t, b, c) +} + +// Rand values should be O(log2(m)) bits +func TestRandIsExpectedLength(t *testing.T) { + trials := 1000 + max := big.NewInt(-1) + + // Generate many nonces, keep the max + for i := 0; i < trials; i++ { + r, err := Rand(m) + require.NoError(t, err) + + // Nonces should be < m + if r.Cmp(m) != -1 { + t.Errorf("nonce too large, require %v < %v", r, m) + } + + if r.Cmp(max) == 1 { + max = r + } + } + + // With high probability, the max nonce should be very close N + lowerBound := new(big.Int).Rsh(m, 1) + if max.Cmp(lowerBound) == -1 { + t.Errorf("Expected max nonce: %v > %v", max, lowerBound) + } +} + +// Randomly selected nonces with a large modulus will be unique with overwhelming probability +func TestRandDistinctWithLargeModulus(t *testing.T) { + const iterations = 1000 + testUnique(t, iterations, func() *big.Int { + r, _ := Rand(m) + return r + }) +} + +// Calls sampleFunc() n times and asserts that the lower 64B of each output are unique. +func testUnique(t *testing.T, iterations int, sampleFunc func() *big.Int) { + // For simplicity, we test only the lower 64B of each nonce. This is sufficient + // to prove uniqueness and go-lang doesn't hash slices (no slices in maps) + const size = 256 / 8 + seen := make(map[[size]byte]bool) + var x [size]byte + + // Check the pre-computed commitments for uniquness + for i := 0; i < iterations; i++ { + // Retrieve a sample + sample := sampleFunc() + require.NotNil(t, sample) + + // Copy the bytes from slice>array + copy(x[:], sample.Bytes()) + + // Ensure each sample is unique + if seen[x] { + t.Errorf("duplicate sample found: %v", x) + } + seen[x] = true + } +} + +// Ensure Rand never returns 0 or 1. +func TestRandNotZeroNotOne(t *testing.T) { + // Test for non-zero only useful when iterations >> |Z_m| + const iterations = 1000 + m := big.NewInt(5) + + for i := 0; i < iterations; i++ { + r, err := Rand(m) + require.NoError(t, err) + // Not 0 or 1 + require.NotEqual(t, r, Zero) + require.NotEqual(t, r, One) + } +} + +func TestRand_NilModulusErrors(t *testing.T) { + r, err := Rand(nil) + require.Nil(t, r) + require.Contains(t, err.Error(), internal.ErrNilArguments.Error()) +} + +// Double-inverse is the identity function in fields +func TestInvRoundTrip(t *testing.T) { + m := internal.B10("1031") // Prime-order modulus + + for _, a := range []*big.Int{ + internal.B10("500"), + internal.B10("-500"), + internal.B10("1"), + internal.B10("1030"), + } { + // Our expected value is the modular reduction of the test value + expected := a.Mod(a, m) + + // Invert and check + aInv, err := Inv(a, m) + require.NoError(t, err, "a=%v", a) + require.NotNil(t, aInv) + + // Invert again and check + a_, err := Inv(aInv, m) + if err != nil { + require.Equal(t, expected, a_) + } + } +} + +// Tests values for which there is no inverse in the given field +func TestInvNotFound(t *testing.T) { + m := internal.B10("1024") // m = 2^10 + // 0 and even numbers will not have inverse in this ring + + for _, a := range []*big.Int{ + internal.B10("500"), + internal.B10("-500"), + internal.B10("0"), + internal.B10("1024"), + internal.B10("512"), + internal.B10("300000000"), + } { + // Invert and check + aInv, err := Inv(a, m) + require.Error(t, err, "a=%v", a) + require.Nil(t, aInv) + } +} + +func TestExpKnownAnswer(t *testing.T) { + p := internal.B10("1031") // prime-order field + pMinus1 := internal.B10("1030") + tests := []struct { + name string + x, e, m *big.Int + expected *big.Int + }{ + {"fermat's little thm: 500", internal.B10("500"), p, p, internal.B10("500")}, + {"fermat's little thm (p-1): 500", internal.B10("500"), pMinus1, p, One}, + {"fermat's little thm (p-1): 5000", internal.B10("5000"), pMinus1, p, One}, + {"399^0 = 1", internal.B10("399"), Zero, p, One}, + {"673^1 = 673", internal.B10("673"), One, p, internal.B10("673")}, + } + + // Run all the tests! + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + actual, err := Exp(test.x, test.e, test.m) + if err != nil { + require.Equal(t, test.expected, actual) + } + }) + } +} + +// A product of two 1024b safe primes +var N1024 = internal.B10( + "22657252520748253292205422817162431301953923432914829530688424232913850279325496327198502914522231560238552529734156383924448818535517634061008476071362010781638360092704508943571866960229942049437914690556866055765377519627454975682400206932320319743805083072214857842762721537739950074695623974079312071498296625705376593890814889314744719469735809152488403143751157723139035869185892099006348653635981206799193781030834368833947197930944812082594326193527332208252230115672713914945889734620959932802893197325106135662762752470236627025599443912886530954179753873735786171937758916890000958846322096261981191349917", +) + +// A product of two 256b safe primes +var N256 = internal.B10( + "10815068324662993508164204692909269429257853772524581783499643160896147777579932560873002543907262462663453338979819981987639157192530671167315407970757417", +) + +func Benchmark_rand1024(b *testing.B) { + if testing.Short() { + b.Skip("skipping test in short mode.") + } + + for i := 0; i < b.N; i++ { + Rand(N1024) // nolint + } +} + +func BenchmarkRand1024(b *testing.B) { + if testing.Short() { + b.Skip("skipping test in short mode.") + } + + for i := 0; i < b.N; i++ { + Rand(N1024) // nolint + } +} + +func Benchmark_rand256(b *testing.B) { + if testing.Short() { + b.Skip("skipping test in short mode.") + } + + for i := 0; i < b.N; i++ { + Rand(N256) // nolint + } +} + +func BenchmarkRandStar256(b *testing.B) { + if testing.Short() { + b.Skip("skipping test in short mode.") + } + + for i := 0; i < b.N; i++ { + Rand(N256) // nolint + } +} diff --git a/core/primes.go b/core/primes.go new file mode 100755 index 0000000..24ae766 --- /dev/null +++ b/core/primes.go @@ -0,0 +1,42 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package core + +import ( + "crypto/rand" + "fmt" + "math" + "math/big" +) + +// GenerateSafePrime creates a prime number `p` +// where (`p`-1)/2 is also prime with at least `bits` +func GenerateSafePrime(bits uint) (*big.Int, error) { + if bits < 3 { + return nil, fmt.Errorf("safe prime size must be at least 3-bits") + } + + var p *big.Int + var err error + checks := int(math.Max(float64(bits)/16, 8)) + for { + // rand.Prime throws an error if bits < 2 + // -1 so the Sophie-Germain prime is 1023 bits + // and the Safe prime is 1024 + p, err = rand.Prime(rand.Reader, int(bits)-1) + if err != nil { + return nil, err + } + p.Add(p.Lsh(p, 1), One) + + if p.ProbablyPrime(checks) { + break + } + } + + return p, nil +} diff --git a/core/protocol/protocol.go b/core/protocol/protocol.go new file mode 100644 index 0000000..ba45476 --- /dev/null +++ b/core/protocol/protocol.go @@ -0,0 +1,114 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package protocol + +import ( + "encoding/base64" + "encoding/json" + "fmt" +) + +var ( + ErrNotInitialized = fmt.Errorf("object has not been initialized") + ErrProtocolFinished = fmt.Errorf("the protocol has finished") +) + +const ( + // Dkls18Dkg specifies the DKG protocol of the DKLs18 potocol. + Dkls18Dkg = "DKLs18-DKG" + + // Dkls18Sign specifies the DKG protocol of the DKLs18 potocol. + Dkls18Sign = "DKLs18-Sign" + + // Dkls18Refresh specifies the DKG protocol of the DKLs18 potocol. + Dkls18Refresh = "DKLs18-Refresh" + + // versions will increment in 100 intervals, to leave room for adding other versions in between them if it is + // ever needed in the future. + + // Version0 is version 0! + Version0 = 100 + + // Version1 is version 2! + Version1 = 200 +) + +// Message provides serializers and deserializer for the inputs and outputs of each step of the protocol. +// Moreover, it adds some metadata and versioning around the serialized data. +type Message struct { + Payloads map[string][]byte `json:"payloads"` + Metadata map[string]string `json:"metadata"` + Protocol string `json:"protocol"` + Version uint `json:"version"` +} + +// EncodeMessage encodes the message to a string. +func EncodeMessage(m *Message) (string, error) { + bz, err := json.Marshal(m) + if err != nil { + return "", err + } + return base64.StdEncoding.EncodeToString(bz), nil +} + +// DecodeMessage decodes the message from a string. +func DecodeMessage(s string) (*Message, error) { + bz, err := base64.StdEncoding.DecodeString(s) + if err != nil { + return nil, err + } + var m Message + if err := json.Unmarshal(bz, &m); err != nil { + return nil, err + } + return &m, nil +} + +// MarshalJSON marshals the message to JSON. +func (m *Message) MarshalJSON() ([]byte, error) { + type Alias Message // Use type alias to avoid infinite recursion + return json.Marshal(&struct { + *Alias + }{ + Alias: (*Alias)(m), + }) +} + +// UnmarshalJSON unmarshals the message from JSON. +func (m *Message) UnmarshalJSON(data []byte) error { + var obj map[string]any + if err := json.Unmarshal(data, &obj); err != nil { + return err + } + m.Payloads = make(map[string][]byte) + m.Metadata = make(map[string]string) + for k, v := range obj { + switch k { + case "payloads": + m.Payloads = v.(map[string][]byte) + case "metadata": + m.Metadata = v.(map[string]string) + case "protocol": + m.Protocol = v.(string) + case "version": + m.Version = uint(v.(float64)) + } + } + return nil +} + +// Iterator an interface for the DKLs18 protocols that follows the iterator pattern. +type Iterator interface { + // Next runs the next round of the protocol. + // Returns `ErrProtocolFinished` when protocol has completed. + Next(input *Message) (*Message, error) + + // Result returns the final result, if any, of the completed protocol. + // Returns nil if the protocol has not yet terminated. + // Returns an error if an error was encountered during protocol execution. + Result(version uint) (*Message, error) +} diff --git a/daed/aes_siv.go b/daed/aes_siv.go new file mode 100644 index 0000000..fa8a063 --- /dev/null +++ b/daed/aes_siv.go @@ -0,0 +1,246 @@ +package daed + +import ( + "crypto/aes" + "crypto/cipher" + "crypto/subtle" + "errors" + "fmt" + "math" + // Placeholder for internal crypto/cipher allowlist, please ignore. + // Placeholder for internal crypto/subtle allowlist, please ignore. +) + +// AESSIV is an implementation of AES-SIV-CMAC as defined in +// https://tools.ietf.org/html/rfc5297. +// +// AESSIV implements a deterministic encryption with associated data (i.e. the +// DeterministicAEAD interface). Hence the implementation below is restricted +// to one AD component. +// +// Security Note: +// +// Chatterjee, Menezes and Sarkar analyze AES-SIV in Section 5.1 of +// https://www.math.uwaterloo.ca/~ajmeneze/publications/tightness.pdf +// +// Their analysis shows that AES-SIV is susceptible to an attack in +// a multi-user setting. Concretely, if an attacker knows the encryption +// of a message m encrypted and authenticated with k different keys, +// then it is possible to find one of the MAC keys in time 2^b / k +// where b is the size of the MAC key. A consequence of this attack +// is that 128-bit MAC keys give unsufficient security. +// Since 192-bit AES keys are not supported by tink for voodoo reasons +// and RFC 5297 only supports same size encryption and MAC keys this +// implies that keys must be 64 bytes (2*256 bits) long. +type AESSIV struct { + Cipher cipher.Block + K1 []byte + K2 []byte + CmacK1 []byte + CmacK2 []byte +} + +const ( + // AESSIVKeySize is the key size in bytes. + AESSIVKeySize = 64 + + intSize = 32 << (^uint(0) >> 63) // 32 or 64 + maxInt = 1<<(intSize-1) - 1 +) + +// NewAESSIV returns an AESSIV instance. +func NewAESSIV(key []byte) (*AESSIV, error) { + if len(key) != AESSIVKeySize { + return nil, fmt.Errorf("aes_siv: invalid key size %d", len(key)) + } + + k1 := key[:32] + k2 := key[32:] + c, err := aes.NewCipher(k1) + if err != nil { + return nil, fmt.Errorf("aes_siv: aes.NewCipher(%s) failed, %v", k1, err) + } + + block := make([]byte, aes.BlockSize) + c.Encrypt(block, block) + multiplyByX(block) + cmacK1 := make([]byte, aes.BlockSize) + copy(cmacK1, block) + multiplyByX(block) + cmacK2 := make([]byte, aes.BlockSize) + copy(cmacK2, block) + + return &AESSIV{ + K1: k1, + K2: k2, + CmacK1: cmacK1, + CmacK2: cmacK2, + Cipher: c, + }, nil +} + +// multiplyByX multiplies an element in GF(2^128) by its generator. +// +// This function is incorrectly named "doubling" in section 2.3 of RFC 5297. +func multiplyByX(block []byte) { + carry := int(block[0] >> 7) + for i := 0; i < aes.BlockSize-1; i++ { + block[i] = (block[i] << 1) | (block[i+1] >> 7) + } + + block[aes.BlockSize-1] = (block[aes.BlockSize-1] << 1) ^ byte( + subtle.ConstantTimeSelect(carry, 0x87, 0x00), + ) +} + +// EncryptDeterministically deterministically encrypts plaintext with associatedData. +func (asc *AESSIV) EncryptDeterministically(plaintext, associatedData []byte) ([]byte, error) { + if len(plaintext) > maxInt-aes.BlockSize { + return nil, fmt.Errorf("aes_siv: plaintext too long") + } + siv := make([]byte, aes.BlockSize) + asc.s2v(plaintext, associatedData, siv) + + ct := make([]byte, len(plaintext)+aes.BlockSize) + copy(ct[:aes.BlockSize], siv) + if err := asc.ctrCrypt(siv, plaintext, ct[aes.BlockSize:]); err != nil { + return nil, err + } + + return ct, nil +} + +// DecryptDeterministically deterministically decrypts ciphertext with associatedData. +func (asc *AESSIV) DecryptDeterministically(ciphertext, associatedData []byte) ([]byte, error) { + if len(ciphertext) < aes.BlockSize { + return nil, errors.New("aes_siv: ciphertext is too short") + } + + pt := make([]byte, len(ciphertext)-aes.BlockSize) + siv := ciphertext[:aes.BlockSize] + asc.ctrCrypt(siv, ciphertext[aes.BlockSize:], pt) + s2v := make([]byte, aes.BlockSize) + asc.s2v(pt, associatedData, s2v) + + diff := byte(0) + for i := 0; i < aes.BlockSize; i++ { + diff |= siv[i] ^ s2v[i] + } + if diff != 0 { + return nil, errors.New("aes_siv: invalid ciphertext") + } + + return pt, nil +} + +// ctrCrypt encrypts (or decrypts) the bytes in in using an SIV and writes the +// result to out. +func (asc *AESSIV) ctrCrypt(siv, in, out []byte) error { + // siv might be used outside of ctrCrypt(), so making a copy of it. + iv := make([]byte, aes.BlockSize) + copy(iv, siv) + iv[8] &= 0x7f + iv[12] &= 0x7f + + c, err := aes.NewCipher(asc.K2) + if err != nil { + return fmt.Errorf("aes_siv: aes.NewCipher(%s) failed, %v", asc.K2, err) + } + + steam := cipher.NewCTR(c, iv) + steam.XORKeyStream(out, in) + return nil +} + +// s2v is a Pseudo-Random Function (PRF) construction: +// https://tools.ietf.org/html/rfc5297. +func (asc *AESSIV) s2v(msg, ad, siv []byte) { + block := make([]byte, aes.BlockSize) + asc.cmac(block, block) + multiplyByX(block) + + adMac := make([]byte, aes.BlockSize) + asc.cmac(ad, adMac) + xorBlock(adMac, block) + + if len(msg) >= aes.BlockSize { + asc.cmacLong(msg, block, siv) + } else { + multiplyByX(block) + for i := 0; i < len(msg); i++ { + block[i] ^= msg[i] + } + block[len(msg)] ^= 0x80 + asc.cmac(block, siv) + } +} + +// cmacLong computes CMAC(XorEnd(data, last)), where XorEnd xors the bytes in +// last to the last bytes in data. +// +// The size of the data must be at least 16 bytes. +func (asc *AESSIV) cmacLong(data, last, mac []byte) { + block := make([]byte, aes.BlockSize) + copy(block, data[:aes.BlockSize]) + + idx := aes.BlockSize + for aes.BlockSize <= len(data)-idx { + asc.Cipher.Encrypt(block, block) + xorBlock(data[idx:idx+aes.BlockSize], block) + idx += aes.BlockSize + } + + remaining := len(data) - idx + for i := 0; i < aes.BlockSize-remaining; i++ { + block[remaining+i] ^= last[i] + } + if remaining == 0 { + xorBlock(asc.CmacK1, block) + } else { + asc.Cipher.Encrypt(block, block) + for i := 0; i < remaining; i++ { + block[i] ^= last[aes.BlockSize-remaining+i] + block[i] ^= data[idx+i] + } + block[remaining] ^= 0x80 + xorBlock(asc.CmacK2, block) + } + + asc.Cipher.Encrypt(mac, block) +} + +// cmac computes a CMAC of some data. +func (asc *AESSIV) cmac(data, mac []byte) { + numBs := int(math.Ceil(float64(len(data)) / aes.BlockSize)) + if numBs == 0 { + numBs = 1 + } + lastBSize := len(data) - (numBs-1)*aes.BlockSize + + block := make([]byte, aes.BlockSize) + idx := 0 + for i := 0; i < numBs-1; i++ { + xorBlock(data[idx:idx+aes.BlockSize], block) + asc.Cipher.Encrypt(block, block) + idx += aes.BlockSize + } + for j := 0; j < lastBSize; j++ { + block[j] ^= data[idx+j] + } + + if lastBSize == aes.BlockSize { + xorBlock(asc.CmacK1, block) + } else { + block[lastBSize] ^= 0x80 + xorBlock(asc.CmacK2, block) + } + + asc.Cipher.Encrypt(mac, block) +} + +// xorBlock sets block[i] = x[i] ^ block[i]. +func xorBlock(x, block []byte) { + for i := 0; i < aes.BlockSize; i++ { + block[i] ^= x[i] + } +} diff --git a/daed/aes_siv_test.go b/daed/aes_siv_test.go new file mode 100644 index 0000000..19e615d --- /dev/null +++ b/daed/aes_siv_test.go @@ -0,0 +1,303 @@ +package daed_test + +import ( + "bytes" + "encoding/hex" + "encoding/json" + "os" + "path/filepath" + "testing" + + subtle "github.com/sonr-io/sonr/crypto/daed" + "github.com/sonr-io/sonr/crypto/subtle/random" +) + +type testData struct { + Algorithm string + GeneratorVersion string + TestGroups []*testGroup + NumberOfTests uint32 +} + +type testGroup struct { + Type string + Tests []*testCase + KeySize uint32 +} + +type testCase struct { + Key string + Aad string + Msg string + Ct string + Result string + TcID uint32 +} + +func TestAESSIV_EncryptDecrypt(t *testing.T) { + keyStr := "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" + + "00112233445566778899aabbccddeefff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff" + key, _ := hex.DecodeString(keyStr) + msg := []byte("Some data to encrypt.") + aad := []byte("Additional data") + + a, err := subtle.NewAESSIV(key) + if err != nil { + t.Errorf("NewAESSIV(key) = _, %v, want _, nil", err) + } + + ct, err := a.EncryptDeterministically(msg, aad) + if err != nil { + t.Errorf("Unexpected encryption error: %v", err) + } + + if pt, err := a.DecryptDeterministically(ct, aad); err != nil { + t.Errorf("Unexpected decryption error: %v", err) + } else if !bytes.Equal(pt, msg) { + t.Errorf("Mismatched plaintexts: got %v, want %v", pt, msg) + } +} + +func TestAESSIV_EmptyPlaintext(t *testing.T) { + keyStr := "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" + + "00112233445566778899aabbccddeefff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff" + key, _ := hex.DecodeString(keyStr) + aad := []byte("Additional data") + + a, err := subtle.NewAESSIV(key) + if err != nil { + t.Errorf("NewAESSIV(key) = _, %v, want _, nil", err) + } + + ct, err := a.EncryptDeterministically(nil, aad) + if err != nil { + t.Errorf("Unexpected encryption error: %v", err) + } + if pt, err := a.DecryptDeterministically(ct, aad); err != nil { + t.Errorf("Unexpected decryption error: %v", err) + } else if !bytes.Equal(pt, []byte{}) { + t.Errorf("Mismatched plaintexts: got %v, want []", pt) + } + + ct, err = a.EncryptDeterministically([]byte{}, aad) + if err != nil { + t.Errorf("Unexpected encryption error: %v", err) + } + if pt, err := a.DecryptDeterministically(ct, aad); err != nil { + t.Errorf("Unexpected decryption error: %v", err) + } else if !bytes.Equal(pt, []byte{}) { + t.Errorf("Mismatched plaintexts: got %v, want []", pt) + } +} + +func TestAESSIV_EmptyAdditionalData(t *testing.T) { + keyStr := "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" + + "00112233445566778899aabbccddeefff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff" + key, _ := hex.DecodeString(keyStr) + + a, err := subtle.NewAESSIV(key) + if err != nil { + t.Errorf("NewAESSIV(key) = _, %v, want _, nil", err) + } + + ct, err := a.EncryptDeterministically(nil, nil) + if err != nil { + t.Errorf("Unexpected encryption error: %v", err) + } + + if pt, err := a.DecryptDeterministically(ct, nil); err != nil { + t.Errorf("Unexpected decryption error: %v", err) + } else if !bytes.Equal(pt, []byte{}) { + t.Errorf("Mismatched plaintexts: got %v, want []", pt) + } + + if pt, err := a.DecryptDeterministically(ct, []byte{}); err != nil { + t.Errorf("Unexpected decryption error: %v", err) + } else if !bytes.Equal(pt, []byte{}) { + t.Errorf("Mismatched plaintexts: got %v, want []", pt) + } +} + +func TestAESSIV_KeySizes(t *testing.T) { + keyStr := "198371900187498172316311acf81d238ff7619873a61983d619c87b63a1987f" + + "987131819803719b847126381cd763871638aa71638176328761287361231321" + + "812731321de508761437195ff231765aa4913219873ac6918639816312130011" + + "abc900bba11400187984719827431246bbab1231eb4145215ff7141436616beb" + + "9817298148712fed3aab61000ff123313e" + key, _ := hex.DecodeString(keyStr) + + for i := 0; i < len(key); i++ { + _, err := subtle.NewAESSIV(key[:i]) + if i == subtle.AESSIVKeySize && err != nil { + t.Errorf("Rejected valid key size: %v, %v", i, err) + } + if i != subtle.AESSIVKeySize && err == nil { + t.Errorf("Allowed invalid key size: %v", i) + } + } +} + +func TestAESSIV_MessageSizes(t *testing.T) { + keyStr := "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" + + "00112233445566778899aabbccddeefff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff" + key, _ := hex.DecodeString(keyStr) + aad := []byte("Additional data") + + a, err := subtle.NewAESSIV(key) + if err != nil { + t.Errorf("NewAESSIV(key) = _, %v, want _, nil", err) + } + + for i := uint32(0); i < 1024; i++ { + msg := random.GetRandomBytes(i) + ct, err := a.EncryptDeterministically(msg, aad) + if err != nil { + t.Errorf("Unexpected encryption error: %v", err) + } + if pt, err := a.DecryptDeterministically(ct, aad); err != nil { + t.Errorf("Unexpected decryption error: %v", err) + } else if !bytes.Equal(pt, msg) { + t.Errorf("Mismatched plaintexts: got %v, want %v", pt, msg) + } + } + + for i := uint32(1024); i < 100000; i += 5000 { + msg := random.GetRandomBytes(i) + ct, err := a.EncryptDeterministically(msg, aad) + if err != nil { + t.Errorf("Unexpected encryption error: %v", err) + } + if pt, err := a.DecryptDeterministically(ct, aad); err != nil { + t.Errorf("Unexpected decryption error: %v", err) + } else if !bytes.Equal(pt, msg) { + t.Errorf("Mismatched plaintexts: got %v, want %v", pt, msg) + } + } +} + +func TestAESSIV_AdditionalDataSizes(t *testing.T) { + keyStr := "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" + + "00112233445566778899aabbccddeefff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff" + key, _ := hex.DecodeString(keyStr) + msg := []byte("Some data to encrypt.") + + a, err := subtle.NewAESSIV(key) + if err != nil { + t.Errorf("NewAESSIV(key) = _, %v, want _, nil", err) + } + + for i := uint32(0); i < 1024; i++ { + aad := random.GetRandomBytes(i) + ct, err := a.EncryptDeterministically(msg, aad) + if err != nil { + t.Errorf("Unexpected encryption error: %v", err) + } + if pt, err := a.DecryptDeterministically(ct, aad); err != nil { + t.Errorf("Unexpected decryption error: %v", err) + } else if !bytes.Equal(pt, msg) { + t.Errorf("Mismatched plaintexts: got %v, want %v", pt, msg) + } + } +} + +func TestAESSIV_CiphertextModifications(t *testing.T) { + keyStr := "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" + + "00112233445566778899aabbccddeefff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff" + key, _ := hex.DecodeString(keyStr) + aad := []byte("Additional data") + + a, err := subtle.NewAESSIV(key) + if err != nil { + t.Errorf("NewAESSIV(key) = _, %v, want _, nil", err) + } + + for i := uint32(0); i < 50; i++ { + msg := random.GetRandomBytes(i) + ct, err := a.EncryptDeterministically(msg, aad) + if err != nil { + t.Errorf("Unexpected encryption error: %v", err) + } + for j := 0; j < len(ct); j++ { + for b := uint32(0); b < 8; b++ { + ct[j] ^= 1 << b + if _, err := a.DecryptDeterministically(ct, aad); err == nil { + t.Errorf("Modified ciphertext decrypted: byte %d, bit %d", j, b) + } + ct[j] ^= 1 << b + } + } + } +} + +func TestAESSIV_WycheproofVectors(t *testing.T) { + srcDir, ok := os.LookupEnv("TEST_SRCDIR") + if !ok { + t.Skip("TEST_SRCDIR not set") + } + f, err := os.Open(filepath.Join(srcDir, "wycheproof/testvectors/aes_siv_cmac_test.json")) + if err != nil { + t.Fatalf("Cannot open file: %s", err) + } + parser := json.NewDecoder(f) + data := new(testData) + if err := parser.Decode(data); err != nil { + t.Fatalf("Cannot decode test data: %s", err) + } + + for _, g := range data.TestGroups { + if g.KeySize/8 != subtle.AESSIVKeySize { + continue + } + + for _, tc := range g.Tests { + key, err := hex.DecodeString(tc.Key) + if err != nil { + t.Errorf("#%d, cannot decode key: %s", tc.TcID, err) + } + aad, err := hex.DecodeString(tc.Aad) + if err != nil { + t.Errorf("#%d, cannot decode aad: %s", tc.TcID, err) + } + msg, err := hex.DecodeString(tc.Msg) + if err != nil { + t.Errorf("#%d, cannot decode msg: %s", tc.TcID, err) + } + ct, err := hex.DecodeString(tc.Ct) + if err != nil { + t.Errorf("#%d, cannot decode ct: %s", tc.TcID, err) + } + + a, err := subtle.NewAESSIV(key) + if err != nil { + t.Errorf("NewAESSIV(key) = _, %v, want _, nil", err) + continue + } + + // EncryptDeterministically should always succeed since msg and aad are valid inputs. + gotCt, err := a.EncryptDeterministically(msg, aad) + if err != nil { + t.Errorf("#%d, unexpected encryption error: %v", tc.TcID, err) + } else { + if tc.Result == "valid" && !bytes.Equal(gotCt, ct) { + t.Errorf("#%d, incorrect encryption: got %v, want %v", tc.TcID, gotCt, ct) + } + if tc.Result == "invalid" && bytes.Equal(gotCt, ct) { + t.Errorf("#%d, invalid encryption: got %v, want %v", tc.TcID, gotCt, ct) + } + } + + pt, err := a.DecryptDeterministically(ct, aad) + if tc.Result == "valid" { + if err != nil { + t.Errorf("#%d, unexpected decryption error: %v", tc.TcID, err) + } else if !bytes.Equal(pt, msg) { + t.Errorf("#%d, incorrect decryption: got %v, want %v", tc.TcID, pt, msg) + } + } else { + if err == nil { + t.Errorf("#%d, decryption error expected: got nil", tc.TcID) + } + } + } + } +} diff --git a/dkg/frost/README.md b/dkg/frost/README.md new file mode 100755 index 0000000..a85c425 --- /dev/null +++ b/dkg/frost/README.md @@ -0,0 +1,13 @@ +--- +aliases: [README] +tags: [] +title: README +linter-yaml-title-alias: README +date created: Wednesday, April 17th 2024, 4:11:40 pm +date modified: Thursday, April 18th 2024, 8:19:25 am +--- + +## FROST: Flexible Round-Optimized Schnorr Threshold Signatures + +This package is an implementation of the DKG part of +[FROST: Flexible Round-Optimized Schnorr Threshold Signatures](https://eprint.iacr.org/2020/852.pdf) diff --git a/dkg/frost/dkg_round1.go b/dkg/frost/dkg_round1.go new file mode 100644 index 0000000..91819fc --- /dev/null +++ b/dkg/frost/dkg_round1.go @@ -0,0 +1,153 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package frost + +import ( + "bytes" + crand "crypto/rand" + "encoding/gob" + "fmt" + "reflect" + + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/internal" + "github.com/sonr-io/sonr/crypto/sharing" +) + +// Round1Bcast are values that are broadcast to all other participants +// after round1 completes +type Round1Bcast struct { + Verifiers *sharing.FeldmanVerifier + Wi, Ci curves.Scalar +} + +type Round1Result struct { + Broadcast *Round1Bcast + P2P *sharing.ShamirShare +} + +func (result *Round1Result) Encode() ([]byte, error) { + gob.Register(result.Broadcast.Verifiers.Commitments[0]) // just the point for now + gob.Register(result.Broadcast.Ci) + buf := &bytes.Buffer{} + enc := gob.NewEncoder(buf) + if err := enc.Encode(result); err != nil { + return nil, errors.Wrap(err, "couldn't encode round 1 broadcast") + } + return buf.Bytes(), nil +} + +func (result *Round1Result) Decode(input []byte) error { + buf := bytes.NewBuffer(input) + dec := gob.NewDecoder(buf) + if err := dec.Decode(result); err != nil { + return errors.Wrap(err, "couldn't encode round 1 broadcast") + } + return nil +} + +// Round1P2PSend are values that are P2PSend to all other participants +// after round1 completes +type Round1P2PSend = map[uint32]*sharing.ShamirShare + +// Round1 implements dkg round 1 of FROST +func (dp *DkgParticipant) Round1(secret []byte) (*Round1Bcast, Round1P2PSend, error) { + // Make sure dkg participant is not empty + if dp == nil || dp.Curve == nil { + return nil, nil, internal.ErrNilArguments + } + + // Make sure round number is correct + if dp.round != 1 { + return nil, nil, internal.ErrInvalidRound + } + + // Check number of participants + if uint32(len(dp.otherParticipantShares)+1) > dp.feldman.Limit || + uint32(len(dp.otherParticipantShares)+1) < dp.feldman.Threshold { + return nil, nil, fmt.Errorf( + "length of dp.otherParticipantShares + 1 should be equal to feldman limit", + ) + } + + // If secret is nil, sample a new one + // If not, check secret is valid + var s curves.Scalar + var err error + if secret == nil { + s = dp.Curve.Scalar.Random(crand.Reader) + } else { + s, err = dp.Curve.Scalar.SetBytes(secret) + if err != nil { + return nil, nil, err + } + if s.IsZero() { + return nil, nil, internal.ErrZeroValue + } + } + + // Step 1 - (Aj0,...Ajt), (xi1,...,xin) <- FeldmanShare(s) + // We should validate types of Feldman curve scalar and participant's curve scalar. + if reflect.TypeOf(dp.feldman.Curve.Scalar) != reflect.TypeOf(dp.Curve.Scalar) { + return nil, nil, fmt.Errorf( + "feldman scalar should have the same type as the dkg participant scalar", + ) + } + verifiers, shares, err := dp.feldman.Split(s, crand.Reader) + if err != nil { + return nil, nil, err + } + + // Store Verifiers and shares + dp.verifiers = verifiers + dp.secretShares = shares + + // Step 2 - Sample ki <- Z_q + ki := dp.Curve.Scalar.Random(crand.Reader) + + // Step 3 - Compute Ri = ki*G + Ri := dp.Curve.ScalarBaseMult(ki) + + // Step 4 - Compute Ci = H(i, CTX, g^{a_(i,0)}, R_i), where CTX is fixed context string + var msg []byte + // Append participant id + msg = append(msg, byte(dp.Id)) + // Append CTX + msg = append(msg, dp.ctx) + // Append a_{i,0}*G + msg = append(msg, verifiers.Commitments[0].ToAffineCompressed()...) + // Append Ri + msg = append(msg, Ri.ToAffineCompressed()...) + // Hash the message and get Ci + ci := dp.Curve.Scalar.Hash(msg) + + // Step 5 - Compute Wi = ki+a_{i,0}*c_i mod q. Note that a_{i,0} is the secret. + // Note: We have to compute scalar in the following way when using ed25519 curve, rather than scalar := dp.Scalar.Mul(s, Ci) + // there is an invalid encoding error when we compute scalar as above. + wi := s.MulAdd(ci, ki) + + // Step 6 - Broadcast (Ci, Wi, Ci) to other participants + round1Bcast := &Round1Bcast{ + verifiers, + wi, + ci, + } + + // Step 7 - P2PSend f_i(j) to each participant Pj and keep (i, f_j(i)) for himself + p2pSend := make(Round1P2PSend, len(dp.otherParticipantShares)) + for id := range dp.otherParticipantShares { + p2pSend[id] = shares[id-1] + } + + // Update internal state + dp.round = 2 + + // return + return round1Bcast, p2pSend, nil +} diff --git a/dkg/frost/dkg_round2.go b/dkg/frost/dkg_round2.go new file mode 100644 index 0000000..381c3d2 --- /dev/null +++ b/dkg/frost/dkg_round2.go @@ -0,0 +1,162 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package frost + +import ( + "fmt" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/internal" + "github.com/sonr-io/sonr/crypto/sharing" +) + +// Round2Bcast are values that are broadcast to all other participants +// after round2 completes +type Round2Bcast struct { + VerificationKey curves.Point + VkShare curves.Point +} + +// Round2 implements dkg round 2 of FROST +func (dp *DkgParticipant) Round2( + bcast map[uint32]*Round1Bcast, + p2psend map[uint32]*sharing.ShamirShare, +) (*Round2Bcast, error) { + // Make sure dkg participant is not empty + if dp == nil || dp.Curve == nil { + return nil, internal.ErrNilArguments + } + + // Check dkg participant has the correct dkg round number + if dp.round != 2 { + return nil, internal.ErrInvalidRound + } + + // Check the input is valid + if bcast == nil || p2psend == nil || len(p2psend) == 0 { + return nil, internal.ErrNilArguments + } + + // Check length of bcast and p2psend + if uint32(len(bcast)) > dp.feldman.Limit || uint32(len(bcast)) < dp.feldman.Threshold-1 { + return nil, fmt.Errorf("invalid broadcast length") + } + + if uint32(len(p2psend)) > dp.feldman.Limit-1 || uint32(len(p2psend)) < dp.feldman.Threshold-1 { + return nil, fmt.Errorf("invalid p2pSend length") + } + + // We should validate Wi and Ci values in Round1Bcast + for id := range bcast { + // ci should be within the range 1 to q-1, q is the group order. + if bcast[id].Ci.IsZero() { + return nil, fmt.Errorf("ci should not be zero from participant %d", id) + } + } + // Validate each received commitment is on curve + for id := range bcast { + for _, com := range bcast[id].Verifiers.Commitments { + if !com.IsOnCurve() || com.IsIdentity() { + return nil, fmt.Errorf("some commitment is not on curve from participant %d", id) + } + } + } + + var err error + + // Step 2 - for j in 1,...,n + for id := range bcast { + + // Step 3 - if j == i, continue + if id == dp.Id { + continue + } + + // Step 4 - Check equation c_j = H(j, CTX, A_{j,0}, g^{w_j}*A_{j,0}^{-c_j} + // Get Aj0 + Aj0 := bcast[id].Verifiers.Commitments[0] + // Compute g^{w_j} + prod1 := dp.Curve.ScalarBaseMult(bcast[id].Wi) + // Compute A_{j,0}^{-c_j} + prod2 := Aj0.Mul(bcast[id].Ci.Neg()) + + // We need to check Aj0 and prod2 are points on the same curve. + if !Aj0.IsOnCurve() || Aj0.IsIdentity() || !prod2.IsOnCurve() || prod2.IsIdentity() || + Aj0.CurveName() != prod2.CurveName() { + return nil, fmt.Errorf("invalid Aj0 or prod2 which is not on the same curve") + } + if prod2 == nil { + return nil, fmt.Errorf("invalid should not be nil") + } + + prod := prod1.Add(prod2) + var msg []byte + // Append participant id + msg = append(msg, byte(id)) + // Append CTX + msg = append(msg, dp.ctx) + // Append Aj0 + msg = append(msg, Aj0.ToAffineCompressed()...) + // Append prod + msg = append(msg, prod.ToAffineCompressed()...) + // Hash the message and get cj + cj := dp.Curve.Scalar.Hash(msg) + // Check equation + if cj.Cmp(bcast[id].Ci) != 0 { + return nil, fmt.Errorf("hash check fails for participant with id %d", id) + } + + // Step 5 - FeldmanVerify + fji := p2psend[id] + if err = bcast[id].Verifiers.Verify(fji); err != nil { + return nil, fmt.Errorf("feldman verify fails for participant with id %d", id) + } + } + + sk, err := dp.Curve.Scalar.SetBytes(dp.secretShares[dp.Id-1].Value) + if err != nil { + return nil, err + } + vk := dp.verifiers.Commitments[0] + // Step 6 - Compute signing key share ski = \sum_{j=1}^n xji + for id := range bcast { + if id == dp.Id { + continue + } + t2, err := dp.Curve.Scalar.SetBytes(p2psend[id].Value) + if err != nil { + return nil, err + } + sk = sk.Add(t2) + } + + // Step 8 - Compute verification key vk = sum(A_{j,0}), j = 1,...,n + for id := range bcast { + if id == dp.Id { + continue + } + vk = vk.Add(bcast[id].Verifiers.Commitments[0]) + } + + // Store signing key share + dp.SkShare = sk + + // Step 7 - Compute verification key share vki = ski*G and store + dp.VkShare = dp.Curve.ScalarBaseMult(sk) + + // Store verification key + dp.VerificationKey = vk + + // Update round number + dp.round = 3 + + // Broadcast + return &Round2Bcast{ + vk, + dp.VkShare, + }, nil +} diff --git a/dkg/frost/dkg_rounds_test.go b/dkg/frost/dkg_rounds_test.go new file mode 100644 index 0000000..6f6861e --- /dev/null +++ b/dkg/frost/dkg_rounds_test.go @@ -0,0 +1,206 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package frost + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/sharing" +) + +var ( + testCurve = curves.ED25519() + Ctx = "string to prevent replay attack" +) + +// Test dkg round1 works for 2 participants +func TestDkgRound1Works(t *testing.T) { + p1, err := NewDkgParticipant(1, 2, Ctx, testCurve, 2) + require.NoError(t, err) + bcast, p2psend, err := p1.Round1(nil) + require.NoError(t, err) + require.NotNil(t, bcast) + require.NotNil(t, p2psend) + require.NotNil(t, p1.ctx) + require.Equal(t, len(p2psend), 1) + require.Equal(t, p1.round, 2) + _, ok := p2psend[2] + require.True(t, ok) +} + +func TestDkgRound1RepeatCall(t *testing.T) { + p1, err := NewDkgParticipant(1, 2, Ctx, testCurve, 2) + require.NoError(t, err) + _, _, err = p1.Round1(nil) + require.NoError(t, err) + _, _, err = p1.Round1(nil) + require.Error(t, err) +} + +func TestDkgRound1BadSecret(t *testing.T) { + p1, err := NewDkgParticipant(1, 2, Ctx, testCurve, 2) + require.NoError(t, err) + // secret == 0 + secret := []byte{0} + _, _, err = p1.Round1(secret) + require.Error(t, err) + // secret too big + secret = []byte{ + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + } + _, _, err = p1.Round1(secret) + require.Error(t, err) +} + +func PrepareRound2Input( + t *testing.T, +) (*DkgParticipant, *DkgParticipant, *Round1Bcast, *Round1Bcast, Round1P2PSend, Round1P2PSend) { + // Prepare round 1 output of 2 participants + p1, err := NewDkgParticipant(1, 2, Ctx, testCurve, 2) + require.NoError(t, err) + require.Equal(t, p1.otherParticipantShares[2].Id, uint32(2)) + p2, err := NewDkgParticipant(2, 2, Ctx, testCurve, 1) + require.NoError(t, err) + require.Equal(t, p2.otherParticipantShares[1].Id, uint32(1)) + bcast1, p2psend1, _ := p1.Round1(nil) + bcast2, p2psend2, _ := p2.Round1(nil) + return p1, p2, bcast1, bcast2, p2psend1, p2psend2 +} + +// Test FROST DKG round 2 works +func TestDkgRound2Works(t *testing.T) { + // Prepare Dkg Round1 output + p1, _, bcast1, bcast2, _, p2psend2 := PrepareRound2Input(t) + // Actual Test + require.NotNil(t, bcast1) + require.NotNil(t, bcast2) + require.NotNil(t, p2psend2[1]) + bcast := make(map[uint32]*Round1Bcast) + p2p := make(map[uint32]*sharing.ShamirShare) + bcast[1] = bcast1 + bcast[2] = bcast2 + p2p[2] = p2psend2[1] + round2Out, err := p1.Round2(bcast, p2p) + require.NoError(t, err) + require.NotNil(t, round2Out) + require.NotNil(t, p1.SkShare) + require.NotNil(t, p1.VkShare) + require.NotNil(t, p1.VerificationKey) + require.NotNil(t, p1.otherParticipantShares) +} + +// Test FROST DKG round 2 repeat call +func TestDkgRound2RepeatCall(t *testing.T) { + // Prepare round 1 output + p1, _, bcast1, bcast2, _, p2psend2 := PrepareRound2Input(t) + // Actual Test + require.NotNil(t, bcast1) + require.NotNil(t, bcast2) + require.NotNil(t, p2psend2[1]) + bcast := make(map[uint32]*Round1Bcast) + p2p := make(map[uint32]*sharing.ShamirShare) + bcast[1] = bcast1 + bcast[2] = bcast2 + p2p[2] = p2psend2[1] + _, err := p1.Round2(bcast, p2p) + require.NoError(t, err) + _, err = p1.Round2(bcast, p2p) + require.Error(t, err) +} + +// Test FROST Dkg Round 2 Bad Input +func TestDkgRound2BadInput(t *testing.T) { + // Prepare Dkg Round 1 output + p1, _, _, _, _, _ := PrepareRound2Input(t) + bcast := make(map[uint32]*Round1Bcast) + p2p := make(map[uint32]*sharing.ShamirShare) + + // Test empty bcast and p2p + _, err := p1.Round2(bcast, p2p) + require.Error(t, err) + + // Test nil bcast and p2p + p1, _, _, _, _, _ = PrepareRound2Input(t) + _, err = p1.Round2(nil, nil) + require.Error(t, err) + + // Test tampered input bcast and p2p + p1, _, bcast1, bcast2, _, p2psend2 := PrepareRound2Input(t) + bcast = make(map[uint32]*Round1Bcast) + p2p = make(map[uint32]*sharing.ShamirShare) + + // Tamper p2psend2 by doubling the value + tmp, _ := testCurve.Scalar.SetBytes(p2psend2[1].Value) + p2psend2[1].Value = tmp.Double().Bytes() + bcast[1] = bcast1 + bcast[2] = bcast2 + p2p[2] = p2psend2[1] + _, err = p1.Round2(bcast, p2p) + require.Error(t, err) +} + +// Test full round works +func TestFullDkgRoundsWorks(t *testing.T) { + // Initiate two participants and running round 1 + p1, p2, bcast1, bcast2, p2psend1, p2psend2 := PrepareRound2Input(t) + bcast := make(map[uint32]*Round1Bcast) + p2p1 := make(map[uint32]*sharing.ShamirShare) + p2p2 := make(map[uint32]*sharing.ShamirShare) + bcast[1] = bcast1 + bcast[2] = bcast2 + p2p1[2] = p2psend2[1] + p2p2[1] = p2psend1[2] + + // Running round 2 + round2Out1, _ := p1.Round2(bcast, p2p1) + round2Out2, _ := p2.Round2(bcast, p2p2) + require.Equal(t, round2Out1.VerificationKey, round2Out2.VerificationKey) + s, _ := sharing.NewShamir(2, 2, testCurve) + sk, err := s.Combine(&sharing.ShamirShare{Id: p1.Id, Value: p1.SkShare.Bytes()}, + &sharing.ShamirShare{Id: p2.Id, Value: p2.SkShare.Bytes()}) + require.NoError(t, err) + + vk := testCurve.ScalarBaseMult(sk) + require.True(t, vk.Equal(p1.VerificationKey)) +} diff --git a/dkg/frost/participant.go b/dkg/frost/participant.go new file mode 100644 index 0000000..8cc09ff --- /dev/null +++ b/dkg/frost/participant.go @@ -0,0 +1,70 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Package frost is an implementation of the DKG part of https://eprint.iacr.org/2020/852.pdf +package frost + +import ( + "strconv" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/internal" + "github.com/sonr-io/sonr/crypto/sharing" +) + +type DkgParticipant struct { + round int + Curve *curves.Curve + otherParticipantShares map[uint32]*dkgParticipantData + Id uint32 + SkShare curves.Scalar + VerificationKey curves.Point + VkShare curves.Point + feldman *sharing.Feldman + verifiers *sharing.FeldmanVerifier + secretShares []*sharing.ShamirShare + ctx byte +} + +type dkgParticipantData struct { + Id uint32 + Share *sharing.ShamirShare + Verifiers *sharing.FeldmanVerifier +} + +func NewDkgParticipant( + id, threshold uint32, + ctx string, + curve *curves.Curve, + otherParticipants ...uint32, +) (*DkgParticipant, error) { + if curve == nil || len(otherParticipants) == 0 { + return nil, internal.ErrNilArguments + } + limit := uint32(len(otherParticipants)) + 1 + feldman, err := sharing.NewFeldman(threshold, limit, curve) + if err != nil { + return nil, err + } + otherParticipantShares := make(map[uint32]*dkgParticipantData, len(otherParticipants)) + for _, id := range otherParticipants { + otherParticipantShares[id] = &dkgParticipantData{ + Id: id, + } + } + + // SetBigInt the common fixed string + ctxV, _ := strconv.Atoi(ctx) + + return &DkgParticipant{ + Id: id, + round: 1, + Curve: curve, + feldman: feldman, + otherParticipantShares: otherParticipantShares, + ctx: byte(ctxV), + }, nil +} diff --git a/dkg/gennaro/README.md b/dkg/gennaro/README.md new file mode 100755 index 0000000..0bc82ea --- /dev/null +++ b/dkg/gennaro/README.md @@ -0,0 +1,13 @@ +--- +aliases: [README] +tags: [] +title: README +linter-yaml-title-alias: README +date created: Wednesday, April 17th 2024, 4:11:40 pm +date modified: Thursday, April 18th 2024, 8:19:25 am +--- + +## One Round Threshold ECDSA with Identifiable Abort (GG20) + +This package is an implementation of the DKG part of +[One Round Threshold ECDSA with Identifiable Abort](https://eprint.iacr.org/2020/540.pdf). diff --git a/dkg/gennaro/participant.go b/dkg/gennaro/participant.go new file mode 100644 index 0000000..dd4420d --- /dev/null +++ b/dkg/gennaro/participant.go @@ -0,0 +1,102 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Package gennaro is an implementation of the DKG part of https://eprint.iacr.org/2020/540.pdf +package gennaro + +import ( + "crypto/elliptic" + "fmt" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/internal" + v1 "github.com/sonr-io/sonr/crypto/sharing/v1" +) + +// Participant is a DKG player that contains information needed to perform DKG rounds +// and yield a secret key share and public key when finished +type Participant struct { + round int + curve elliptic.Curve + scalar curves.EcScalar + otherParticipantShares map[uint32]*dkgParticipantData + id uint32 + skShare *curves.Element + verificationKey *v1.ShareVerifier + feldman *v1.Feldman + pedersen *v1.Pedersen + pedersenResult *v1.PedersenResult +} + +// NewParticipant creates a participant ready to perform a DKG +// `id` is the integer value identifier for this participant +// `threshold` is the minimum bound for the secret sharing scheme +// `generator` is the blinding factor generator used by pedersen's verifiable secret sharing +// `otherParticipants` is the integer value identifiers for the other participants +// `id` and `otherParticipants` must be the set of integers 1,2,....,n +func NewParticipant( + id, threshold uint32, + generator *curves.EcPoint, + scalar curves.EcScalar, + otherParticipants ...uint32, +) (*Participant, error) { + if generator == nil || len(otherParticipants) == 0 { + return nil, internal.ErrNilArguments + } + err := validIds(append(otherParticipants, id)) + if err != nil { + return nil, err + } + + limit := uint32(len(otherParticipants)) + 1 + feldman, err := v1.NewFeldman(threshold, limit, generator.Curve) + if err != nil { + return nil, err + } + pedersen, err := v1.NewPedersen(threshold, limit, generator) + if err != nil { + return nil, err + } + + otherParticipantShares := make(map[uint32]*dkgParticipantData, len(otherParticipants)) + for _, id := range otherParticipants { + otherParticipantShares[id] = &dkgParticipantData{ + Id: id, + } + } + + return &Participant{ + id: id, + round: 1, + curve: generator.Curve, + scalar: scalar, + feldman: feldman, + pedersen: pedersen, + otherParticipantShares: otherParticipantShares, + }, nil +} + +// Determines if the SSIDs are exactly the values 1..n. +func validIds(ids []uint32) error { + // Index + idMap := make(map[uint32]bool, len(ids)) + for _, id := range ids { + idMap[id] = true + } + // Check + for i := 1; i <= len(ids); i++ { + if ok := idMap[uint32(i)]; !ok { + return fmt.Errorf("the ID list %v is invalid; values must be 1,2,..,n", ids) + } + } + return nil +} + +type dkgParticipantData struct { + Id uint32 + Share *v1.ShamirShare + Verifiers []*v1.ShareVerifier +} diff --git a/dkg/gennaro/participant_test.go b/dkg/gennaro/participant_test.go new file mode 100644 index 0000000..f1b3070 --- /dev/null +++ b/dkg/gennaro/participant_test.go @@ -0,0 +1,48 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package gennaro + +import ( + "math/big" + "testing" + + "github.com/btcsuite/btcd/btcec/v2" + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/internal" +) + +var testGenerator, _ = curves.NewScalarBaseMult(btcec.S256(), big.NewInt(3333)) + +func TestNewParticipantWorks(t *testing.T) { + p, err := NewParticipant(1, 2, testGenerator, curves.NewK256Scalar(), 2) + require.NoError(t, err) + require.NotNil(t, p) + require.Equal(t, p.id, uint32(1)) + require.Equal(t, p.round, 1) + require.Equal(t, p.curve, btcec.S256()) + require.NotNil(t, p.pedersen) + require.NotNil(t, p.feldman) + require.Nil(t, p.pedersenResult) + require.NotNil(t, p.otherParticipantShares) + require.NotNil(t, p.scalar) + _, ok := p.otherParticipantShares[2] + require.True(t, ok) +} + +func TestNewParticipantBadInputs(t *testing.T) { + _, err := NewParticipant(0, 0, nil, nil) + require.Error(t, err) + require.Equal(t, err, internal.ErrNilArguments) + _, err = NewParticipant(1, 2, nil, nil) + require.Error(t, err) + require.Equal(t, err, internal.ErrNilArguments) + _, err = NewParticipant(1, 2, testGenerator, nil) + require.Error(t, err) + require.Equal(t, err, internal.ErrNilArguments) +} diff --git a/dkg/gennaro/round1.go b/dkg/gennaro/round1.go new file mode 100644 index 0000000..2ec405d --- /dev/null +++ b/dkg/gennaro/round1.go @@ -0,0 +1,79 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package gennaro + +import ( + "fmt" + "math/big" + + "github.com/sonr-io/sonr/crypto/core" + "github.com/sonr-io/sonr/crypto/internal" + v1 "github.com/sonr-io/sonr/crypto/sharing/v1" +) + +// Round1Bcast are the values that are broadcast to all other participants +// after round1 completes +type Round1Bcast = []*v1.ShareVerifier + +// Round1P2PSend are the values that are sent to individual participants based +// on the id +type Round1P2PSend = map[uint32]*Round1P2PSendPacket + +// Round1P2PSendPacket are the shares generated from the secret for a specific participant +type Round1P2PSendPacket struct { + SecretShare *v1.ShamirShare + BlindingShare *v1.ShamirShare +} + +// Round1 computes the first round for the DKG +// `secret` can be nil +// NOTE: if `secret` is nil, a new secret is generated which creates a new key +// if `secret` is set, then this performs key resharing aka proactive secret sharing update +func (dp *Participant) Round1(secret []byte) (Round1Bcast, Round1P2PSend, error) { + if dp.round != 1 { + return nil, nil, internal.ErrInvalidRound + } + + if secret == nil { + // 1. x $← Zq∗ + s, err := dp.scalar.Random() + if err != nil { + return nil, nil, err + } + secret = s.Bytes() + } else { + s := new(big.Int).SetBytes(secret) + if !dp.scalar.IsValid(s) { + return nil, nil, fmt.Errorf("invalid secret value") + } + if s.Cmp(core.Zero) == 0 { + return nil, nil, internal.ErrZeroValue + } + } + + var err error + // 2. {X1,...,Xt},{R1,...,Rt},{x1,...,xn},{r1,...,rn}= PedersenFeldmanShare(E,Q,x,t,{p1,...,pn}) + dp.pedersenResult, err = dp.pedersen.Split(secret) + if err != nil { + return nil, nil, err + } + + // 4. P2PSend x_j,r_j to participant p_j in {p_1,...,p_n}_{i != j} + p2pSend := make(Round1P2PSend, len(dp.otherParticipantShares)) + for id := range dp.otherParticipantShares { + p2pSend[id] = &Round1P2PSendPacket{ + SecretShare: dp.pedersenResult.SecretShares[id-1], + BlindingShare: dp.pedersenResult.BlindingShares[id-1], + } + } + + // Update internal state + dp.round = 2 + + // 3. EchoBroadcast {X_1,...,X_t} to all other participants. + return dp.pedersenResult.BlindedVerifiers, p2pSend, nil +} diff --git a/dkg/gennaro/round2.go b/dkg/gennaro/round2.go new file mode 100644 index 0000000..7be1d94 --- /dev/null +++ b/dkg/gennaro/round2.go @@ -0,0 +1,93 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package gennaro + +import ( + "fmt" + + "github.com/sonr-io/sonr/crypto/internal" + v1 "github.com/sonr-io/sonr/crypto/sharing/v1" +) + +type Round2Bcast = []*v1.ShareVerifier + +// Round2 computes the second round for Gennaro DKG +// Algorithm 3 - Gennaro DKG Round 2 +// bcast contains all Round1 broadcast from other participants to this participant +// p2p contains all Round1 P2P send message from other participants to this participant +func (dp *Participant) Round2( + bcast map[uint32]Round1Bcast, + p2p map[uint32]*Round1P2PSendPacket, +) (Round2Bcast, error) { + // Check participant is not empty + if dp == nil || dp.curve == nil { + return nil, internal.ErrNilArguments + } + + // Check participant has the correct dkg round number + if dp.round != 2 { + return nil, internal.ErrInvalidRound + } + + // Check the input is valid + if bcast == nil || p2p == nil || len(bcast) == 0 || len(p2p) == 0 { + return nil, internal.ErrNilArguments + } + + // 1. set sk = x_{ii} + sk := dp.pedersenResult.SecretShares[dp.id-1].Value + + // 2. for j in 1,...,n + for id := range bcast { + + // 3. if i = j continue + if id == dp.id { + continue + } + + // Ensure a valid p2p entry exists + if p2p[id] == nil { + return nil, fmt.Errorf("missing p2p packet for id=%v", id) + } + + // 4. If PedersenVerify(E, Q, x_ji, r_ji, {X_ji,...,X_jt}) = false, abort + xji := p2p[id].SecretShare + rji := p2p[id].BlindingShare + bvs := bcast[id] + if ok, err := dp.pedersen.Verify(xji, rji, bvs); !ok { + if err != nil { + return nil, err + } else { + return nil, fmt.Errorf("invalid share for participant id=%v", id) + } + } + + // Store other participants' shares xji for usage in round 3 + dp.otherParticipantShares[id].Share = p2p[id].SecretShare + + // 5. sk = (sk+xji) mod q + // NOTE: we use the EcScalar class to add instead of + // just using big.Int Add and Mod + // because Ed25519 will fail with the big.Int Add and Mod + // and Ed25519 uses different little endian vs big endian + // in big.Int + t1 := sk.BigInt() + t2 := xji.Value.BigInt() + r := dp.scalar.Add(t1, t2) + + sk = sk.Field().NewElement(r) + } + + // Update internal state + dp.round = 3 + + // 7. Store ski as participant i's secret key share + dp.skShare = sk + + // 6. EchoBroadcast {R_1,...,R_t} to all other participants. + return dp.pedersenResult.Verifiers, nil +} diff --git a/dkg/gennaro/round3.go b/dkg/gennaro/round3.go new file mode 100644 index 0000000..458ce6f --- /dev/null +++ b/dkg/gennaro/round3.go @@ -0,0 +1,89 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package gennaro + +import ( + "fmt" + + "github.com/sonr-io/sonr/crypto/internal" + v1 "github.com/sonr-io/sonr/crypto/sharing/v1" +) + +// Round3Bcast contains values that will be broadcast to other participants. +type Round3Bcast = v1.ShareVerifier + +// Round3 computes the third round for Gennaro DKG +// Algorithm 4 - Gennaro DKG Round 3 +// bcast contains all Round2 broadcast from other participants to this participant. +func (dp *Participant) Round3(bcast map[uint32]Round2Bcast) (*Round3Bcast, *v1.ShamirShare, error) { + // Check participant is not empty + if dp == nil || dp.curve == nil { + return nil, nil, internal.ErrNilArguments + } + + // Check participant has the correct dkg round number + if dp.round != 3 { + return nil, nil, internal.ErrInvalidRound + } + + // Check the input is valid + if len(bcast) == 0 { + return nil, nil, internal.ErrNilArguments + } + + // 1. SetBigInt Pk = R_i1 + Pk := dp.pedersenResult.Verifiers[0] + + // 2. for j in 1,...,n + for id := range bcast { + // 3. if i = j continue + if id == dp.id { + continue + } + + // 4. If FeldmanVerify(E, xji, {R_j1,...,R_jt}) = false; abort + xji := dp.otherParticipantShares[id].Share + vs := bcast[id] + if ok, err := dp.feldman.Verify(xji, vs); !ok { + if err != nil { + return nil, nil, err + } else { + return nil, nil, fmt.Errorf("invalid share for participant #{id}") + } + } + + // Store the feldman verifiers for round 4 + dp.otherParticipantShares[id].Verifiers = vs + + // 5. Pk = Pk+R_j1 + temp, err := Pk.Add(bcast[id][0]) + if err != nil { + return nil, nil, fmt.Errorf("error in computing Pk+R_j1") + } + Pk = temp + } + + // This is a sanity check to make sure nothing went wrong + // when computing the public key + if !Pk.IsOnCurve() || Pk.IsIdentity() { + return nil, nil, fmt.Errorf("invalid public key") + } + + // 6. Store Pk as the public verification key + dp.verificationKey = Pk + + // Update internal state + dp.round = 4 + + skShare := v1.ShamirShare{ + Identifier: dp.id, + Value: dp.skShare, + } + + // Output Pk as the public verification key + return Pk, &skShare, nil +} diff --git a/dkg/gennaro/round4.go b/dkg/gennaro/round4.go new file mode 100644 index 0000000..bfb11fd --- /dev/null +++ b/dkg/gennaro/round4.go @@ -0,0 +1,76 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package gennaro + +import ( + "math/big" + + "github.com/sonr-io/sonr/crypto/core" + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/internal" + v1 "github.com/sonr-io/sonr/crypto/sharing/v1" +) + +// Round4 computes the public shares used by tECDSA during signing +// that are converted to additive shares once the signing participants +// are known. This function is idempotent +func (dp *Participant) Round4() (map[uint32]*curves.EcPoint, error) { + // Check participant is not empty + if dp == nil || dp.curve == nil { + return nil, internal.ErrNilArguments + } + + // Check participant has the correct dkg round number + if dp.round != 4 { + return nil, internal.ErrInvalidRound + } + + n := len(dp.otherParticipantShares) + 1 //+1 to include self + // Wj's + publicShares := make(map[uint32]*curves.EcPoint, n) + + // 1. R = {{R1,...,Rt},{Rij,...,Rit}i!=j} + r := make(map[uint32][]*v1.ShareVerifier, n) + r[dp.id] = dp.pedersenResult.Verifiers + for j := range dp.otherParticipantShares { + r[j] = dp.otherParticipantShares[j].Verifiers + } + + // 2. for j in 1,...,n + for j, v := range r { + // 3. Wj = Pk + publicShares[j] = &curves.EcPoint{ + Curve: dp.verificationKey.Curve, + X: new(big.Int).Set(dp.verificationKey.X), + Y: new(big.Int).Set(dp.verificationKey.Y), + } + + // 4. for k in 1,...,t + for k := 0; k < len(dp.pedersenResult.Verifiers); k++ { + // 5. ck = pj * k mod q + pj := big.NewInt(int64(j)) + ck, err := core.Mul(pj, big.NewInt(int64(k+1)), dp.curve.Params().N) + if err != nil { + return nil, err + } + + // 6a. t = ck * Rj + t, err := v[k].ScalarMult(ck) + if err != nil { + return nil, err + } + + // 6b. Wj = Wj + t + publicShares[j], err = publicShares[j].Add(t) + if err != nil { + return nil, err + } + } + } + + return publicShares, nil +} diff --git a/dkg/gennaro/rounds_test.go b/dkg/gennaro/rounds_test.go new file mode 100644 index 0000000..04ce3e8 --- /dev/null +++ b/dkg/gennaro/rounds_test.go @@ -0,0 +1,419 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package gennaro + +import ( + "fmt" + "testing" + + "github.com/btcsuite/btcd/btcec/v2" + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" + v1 "github.com/sonr-io/sonr/crypto/sharing/v1" +) + +func TestParticipantRound1Works(t *testing.T) { + p1, err := NewParticipant(1, 2, testGenerator, curves.NewK256Scalar(), 2) + require.NoError(t, err) + bcast, p2psend, err := p1.Round1(nil) + require.NoError(t, err) + require.NotNil(t, bcast) + require.NotNil(t, p2psend) + require.Equal(t, len(p2psend), 1) + require.Equal(t, len(bcast), 2) + require.NotNil(t, p1.pedersenResult) + require.Equal(t, p1.round, 2) + _, ok := p2psend[2] + require.True(t, ok) +} + +func TestParticipantRound1RepeatCall(t *testing.T) { + p1, err := NewParticipant(1, 2, testGenerator, curves.NewK256Scalar(), 2) + require.NoError(t, err) + _, _, err = p1.Round1(nil) + require.NoError(t, err) + _, _, err = p1.Round1(nil) + require.Error(t, err) +} + +func TestParticipantRound1BadSecret(t *testing.T) { + p1, err := NewParticipant(1, 2, testGenerator, curves.NewK256Scalar(), 2) + require.NoError(t, err) + // secret == 0 + secret := []byte{0} + _, _, err = p1.Round1(secret) + require.Error(t, err) + // secret too big + secret = []byte{ + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + } + _, _, err = p1.Round1(secret) + require.Error(t, err) +} + +func PrepareRound2Input( + t *testing.T, +) (*Participant, *Participant, Round1Bcast, Round1Bcast, Round1P2PSend) { + // Prepare round 1 output of 2 participants + p1, err := NewParticipant(1, 2, testGenerator, curves.NewK256Scalar(), 2) + require.NoError(t, err) + require.Equal(t, p1.otherParticipantShares[2].Id, uint32(2)) + p2, err := NewParticipant(2, 2, testGenerator, curves.NewK256Scalar(), 1) + require.NoError(t, err) + require.Equal(t, p2.otherParticipantShares[1].Id, uint32(1)) + bcast1, _, _ := p1.Round1(nil) + bcast2, p2psend2, _ := p2.Round1(nil) + return p1, p2, bcast1, bcast2, p2psend2 +} + +// Test Gennaro DKG round2 works +func TestParticipantRound2Works(t *testing.T) { + // Prepare Dkg Round 1 output + p1, _, bcast1, bcast2, p2psend2 := PrepareRound2Input(t) + // Actual Test + require.NotNil(t, bcast1) + require.NotNil(t, bcast2) + require.NotNil(t, p2psend2[1]) + bcast := make(map[uint32]Round1Bcast) + p2p := make(map[uint32]*Round1P2PSendPacket) + bcast[1] = bcast1 + bcast[2] = bcast2 + p2p[2] = p2psend2[1] + round2Out, err := p1.Round2(bcast, p2p) + require.NoError(t, err) + require.NotNil(t, round2Out) + require.Equal(t, len(round2Out), 2) + require.NotNil(t, p1.skShare) + require.Equal(t, p1.round, 3) + require.NotNil(t, p1.otherParticipantShares) +} + +// Test Gennaro DKG round 2 repeat call +func TestParticipantRound2RepeatCall(t *testing.T) { + // Prepare Dkg Round 1 output + p1, _, bcast1, bcast2, p2psend2 := PrepareRound2Input(t) + // Actual Test + require.NotNil(t, bcast1) + require.NotNil(t, bcast2) + require.NotNil(t, p2psend2[1]) + bcast := make(map[uint32]Round1Bcast) + p2p := make(map[uint32]*Round1P2PSendPacket) + bcast[1] = bcast1 + bcast[2] = bcast2 + p2p[2] = p2psend2[1] + _, err := p1.Round2(bcast, p2p) + require.NoError(t, err) + _, err = p1.Round2(bcast, p2p) + require.Error(t, err) +} + +// Test Gennaro Dkg Round 2 Bad Input +func TestParticipantRound2BadInput(t *testing.T) { + // Prepare Dkg Round 1 output + p1, _, _, _, _ := PrepareRound2Input(t) + bcast := make(map[uint32]Round1Bcast) + p2p := make(map[uint32]*Round1P2PSendPacket) + + // Test empty bcast and p2p + _, err := p1.Round2(bcast, p2p) + require.Error(t, err) + + // Test nil bcast and p2p + p1, _, _, _, _ = PrepareRound2Input(t) + _, err = p1.Round2(nil, nil) + require.Error(t, err) + + // Test tampered input bcast and p2p + p1, _, bcast1, bcast2, p2psend2 := PrepareRound2Input(t) + bcast = make(map[uint32]Round1Bcast) + p2p = make(map[uint32]*Round1P2PSendPacket) + + // Tamper bcast1 and p2psend2 by doubling their value + bcast1[1].Y = bcast1[1].Y.Add(bcast1[1].Y, bcast1[1].Y) + p2psend2[1].SecretShare.Value = p2psend2[1].SecretShare.Value.Add(p2psend2[1].SecretShare.Value) + bcast[1] = bcast1 + bcast[2] = bcast2 + p2p[2] = p2psend2[1] + _, err = p1.Round2(bcast, p2p) + require.Error(t, err) +} + +func PrepareRound3Input(t *testing.T) (*Participant, *Participant, map[uint32]Round2Bcast) { + p1, _ := NewParticipant(1, 2, testGenerator, curves.NewK256Scalar(), 2) + p2, _ := NewParticipant(2, 2, testGenerator, curves.NewK256Scalar(), 1) + bcast1, p2psend1, _ := p1.Round1(nil) + bcast2, p2psend2, _ := p2.Round1(nil) + bcast := make(map[uint32]Round1Bcast) + p2p1 := make(map[uint32]*Round1P2PSendPacket) + p2p2 := make(map[uint32]*Round1P2PSendPacket) + bcast[1] = bcast1 + bcast[2] = bcast2 + p2p1[2] = p2psend2[1] + p2p2[1] = p2psend1[2] + round2Out1, _ := p1.Round2(bcast, p2p1) + round2Out2, _ := p2.Round2(bcast, p2p2) + round3Input := make(map[uint32]Round2Bcast) + round3Input[1] = round2Out1 + round3Input[2] = round2Out2 + return p1, p2, round3Input +} + +// Test Gennaro Dkg Round 3 Works +func TestParticipantRound3Works(t *testing.T) { + // Prepare Gennaro Dkg Round 3 Input + p1, p2, round3Input := PrepareRound3Input(t) + + // Actual Test + round3Out1, _, err := p1.Round3(round3Input) + require.NoError(t, err) + require.NotNil(t, round3Out1) + round3Out2, _, err := p2.Round3(round3Input) + require.NoError(t, err) + require.NotNil(t, round3Out2) + require.Equal(t, p1.round, 4) + require.Equal(t, p2.round, 4) + require.Equal(t, p1.verificationKey, p2.verificationKey) + + // Test if shares recombine properly + s, _ := v1.NewShamir(2, 2, curves.NewField(btcec.S256().N)) + sk, err := s.Combine(&v1.ShamirShare{Identifier: p1.id, Value: p1.skShare}, + &v1.ShamirShare{Identifier: p2.id, Value: p2.skShare}) + require.NoError(t, err) + + // Test verification keys are G * sk + x, y := btcec.S256().ScalarBaseMult(sk) + tmp := &curves.EcPoint{ + Curve: btcec.S256(), + X: x, + Y: y, + } + require.True(t, tmp.Equals(p1.verificationKey)) + require.True(t, tmp.Equals(p2.verificationKey)) +} + +// Test Gennaro Dkg Round3 Repeat Call +func TestParticipantRound3RepeatCall(t *testing.T) { + // Prepare Round 3 Input + p1, _, round3Input := PrepareRound3Input(t) + + // Actual Test + _, _, err := p1.Round3(round3Input) + require.NoError(t, err) + _, _, err = p1.Round3(round3Input) + require.Error(t, err) +} + +// Test Gennaro DKG Round 3 Bad Input +func TestParticipantRound3BadInput(t *testing.T) { + // Test empty round 3 input + p1, _, _ := PrepareRound3Input(t) + emptyInput := make(map[uint32]Round2Bcast) + _, _, err := p1.Round3(emptyInput) + require.Error(t, err) + + // Test nil round 3 input + p1, _, _ = PrepareRound3Input(t) + _, _, err = p1.Round3(nil) + require.Error(t, err) + + // Test tampered round 3 input + p1, _, round3Input := PrepareRound3Input(t) + // Tamper participant2's broadcast + round3Input[2][0], _ = round3Input[2][0].Add(round3Input[2][1]) + _, _, err = p1.Round3(round3Input) + require.Error(t, err) +} + +// Test Gennaro Dkg Round 4 Works +func TestParticipantRound4Works(t *testing.T) { + // Prepare Gennaro Dkg Round 3 Input + p1, p2, round3Input := PrepareRound3Input(t) + round3Out1, _, err := p1.Round3(round3Input) + require.NoError(t, err) + require.NotNil(t, round3Out1) + round3Out2, _, err := p2.Round3(round3Input) + require.NoError(t, err) + require.NotNil(t, round3Out2) + + // Actual test + publicShares1, err := p1.Round4() + require.NoError(t, err) + require.NotNil(t, publicShares1) + publicShares2, err := p2.Round4() + require.NoError(t, err) + require.NotNil(t, publicShares2) + + require.Equal(t, publicShares1, publicShares2) +} + +// Test Gennaro Dkg Round 4 Works +func TestParticipantRound4RepeatCall(t *testing.T) { + // Prepare Gennaro Dkg Round 3 Input + p1, p2, round3Input := PrepareRound3Input(t) + round3Out1, _, err := p1.Round3(round3Input) + require.NoError(t, err) + require.NotNil(t, round3Out1) + round3Out2, _, err := p2.Round3(round3Input) + require.NoError(t, err) + require.NotNil(t, round3Out2) + + // Actual test + publicShares1, err := p1.Round4() + require.NoError(t, err) + require.NotNil(t, publicShares1) + publicShares2, err := p1.Round4() + require.NoError(t, err) + require.NotNil(t, publicShares2) + + require.Equal(t, publicShares1, publicShares2) +} + +// Test all Gennaro DKG rounds +func TestAllGennaroDkgRounds(t *testing.T) { + // Initiate two participants + p1, _ := NewParticipant(1, 2, testGenerator, curves.NewK256Scalar(), 2) + p2, _ := NewParticipant(2, 2, testGenerator, curves.NewK256Scalar(), 1) + + // Running round 1 + bcast1, p2psend1, _ := p1.Round1(nil) + bcast2, p2psend2, _ := p2.Round1(nil) + bcast := make(map[uint32]Round1Bcast) + p2p1 := make(map[uint32]*Round1P2PSendPacket) + p2p2 := make(map[uint32]*Round1P2PSendPacket) + bcast[1] = bcast1 + bcast[2] = bcast2 + p2p1[2] = p2psend2[1] + p2p2[1] = p2psend1[2] + + // Running round 2 + round2Out1, _ := p1.Round2(bcast, p2p1) + round2Out2, _ := p2.Round2(bcast, p2p2) + round3Input := make(map[uint32]Round2Bcast) + round3Input[1] = round2Out1 + round3Input[2] = round2Out2 + + // Running round 3 + round3Out1, _, _ := p1.Round3(round3Input) + round3Out2, _, _ := p2.Round3(round3Input) + require.NotNil(t, round3Out1) + require.NotNil(t, round3Out2) + + // Running round 4 + publicShares1, _ := p1.Round4() + publicShares2, _ := p2.Round4() + + // Test output of all rounds + require.Equal(t, publicShares1, publicShares2) + s, _ := v1.NewShamir(2, 2, curves.NewField(btcec.S256().N)) + sk, err := s.Combine(&v1.ShamirShare{Identifier: p1.id, Value: p1.skShare}, + &v1.ShamirShare{Identifier: p2.id, Value: p2.skShare}) + require.NoError(t, err) + + x, y := btcec.S256().ScalarBaseMult(sk) + tmp := &curves.EcPoint{ + Curve: btcec.S256(), + X: x, + Y: y, + } + require.True(t, tmp.Equals(p1.verificationKey)) + require.True(t, tmp.Equals(p2.verificationKey)) +} + +// Ensure correct functioning when input is missing +func TestParticipant2BadInput(t *testing.T) { + // + // Setup + // + p1, _ := NewParticipant(1, 2, testGenerator, curves.NewK256Scalar(), 2) + p2, _ := NewParticipant(2, 2, testGenerator, curves.NewK256Scalar(), 1) + bcast1, _, _ := p1.Round1(nil) + bcast2, p2psend2, _ := p2.Round1(nil) + bcast := make(map[uint32]Round1Bcast) + p2p1 := make(map[uint32]*Round1P2PSendPacket) + bcast[1] = bcast1 + bcast[2] = bcast2 + // Exclude p2p 2>1 + p2p1[4] = p2psend2[1] + + // Run round 2 + _, err := p1.Round2(bcast, p2p1) + require.Error(t, err) +} + +func TestValidIDs(t *testing.T) { + err := fmt.Errorf("") + tests := []struct { + name string + in []uint32 + expected error + }{ + {"positive-1,2", []uint32{1, 2}, nil}, + {"positive-2,1", []uint32{2, 1}, nil}, + {"positive-1-10", []uint32{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, nil}, + {"positive-1-10-random", []uint32{10, 9, 6, 5, 3, 2, 8, 7, 1, 4}, nil}, + {"negative-1,3", []uint32{1, 3}, err}, + {"negative-1-10-missing-5", []uint32{10, 9, 6, 3, 2, 8, 7, 1, 4}, err}, + } + // Run all the tests! + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + err := validIds(test.in) + if test.expected == nil { + require.NoError(t, err) + } else { + require.Error(t, err) + } + }) + } +} + +// Test newParticipant with arbitrary IDs +func TestParticipantArbitraryIds(t *testing.T) { + _, err := NewParticipant(3, 2, testGenerator, curves.NewK256Scalar(), 4) + require.Error(t, err) + _, err = NewParticipant(0, 2, testGenerator, curves.NewK256Scalar(), 1) + require.Error(t, err) + _, err = NewParticipant(2, 2, testGenerator, curves.NewK256Scalar(), 2, 3, 5) + require.Error(t, err) + _, err = NewParticipant(1, 2, testGenerator, curves.NewK256Scalar(), 4) + require.Error(t, err) +} diff --git a/dkg/gennaro2p/README.md b/dkg/gennaro2p/README.md new file mode 100755 index 0000000..3652657 --- /dev/null +++ b/dkg/gennaro2p/README.md @@ -0,0 +1,12 @@ +--- +aliases: [README] +tags: [] +title: README +linter-yaml-title-alias: README +date created: Wednesday, April 17th 2024, 4:11:40 pm +date modified: Thursday, April 18th 2024, 8:19:25 am +--- + +## Two-party GG20 + +This package wraps dkg/genarro and specializes it for the 2-party case. diff --git a/dkg/gennaro2p/genarro2p.go b/dkg/gennaro2p/genarro2p.go new file mode 100644 index 0000000..dec8bca --- /dev/null +++ b/dkg/gennaro2p/genarro2p.go @@ -0,0 +1,151 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Package gennaro2p wraps dkg/genarro and specializes it for the 2-party case. Simpler API, no +// distinction between broadcast and peer messages, and only counterparty messages are +// used as round inputs since self-inputs are always ignored. +package gennaro2p + +import ( + "crypto/elliptic" + "fmt" + + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/dkg/gennaro" + v1 "github.com/sonr-io/sonr/crypto/sharing/v1" +) + +const threshold = 2 + +// Participant is a DKG player that contains information needed to perform DKG rounds +// and yield a secret key share and public key when finished +type Participant struct { + id uint32 + counterPartyId uint32 + embedded *gennaro.Participant + blind *curves.EcPoint +} + +type Round1Message struct { + Verifiers []*v1.ShareVerifier + SecretShare *v1.ShamirShare + BlindingShare *v1.ShamirShare + Blind *curves.EcPoint +} + +type Round2Message struct { + Verifiers []*v1.ShareVerifier +} + +type DkgResult struct { + PublicKey *curves.EcPoint + SecretShare *v1.ShamirShare + PublicShares map[uint32]*curves.EcPoint +} + +// NewParticipant creates a participant ready to perform a DKG +// blind must be a generator and must be synchronized between counterparties. +// The first participant can set it to `nil` and a secure blinding factor will be +// generated. +func NewParticipant(id, counterPartyId uint32, blind *curves.EcPoint, + scalar curves.EcScalar, curve elliptic.Curve, +) (*Participant, error) { + // Generate blinding value, if required + var err error + if blind == nil { + blind, err = newBlind(scalar, curve) + if err != nil { + return nil, errors.Wrap(err, "generating fresh blinding generator") + } + } + p, err := gennaro.NewParticipant(id, threshold, blind, scalar, counterPartyId) + if err != nil { + return nil, errors.Wrap(err, "created genarro.Participant") + } + return &Participant{id, counterPartyId, p, blind}, nil +} + +// Creates a random blinding factor (as a generator) required for pedersen's VSS +func newBlind(curveScalar curves.EcScalar, curve elliptic.Curve) (*curves.EcPoint, error) { + rScalar, err := curveScalar.Random() + if err != nil { + return nil, errors.Wrap(err, "generating blinding scalar") + } + return curves.NewScalarBaseMult(curve, rScalar) +} + +// Runs DKG round 1. If `secret` is nil, shares of a new, random signing key are generated. +// Otherwise, the existing secret shares will be refreshed but the privkey and pubkey +// will remain unchanged. +func (p *Participant) Round1(secret []byte) (*Round1Message, error) { + // Run round 1 + bcast, p2p, err := p.embedded.Round1(secret) + if err != nil { + return nil, errors.Wrap(err, "calling embedded.Round1()") + } + + // Ensure the map has the expected entry so there's no SIGSEGV when we + // repackage it + if p2p[p.counterPartyId] == nil { + return nil, fmt.Errorf("round1 response for p2p[%v] is nil", p.counterPartyId) + } + + // Package response + return &Round1Message{ + bcast, + p2p[p.counterPartyId].SecretShare, + p2p[p.counterPartyId].BlindingShare, + p.blind, + }, nil +} + +// Runs DKG round 2 using the counterparty's output from round 1. +func (p *Participant) Round2(msg *Round1Message) (*Round2Message, error) { + // Run round 2 + bcast, err := p.embedded.Round2( + map[uint32]gennaro.Round1Bcast{ + p.counterPartyId: msg.Verifiers, + }, + map[uint32]*gennaro.Round1P2PSendPacket{ + p.counterPartyId: { + SecretShare: msg.SecretShare, + BlindingShare: msg.BlindingShare, + }, + }) + if err != nil { + return nil, errors.Wrap(err, "calling embedded.Round2()") + } + + // Package response + return &Round2Message{bcast}, nil +} + +// Completes the DKG using the counterparty's output from round 2. +func (p *Participant) Finalize(msg *Round2Message) (*DkgResult, error) { + // Run round 3 + pk, share, err := p.embedded.Round3( + map[uint32]gennaro.Round2Bcast{ + p.counterPartyId: msg.Verifiers, + }) + if err != nil { + return nil, errors.Wrap(err, "calling embedded.Round3()") + } + + // Compute public shares + pubShares, err := p.embedded.Round4() + if err != nil { + return nil, errors.Wrap(err, "calling embedded.Roun4()") + } + + // Package response + return &DkgResult{ + PublicKey: pk, + SecretShare: share, + PublicShares: pubShares, + }, nil +} diff --git a/dkg/gennaro2p/genarro2p_test.go b/dkg/gennaro2p/genarro2p_test.go new file mode 100644 index 0000000..ee68a49 --- /dev/null +++ b/dkg/gennaro2p/genarro2p_test.go @@ -0,0 +1,181 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package gennaro2p + +import ( + "crypto/elliptic" + "fmt" + "reflect" + "testing" + + "github.com/btcsuite/btcd/btcec/v2" + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" + v1 "github.com/sonr-io/sonr/crypto/sharing/v1" +) + +var ( + curveScalar = curves.NewK256Scalar() + curve = btcec.S256() +) + +const ( + clientId = 1 + serverId = 2 +) + +// Benchmark full DKG including blind selection and setup +func BenchmarkDkg(b *testing.B) { + if testing.Short() { + b.Skip("skipping test in short mode.") + } + + for i := 0; i < b.N; i++ { + _, _, err := dkg() + require.NoError(b, err) + } +} + +// Run a DKG and reports the client/server results +func dkg() (*DkgResult, *DkgResult, error) { + // Create client/server + blind, _ := newBlind(curveScalar, curve) + + client, err := NewParticipant(clientId, serverId, blind, curveScalar, curve) + if err != nil { + return nil, nil, err + } + + server, err := NewParticipant(serverId, clientId, blind, curveScalar, curve) + if err != nil { + return nil, nil, err + } + + // R1 + clientR1, err := client.Round1(nil) + if err != nil { + return nil, nil, err + } + + serverR1, err := server.Round1(nil) + if err != nil { + return nil, nil, err + } + + // R2 + clientR2, err := client.Round2(serverR1) + if err != nil { + return nil, nil, err + } + + serverR2, err := server.Round2(clientR1) + if err != nil { + return nil, nil, err + } + + // Finalize + clientResult, err := client.Finalize(serverR2) + if err != nil { + return nil, nil, err + } + + serverResult, err := server.Finalize(clientR2) + if err != nil { + return nil, nil, err + } + + return clientResult, serverResult, nil +} + +// Run a full DKG and verify the absence of errors and valid results +func TestDkg(t *testing.T) { + // Setup and ensure no errors + clientResult, serverResult, err := dkg() + require.NoError(t, err) + require.NotNil(t, clientResult) + require.NotNil(t, serverResult) + + // Now run tests + t.Run("produce the same public key", func(t *testing.T) { + require.Equal(t, clientResult.PublicKey, serverResult.PublicKey) + }) + t.Run("produce identical public shares", func(t *testing.T) { + require.True(t, reflect.DeepEqual(clientResult.PublicShares, serverResult.PublicShares)) + }) + t.Run("produce distinct secret shares", func(t *testing.T) { + require.NotEqual(t, clientResult.SecretShare, serverResult.SecretShare) + }) + t.Run("produce distinct secret shares", func(t *testing.T) { + require.NotEqual(t, clientResult.SecretShare, serverResult.SecretShare) + }) + t.Run("shares sum to expected public key", func(t *testing.T) { + pubkey, err := reconstructPubkey( + clientResult.SecretShare, + serverResult.SecretShare, + curve) + require.NoError(t, err) + require.Equal(t, serverResult.PublicKey, pubkey) + }) +} + +// Reconstruct the pubkey from 2 shares +func reconstructPubkey(s1, s2 *v1.ShamirShare, curve elliptic.Curve) (*curves.EcPoint, error) { + s, err := v1.NewShamir(2, 2, s1.Value.Field()) + if err != nil { + return nil, err + } + + sk, err := s.Combine(s1, s2) + if err != nil { + return nil, err + } + + x, y := curve.ScalarBaseMult(sk) + return &curves.EcPoint{ + Curve: curve, + X: x, + Y: y, + }, nil +} + +// Test blind generator helper function produces a value on the expected curve +func TestNewBlindOnCurve(t *testing.T) { + const n = 1024 + for i := 0; i < n; i++ { + b, err := newBlind(curveScalar, curve) + require.NoError(t, err) + require.NotNil(t, b) + + // Valid point? + require.True(t, b.IsOnCurve() && b.IsValid()) + require.True(t, b.IsValid()) + require.False(t, b.IsIdentity()) + require.False(t, b.IsBasePoint()) + } +} + +func TestNewBlindProvidesDistinctPoints(t *testing.T) { + const n = 1024 + seen := make(map[string]bool, n) + // seen := make(map[core.EcPoint]bool, n) + + for i := 0; i < n; i++ { + b, err := newBlind(curveScalar, curve) + require.NoError(t, err) + + // serialize so the point is hashable + txt := fmt.Sprintf("%#v", b) + + // We shouldn't see the same point twice + ok := seen[txt] + require.False(t, ok) + + // store + seen[txt] = true + } +} diff --git a/ecdsa/canonical.go b/ecdsa/canonical.go new file mode 100644 index 0000000..e4bd94d --- /dev/null +++ b/ecdsa/canonical.go @@ -0,0 +1,193 @@ +// Package ecdsa provides ECDSA signature canonicalization +package ecdsa + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "fmt" + "math/big" +) + +// CanonicalizeSignature ensures ECDSA signature is in canonical form +// This prevents signature malleability attacks where (r, s) and (r, -s mod N) are both valid +func CanonicalizeSignature(r, s *big.Int, curve elliptic.Curve) (*big.Int, *big.Int, error) { + if r == nil || s == nil { + return nil, nil, fmt.Errorf("r and s cannot be nil") + } + + if curve == nil { + return nil, nil, fmt.Errorf("curve cannot be nil") + } + + N := curve.Params().N + if N == nil { + return nil, nil, fmt.Errorf("invalid curve parameters") + } + + // Create copies to avoid modifying originals + rCopy := new(big.Int).Set(r) + sCopy := new(big.Int).Set(s) + + // Check if r is in valid range [1, N-1] + if rCopy.Sign() <= 0 || rCopy.Cmp(N) >= 0 { + return nil, nil, fmt.Errorf("r is not in valid range [1, N-1]") + } + + // Check if s is in valid range [1, N-1] + if sCopy.Sign() <= 0 || sCopy.Cmp(N) >= 0 { + return nil, nil, fmt.Errorf("s is not in valid range [1, N-1]") + } + + // Ensure s is canonical (s <= N/2) + halfN := new(big.Int).Div(N, big.NewInt(2)) + if sCopy.Cmp(halfN) > 0 { + // Use N - s to get canonical form + sCopy.Sub(N, sCopy) + } + + return rCopy, sCopy, nil +} + +// IsSignatureCanonical checks if an ECDSA signature is in canonical form +func IsSignatureCanonical(r, s *big.Int, curve elliptic.Curve) bool { + if r == nil || s == nil || curve == nil { + return false + } + + N := curve.Params().N + if N == nil { + return false + } + + // Check r is in valid range [1, N-1] + if r.Sign() <= 0 || r.Cmp(N) >= 0 { + return false + } + + // Check s is in valid range [1, N/2] + halfN := new(big.Int).Div(N, big.NewInt(2)) + if s.Sign() <= 0 || s.Cmp(halfN) > 0 { + return false + } + + return true +} + +// ValidateAndCanonicalizeSignature validates and canonicalizes an ECDSA signature +func ValidateAndCanonicalizeSignature(pub *ecdsa.PublicKey, hash []byte, r, s *big.Int) (*big.Int, *big.Int, error) { + if pub == nil { + return nil, nil, fmt.Errorf("public key cannot be nil") + } + + if len(hash) == 0 { + return nil, nil, fmt.Errorf("hash cannot be empty") + } + + // Canonicalize the signature + rCanon, sCanon, err := CanonicalizeSignature(r, s, pub.Curve) + if err != nil { + return nil, nil, fmt.Errorf("failed to canonicalize signature: %w", err) + } + + // Verify the canonical signature + if !ecdsa.Verify(pub, hash, rCanon, sCanon) { + // If canonical signature doesn't verify, try the original + // This handles the case where the signature was already canonical but negated + if !ecdsa.Verify(pub, hash, r, s) { + return nil, nil, fmt.Errorf("signature verification failed") + } + // Original verified, return it canonicalized + return CanonicalizeSignature(r, s, pub.Curve) + } + + return rCanon, sCanon, nil +} + +// RejectNonCanonical rejects non-canonical signatures outright +// This is stricter than canonicalization and prevents accepting malleable signatures +func RejectNonCanonical(r, s *big.Int, curve elliptic.Curve) error { + if r == nil || s == nil { + return fmt.Errorf("r and s cannot be nil") + } + + if curve == nil { + return fmt.Errorf("curve cannot be nil") + } + + if !IsSignatureCanonical(r, s, curve) { + return fmt.Errorf("signature is not in canonical form") + } + + return nil +} + +// NormalizeSignature normalizes an ECDSA signature to ensure consistent representation +// This is useful for signature aggregation and comparison +func NormalizeSignature(r, s *big.Int, curve elliptic.Curve) (*big.Int, *big.Int, error) { + // First canonicalize + rNorm, sNorm, err := CanonicalizeSignature(r, s, curve) + if err != nil { + return nil, nil, err + } + + // Additional normalization can be added here if needed + // For example, ensuring consistent byte representation + + return rNorm, sNorm, nil +} + +// CompareSignatures compares two ECDSA signatures for equality after canonicalization +func CompareSignatures(r1, s1, r2, s2 *big.Int, curve elliptic.Curve) (bool, error) { + // Canonicalize both signatures + r1Canon, s1Canon, err := CanonicalizeSignature(r1, s1, curve) + if err != nil { + return false, fmt.Errorf("failed to canonicalize first signature: %w", err) + } + + r2Canon, s2Canon, err := CanonicalizeSignature(r2, s2, curve) + if err != nil { + return false, fmt.Errorf("failed to canonicalize second signature: %w", err) + } + + // Compare canonical forms + return r1Canon.Cmp(r2Canon) == 0 && s1Canon.Cmp(s2Canon) == 0, nil +} + +// SignatureBytes converts signature to bytes in canonical form +// Returns 64 bytes for P-256 (32 bytes for r, 32 bytes for s) +func SignatureBytes(r, s *big.Int, curve elliptic.Curve) ([]byte, error) { + // Canonicalize first + rCanon, sCanon, err := CanonicalizeSignature(r, s, curve) + if err != nil { + return nil, err + } + + // Get the byte size for the curve + byteSize := (curve.Params().BitSize + 7) / 8 + + // Convert to bytes with proper padding + rBytes := rCanon.Bytes() + sBytes := sCanon.Bytes() + + // Pad if necessary + signature := make([]byte, 2*byteSize) + copy(signature[byteSize-len(rBytes):byteSize], rBytes) + copy(signature[2*byteSize-len(sBytes):], sBytes) + + return signature, nil +} + +// SignatureFromBytes reconstructs signature from bytes and ensures it's canonical +func SignatureFromBytes(sig []byte, curve elliptic.Curve) (*big.Int, *big.Int, error) { + byteSize := (curve.Params().BitSize + 7) / 8 + + if len(sig) != 2*byteSize { + return nil, nil, fmt.Errorf("invalid signature length: expected %d, got %d", 2*byteSize, len(sig)) + } + + r := new(big.Int).SetBytes(sig[:byteSize]) + s := new(big.Int).SetBytes(sig[byteSize:]) + + // Ensure canonical form + return CanonicalizeSignature(r, s, curve) +} diff --git a/ecdsa/canonical_test.go b/ecdsa/canonical_test.go new file mode 100644 index 0000000..5931e03 --- /dev/null +++ b/ecdsa/canonical_test.go @@ -0,0 +1,276 @@ +package ecdsa + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "crypto/sha256" + "math/big" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestCanonicalizeSignature(t *testing.T) { + curve := elliptic.P256() + N := curve.Params().N + halfN := new(big.Int).Div(N, big.NewInt(2)) + + // Test canonical signature (s <= N/2) + r := big.NewInt(12345) + s := new(big.Int).Sub(halfN, big.NewInt(1)) // s = N/2 - 1 + + rCanon, sCanon, err := CanonicalizeSignature(r, s, curve) + require.NoError(t, err) + assert.Equal(t, r, rCanon) + assert.Equal(t, s, sCanon) + + // Test non-canonical signature (s > N/2) + sNonCanon := new(big.Int).Add(halfN, big.NewInt(1)) // s = N/2 + 1 + + rCanon, sCanon, err = CanonicalizeSignature(r, sNonCanon, curve) + require.NoError(t, err) + assert.Equal(t, r, rCanon) + + // sCanon should be N - sNonCanon + expected := new(big.Int).Sub(N, sNonCanon) + assert.Equal(t, expected, sCanon) +} + +func TestIsSignatureCanonical(t *testing.T) { + curve := elliptic.P256() + N := curve.Params().N + halfN := new(big.Int).Div(N, big.NewInt(2)) + + // Test canonical signature + r := big.NewInt(12345) + s := halfN // s = N/2 (boundary case, still canonical) + + assert.True(t, IsSignatureCanonical(r, s, curve)) + + // Test non-canonical signature + sNonCanon := new(big.Int).Add(halfN, big.NewInt(1)) + assert.False(t, IsSignatureCanonical(r, sNonCanon, curve)) + + // Test invalid r (r = 0) + assert.False(t, IsSignatureCanonical(big.NewInt(0), s, curve)) + + // Test invalid r (r >= N) + assert.False(t, IsSignatureCanonical(N, s, curve)) + + // Test invalid s (s = 0) + assert.False(t, IsSignatureCanonical(r, big.NewInt(0), curve)) + + // Test nil inputs + assert.False(t, IsSignatureCanonical(nil, s, curve)) + assert.False(t, IsSignatureCanonical(r, nil, curve)) + assert.False(t, IsSignatureCanonical(r, s, nil)) +} + +func TestValidateAndCanonicalizeSignature(t *testing.T) { + priv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + message := []byte("test message") + hash := sha256.Sum256(message) + + // Sign with standard ECDSA + r, s, err := ecdsa.Sign(rand.Reader, priv, hash[:]) + require.NoError(t, err) + + // Validate and canonicalize + rCanon, sCanon, err := ValidateAndCanonicalizeSignature(&priv.PublicKey, hash[:], r, s) + require.NoError(t, err) + + // Verify canonical signature + valid := ecdsa.Verify(&priv.PublicKey, hash[:], rCanon, sCanon) + assert.True(t, valid) + + // Ensure signature is canonical + assert.True(t, IsSignatureCanonical(rCanon, sCanon, priv.Curve)) + + // Test with invalid signature + wrongR := new(big.Int).Add(r, big.NewInt(1)) + _, _, err = ValidateAndCanonicalizeSignature(&priv.PublicKey, hash[:], wrongR, s) + assert.Error(t, err) +} + +func TestRejectNonCanonical(t *testing.T) { + curve := elliptic.P256() + N := curve.Params().N + halfN := new(big.Int).Div(N, big.NewInt(2)) + + r := big.NewInt(12345) + + // Test canonical signature + sCanon := new(big.Int).Sub(halfN, big.NewInt(1)) + err := RejectNonCanonical(r, sCanon, curve) + assert.NoError(t, err) + + // Test non-canonical signature + sNonCanon := new(big.Int).Add(halfN, big.NewInt(1)) + err = RejectNonCanonical(r, sNonCanon, curve) + assert.Error(t, err) + assert.Contains(t, err.Error(), "not in canonical form") + + // Test nil inputs + err = RejectNonCanonical(nil, sCanon, curve) + assert.Error(t, err) + + err = RejectNonCanonical(r, nil, curve) + assert.Error(t, err) + + err = RejectNonCanonical(r, sCanon, nil) + assert.Error(t, err) +} + +func TestCompareSignatures(t *testing.T) { + curve := elliptic.P256() + N := curve.Params().N + halfN := new(big.Int).Div(N, big.NewInt(2)) + + r := big.NewInt(12345) + s1 := new(big.Int).Sub(halfN, big.NewInt(1)) + + // Same signature should be equal + equal, err := CompareSignatures(r, s1, r, s1, curve) + require.NoError(t, err) + assert.True(t, equal) + + // Canonical and non-canonical versions of same signature should be equal + s1NonCanon := new(big.Int).Sub(N, s1) + equal, err = CompareSignatures(r, s1, r, s1NonCanon, curve) + require.NoError(t, err) + assert.True(t, equal) + + // Different signatures should not be equal + s2 := new(big.Int).Sub(halfN, big.NewInt(10)) + equal, err = CompareSignatures(r, s1, r, s2, curve) + require.NoError(t, err) + assert.False(t, equal) +} + +func TestSignatureBytes(t *testing.T) { + curve := elliptic.P256() + N := curve.Params().N + halfN := new(big.Int).Div(N, big.NewInt(2)) + + r := big.NewInt(12345) + s := new(big.Int).Sub(halfN, big.NewInt(1)) + + // Convert to bytes + sigBytes, err := SignatureBytes(r, s, curve) + require.NoError(t, err) + assert.Len(t, sigBytes, 64) // 32 bytes for r, 32 bytes for s on P-256 + + // Test with non-canonical s - should be canonicalized + sNonCanon := new(big.Int).Sub(N, s) + sigBytesNonCanon, err := SignatureBytes(r, sNonCanon, curve) + require.NoError(t, err) + + // Both should produce the same bytes (after canonicalization) + assert.Equal(t, sigBytes, sigBytesNonCanon) +} + +func TestSignatureFromBytes(t *testing.T) { + curve := elliptic.P256() + + // Create a signature + r := big.NewInt(12345) + s := big.NewInt(67890) + + // Convert to bytes + sigBytes, err := SignatureBytes(r, s, curve) + require.NoError(t, err) + + // Convert back from bytes + rRecovered, sRecovered, err := SignatureFromBytes(sigBytes, curve) + require.NoError(t, err) + + // Should be canonical + assert.True(t, IsSignatureCanonical(rRecovered, sRecovered, curve)) + + // Values should match (after canonicalization) + rCanon, sCanon, err := CanonicalizeSignature(r, s, curve) + require.NoError(t, err) + assert.Equal(t, rCanon, rRecovered) + assert.Equal(t, sCanon, sRecovered) + + // Test with invalid length + _, _, err = SignatureFromBytes([]byte("too short"), curve) + assert.Error(t, err) +} + +func TestNormalizeSignature(t *testing.T) { + curve := elliptic.P256() + N := curve.Params().N + halfN := new(big.Int).Div(N, big.NewInt(2)) + + r := big.NewInt(12345) + s := new(big.Int).Add(halfN, big.NewInt(1)) // Non-canonical + + // Normalize should canonicalize + rNorm, sNorm, err := NormalizeSignature(r, s, curve) + require.NoError(t, err) + + assert.True(t, IsSignatureCanonical(rNorm, sNorm, curve)) + assert.Equal(t, r, rNorm) + + // sNorm should be N - s + expected := new(big.Int).Sub(N, s) + assert.Equal(t, expected, sNorm) +} + +func TestCanonicalWithRealSignatures(t *testing.T) { + priv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + message := []byte("test canonical signatures") + hash := sha256.Sum256(message) + + // Generate multiple signatures and ensure all can be canonicalized + for i := 0; i < 10; i++ { + r, s, err := ecdsa.Sign(rand.Reader, priv, hash[:]) + require.NoError(t, err) + + // Canonicalize + rCanon, sCanon, err := CanonicalizeSignature(r, s, priv.Curve) + require.NoError(t, err) + + // Should be canonical + assert.True(t, IsSignatureCanonical(rCanon, sCanon, priv.Curve)) + + // Should still verify + valid := ecdsa.Verify(&priv.PublicKey, hash[:], rCanon, sCanon) + assert.True(t, valid) + } +} + +func BenchmarkCanonicalizeSignature(b *testing.B) { + curve := elliptic.P256() + N := curve.Params().N + halfN := new(big.Int).Div(N, big.NewInt(2)) + + r := big.NewInt(12345) + s := new(big.Int).Add(halfN, big.NewInt(1)) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, _, _ = CanonicalizeSignature(r, s, curve) + } +} + +func BenchmarkIsSignatureCanonical(b *testing.B) { + curve := elliptic.P256() + N := curve.Params().N + halfN := new(big.Int).Div(N, big.NewInt(2)) + + r := big.NewInt(12345) + s := new(big.Int).Sub(halfN, big.NewInt(1)) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = IsSignatureCanonical(r, s, curve) + } +} diff --git a/ecdsa/deterministic.go b/ecdsa/deterministic.go new file mode 100644 index 0000000..3ef5cd5 --- /dev/null +++ b/ecdsa/deterministic.go @@ -0,0 +1,224 @@ +// Package ecdsa provides RFC 6979 deterministic ECDSA implementation +package ecdsa + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/hmac" + "crypto/sha256" + "fmt" + "hash" + "math/big" +) + +// DeterministicSign implements RFC 6979 deterministic ECDSA signing +// This prevents nonce reuse and bias attacks by generating k deterministically +func DeterministicSign(priv *ecdsa.PrivateKey, hash []byte) (*big.Int, *big.Int, error) { + if priv == nil || priv.D == nil { + return nil, nil, fmt.Errorf("invalid private key") + } + + if len(hash) == 0 { + return nil, nil, fmt.Errorf("hash cannot be empty") + } + + // Generate deterministic k using RFC 6979 + k := generateK(priv, hash, sha256.New) + + // Sign with deterministic k + return signWithK(priv, hash, k) +} + +// generateK implements RFC 6979 deterministic nonce generation +func generateK(priv *ecdsa.PrivateKey, hash []byte, hashFunc func() hash.Hash) *big.Int { + curve := priv.Curve + N := curve.Params().N + bitSize := N.BitLen() + byteSize := (bitSize + 7) / 8 + + // Step a: Process hash + h1 := hashToInt(hash, curve) + + // Step b: Convert private key to bytes + x := priv.D.Bytes() + if len(x) < byteSize { + // Pad with zeros on the left + padding := make([]byte, byteSize-len(x)) + x = append(padding, x...) + } + + // Step c: Create HMAC-DRBG instance + hm := hmac.New(hashFunc, nil) + hlen := hm.Size() + + // Step d: Set V = 0x01 0x01 0x01 ... 0x01 + v := bytes(hlen, 0x01) + + // Step e: Set K = 0x00 0x00 0x00 ... 0x00 + k := bytes(hlen, 0x00) + + // Step f: K = HMAC_K(V || 0x00 || x || h1) + k = hmacCompute(hashFunc, k, v, []byte{0x00}, x, h1.Bytes()) + + // Step g: V = HMAC_K(V) + v = hmacCompute(hashFunc, k, v) + + // Step h: K = HMAC_K(V || 0x01 || x || h1) + k = hmacCompute(hashFunc, k, v, []byte{0x01}, x, h1.Bytes()) + + // Step i: V = HMAC_K(V) + v = hmacCompute(hashFunc, k, v) + + // Step j: Generate k + for { + // Step j.1: Set T = empty sequence + var t []byte + + // Step j.2: While tlen < qlen + for len(t)*8 < bitSize { + // V = HMAC_K(V) + v = hmacCompute(hashFunc, k, v) + // T = T || V + t = append(t, v...) + } + + // Step j.3: k = bits2int(T) + kInt := hashToInt(t, curve) + + // Check if k is valid (0 < k < N) + if kInt.Sign() > 0 && kInt.Cmp(N) < 0 { + return kInt + } + + // Step j.4: K = HMAC_K(V || 0x00) + k = hmacCompute(hashFunc, k, v, []byte{0x00}) + // V = HMAC_K(V) + v = hmacCompute(hashFunc, k, v) + } +} + +// signWithK performs ECDSA signing with a given k value +func signWithK(priv *ecdsa.PrivateKey, hash []byte, k *big.Int) (*big.Int, *big.Int, error) { + curve := priv.Curve + N := curve.Params().N + + // Calculate r = x-coordinate of k*G mod N + x, _ := curve.ScalarBaseMult(k.Bytes()) + r := new(big.Int).Set(x) + r.Mod(r, N) + + if r.Sign() == 0 { + return nil, nil, fmt.Errorf("invalid r value") + } + + // Calculate s = k^(-1) * (h + r*d) mod N + e := hashToInt(hash, curve) + + kInv := new(big.Int).ModInverse(k, N) + if kInv == nil { + return nil, nil, fmt.Errorf("k has no inverse") + } + + s := new(big.Int).Mul(r, priv.D) + s.Add(s, e) + s.Mul(s, kInv) + s.Mod(s, N) + + if s.Sign() == 0 { + return nil, nil, fmt.Errorf("invalid s value") + } + + // Canonicalize signature (ensure s <= N/2) + rFinal, sFinal := canonicalize(r, s, N) + return rFinal, sFinal, nil +} + +// canonicalize ensures the signature is in canonical form (s <= N/2) +// This prevents signature malleability +func canonicalize(r, s, N *big.Int) (*big.Int, *big.Int) { + halfN := new(big.Int).Div(N, big.NewInt(2)) + + // If s > N/2, use N - s instead + if s.Cmp(halfN) > 0 { + s = new(big.Int).Sub(N, s) + } + + return r, s +} + +// hashToInt converts a hash value to an integer for ECDSA operations +func hashToInt(hash []byte, curve elliptic.Curve) *big.Int { + N := curve.Params().N + orderBits := N.BitLen() + orderBytes := (orderBits + 7) / 8 + + if len(hash) > orderBytes { + hash = hash[:orderBytes] + } + + ret := new(big.Int).SetBytes(hash) + excess := len(hash)*8 - orderBits + if excess > 0 { + ret.Rsh(ret, uint(excess)) + } + + return ret +} + +// hmacCompute computes HMAC with concatenated data +func hmacCompute(hashFunc func() hash.Hash, key []byte, data ...[]byte) []byte { + mac := hmac.New(hashFunc, key) + for _, d := range data { + mac.Write(d) + } + return mac.Sum(nil) +} + +// bytes creates a byte slice filled with value +func bytes(size int, value byte) []byte { + b := make([]byte, size) + for i := range b { + b[i] = value + } + return b +} + +// VerifyDeterministic verifies a deterministic ECDSA signature +func VerifyDeterministic(pub *ecdsa.PublicKey, hash []byte, r, s *big.Int) bool { + if pub == nil || r == nil || s == nil { + return false + } + + // Ensure signature is canonical + N := pub.Curve.Params().N + halfN := new(big.Int).Div(N, big.NewInt(2)) + + // Check r and s are in valid range + if r.Sign() <= 0 || r.Cmp(N) >= 0 { + return false + } + if s.Sign() <= 0 || s.Cmp(halfN) > 0 { + return false // s must be <= N/2 for canonical form + } + + return ecdsa.Verify(pub, hash, r, s) +} + +// IsCanonical checks if a signature is in canonical form +func IsCanonical(s, N *big.Int) bool { + if s == nil || N == nil { + return false + } + + halfN := new(big.Int).Div(N, big.NewInt(2)) + return s.Cmp(halfN) <= 0 +} + +// MakeCanonical converts a signature to canonical form +func MakeCanonical(r, s, N *big.Int) (*big.Int, *big.Int) { + if r == nil || s == nil || N == nil { + return r, s + } + + return canonicalize(r, s, N) +} diff --git a/ecdsa/deterministic_test.go b/ecdsa/deterministic_test.go new file mode 100644 index 0000000..d9ea00a --- /dev/null +++ b/ecdsa/deterministic_test.go @@ -0,0 +1,286 @@ +package ecdsa + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "crypto/sha256" + "encoding/hex" + "math/big" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestDeterministicSign(t *testing.T) { + // Generate test key + priv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + // Test message + message := []byte("test message for deterministic signing") + hash := sha256.Sum256(message) + + // Sign with deterministic algorithm + r1, s1, err := DeterministicSign(priv, hash[:]) + require.NoError(t, err) + assert.NotNil(t, r1) + assert.NotNil(t, s1) + + // Sign again - should produce identical signature + r2, s2, err := DeterministicSign(priv, hash[:]) + require.NoError(t, err) + assert.Equal(t, r1, r2, "deterministic signatures should be identical") + assert.Equal(t, s1, s2, "deterministic signatures should be identical") + + // Verify signature + valid := ecdsa.Verify(&priv.PublicKey, hash[:], r1, s1) + assert.True(t, valid, "signature should be valid") + + // Different message should produce different signature + message2 := []byte("different message") + hash2 := sha256.Sum256(message2) + r3, s3, err := DeterministicSign(priv, hash2[:]) + require.NoError(t, err) + assert.NotEqual(t, r1, r3, "different messages should produce different signatures") + // Also verify the s component is different + assert.NotEqual(t, s1, s3, "different messages should produce different s values") +} + +func TestCanonicalSignature(t *testing.T) { + priv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + message := []byte("test canonical signature") + hash := sha256.Sum256(message) + + // Sign multiple times and check all signatures are canonical + for i := 0; i < 10; i++ { + r, s, err := DeterministicSign(priv, hash[:]) + require.NoError(t, err) + + // Check signature is canonical (s <= N/2) + N := priv.Curve.Params().N + assert.True(t, IsCanonical(s, N), "signature should be canonical") + + // Verify signature + valid := ecdsa.Verify(&priv.PublicKey, hash[:], r, s) + assert.True(t, valid, "canonical signature should be valid") + } +} + +func TestMakeCanonical(t *testing.T) { + curve := elliptic.P256() + N := curve.Params().N + halfN := new(big.Int).Div(N, big.NewInt(2)) + + // Test with non-canonical s (s > N/2) + r := big.NewInt(12345) + s := new(big.Int).Add(halfN, big.NewInt(1)) // s = N/2 + 1 + + assert.False(t, IsCanonical(s, N), "s > N/2 should not be canonical") + + // Make canonical + rCanon, sCanon := MakeCanonical(r, s, N) + + assert.Equal(t, r, rCanon, "r should not change") + assert.True(t, IsCanonical(sCanon, N), "canonicalized s should be <= N/2") + + // sCanon should equal N - s + expected := new(big.Int).Sub(N, s) + assert.Equal(t, expected, sCanon, "canonical s should be N - s") +} + +func TestVerifyDeterministic(t *testing.T) { + priv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + message := []byte("test verification") + hash := sha256.Sum256(message) + + // Create deterministic signature + r, s, err := DeterministicSign(priv, hash[:]) + require.NoError(t, err) + + // Verify with our function + valid := VerifyDeterministic(&priv.PublicKey, hash[:], r, s) + assert.True(t, valid, "signature should verify") + + // Test with non-canonical signature (should fail) + N := priv.Curve.Params().N + sNonCanon := new(big.Int).Sub(N, s) // Create non-canonical s + + valid = VerifyDeterministic(&priv.PublicKey, hash[:], r, sNonCanon) + assert.False(t, valid, "non-canonical signature should not verify") + + // Test with wrong hash + wrongHash := sha256.Sum256([]byte("wrong message")) + valid = VerifyDeterministic(&priv.PublicKey, wrongHash[:], r, s) + assert.False(t, valid, "signature with wrong hash should not verify") +} + +func TestInvalidInputs(t *testing.T) { + priv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + hash := sha256.Sum256([]byte("test")) + + // Test with nil private key + r, s, err := DeterministicSign(nil, hash[:]) + assert.Error(t, err) + assert.Nil(t, r) + assert.Nil(t, s) + + // Test with empty hash + r, s, err = DeterministicSign(priv, []byte{}) + assert.Error(t, err) + assert.Nil(t, r) + assert.Nil(t, s) + + // Test verify with nil inputs + assert.False(t, VerifyDeterministic(nil, hash[:], big.NewInt(1), big.NewInt(1))) + assert.False(t, VerifyDeterministic(&priv.PublicKey, hash[:], nil, big.NewInt(1))) + assert.False(t, VerifyDeterministic(&priv.PublicKey, hash[:], big.NewInt(1), nil)) +} + +func TestDifferentCurves(t *testing.T) { + curves := []elliptic.Curve{ + elliptic.P224(), + elliptic.P256(), + elliptic.P384(), + elliptic.P521(), + } + + message := []byte("test message for different curves") + hash := sha256.Sum256(message) + + for _, curve := range curves { + t.Run(curve.Params().Name, func(t *testing.T) { + priv, err := ecdsa.GenerateKey(curve, rand.Reader) + require.NoError(t, err) + + // Sign deterministically + r, s, err := DeterministicSign(priv, hash[:]) + require.NoError(t, err) + + // Verify signature is canonical + N := curve.Params().N + assert.True(t, IsCanonical(s, N)) + + // Verify signature + valid := ecdsa.Verify(&priv.PublicKey, hash[:], r, s) + assert.True(t, valid) + + // Verify deterministic property + r2, s2, err := DeterministicSign(priv, hash[:]) + require.NoError(t, err) + assert.Equal(t, r, r2) + assert.Equal(t, s, s2) + }) + } +} + +// TestRFC6979Vectors tests against known test vectors +// These are simplified vectors - in production, use the full RFC 6979 test vectors +func TestRFC6979Vectors(t *testing.T) { + // Test vector for P-256 with SHA-256 + // This is a simplified example - real implementation should use official test vectors + privKeyHex := "c9afa9d845ba75166b5c215767b1d6934e50c3db36e89b127b8a622b120f6721" + messageHex := "73616d706c65" // "sample" + + privKeyBytes, err := hex.DecodeString(privKeyHex) + require.NoError(t, err) + + message, err := hex.DecodeString(messageHex) + require.NoError(t, err) + + // Create private key + priv := new(ecdsa.PrivateKey) + priv.Curve = elliptic.P256() + priv.D = new(big.Int).SetBytes(privKeyBytes) + priv.PublicKey.Curve = priv.Curve + priv.PublicKey.X, priv.PublicKey.Y = priv.Curve.ScalarBaseMult(privKeyBytes) + + // Hash message + hash := sha256.Sum256(message) + + // Sign deterministically + r, s, err := DeterministicSign(priv, hash[:]) + require.NoError(t, err) + assert.NotNil(t, r) + assert.NotNil(t, s) + + // Verify signature + valid := ecdsa.Verify(&priv.PublicKey, hash[:], r, s) + assert.True(t, valid, "RFC 6979 test vector signature should verify") +} + +func BenchmarkDeterministicSign(b *testing.B) { + priv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(b, err) + + message := []byte("benchmark message") + hash := sha256.Sum256(message) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, _, _ = DeterministicSign(priv, hash[:]) + } +} + +func BenchmarkVerifyDeterministic(b *testing.B) { + priv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(b, err) + + message := []byte("benchmark message") + hash := sha256.Sum256(message) + + r, s, err := DeterministicSign(priv, hash[:]) + require.NoError(b, err) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = VerifyDeterministic(&priv.PublicKey, hash[:], r, s) + } +} + +func TestConcurrentSigning(t *testing.T) { + priv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + message := []byte("concurrent test") + hash := sha256.Sum256(message) + + // Sign concurrently + const goroutines = 10 + results := make(chan struct { + r, s *big.Int + err error + }, goroutines) + + for i := 0; i < goroutines; i++ { + go func() { + r, s, err := DeterministicSign(priv, hash[:]) + results <- struct { + r, s *big.Int + err error + }{r, s, err} + }() + } + + // Collect results + var firstR, firstS *big.Int + for i := 0; i < goroutines; i++ { + result := <-results + require.NoError(t, result.err) + + if i == 0 { + firstR, firstS = result.r, result.s + } else { + // All signatures should be identical (deterministic) + assert.Equal(t, firstR, result.r) + assert.Equal(t, firstS, result.s) + } + } +} diff --git a/ecies/encrypt.go b/ecies/encrypt.go new file mode 100644 index 0000000..4217c7f --- /dev/null +++ b/ecies/encrypt.go @@ -0,0 +1,13 @@ +package ecies + +import eciesgo "github.com/ecies/go/v2" + +// Encrypt encrypts a plaintext using a public key +func Encrypt(pub *PublicKey, plaintext []byte) ([]byte, error) { + return eciesgo.Encrypt(pub, plaintext) +} + +// Decrypt decrypts a ciphertext using a private key +func Decrypt(priv *PrivateKey, ciphertext []byte) ([]byte, error) { + return eciesgo.Decrypt(priv, ciphertext) +} diff --git a/ecies/keys.go b/ecies/keys.go new file mode 100644 index 0000000..36bf9d7 --- /dev/null +++ b/ecies/keys.go @@ -0,0 +1,57 @@ +package ecies + +import ( + "bytes" + "crypto/ecdsa" + "crypto/rand" + "fmt" + + eciesgo "github.com/ecies/go/v2" + "lukechampine.com/blake3" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +type PrivateKey = eciesgo.PrivateKey + +type PublicKey = eciesgo.PublicKey + +// GenerateKey generates secp256k1 key pair +func GenerateKey() (*PrivateKey, error) { + curve := curves.SP256() + p, err := ecdsa.GenerateKey(curve, rand.Reader) + if err != nil { + return nil, fmt.Errorf("cannot generate key pair: %w", err) + } + return &PrivateKey{ + PublicKey: &PublicKey{ + Curve: curve, + X: p.X, + Y: p.Y, + }, + D: p.D, + }, nil +} + +// GenerateKeyFromSeed generates secp256k1 key pair from []byte seed +func GenerateKeyFromSeed(seed []byte) (*PrivateKey, error) { + curve := curves.SP256() + p, err := ecdsa.GenerateKey(curve, bytes.NewReader(seed[:])) + if err != nil { + return nil, fmt.Errorf("cannot generate key pair: %w", err) + } + return &PrivateKey{ + PublicKey: &PublicKey{ + Curve: curve, + X: p.X, + Y: p.Y, + }, + D: p.D, + }, nil +} + +// HashSeed returns 512 sum hash of byte slice +func HashSeed(seed []byte) []byte { + bz := blake3.Sum512(seed) + return bz[:] +} diff --git a/ecies/keys_test.go b/ecies/keys_test.go new file mode 100644 index 0000000..0e72374 --- /dev/null +++ b/ecies/keys_test.go @@ -0,0 +1,24 @@ +package ecies_test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/sonr-io/sonr/crypto/ecies" +) + +func TestGenerateKey(t *testing.T) { + _, err := ecies.GenerateKey() + assert.NoError(t, err) +} + +func TestGenerateFromSeed(t *testing.T) { + seed := ecies.HashSeed( + []byte("testasdfasdfasdfasdfasdfw234453412341testasdfasdfasdfasdfasdfw234453412341"), + ) + _, err := ecies.GenerateKeyFromSeed(seed) + assert.NoError(t, err) + _, err = ecies.GenerateKeyFromSeed(seed) + assert.NoError(t, err) +} diff --git a/empty-module/bip39.go b/empty-module/bip39.go new file mode 100644 index 0000000..52371aa --- /dev/null +++ b/empty-module/bip39.go @@ -0,0 +1,33 @@ +// Package bip39 provides BIP39 mnemonic functionality. +// This is a stub package to replace the missing tyler-smith/go-bip39 repository. +// The original repository at github.com/tyler-smith/go-bip39 no longer exists. +package bip39 + +import ( + "errors" +) + +// NewMnemonic generates a new mnemonic sequence +func NewMnemonic(bitSize int) (string, error) { + return "", errors.New("tyler-smith/go-bip39 is deprecated, use github.com/cosmos/go-bip39 instead") +} + +// NewSeed creates a seed from a mnemonic and passphrase +func NewSeed(mnemonic string, password string) []byte { + panic("tyler-smith/go-bip39 is deprecated, use github.com/cosmos/go-bip39 instead") +} + +// IsMnemonicValid validates a mnemonic sequence +func IsMnemonicValid(mnemonic string) bool { + return false +} + +// NewEntropy generates new entropy +func NewEntropy(bitSize int) ([]byte, error) { + return nil, errors.New("tyler-smith/go-bip39 is deprecated, use github.com/cosmos/go-bip39 instead") +} + +// NewMnemonicFromEntropy creates a mnemonic from entropy +func NewMnemonicFromEntropy(entropy []byte) (string, error) { + return "", errors.New("tyler-smith/go-bip39 is deprecated, use github.com/cosmos/go-bip39 instead") +} diff --git a/empty-module/go.mod b/empty-module/go.mod new file mode 100644 index 0000000..10d1440 --- /dev/null +++ b/empty-module/go.mod @@ -0,0 +1,3 @@ +module github.com/tyler-smith/go-bip39 + +go 1.24.7 \ No newline at end of file diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..d4f1e9c --- /dev/null +++ b/go.mod @@ -0,0 +1,46 @@ +module github.com/sonr-io/sonr/crypto + +go 1.24.7 + +require ( + filippo.io/edwards25519 v1.1.0 + git.sr.ht/~sircmpwn/go-bare v0.0.0-20210406120253-ab86bc2846d9 + github.com/Oudwins/zog v0.21.6 + github.com/btcsuite/btcd/btcec/v2 v2.3.4 + github.com/bwesterb/go-ristretto v1.2.3 + github.com/consensys/gnark-crypto v0.19.0 + github.com/cosmos/btcutil v1.0.5 + github.com/cosmos/cosmos-sdk v0.53.4 + github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564 + github.com/ecies/go/v2 v2.0.10 + github.com/golang-jwt/jwt/v5 v5.3.0 + github.com/gtank/merlin v0.1.1 + github.com/ipfs/go-cid v0.5.0 + github.com/libp2p/go-libp2p v0.43.0 + github.com/mr-tron/base58 v1.2.0 + github.com/multiformats/go-multibase v0.2.0 + github.com/multiformats/go-multihash v0.2.3 + github.com/multiformats/go-varint v0.1.0 + github.com/pkg/errors v0.9.1 + github.com/stretchr/testify v1.10.0 + golang.org/x/crypto v0.42.0 + lukechampine.com/blake3 v1.4.1 +) + +require ( + github.com/bits-and-blooms/bitset v1.24.0 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 // indirect + github.com/ethereum/go-ethereum v1.16.3 // indirect + github.com/klauspost/cpuid/v2 v2.2.10 // indirect + github.com/mimoo/StrobeGo v0.0.0-20181016162300-f8f6d4d2b643 // indirect + github.com/minio/sha256-simd v1.0.1 // indirect + github.com/multiformats/go-base32 v0.1.0 // indirect + github.com/multiformats/go-base36 v0.2.0 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/spaolacci/murmur3 v1.1.0 // indirect + golang.org/x/exp v0.0.0-20250606033433-dcc06ee1d476 // indirect + golang.org/x/sys v0.36.0 // indirect + google.golang.org/protobuf v1.36.6 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..1fc77ce --- /dev/null +++ b/go.sum @@ -0,0 +1,102 @@ +filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= +filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= +git.sr.ht/~sircmpwn/getopt v0.0.0-20191230200459-23622cc906b3/go.mod h1:wMEGFFFNuPos7vHmWXfszqImLppbc0wEhh6JBfJIUgw= +git.sr.ht/~sircmpwn/go-bare v0.0.0-20210406120253-ab86bc2846d9 h1:Ahny8Ud1LjVMMAlt8utUFKhhxJtwBAualvsbc/Sk7cE= +git.sr.ht/~sircmpwn/go-bare v0.0.0-20210406120253-ab86bc2846d9/go.mod h1:BVJwbDfVjCjoFiKrhkei6NdGcZYpkDkdyCdg1ukytRA= +github.com/Oudwins/zog v0.21.6 h1:3JVJA66fr59k2x72RojCB7v5XkVmtVsnp1YO/np595k= +github.com/Oudwins/zog v0.21.6/go.mod h1:c4ADJ2zNkJp37ZViNy1o3ZZoeMvO7UQVO7BaPtRoocg= +github.com/bits-and-blooms/bitset v1.24.0 h1:H4x4TuulnokZKvHLfzVRTHJfFfnHEeSYJizujEZvmAM= +github.com/bits-and-blooms/bitset v1.24.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/btcsuite/btcd/btcec/v2 v2.3.4 h1:3EJjcN70HCu/mwqlUsGK8GcNVyLVxFDlWurTXGPFfiQ= +github.com/btcsuite/btcd/btcec/v2 v2.3.4/go.mod h1:zYzJ8etWJQIv1Ogk7OzpWjowwOdXY1W/17j2MW85J04= +github.com/bwesterb/go-ristretto v1.2.3 h1:1w53tCkGhCQ5djbat3+MH0BAQ5Kfgbt56UZQ/JMzngw= +github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= +github.com/consensys/gnark-crypto v0.19.0 h1:zXCqeY2txSaMl6G5wFpZzMWJU9HPNh8qxPnYJ1BL9vA= +github.com/consensys/gnark-crypto v0.19.0/go.mod h1:rT23F0XSZqE0mUA0+pRtnL56IbPxs6gp4CeRsBk4XS0= +github.com/cosmos/btcutil v1.0.5 h1:t+ZFcX77LpKtDBhjucvnOH8C2l2ioGsBNEQ3jef8xFk= +github.com/cosmos/btcutil v1.0.5/go.mod h1:IyB7iuqZMJlthe2tkIFL33xPyzbFYP0XVdS8P5lUPis= +github.com/cosmos/cosmos-sdk v0.53.4 h1:kPF6vY68+/xi1/VebSZGpoxQqA52qkhUzqkrgeBn3Mg= +github.com/cosmos/cosmos-sdk v0.53.4/go.mod h1:7U3+WHZtI44dEOnU46+lDzBb2tFh1QlMvi8Z5JugopI= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/decred/dcrd/crypto/blake256 v1.1.0 h1:zPMNGQCm0g4QTY27fOCorQW7EryeQ/U0x++OzVrdms8= +github.com/decred/dcrd/crypto/blake256 v1.1.0/go.mod h1:2OfgNZ5wDpcsFmHmCK5gZTPcCXqlm2ArzUIkw9czNJo= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 h1:NMZiJj8QnKe1LgsbDayM4UoHwbvwDRwnI3hwNaAHRnc= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0/go.mod h1:ZXNYxsqcloTdSy/rNShjYzMhyjf0LaoftYK0p+A3h40= +github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564 h1:I6KUy4CI6hHjqnyJLNCEi7YHVMkwwtfSr2k9splgdSM= +github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564/go.mod h1:yekO+3ZShy19S+bsmnERmznGy9Rfg6dWWWpiGJjNAz8= +github.com/ecies/go/v2 v2.0.10 h1:AaLxGio0MLLbvWur4rKnLzw+K9zI+wMScIDAtqCqOtU= +github.com/ecies/go/v2 v2.0.10/go.mod h1:N73OyuR6tuKznit2LhXjrZ0XAQ234uKbzYz8pEPYzlI= +github.com/ethereum/go-ethereum v1.16.3 h1:nDoBSrmsrPbrDIVLTkDQCy1U9KdHN+F2PzvMbDoS42Q= +github.com/ethereum/go-ethereum v1.16.3/go.mod h1:Lrsc6bt9Gm9RyvhfFK53vboCia8kpF9nv+2Ukntnl+8= +github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= +github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/gtank/merlin v0.1.1 h1:eQ90iG7K9pOhtereWsmyRJ6RAwcP4tHTDBHXNg+u5is= +github.com/gtank/merlin v0.1.1/go.mod h1:T86dnYJhcGOh5BjZFCJWTDeTK7XW8uE+E21Cy/bIQ+s= +github.com/ipfs/go-cid v0.5.0 h1:goEKKhaGm0ul11IHA7I6p1GmKz8kEYniqFopaB5Otwg= +github.com/ipfs/go-cid v0.5.0/go.mod h1:0L7vmeNXpQpUS9vt+yEARkJ8rOg43DF3iPgn4GIN0mk= +github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE= +github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leanovate/gopter v0.2.11 h1:vRjThO1EKPb/1NsDXuDrzldR28RLkBflWYcU9CvzWu4= +github.com/leanovate/gopter v0.2.11/go.mod h1:aK3tzZP/C+p1m3SPRE4SYZFGP7jjkuSI4f7Xvpt0S9c= +github.com/libp2p/go-buffer-pool v0.1.0 h1:oK4mSFcQz7cTQIfqbe4MIj9gLW+mnanjyFtc6cdF0Y8= +github.com/libp2p/go-buffer-pool v0.1.0/go.mod h1:N+vh8gMqimBzdKkSMVuydVDq+UV5QTWy5HSiZacSbPg= +github.com/libp2p/go-libp2p v0.43.0 h1:b2bg2cRNmY4HpLK8VHYQXLX2d3iND95OjodLFymvqXU= +github.com/libp2p/go-libp2p v0.43.0/go.mod h1:IiSqAXDyP2sWH+J2gs43pNmB/y4FOi2XQPbsb+8qvzc= +github.com/mimoo/StrobeGo v0.0.0-20181016162300-f8f6d4d2b643 h1:hLDRPB66XQT/8+wG9WsDpiCvZf1yKO7sz7scAjSlBa0= +github.com/mimoo/StrobeGo v0.0.0-20181016162300-f8f6d4d2b643/go.mod h1:43+3pMjjKimDBf5Kr4ZFNGbLql1zKkbImw+fZbw3geM= +github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM= +github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8= +github.com/mr-tron/base58 v1.2.0 h1:T/HDJBh4ZCPbU39/+c3rRvE0uKBQlU27+QI8LJ4t64o= +github.com/mr-tron/base58 v1.2.0/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc= +github.com/multiformats/go-base32 v0.1.0 h1:pVx9xoSPqEIQG8o+UbAe7DNi51oej1NtK+aGkbLYxPE= +github.com/multiformats/go-base32 v0.1.0/go.mod h1:Kj3tFY6zNr+ABYMqeUNeGvkIC/UYgtWibDcT0rExnbI= +github.com/multiformats/go-base36 v0.2.0 h1:lFsAbNOGeKtuKozrtBsAkSVhv1p9D0/qedU9rQyccr0= +github.com/multiformats/go-base36 v0.2.0/go.mod h1:qvnKE++v+2MWCfePClUEjE78Z7P2a1UV0xHgWc0hkp4= +github.com/multiformats/go-multiaddr v0.16.0 h1:oGWEVKioVQcdIOBlYM8BH1rZDWOGJSqr9/BKl6zQ4qc= +github.com/multiformats/go-multiaddr v0.16.0/go.mod h1:JSVUmXDjsVFiW7RjIFMP7+Ev+h1DTbiJgVeTV/tcmP0= +github.com/multiformats/go-multibase v0.2.0 h1:isdYCVLvksgWlMW9OZRYJEa9pZETFivncJHmHnnd87g= +github.com/multiformats/go-multibase v0.2.0/go.mod h1:bFBZX4lKCA/2lyOFSAoKH5SS6oPyjtnzK/XTFDPkNuk= +github.com/multiformats/go-multicodec v0.9.1 h1:x/Fuxr7ZuR4jJV4Os5g444F7xC4XmyUaT/FWtE+9Zjo= +github.com/multiformats/go-multicodec v0.9.1/go.mod h1:LLWNMtyV5ithSBUo3vFIMaeDy+h3EbkMTek1m+Fybbo= +github.com/multiformats/go-multihash v0.2.3 h1:7Lyc8XfX/IY2jWb/gI7JP+o7JEq9hOa7BFvVU9RSh+U= +github.com/multiformats/go-multihash v0.2.3/go.mod h1:dXgKXCXjBzdscBLk9JkjINiEsCKRVch90MdaGiKsvSM= +github.com/multiformats/go-varint v0.1.0 h1:i2wqFp4sdl3IcIxfAonHQV9qU5OsZ4Ts9IOoETFs5dI= +github.com/multiformats/go-varint v0.1.0/go.mod h1:5KVAVXegtfmNQQm/lCY+ATvDzvJJhSkUlGQV9wgObdI= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= +github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +golang.org/x/crypto v0.42.0 h1:chiH31gIWm57EkTXpwnqf8qeuMUi0yekh6mT2AvFlqI= +golang.org/x/crypto v0.42.0/go.mod h1:4+rDnOTJhQCx2q7/j6rAN5XDw8kPjeaXEUR2eL94ix8= +golang.org/x/exp v0.0.0-20250606033433-dcc06ee1d476 h1:bsqhLWFR6G6xiQcb+JoGqdKdRU6WzPWmK8E0jxTjzo4= +golang.org/x/exp v0.0.0-20250606033433-dcc06ee1d476/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8= +golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k= +golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= +google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +lukechampine.com/blake3 v1.4.1 h1:I3Smz7gso8w4/TunLKec6K2fn+kyKtDxr/xcQEN84Wg= +lukechampine.com/blake3 v1.4.1/go.mod h1:QFosUxmjB8mnrWFSNwKmvxHpfY72bmD2tQ0kBMM3kwo= diff --git a/internal/ed25519/edwards25519/const.go b/internal/ed25519/edwards25519/const.go new file mode 100644 index 0000000..c124e37 --- /dev/null +++ b/internal/ed25519/edwards25519/const.go @@ -0,0 +1,10217 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package edwards25519 + +// BasePointOrder is the number of points in the subgroup generated by the base point. +var BasePointOrder = [32]byte{ + 237, + 211, + 245, + 92, + 26, + 99, + 18, + 88, + 214, + 156, + 247, + 162, + 222, + 249, + 222, + 20, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 16, +} + +var d = FieldElement{ + -10913610, 13857413, -15372611, 6949391, 114729, -8787816, -6275908, -3247719, -18696448, -12055116, +} + +var d2 = FieldElement{ + -21827239, -5839606, -30745221, 13898782, 229458, 15978800, -12551817, -6495438, 29715968, 9444199, +} + +var SqrtM1 = FieldElement{ + -32595792, -7943725, 9377950, 3500415, 12389472, -272473, -25146209, -2005654, 326686, 11406482, +} + +var A = FieldElement{ + 486662, 0, 0, 0, 0, 0, 0, 0, 0, 0, +} + +var extendedBaseEl = ExtendedGroupElement{ + FieldElement{ + 25485296, + 5318399, + 8791791, + -8299916, + -14349720, + 6939349, + -3324311, + -7717049, + 7287234, + -6577708, + }, + FieldElement{ + -758052, + -1832720, + 13046421, + -4857925, + 6576754, + 14371947, + -13139572, + 6845540, + -2198883, + -4003719, + }, + FieldElement{ + -947565, + 6097708, + -469190, + 10704810, + -8556274, + -15589498, + -16424464, + -16608899, + 14028613, + -5004649, + }, + FieldElement{ + 6966464, + -2456167, + 7033433, + 6781840, + 28785542, + 12262365, + -2659449, + 13959020, + -21013759, + -5262166, + }, +} + +// BaseBytes can be used to hash the base point if necessary +var BaseBytes [32]byte + +func init() { + extendedBaseEl.ToBytes(&BaseBytes) +} + +var bi = [8]PreComputedGroupElement{ + { + FieldElement{ + 25967493, + -14356035, + 29566456, + 3660896, + -12694345, + 4014787, + 27544626, + -11754271, + -6079156, + 2047605, + }, + FieldElement{ + -12545711, + 934262, + -2722910, + 3049990, + -727428, + 9406986, + 12720692, + 5043384, + 19500929, + -15469378, + }, + FieldElement{ + -8738181, + 4489570, + 9688441, + -14785194, + 10184609, + -12363380, + 29287919, + 11864899, + -24514362, + -4438546, + }, + }, + { + FieldElement{ + 15636291, + -9688557, + 24204773, + -7912398, + 616977, + -16685262, + 27787600, + -14772189, + 28944400, + -1550024, + }, + FieldElement{ + 16568933, + 4717097, + -11556148, + -1102322, + 15682896, + -11807043, + 16354577, + -11775962, + 7689662, + 11199574, + }, + FieldElement{ + 30464156, + -5976125, + -11779434, + -15670865, + 23220365, + 15915852, + 7512774, + 10017326, + -17749093, + -9920357, + }, + }, + { + FieldElement{ + 10861363, + 11473154, + 27284546, + 1981175, + -30064349, + 12577861, + 32867885, + 14515107, + -15438304, + 10819380, + }, + FieldElement{ + 4708026, + 6336745, + 20377586, + 9066809, + -11272109, + 6594696, + -25653668, + 12483688, + -12668491, + 5581306, + }, + FieldElement{ + 19563160, + 16186464, + -29386857, + 4097519, + 10237984, + -4348115, + 28542350, + 13850243, + -23678021, + -15815942, + }, + }, + { + FieldElement{ + 5153746, + 9909285, + 1723747, + -2777874, + 30523605, + 5516873, + 19480852, + 5230134, + -23952439, + -15175766, + }, + FieldElement{ + -30269007, + -3463509, + 7665486, + 10083793, + 28475525, + 1649722, + 20654025, + 16520125, + 30598449, + 7715701, + }, + FieldElement{ + 28881845, + 14381568, + 9657904, + 3680757, + -20181635, + 7843316, + -31400660, + 1370708, + 29794553, + -1409300, + }, + }, + { + FieldElement{ + -22518993, + -6692182, + 14201702, + -8745502, + -23510406, + 8844726, + 18474211, + -1361450, + -13062696, + 13821877, + }, + FieldElement{ + -6455177, + -7839871, + 3374702, + -4740862, + -27098617, + -10571707, + 31655028, + -7212327, + 18853322, + -14220951, + }, + FieldElement{ + 4566830, + -12963868, + -28974889, + -12240689, + -7602672, + -2830569, + -8514358, + -10431137, + 2207753, + -3209784, + }, + }, + { + FieldElement{ + -25154831, + -4185821, + 29681144, + 7868801, + -6854661, + -9423865, + -12437364, + -663000, + -31111463, + -16132436, + }, + FieldElement{ + 25576264, + -2703214, + 7349804, + -11814844, + 16472782, + 9300885, + 3844789, + 15725684, + 171356, + 6466918, + }, + FieldElement{ + 23103977, + 13316479, + 9739013, + -16149481, + 817875, + -15038942, + 8965339, + -14088058, + -30714912, + 16193877, + }, + }, + { + FieldElement{ + -33521811, + 3180713, + -2394130, + 14003687, + -16903474, + -16270840, + 17238398, + 4729455, + -18074513, + 9256800, + }, + FieldElement{ + -25182317, + -4174131, + 32336398, + 5036987, + -21236817, + 11360617, + 22616405, + 9761698, + -19827198, + 630305, + }, + FieldElement{ + -13720693, + 2639453, + -24237460, + -7406481, + 9494427, + -5774029, + -6554551, + -15960994, + -2449256, + -14291300, + }, + }, + { + FieldElement{ + -3151181, + -5046075, + 9282714, + 6866145, + -31907062, + -863023, + -18940575, + 15033784, + 25105118, + -7894876, + }, + FieldElement{ + -24326370, + 15950226, + -31801215, + -14592823, + -11662737, + -5090925, + 1573892, + -2625887, + 2198790, + -15804619, + }, + FieldElement{ + -3099351, + 10324967, + -2241613, + 7453183, + -5446979, + -2735503, + -13812022, + -16236442, + -32461234, + -12290683, + }, + }, +} + +var base = [32][8]PreComputedGroupElement{ + { + { + FieldElement{ + 25967493, + -14356035, + 29566456, + 3660896, + -12694345, + 4014787, + 27544626, + -11754271, + -6079156, + 2047605, + }, + FieldElement{ + -12545711, + 934262, + -2722910, + 3049990, + -727428, + 9406986, + 12720692, + 5043384, + 19500929, + -15469378, + }, + FieldElement{ + -8738181, + 4489570, + 9688441, + -14785194, + 10184609, + -12363380, + 29287919, + 11864899, + -24514362, + -4438546, + }, + }, + { + FieldElement{ + -12815894, + -12976347, + -21581243, + 11784320, + -25355658, + -2750717, + -11717903, + -3814571, + -358445, + -10211303, + }, + FieldElement{ + -21703237, + 6903825, + 27185491, + 6451973, + -29577724, + -9554005, + -15616551, + 11189268, + -26829678, + -5319081, + }, + FieldElement{ + 26966642, + 11152617, + 32442495, + 15396054, + 14353839, + -12752335, + -3128826, + -9541118, + -15472047, + -4166697, + }, + }, + { + FieldElement{ + 15636291, + -9688557, + 24204773, + -7912398, + 616977, + -16685262, + 27787600, + -14772189, + 28944400, + -1550024, + }, + FieldElement{ + 16568933, + 4717097, + -11556148, + -1102322, + 15682896, + -11807043, + 16354577, + -11775962, + 7689662, + 11199574, + }, + FieldElement{ + 30464156, + -5976125, + -11779434, + -15670865, + 23220365, + 15915852, + 7512774, + 10017326, + -17749093, + -9920357, + }, + }, + { + FieldElement{ + -17036878, + 13921892, + 10945806, + -6033431, + 27105052, + -16084379, + -28926210, + 15006023, + 3284568, + -6276540, + }, + FieldElement{ + 23599295, + -8306047, + -11193664, + -7687416, + 13236774, + 10506355, + 7464579, + 9656445, + 13059162, + 10374397, + }, + FieldElement{ + 7798556, + 16710257, + 3033922, + 2874086, + 28997861, + 2835604, + 32406664, + -3839045, + -641708, + -101325, + }, + }, + { + FieldElement{ + 10861363, + 11473154, + 27284546, + 1981175, + -30064349, + 12577861, + 32867885, + 14515107, + -15438304, + 10819380, + }, + FieldElement{ + 4708026, + 6336745, + 20377586, + 9066809, + -11272109, + 6594696, + -25653668, + 12483688, + -12668491, + 5581306, + }, + FieldElement{ + 19563160, + 16186464, + -29386857, + 4097519, + 10237984, + -4348115, + 28542350, + 13850243, + -23678021, + -15815942, + }, + }, + { + FieldElement{ + -15371964, + -12862754, + 32573250, + 4720197, + -26436522, + 5875511, + -19188627, + -15224819, + -9818940, + -12085777, + }, + FieldElement{ + -8549212, + 109983, + 15149363, + 2178705, + 22900618, + 4543417, + 3044240, + -15689887, + 1762328, + 14866737, + }, + FieldElement{ + -18199695, + -15951423, + -10473290, + 1707278, + -17185920, + 3916101, + -28236412, + 3959421, + 27914454, + 4383652, + }, + }, + { + FieldElement{ + 5153746, + 9909285, + 1723747, + -2777874, + 30523605, + 5516873, + 19480852, + 5230134, + -23952439, + -15175766, + }, + FieldElement{ + -30269007, + -3463509, + 7665486, + 10083793, + 28475525, + 1649722, + 20654025, + 16520125, + 30598449, + 7715701, + }, + FieldElement{ + 28881845, + 14381568, + 9657904, + 3680757, + -20181635, + 7843316, + -31400660, + 1370708, + 29794553, + -1409300, + }, + }, + { + FieldElement{ + 14499471, + -2729599, + -33191113, + -4254652, + 28494862, + 14271267, + 30290735, + 10876454, + -33154098, + 2381726, + }, + FieldElement{ + -7195431, + -2655363, + -14730155, + 462251, + -27724326, + 3941372, + -6236617, + 3696005, + -32300832, + 15351955, + }, + FieldElement{ + 27431194, + 8222322, + 16448760, + -3907995, + -18707002, + 11938355, + -32961401, + -2970515, + 29551813, + 10109425, + }, + }, + }, + { + { + FieldElement{ + -13657040, + -13155431, + -31283750, + 11777098, + 21447386, + 6519384, + -2378284, + -1627556, + 10092783, + -4764171, + }, + FieldElement{ + 27939166, + 14210322, + 4677035, + 16277044, + -22964462, + -12398139, + -32508754, + 12005538, + -17810127, + 12803510, + }, + FieldElement{ + 17228999, + -15661624, + -1233527, + 300140, + -1224870, + -11714777, + 30364213, + -9038194, + 18016357, + 4397660, + }, + }, + { + FieldElement{ + -10958843, + -7690207, + 4776341, + -14954238, + 27850028, + -15602212, + -26619106, + 14544525, + -17477504, + 982639, + }, + FieldElement{ + 29253598, + 15796703, + -2863982, + -9908884, + 10057023, + 3163536, + 7332899, + -4120128, + -21047696, + 9934963, + }, + FieldElement{ + 5793303, + 16271923, + -24131614, + -10116404, + 29188560, + 1206517, + -14747930, + 4559895, + -30123922, + -10897950, + }, + }, + { + FieldElement{ + -27643952, + -11493006, + 16282657, + -11036493, + 28414021, + -15012264, + 24191034, + 4541697, + -13338309, + 5500568, + }, + FieldElement{ + 12650548, + -1497113, + 9052871, + 11355358, + -17680037, + -8400164, + -17430592, + 12264343, + 10874051, + 13524335, + }, + FieldElement{ + 25556948, + -3045990, + 714651, + 2510400, + 23394682, + -10415330, + 33119038, + 5080568, + -22528059, + 5376628, + }, + }, + { + FieldElement{ + -26088264, + -4011052, + -17013699, + -3537628, + -6726793, + 1920897, + -22321305, + -9447443, + 4535768, + 1569007, + }, + FieldElement{ + -2255422, + 14606630, + -21692440, + -8039818, + 28430649, + 8775819, + -30494562, + 3044290, + 31848280, + 12543772, + }, + FieldElement{ + -22028579, + 2943893, + -31857513, + 6777306, + 13784462, + -4292203, + -27377195, + -2062731, + 7718482, + 14474653, + }, + }, + { + FieldElement{ + 2385315, + 2454213, + -22631320, + 46603, + -4437935, + -15680415, + 656965, + -7236665, + 24316168, + -5253567, + }, + FieldElement{ + 13741529, + 10911568, + -33233417, + -8603737, + -20177830, + -1033297, + 33040651, + -13424532, + -20729456, + 8321686, + }, + FieldElement{ + 21060490, + -2212744, + 15712757, + -4336099, + 1639040, + 10656336, + 23845965, + -11874838, + -9984458, + 608372, + }, + }, + { + FieldElement{ + -13672732, + -15087586, + -10889693, + -7557059, + -6036909, + 11305547, + 1123968, + -6780577, + 27229399, + 23887, + }, + FieldElement{ + -23244140, + -294205, + -11744728, + 14712571, + -29465699, + -2029617, + 12797024, + -6440308, + -1633405, + 16678954, + }, + FieldElement{ + -29500620, + 4770662, + -16054387, + 14001338, + 7830047, + 9564805, + -1508144, + -4795045, + -17169265, + 4904953, + }, + }, + { + FieldElement{ + 24059557, + 14617003, + 19037157, + -15039908, + 19766093, + -14906429, + 5169211, + 16191880, + 2128236, + -4326833, + }, + FieldElement{ + -16981152, + 4124966, + -8540610, + -10653797, + 30336522, + -14105247, + -29806336, + 916033, + -6882542, + -2986532, + }, + FieldElement{ + -22630907, + 12419372, + -7134229, + -7473371, + -16478904, + 16739175, + 285431, + 2763829, + 15736322, + 4143876, + }, + }, + { + FieldElement{ + 2379352, + 11839345, + -4110402, + -5988665, + 11274298, + 794957, + 212801, + -14594663, + 23527084, + -16458268, + }, + FieldElement{ + 33431127, + -11130478, + -17838966, + -15626900, + 8909499, + 8376530, + -32625340, + 4087881, + -15188911, + -14416214, + }, + FieldElement{ + 1767683, + 7197987, + -13205226, + -2022635, + -13091350, + 448826, + 5799055, + 4357868, + -4774191, + -16323038, + }, + }, + }, + { + { + FieldElement{ + 6721966, + 13833823, + -23523388, + -1551314, + 26354293, + -11863321, + 23365147, + -3949732, + 7390890, + 2759800, + }, + FieldElement{ + 4409041, + 2052381, + 23373853, + 10530217, + 7676779, + -12885954, + 21302353, + -4264057, + 1244380, + -12919645, + }, + FieldElement{ + -4421239, + 7169619, + 4982368, + -2957590, + 30256825, + -2777540, + 14086413, + 9208236, + 15886429, + 16489664, + }, + }, + { + FieldElement{ + 1996075, + 10375649, + 14346367, + 13311202, + -6874135, + -16438411, + -13693198, + 398369, + -30606455, + -712933, + }, + FieldElement{ + -25307465, + 9795880, + -2777414, + 14878809, + -33531835, + 14780363, + 13348553, + 12076947, + -30836462, + 5113182, + }, + FieldElement{ + -17770784, + 11797796, + 31950843, + 13929123, + -25888302, + 12288344, + -30341101, + -7336386, + 13847711, + 5387222, + }, + }, + { + FieldElement{ + -18582163, + -3416217, + 17824843, + -2340966, + 22744343, + -10442611, + 8763061, + 3617786, + -19600662, + 10370991, + }, + FieldElement{ + 20246567, + -14369378, + 22358229, + -543712, + 18507283, + -10413996, + 14554437, + -8746092, + 32232924, + 16763880, + }, + FieldElement{ + 9648505, + 10094563, + 26416693, + 14745928, + -30374318, + -6472621, + 11094161, + 15689506, + 3140038, + -16510092, + }, + }, + { + FieldElement{ + -16160072, + 5472695, + 31895588, + 4744994, + 8823515, + 10365685, + -27224800, + 9448613, + -28774454, + 366295, + }, + FieldElement{ + 19153450, + 11523972, + -11096490, + -6503142, + -24647631, + 5420647, + 28344573, + 8041113, + 719605, + 11671788, + }, + FieldElement{ + 8678025, + 2694440, + -6808014, + 2517372, + 4964326, + 11152271, + -15432916, + -15266516, + 27000813, + -10195553, + }, + }, + { + FieldElement{ + -15157904, + 7134312, + 8639287, + -2814877, + -7235688, + 10421742, + 564065, + 5336097, + 6750977, + -14521026, + }, + FieldElement{ + 11836410, + -3979488, + 26297894, + 16080799, + 23455045, + 15735944, + 1695823, + -8819122, + 8169720, + 16220347, + }, + FieldElement{ + -18115838, + 8653647, + 17578566, + -6092619, + -8025777, + -16012763, + -11144307, + -2627664, + -5990708, + -14166033, + }, + }, + { + FieldElement{ + -23308498, + -10968312, + 15213228, + -10081214, + -30853605, + -11050004, + 27884329, + 2847284, + 2655861, + 1738395, + }, + FieldElement{ + -27537433, + -14253021, + -25336301, + -8002780, + -9370762, + 8129821, + 21651608, + -3239336, + -19087449, + -11005278, + }, + FieldElement{ + 1533110, + 3437855, + 23735889, + 459276, + 29970501, + 11335377, + 26030092, + 5821408, + 10478196, + 8544890, + }, + }, + { + FieldElement{ + 32173121, + -16129311, + 24896207, + 3921497, + 22579056, + -3410854, + 19270449, + 12217473, + 17789017, + -3395995, + }, + FieldElement{ + -30552961, + -2228401, + -15578829, + -10147201, + 13243889, + 517024, + 15479401, + -3853233, + 30460520, + 1052596, + }, + FieldElement{ + -11614875, + 13323618, + 32618793, + 8175907, + -15230173, + 12596687, + 27491595, + -4612359, + 3179268, + -9478891, + }, + }, + { + FieldElement{ + 31947069, + -14366651, + -4640583, + -15339921, + -15125977, + -6039709, + -14756777, + -16411740, + 19072640, + -9511060, + }, + FieldElement{ + 11685058, + 11822410, + 3158003, + -13952594, + 33402194, + -4165066, + 5977896, + -5215017, + 473099, + 5040608, + }, + FieldElement{ + -20290863, + 8198642, + -27410132, + 11602123, + 1290375, + -2799760, + 28326862, + 1721092, + -19558642, + -3131606, + }, + }, + }, + { + { + FieldElement{ + 7881532, + 10687937, + 7578723, + 7738378, + -18951012, + -2553952, + 21820786, + 8076149, + -27868496, + 11538389, + }, + FieldElement{ + -19935666, + 3899861, + 18283497, + -6801568, + -15728660, + -11249211, + 8754525, + 7446702, + -5676054, + 5797016, + }, + FieldElement{ + -11295600, + -3793569, + -15782110, + -7964573, + 12708869, + -8456199, + 2014099, + -9050574, + -2369172, + -5877341, + }, + }, + { + FieldElement{ + -22472376, + -11568741, + -27682020, + 1146375, + 18956691, + 16640559, + 1192730, + -3714199, + 15123619, + 10811505, + }, + FieldElement{ + 14352098, + -3419715, + -18942044, + 10822655, + 32750596, + 4699007, + -70363, + 15776356, + -28886779, + -11974553, + }, + FieldElement{ + -28241164, + -8072475, + -4978962, + -5315317, + 29416931, + 1847569, + -20654173, + -16484855, + 4714547, + -9600655, + }, + }, + { + FieldElement{ + 15200332, + 8368572, + 19679101, + 15970074, + -31872674, + 1959451, + 24611599, + -4543832, + -11745876, + 12340220, + }, + FieldElement{ + 12876937, + -10480056, + 33134381, + 6590940, + -6307776, + 14872440, + 9613953, + 8241152, + 15370987, + 9608631, + }, + FieldElement{ + -4143277, + -12014408, + 8446281, + -391603, + 4407738, + 13629032, + -7724868, + 15866074, + -28210621, + -8814099, + }, + }, + { + FieldElement{ + 26660628, + -15677655, + 8393734, + 358047, + -7401291, + 992988, + -23904233, + 858697, + 20571223, + 8420556, + }, + FieldElement{ + 14620715, + 13067227, + -15447274, + 8264467, + 14106269, + 15080814, + 33531827, + 12516406, + -21574435, + -12476749, + }, + FieldElement{ + 236881, + 10476226, + 57258, + -14677024, + 6472998, + 2466984, + 17258519, + 7256740, + 8791136, + 15069930, + }, + }, + { + FieldElement{ + 1276410, + -9371918, + 22949635, + -16322807, + -23493039, + -5702186, + 14711875, + 4874229, + -30663140, + -2331391, + }, + FieldElement{ + 5855666, + 4990204, + -13711848, + 7294284, + -7804282, + 1924647, + -1423175, + -7912378, + -33069337, + 9234253, + }, + FieldElement{ + 20590503, + -9018988, + 31529744, + -7352666, + -2706834, + 10650548, + 31559055, + -11609587, + 18979186, + 13396066, + }, + }, + { + FieldElement{ + 24474287, + 4968103, + 22267082, + 4407354, + 24063882, + -8325180, + -18816887, + 13594782, + 33514650, + 7021958, + }, + FieldElement{ + -11566906, + -6565505, + -21365085, + 15928892, + -26158305, + 4315421, + -25948728, + -3916677, + -21480480, + 12868082, + }, + FieldElement{ + -28635013, + 13504661, + 19988037, + -2132761, + 21078225, + 6443208, + -21446107, + 2244500, + -12455797, + -8089383, + }, + }, + { + FieldElement{ + -30595528, + 13793479, + -5852820, + 319136, + -25723172, + -6263899, + 33086546, + 8957937, + -15233648, + 5540521, + }, + FieldElement{ + -11630176, + -11503902, + -8119500, + -7643073, + 2620056, + 1022908, + -23710744, + -1568984, + -16128528, + -14962807, + }, + FieldElement{ + 23152971, + 775386, + 27395463, + 14006635, + -9701118, + 4649512, + 1689819, + 892185, + -11513277, + -15205948, + }, + }, + { + FieldElement{ + 9770129, + 9586738, + 26496094, + 4324120, + 1556511, + -3550024, + 27453819, + 4763127, + -19179614, + 5867134, + }, + FieldElement{ + -32765025, + 1927590, + 31726409, + -4753295, + 23962434, + -16019500, + 27846559, + 5931263, + -29749703, + -16108455, + }, + FieldElement{ + 27461885, + -2977536, + 22380810, + 1815854, + -23033753, + -3031938, + 7283490, + -15148073, + -19526700, + 7734629, + }, + }, + }, + { + { + FieldElement{ + -8010264, + -9590817, + -11120403, + 6196038, + 29344158, + -13430885, + 7585295, + -3176626, + 18549497, + 15302069, + }, + FieldElement{ + -32658337, + -6171222, + -7672793, + -11051681, + 6258878, + 13504381, + 10458790, + -6418461, + -8872242, + 8424746, + }, + FieldElement{ + 24687205, + 8613276, + -30667046, + -3233545, + 1863892, + -1830544, + 19206234, + 7134917, + -11284482, + -828919, + }, + }, + { + FieldElement{ + 11334899, + -9218022, + 8025293, + 12707519, + 17523892, + -10476071, + 10243738, + -14685461, + -5066034, + 16498837, + }, + FieldElement{ + 8911542, + 6887158, + -9584260, + -6958590, + 11145641, + -9543680, + 17303925, + -14124238, + 6536641, + 10543906, + }, + FieldElement{ + -28946384, + 15479763, + -17466835, + 568876, + -1497683, + 11223454, + -2669190, + -16625574, + -27235709, + 8876771, + }, + }, + { + FieldElement{ + -25742899, + -12566864, + -15649966, + -846607, + -33026686, + -796288, + -33481822, + 15824474, + -604426, + -9039817, + }, + FieldElement{ + 10330056, + 70051, + 7957388, + -9002667, + 9764902, + 15609756, + 27698697, + -4890037, + 1657394, + 3084098, + }, + FieldElement{ + 10477963, + -7470260, + 12119566, + -13250805, + 29016247, + -5365589, + 31280319, + 14396151, + -30233575, + 15272409, + }, + }, + { + FieldElement{ + -12288309, + 3169463, + 28813183, + 16658753, + 25116432, + -5630466, + -25173957, + -12636138, + -25014757, + 1950504, + }, + FieldElement{ + -26180358, + 9489187, + 11053416, + -14746161, + -31053720, + 5825630, + -8384306, + -8767532, + 15341279, + 8373727, + }, + FieldElement{ + 28685821, + 7759505, + -14378516, + -12002860, + -31971820, + 4079242, + 298136, + -10232602, + -2878207, + 15190420, + }, + }, + { + FieldElement{ + -32932876, + 13806336, + -14337485, + -15794431, + -24004620, + 10940928, + 8669718, + 2742393, + -26033313, + -6875003, + }, + FieldElement{ + -1580388, + -11729417, + -25979658, + -11445023, + -17411874, + -10912854, + 9291594, + -16247779, + -12154742, + 6048605, + }, + FieldElement{ + -30305315, + 14843444, + 1539301, + 11864366, + 20201677, + 1900163, + 13934231, + 5128323, + 11213262, + 9168384, + }, + }, + { + FieldElement{ + -26280513, + 11007847, + 19408960, + -940758, + -18592965, + -4328580, + -5088060, + -11105150, + 20470157, + -16398701, + }, + FieldElement{ + -23136053, + 9282192, + 14855179, + -15390078, + -7362815, + -14408560, + -22783952, + 14461608, + 14042978, + 5230683, + }, + FieldElement{ + 29969567, + -2741594, + -16711867, + -8552442, + 9175486, + -2468974, + 21556951, + 3506042, + -5933891, + -12449708, + }, + }, + { + FieldElement{ + -3144746, + 8744661, + 19704003, + 4581278, + -20430686, + 6830683, + -21284170, + 8971513, + -28539189, + 15326563, + }, + FieldElement{ + -19464629, + 10110288, + -17262528, + -3503892, + -23500387, + 1355669, + -15523050, + 15300988, + -20514118, + 9168260, + }, + FieldElement{ + -5353335, + 4488613, + -23803248, + 16314347, + 7780487, + -15638939, + -28948358, + 9601605, + 33087103, + -9011387, + }, + }, + { + FieldElement{ + -19443170, + -15512900, + -20797467, + -12445323, + -29824447, + 10229461, + -27444329, + -15000531, + -5996870, + 15664672, + }, + FieldElement{ + 23294591, + -16632613, + -22650781, + -8470978, + 27844204, + 11461195, + 13099750, + -2460356, + 18151676, + 13417686, + }, + FieldElement{ + -24722913, + -4176517, + -31150679, + 5988919, + -26858785, + 6685065, + 1661597, + -12551441, + 15271676, + -15452665, + }, + }, + }, + { + { + FieldElement{ + 11433042, + -13228665, + 8239631, + -5279517, + -1985436, + -725718, + -18698764, + 2167544, + -6921301, + -13440182, + }, + FieldElement{ + -31436171, + 15575146, + 30436815, + 12192228, + -22463353, + 9395379, + -9917708, + -8638997, + 12215110, + 12028277, + }, + FieldElement{ + 14098400, + 6555944, + 23007258, + 5757252, + -15427832, + -12950502, + 30123440, + 4617780, + -16900089, + -655628, + }, + }, + { + FieldElement{ + -4026201, + -15240835, + 11893168, + 13718664, + -14809462, + 1847385, + -15819999, + 10154009, + 23973261, + -12684474, + }, + FieldElement{ + -26531820, + -3695990, + -1908898, + 2534301, + -31870557, + -16550355, + 18341390, + -11419951, + 32013174, + -10103539, + }, + FieldElement{ + -25479301, + 10876443, + -11771086, + -14625140, + -12369567, + 1838104, + 21911214, + 6354752, + 4425632, + -837822, + }, + }, + { + FieldElement{ + -10433389, + -14612966, + 22229858, + -3091047, + -13191166, + 776729, + -17415375, + -12020462, + 4725005, + 14044970, + }, + FieldElement{ + 19268650, + -7304421, + 1555349, + 8692754, + -21474059, + -9910664, + 6347390, + -1411784, + -19522291, + -16109756, + }, + FieldElement{ + -24864089, + 12986008, + -10898878, + -5558584, + -11312371, + -148526, + 19541418, + 8180106, + 9282262, + 10282508, + }, + }, + { + FieldElement{ + -26205082, + 4428547, + -8661196, + -13194263, + 4098402, + -14165257, + 15522535, + 8372215, + 5542595, + -10702683, + }, + FieldElement{ + -10562541, + 14895633, + 26814552, + -16673850, + -17480754, + -2489360, + -2781891, + 6993761, + -18093885, + 10114655, + }, + FieldElement{ + -20107055, + -929418, + 31422704, + 10427861, + -7110749, + 6150669, + -29091755, + -11529146, + 25953725, + -106158, + }, + }, + { + FieldElement{ + -4234397, + -8039292, + -9119125, + 3046000, + 2101609, + -12607294, + 19390020, + 6094296, + -3315279, + 12831125, + }, + FieldElement{ + -15998678, + 7578152, + 5310217, + 14408357, + -33548620, + -224739, + 31575954, + 6326196, + 7381791, + -2421839, + }, + FieldElement{ + -20902779, + 3296811, + 24736065, + -16328389, + 18374254, + 7318640, + 6295303, + 8082724, + -15362489, + 12339664, + }, + }, + { + FieldElement{ + 27724736, + 2291157, + 6088201, + -14184798, + 1792727, + 5857634, + 13848414, + 15768922, + 25091167, + 14856294, + }, + FieldElement{ + -18866652, + 8331043, + 24373479, + 8541013, + -701998, + -9269457, + 12927300, + -12695493, + -22182473, + -9012899, + }, + FieldElement{ + -11423429, + -5421590, + 11632845, + 3405020, + 30536730, + -11674039, + -27260765, + 13866390, + 30146206, + 9142070, + }, + }, + { + FieldElement{ + 3924129, + -15307516, + -13817122, + -10054960, + 12291820, + -668366, + -27702774, + 9326384, + -8237858, + 4171294, + }, + FieldElement{ + -15921940, + 16037937, + 6713787, + 16606682, + -21612135, + 2790944, + 26396185, + 3731949, + 345228, + -5462949, + }, + FieldElement{ + -21327538, + 13448259, + 25284571, + 1143661, + 20614966, + -8849387, + 2031539, + -12391231, + -16253183, + -13582083, + }, + }, + { + FieldElement{ + 31016211, + -16722429, + 26371392, + -14451233, + -5027349, + 14854137, + 17477601, + 3842657, + 28012650, + -16405420, + }, + FieldElement{ + -5075835, + 9368966, + -8562079, + -4600902, + -15249953, + 6970560, + -9189873, + 16292057, + -8867157, + 3507940, + }, + FieldElement{ + 29439664, + 3537914, + 23333589, + 6997794, + -17555561, + -11018068, + -15209202, + -15051267, + -9164929, + 6580396, + }, + }, + }, + { + { + FieldElement{ + -12185861, + -7679788, + 16438269, + 10826160, + -8696817, + -6235611, + 17860444, + -9273846, + -2095802, + 9304567, + }, + FieldElement{ + 20714564, + -4336911, + 29088195, + 7406487, + 11426967, + -5095705, + 14792667, + -14608617, + 5289421, + -477127, + }, + FieldElement{ + -16665533, + -10650790, + -6160345, + -13305760, + 9192020, + -1802462, + 17271490, + 12349094, + 26939669, + -3752294, + }, + }, + { + FieldElement{ + -12889898, + 9373458, + 31595848, + 16374215, + 21471720, + 13221525, + -27283495, + -12348559, + -3698806, + 117887, + }, + FieldElement{ + 22263325, + -6560050, + 3984570, + -11174646, + -15114008, + -566785, + 28311253, + 5358056, + -23319780, + 541964, + }, + FieldElement{ + 16259219, + 3261970, + 2309254, + -15534474, + -16885711, + -4581916, + 24134070, + -16705829, + -13337066, + -13552195, + }, + }, + { + FieldElement{ + 9378160, + -13140186, + -22845982, + -12745264, + 28198281, + -7244098, + -2399684, + -717351, + 690426, + 14876244, + }, + FieldElement{ + 24977353, + -314384, + -8223969, + -13465086, + 28432343, + -1176353, + -13068804, + -12297348, + -22380984, + 6618999, + }, + FieldElement{ + -1538174, + 11685646, + 12944378, + 13682314, + -24389511, + -14413193, + 8044829, + -13817328, + 32239829, + -5652762, + }, + }, + { + FieldElement{ + -18603066, + 4762990, + -926250, + 8885304, + -28412480, + -3187315, + 9781647, + -10350059, + 32779359, + 5095274, + }, + FieldElement{ + -33008130, + -5214506, + -32264887, + -3685216, + 9460461, + -9327423, + -24601656, + 14506724, + 21639561, + -2630236, + }, + FieldElement{ + -16400943, + -13112215, + 25239338, + 15531969, + 3987758, + -4499318, + -1289502, + -6863535, + 17874574, + 558605, + }, + }, + { + FieldElement{ + -13600129, + 10240081, + 9171883, + 16131053, + -20869254, + 9599700, + 33499487, + 5080151, + 2085892, + 5119761, + }, + FieldElement{ + -22205145, + -2519528, + -16381601, + 414691, + -25019550, + 2170430, + 30634760, + -8363614, + -31999993, + -5759884, + }, + FieldElement{ + -6845704, + 15791202, + 8550074, + -1312654, + 29928809, + -12092256, + 27534430, + -7192145, + -22351378, + 12961482, + }, + }, + { + FieldElement{ + -24492060, + -9570771, + 10368194, + 11582341, + -23397293, + -2245287, + 16533930, + 8206996, + -30194652, + -5159638, + }, + FieldElement{ + -11121496, + -3382234, + 2307366, + 6362031, + -135455, + 8868177, + -16835630, + 7031275, + 7589640, + 8945490, + }, + FieldElement{ + -32152748, + 8917967, + 6661220, + -11677616, + -1192060, + -15793393, + 7251489, + -11182180, + 24099109, + -14456170, + }, + }, + { + FieldElement{ + 5019558, + -7907470, + 4244127, + -14714356, + -26933272, + 6453165, + -19118182, + -13289025, + -6231896, + -10280736, + }, + FieldElement{ + 10853594, + 10721687, + 26480089, + 5861829, + -22995819, + 1972175, + -1866647, + -10557898, + -3363451, + -6441124, + }, + FieldElement{ + -17002408, + 5906790, + 221599, + -6563147, + 7828208, + -13248918, + 24362661, + -2008168, + -13866408, + 7421392, + }, + }, + { + FieldElement{ + 8139927, + -6546497, + 32257646, + -5890546, + 30375719, + 1886181, + -21175108, + 15441252, + 28826358, + -4123029, + }, + FieldElement{ + 6267086, + 9695052, + 7709135, + -16603597, + -32869068, + -1886135, + 14795160, + -7840124, + 13746021, + -1742048, + }, + FieldElement{ + 28584902, + 7787108, + -6732942, + -15050729, + 22846041, + -7571236, + -3181936, + -363524, + 4771362, + -8419958, + }, + }, + }, + { + { + FieldElement{ + 24949256, + 6376279, + -27466481, + -8174608, + -18646154, + -9930606, + 33543569, + -12141695, + 3569627, + 11342593, + }, + FieldElement{ + 26514989, + 4740088, + 27912651, + 3697550, + 19331575, + -11472339, + 6809886, + 4608608, + 7325975, + -14801071, + }, + FieldElement{ + -11618399, + -14554430, + -24321212, + 7655128, + -1369274, + 5214312, + -27400540, + 10258390, + -17646694, + -8186692, + }, + }, + { + FieldElement{ + 11431204, + 15823007, + 26570245, + 14329124, + 18029990, + 4796082, + -31446179, + 15580664, + 9280358, + -3973687, + }, + FieldElement{ + -160783, + -10326257, + -22855316, + -4304997, + -20861367, + -13621002, + -32810901, + -11181622, + -15545091, + 4387441, + }, + FieldElement{ + -20799378, + 12194512, + 3937617, + -5805892, + -27154820, + 9340370, + -24513992, + 8548137, + 20617071, + -7482001, + }, + }, + { + FieldElement{ + -938825, + -3930586, + -8714311, + 16124718, + 24603125, + -6225393, + -13775352, + -11875822, + 24345683, + 10325460, + }, + FieldElement{ + -19855277, + -1568885, + -22202708, + 8714034, + 14007766, + 6928528, + 16318175, + -1010689, + 4766743, + 3552007, + }, + FieldElement{ + -21751364, + -16730916, + 1351763, + -803421, + -4009670, + 3950935, + 3217514, + 14481909, + 10988822, + -3994762, + }, + }, + { + FieldElement{ + 15564307, + -14311570, + 3101243, + 5684148, + 30446780, + -8051356, + 12677127, + -6505343, + -8295852, + 13296005, + }, + FieldElement{ + -9442290, + 6624296, + -30298964, + -11913677, + -4670981, + -2057379, + 31521204, + 9614054, + -30000824, + 12074674, + }, + FieldElement{ + 4771191, + -135239, + 14290749, + -13089852, + 27992298, + 14998318, + -1413936, + -1556716, + 29832613, + -16391035, + }, + }, + { + FieldElement{ + 7064884, + -7541174, + -19161962, + -5067537, + -18891269, + -2912736, + 25825242, + 5293297, + -27122660, + 13101590, + }, + FieldElement{ + -2298563, + 2439670, + -7466610, + 1719965, + -27267541, + -16328445, + 32512469, + -5317593, + -30356070, + -4190957, + }, + FieldElement{ + -30006540, + 10162316, + -33180176, + 3981723, + -16482138, + -13070044, + 14413974, + 9515896, + 19568978, + 9628812, + }, + }, + { + FieldElement{ + 33053803, + 199357, + 15894591, + 1583059, + 27380243, + -4580435, + -17838894, + -6106839, + -6291786, + 3437740, + }, + FieldElement{ + -18978877, + 3884493, + 19469877, + 12726490, + 15913552, + 13614290, + -22961733, + 70104, + 7463304, + 4176122, + }, + FieldElement{ + -27124001, + 10659917, + 11482427, + -16070381, + 12771467, + -6635117, + -32719404, + -5322751, + 24216882, + 5944158, + }, + }, + { + FieldElement{ + 8894125, + 7450974, + -2664149, + -9765752, + -28080517, + -12389115, + 19345746, + 14680796, + 11632993, + 5847885, + }, + FieldElement{ + 26942781, + -2315317, + 9129564, + -4906607, + 26024105, + 11769399, + -11518837, + 6367194, + -9727230, + 4782140, + }, + FieldElement{ + 19916461, + -4828410, + -22910704, + -11414391, + 25606324, + -5972441, + 33253853, + 8220911, + 6358847, + -1873857, + }, + }, + { + FieldElement{ + 801428, + -2081702, + 16569428, + 11065167, + 29875704, + 96627, + 7908388, + -4480480, + -13538503, + 1387155, + }, + FieldElement{ + 19646058, + 5720633, + -11416706, + 12814209, + 11607948, + 12749789, + 14147075, + 15156355, + -21866831, + 11835260, + }, + FieldElement{ + 19299512, + 1155910, + 28703737, + 14890794, + 2925026, + 7269399, + 26121523, + 15467869, + -26560550, + 5052483, + }, + }, + }, + { + { + FieldElement{ + -3017432, + 10058206, + 1980837, + 3964243, + 22160966, + 12322533, + -6431123, + -12618185, + 12228557, + -7003677, + }, + FieldElement{ + 32944382, + 14922211, + -22844894, + 5188528, + 21913450, + -8719943, + 4001465, + 13238564, + -6114803, + 8653815, + }, + FieldElement{ + 22865569, + -4652735, + 27603668, + -12545395, + 14348958, + 8234005, + 24808405, + 5719875, + 28483275, + 2841751, + }, + }, + { + FieldElement{ + -16420968, + -1113305, + -327719, + -12107856, + 21886282, + -15552774, + -1887966, + -315658, + 19932058, + -12739203, + }, + FieldElement{ + -11656086, + 10087521, + -8864888, + -5536143, + -19278573, + -3055912, + 3999228, + 13239134, + -4777469, + -13910208, + }, + FieldElement{ + 1382174, + -11694719, + 17266790, + 9194690, + -13324356, + 9720081, + 20403944, + 11284705, + -14013818, + 3093230, + }, + }, + { + FieldElement{ + 16650921, + -11037932, + -1064178, + 1570629, + -8329746, + 7352753, + -302424, + 16271225, + -24049421, + -6691850, + }, + FieldElement{ + -21911077, + -5927941, + -4611316, + -5560156, + -31744103, + -10785293, + 24123614, + 15193618, + -21652117, + -16739389, + }, + FieldElement{ + -9935934, + -4289447, + -25279823, + 4372842, + 2087473, + 10399484, + 31870908, + 14690798, + 17361620, + 11864968, + }, + }, + { + FieldElement{ + -11307610, + 6210372, + 13206574, + 5806320, + -29017692, + -13967200, + -12331205, + -7486601, + -25578460, + -16240689, + }, + FieldElement{ + 14668462, + -12270235, + 26039039, + 15305210, + 25515617, + 4542480, + 10453892, + 6577524, + 9145645, + -6443880, + }, + FieldElement{ + 5974874, + 3053895, + -9433049, + -10385191, + -31865124, + 3225009, + -7972642, + 3936128, + -5652273, + -3050304, + }, + }, + { + FieldElement{ + 30625386, + -4729400, + -25555961, + -12792866, + -20484575, + 7695099, + 17097188, + -16303496, + -27999779, + 1803632, + }, + FieldElement{ + -3553091, + 9865099, + -5228566, + 4272701, + -5673832, + -16689700, + 14911344, + 12196514, + -21405489, + 7047412, + }, + FieldElement{ + 20093277, + 9920966, + -11138194, + -5343857, + 13161587, + 12044805, + -32856851, + 4124601, + -32343828, + -10257566, + }, + }, + { + FieldElement{ + -20788824, + 14084654, + -13531713, + 7842147, + 19119038, + -13822605, + 4752377, + -8714640, + -21679658, + 2288038, + }, + FieldElement{ + -26819236, + -3283715, + 29965059, + 3039786, + -14473765, + 2540457, + 29457502, + 14625692, + -24819617, + 12570232, + }, + FieldElement{ + -1063558, + -11551823, + 16920318, + 12494842, + 1278292, + -5869109, + -21159943, + -3498680, + -11974704, + 4724943, + }, + }, + { + FieldElement{ + 17960970, + -11775534, + -4140968, + -9702530, + -8876562, + -1410617, + -12907383, + -8659932, + -29576300, + 1903856, + }, + FieldElement{ + 23134274, + -14279132, + -10681997, + -1611936, + 20684485, + 15770816, + -12989750, + 3190296, + 26955097, + 14109738, + }, + FieldElement{ + 15308788, + 5320727, + -30113809, + -14318877, + 22902008, + 7767164, + 29425325, + -11277562, + 31960942, + 11934971, + }, + }, + { + FieldElement{ + -27395711, + 8435796, + 4109644, + 12222639, + -24627868, + 14818669, + 20638173, + 4875028, + 10491392, + 1379718, + }, + FieldElement{ + -13159415, + 9197841, + 3875503, + -8936108, + -1383712, + -5879801, + 33518459, + 16176658, + 21432314, + 12180697, + }, + FieldElement{ + -11787308, + 11500838, + 13787581, + -13832590, + -22430679, + 10140205, + 1465425, + 12689540, + -10301319, + -13872883, + }, + }, + }, + { + { + FieldElement{ + 5414091, + -15386041, + -21007664, + 9643570, + 12834970, + 1186149, + -2622916, + -1342231, + 26128231, + 6032912, + }, + FieldElement{ + -26337395, + -13766162, + 32496025, + -13653919, + 17847801, + -12669156, + 3604025, + 8316894, + -25875034, + -10437358, + }, + FieldElement{ + 3296484, + 6223048, + 24680646, + -12246460, + -23052020, + 5903205, + -8862297, + -4639164, + 12376617, + 3188849, + }, + }, + { + FieldElement{ + 29190488, + -14659046, + 27549113, + -1183516, + 3520066, + -10697301, + 32049515, + -7309113, + -16109234, + -9852307, + }, + FieldElement{ + -14744486, + -9309156, + 735818, + -598978, + -20407687, + -5057904, + 25246078, + -15795669, + 18640741, + -960977, + }, + FieldElement{ + -6928835, + -16430795, + 10361374, + 5642961, + 4910474, + 12345252, + -31638386, + -494430, + 10530747, + 1053335, + }, + }, + { + FieldElement{ + -29265967, + -14186805, + -13538216, + -12117373, + -19457059, + -10655384, + -31462369, + -2948985, + 24018831, + 15026644, + }, + FieldElement{ + -22592535, + -3145277, + -2289276, + 5953843, + -13440189, + 9425631, + 25310643, + 13003497, + -2314791, + -15145616, + }, + FieldElement{ + -27419985, + -603321, + -8043984, + -1669117, + -26092265, + 13987819, + -27297622, + 187899, + -23166419, + -2531735, + }, + }, + { + FieldElement{ + -21744398, + -13810475, + 1844840, + 5021428, + -10434399, + -15911473, + 9716667, + 16266922, + -5070217, + 726099, + }, + FieldElement{ + 29370922, + -6053998, + 7334071, + -15342259, + 9385287, + 2247707, + -13661962, + -4839461, + 30007388, + -15823341, + }, + FieldElement{ + -936379, + 16086691, + 23751945, + -543318, + -1167538, + -5189036, + 9137109, + 730663, + 9835848, + 4555336, + }, + }, + { + FieldElement{ + -23376435, + 1410446, + -22253753, + -12899614, + 30867635, + 15826977, + 17693930, + 544696, + -11985298, + 12422646, + }, + FieldElement{ + 31117226, + -12215734, + -13502838, + 6561947, + -9876867, + -12757670, + -5118685, + -4096706, + 29120153, + 13924425, + }, + FieldElement{ + -17400879, + -14233209, + 19675799, + -2734756, + -11006962, + -5858820, + -9383939, + -11317700, + 7240931, + -237388, + }, + }, + { + FieldElement{ + -31361739, + -11346780, + -15007447, + -5856218, + -22453340, + -12152771, + 1222336, + 4389483, + 3293637, + -15551743, + }, + FieldElement{ + -16684801, + -14444245, + 11038544, + 11054958, + -13801175, + -3338533, + -24319580, + 7733547, + 12796905, + -6335822, + }, + FieldElement{ + -8759414, + -10817836, + -25418864, + 10783769, + -30615557, + -9746811, + -28253339, + 3647836, + 3222231, + -11160462, + }, + }, + { + FieldElement{ + 18606113, + 1693100, + -25448386, + -15170272, + 4112353, + 10045021, + 23603893, + -2048234, + -7550776, + 2484985, + }, + FieldElement{ + 9255317, + -3131197, + -12156162, + -1004256, + 13098013, + -9214866, + 16377220, + -2102812, + -19802075, + -3034702, + }, + FieldElement{ + -22729289, + 7496160, + -5742199, + 11329249, + 19991973, + -3347502, + -31718148, + 9936966, + -30097688, + -10618797, + }, + }, + { + FieldElement{ + 21878590, + -5001297, + 4338336, + 13643897, + -3036865, + 13160960, + 19708896, + 5415497, + -7360503, + -4109293, + }, + FieldElement{ + 27736861, + 10103576, + 12500508, + 8502413, + -3413016, + -9633558, + 10436918, + -1550276, + -23659143, + -8132100, + }, + FieldElement{ + 19492550, + -12104365, + -29681976, + -852630, + -3208171, + 12403437, + 30066266, + 8367329, + 13243957, + 8709688, + }, + }, + }, + { + { + FieldElement{ + 12015105, + 2801261, + 28198131, + 10151021, + 24818120, + -4743133, + -11194191, + -5645734, + 5150968, + 7274186, + }, + FieldElement{ + 2831366, + -12492146, + 1478975, + 6122054, + 23825128, + -12733586, + 31097299, + 6083058, + 31021603, + -9793610, + }, + FieldElement{ + -2529932, + -2229646, + 445613, + 10720828, + -13849527, + -11505937, + -23507731, + 16354465, + 15067285, + -14147707, + }, + }, + { + FieldElement{ + 7840942, + 14037873, + -33364863, + 15934016, + -728213, + -3642706, + 21403988, + 1057586, + -19379462, + -12403220, + }, + FieldElement{ + 915865, + -16469274, + 15608285, + -8789130, + -24357026, + 6060030, + -17371319, + 8410997, + -7220461, + 16527025, + }, + FieldElement{ + 32922597, + -556987, + 20336074, + -16184568, + 10903705, + -5384487, + 16957574, + 52992, + 23834301, + 6588044, + }, + }, + { + FieldElement{ + 32752030, + 11232950, + 3381995, + -8714866, + 22652988, + -10744103, + 17159699, + 16689107, + -20314580, + -1305992, + }, + FieldElement{ + -4689649, + 9166776, + -25710296, + -10847306, + 11576752, + 12733943, + 7924251, + -2752281, + 1976123, + -7249027, + }, + FieldElement{ + 21251222, + 16309901, + -2983015, + -6783122, + 30810597, + 12967303, + 156041, + -3371252, + 12331345, + -8237197, + }, + }, + { + FieldElement{ + 8651614, + -4477032, + -16085636, + -4996994, + 13002507, + 2950805, + 29054427, + -5106970, + 10008136, + -4667901, + }, + FieldElement{ + 31486080, + 15114593, + -14261250, + 12951354, + 14369431, + -7387845, + 16347321, + -13662089, + 8684155, + -10532952, + }, + FieldElement{ + 19443825, + 11385320, + 24468943, + -9659068, + -23919258, + 2187569, + -26263207, + -6086921, + 31316348, + 14219878, + }, + }, + { + FieldElement{ + -28594490, + 1193785, + 32245219, + 11392485, + 31092169, + 15722801, + 27146014, + 6992409, + 29126555, + 9207390, + }, + FieldElement{ + 32382935, + 1110093, + 18477781, + 11028262, + -27411763, + -7548111, + -4980517, + 10843782, + -7957600, + -14435730, + }, + FieldElement{ + 2814918, + 7836403, + 27519878, + -7868156, + -20894015, + -11553689, + -21494559, + 8550130, + 28346258, + 1994730, + }, + }, + { + FieldElement{ + -19578299, + 8085545, + -14000519, + -3948622, + 2785838, + -16231307, + -19516951, + 7174894, + 22628102, + 8115180, + }, + FieldElement{ + -30405132, + 955511, + -11133838, + -15078069, + -32447087, + -13278079, + -25651578, + 3317160, + -9943017, + 930272, + }, + FieldElement{ + -15303681, + -6833769, + 28856490, + 1357446, + 23421993, + 1057177, + 24091212, + -1388970, + -22765376, + -10650715, + }, + }, + { + FieldElement{ + -22751231, + -5303997, + -12907607, + -12768866, + -15811511, + -7797053, + -14839018, + -16554220, + -1867018, + 8398970, + }, + FieldElement{ + -31969310, + 2106403, + -4736360, + 1362501, + 12813763, + 16200670, + 22981545, + -6291273, + 18009408, + -15772772, + }, + FieldElement{ + -17220923, + -9545221, + -27784654, + 14166835, + 29815394, + 7444469, + 29551787, + -3727419, + 19288549, + 1325865, + }, + }, + { + FieldElement{ + 15100157, + -15835752, + -23923978, + -1005098, + -26450192, + 15509408, + 12376730, + -3479146, + 33166107, + -8042750, + }, + FieldElement{ + 20909231, + 13023121, + -9209752, + 16251778, + -5778415, + -8094914, + 12412151, + 10018715, + 2213263, + -13878373, + }, + FieldElement{ + 32529814, + -11074689, + 30361439, + -16689753, + -9135940, + 1513226, + 22922121, + 6382134, + -5766928, + 8371348, + }, + }, + }, + { + { + FieldElement{ + 9923462, + 11271500, + 12616794, + 3544722, + -29998368, + -1721626, + 12891687, + -8193132, + -26442943, + 10486144, + }, + FieldElement{ + -22597207, + -7012665, + 8587003, + -8257861, + 4084309, + -12970062, + 361726, + 2610596, + -23921530, + -11455195, + }, + FieldElement{ + 5408411, + -1136691, + -4969122, + 10561668, + 24145918, + 14240566, + 31319731, + -4235541, + 19985175, + -3436086, + }, + }, + { + FieldElement{ + -13994457, + 16616821, + 14549246, + 3341099, + 32155958, + 13648976, + -17577068, + 8849297, + 65030, + 8370684, + }, + FieldElement{ + -8320926, + -12049626, + 31204563, + 5839400, + -20627288, + -1057277, + -19442942, + 6922164, + 12743482, + -9800518, + }, + FieldElement{ + -2361371, + 12678785, + 28815050, + 4759974, + -23893047, + 4884717, + 23783145, + 11038569, + 18800704, + 255233, + }, + }, + { + FieldElement{ + -5269658, + -1773886, + 13957886, + 7990715, + 23132995, + 728773, + 13393847, + 9066957, + 19258688, + -14753793, + }, + FieldElement{ + -2936654, + -10827535, + -10432089, + 14516793, + -3640786, + 4372541, + -31934921, + 2209390, + -1524053, + 2055794, + }, + FieldElement{ + 580882, + 16705327, + 5468415, + -2683018, + -30926419, + -14696000, + -7203346, + -8994389, + -30021019, + 7394435, + }, + }, + { + FieldElement{ + 23838809, + 1822728, + -15738443, + 15242727, + 8318092, + -3733104, + -21672180, + -3492205, + -4821741, + 14799921, + }, + FieldElement{ + 13345610, + 9759151, + 3371034, + -16137791, + 16353039, + 8577942, + 31129804, + 13496856, + -9056018, + 7402518, + }, + FieldElement{ + 2286874, + -4435931, + -20042458, + -2008336, + -13696227, + 5038122, + 11006906, + -15760352, + 8205061, + 1607563, + }, + }, + { + FieldElement{ + 14414086, + -8002132, + 3331830, + -3208217, + 22249151, + -5594188, + 18364661, + -2906958, + 30019587, + -9029278, + }, + FieldElement{ + -27688051, + 1585953, + -10775053, + 931069, + -29120221, + -11002319, + -14410829, + 12029093, + 9944378, + 8024, + }, + FieldElement{ + 4368715, + -3709630, + 29874200, + -15022983, + -20230386, + -11410704, + -16114594, + -999085, + -8142388, + 5640030, + }, + }, + { + FieldElement{ + 10299610, + 13746483, + 11661824, + 16234854, + 7630238, + 5998374, + 9809887, + -16694564, + 15219798, + -14327783, + }, + FieldElement{ + 27425505, + -5719081, + 3055006, + 10660664, + 23458024, + 595578, + -15398605, + -1173195, + -18342183, + 9742717, + }, + FieldElement{ + 6744077, + 2427284, + 26042789, + 2720740, + -847906, + 1118974, + 32324614, + 7406442, + 12420155, + 1994844, + }, + }, + { + FieldElement{ + 14012521, + -5024720, + -18384453, + -9578469, + -26485342, + -3936439, + -13033478, + -10909803, + 24319929, + -6446333, + }, + FieldElement{ + 16412690, + -4507367, + 10772641, + 15929391, + -17068788, + -4658621, + 10555945, + -10484049, + -30102368, + -4739048, + }, + FieldElement{ + 22397382, + -7767684, + -9293161, + -12792868, + 17166287, + -9755136, + -27333065, + 6199366, + 21880021, + -12250760, + }, + }, + { + FieldElement{ + -4283307, + 5368523, + -31117018, + 8163389, + -30323063, + 3209128, + 16557151, + 8890729, + 8840445, + 4957760, + }, + FieldElement{ + -15447727, + 709327, + -6919446, + -10870178, + -29777922, + 6522332, + -21720181, + 12130072, + -14796503, + 5005757, + }, + FieldElement{ + -2114751, + -14308128, + 23019042, + 15765735, + -25269683, + 6002752, + 10183197, + -13239326, + -16395286, + -2176112, + }, + }, + }, + { + { + FieldElement{ + -19025756, + 1632005, + 13466291, + -7995100, + -23640451, + 16573537, + -32013908, + -3057104, + 22208662, + 2000468, + }, + FieldElement{ + 3065073, + -1412761, + -25598674, + -361432, + -17683065, + -5703415, + -8164212, + 11248527, + -3691214, + -7414184, + }, + FieldElement{ + 10379208, + -6045554, + 8877319, + 1473647, + -29291284, + -12507580, + 16690915, + 2553332, + -3132688, + 16400289, + }, + }, + { + FieldElement{ + 15716668, + 1254266, + -18472690, + 7446274, + -8448918, + 6344164, + -22097271, + -7285580, + 26894937, + 9132066, + }, + FieldElement{ + 24158887, + 12938817, + 11085297, + -8177598, + -28063478, + -4457083, + -30576463, + 64452, + -6817084, + -2692882, + }, + FieldElement{ + 13488534, + 7794716, + 22236231, + 5989356, + 25426474, + -12578208, + 2350710, + -3418511, + -4688006, + 2364226, + }, + }, + { + FieldElement{ + 16335052, + 9132434, + 25640582, + 6678888, + 1725628, + 8517937, + -11807024, + -11697457, + 15445875, + -7798101, + }, + FieldElement{ + 29004207, + -7867081, + 28661402, + -640412, + -12794003, + -7943086, + 31863255, + -4135540, + -278050, + -15759279, + }, + FieldElement{ + -6122061, + -14866665, + -28614905, + 14569919, + -10857999, + -3591829, + 10343412, + -6976290, + -29828287, + -10815811, + }, + }, + { + FieldElement{ + 27081650, + 3463984, + 14099042, + -4517604, + 1616303, + -6205604, + 29542636, + 15372179, + 17293797, + 960709, + }, + FieldElement{ + 20263915, + 11434237, + -5765435, + 11236810, + 13505955, + -10857102, + -16111345, + 6493122, + -19384511, + 7639714, + }, + FieldElement{ + -2830798, + -14839232, + 25403038, + -8215196, + -8317012, + -16173699, + 18006287, + -16043750, + 29994677, + -15808121, + }, + }, + { + FieldElement{ + 9769828, + 5202651, + -24157398, + -13631392, + -28051003, + -11561624, + -24613141, + -13860782, + -31184575, + 709464, + }, + FieldElement{ + 12286395, + 13076066, + -21775189, + -1176622, + -25003198, + 4057652, + -32018128, + -8890874, + 16102007, + 13205847, + }, + FieldElement{ + 13733362, + 5599946, + 10557076, + 3195751, + -5557991, + 8536970, + -25540170, + 8525972, + 10151379, + 10394400, + }, + }, + { + FieldElement{ + 4024660, + -16137551, + 22436262, + 12276534, + -9099015, + -2686099, + 19698229, + 11743039, + -33302334, + 8934414, + }, + FieldElement{ + -15879800, + -4525240, + -8580747, + -2934061, + 14634845, + -698278, + -9449077, + 3137094, + -11536886, + 11721158, + }, + FieldElement{ + 17555939, + -5013938, + 8268606, + 2331751, + -22738815, + 9761013, + 9319229, + 8835153, + -9205489, + -1280045, + }, + }, + { + FieldElement{ + -461409, + -7830014, + 20614118, + 16688288, + -7514766, + -4807119, + 22300304, + 505429, + 6108462, + -6183415, + }, + FieldElement{ + -5070281, + 12367917, + -30663534, + 3234473, + 32617080, + -8422642, + 29880583, + -13483331, + -26898490, + -7867459, + }, + FieldElement{ + -31975283, + 5726539, + 26934134, + 10237677, + -3173717, + -605053, + 24199304, + 3795095, + 7592688, + -14992079, + }, + }, + { + FieldElement{ + 21594432, + -14964228, + 17466408, + -4077222, + 32537084, + 2739898, + 6407723, + 12018833, + -28256052, + 4298412, + }, + FieldElement{ + -20650503, + -11961496, + -27236275, + 570498, + 3767144, + -1717540, + 13891942, + -1569194, + 13717174, + 10805743, + }, + FieldElement{ + -14676630, + -15644296, + 15287174, + 11927123, + 24177847, + -8175568, + -796431, + 14860609, + -26938930, + -5863836, + }, + }, + }, + { + { + FieldElement{ + 12962541, + 5311799, + -10060768, + 11658280, + 18855286, + -7954201, + 13286263, + -12808704, + -4381056, + 9882022, + }, + FieldElement{ + 18512079, + 11319350, + -20123124, + 15090309, + 18818594, + 5271736, + -22727904, + 3666879, + -23967430, + -3299429, + }, + FieldElement{ + -6789020, + -3146043, + 16192429, + 13241070, + 15898607, + -14206114, + -10084880, + -6661110, + -2403099, + 5276065, + }, + }, + { + FieldElement{ + 30169808, + -5317648, + 26306206, + -11750859, + 27814964, + 7069267, + 7152851, + 3684982, + 1449224, + 13082861, + }, + FieldElement{ + 10342826, + 3098505, + 2119311, + 193222, + 25702612, + 12233820, + 23697382, + 15056736, + -21016438, + -8202000, + }, + FieldElement{ + -33150110, + 3261608, + 22745853, + 7948688, + 19370557, + -15177665, + -26171976, + 6482814, + -10300080, + -11060101, + }, + }, + { + FieldElement{ + 32869458, + -5408545, + 25609743, + 15678670, + -10687769, + -15471071, + 26112421, + 2521008, + -22664288, + 6904815, + }, + FieldElement{ + 29506923, + 4457497, + 3377935, + -9796444, + -30510046, + 12935080, + 1561737, + 3841096, + -29003639, + -6657642, + }, + FieldElement{ + 10340844, + -6630377, + -18656632, + -2278430, + 12621151, + -13339055, + 30878497, + -11824370, + -25584551, + 5181966, + }, + }, + { + FieldElement{ + 25940115, + -12658025, + 17324188, + -10307374, + -8671468, + 15029094, + 24396252, + -16450922, + -2322852, + -12388574, + }, + FieldElement{ + -21765684, + 9916823, + -1300409, + 4079498, + -1028346, + 11909559, + 1782390, + 12641087, + 20603771, + -6561742, + }, + FieldElement{ + -18882287, + -11673380, + 24849422, + 11501709, + 13161720, + -4768874, + 1925523, + 11914390, + 4662781, + 7820689, + }, + }, + { + FieldElement{ + 12241050, + -425982, + 8132691, + 9393934, + 32846760, + -1599620, + 29749456, + 12172924, + 16136752, + 15264020, + }, + FieldElement{ + -10349955, + -14680563, + -8211979, + 2330220, + -17662549, + -14545780, + 10658213, + 6671822, + 19012087, + 3772772, + }, + FieldElement{ + 3753511, + -3421066, + 10617074, + 2028709, + 14841030, + -6721664, + 28718732, + -15762884, + 20527771, + 12988982, + }, + }, + { + FieldElement{ + -14822485, + -5797269, + -3707987, + 12689773, + -898983, + -10914866, + -24183046, + -10564943, + 3299665, + -12424953, + }, + FieldElement{ + -16777703, + -15253301, + -9642417, + 4978983, + 3308785, + 8755439, + 6943197, + 6461331, + -25583147, + 8991218, + }, + FieldElement{ + -17226263, + 1816362, + -1673288, + -6086439, + 31783888, + -8175991, + -32948145, + 7417950, + -30242287, + 1507265, + }, + }, + { + FieldElement{ + 29692663, + 6829891, + -10498800, + 4334896, + 20945975, + -11906496, + -28887608, + 8209391, + 14606362, + -10647073, + }, + FieldElement{ + -3481570, + 8707081, + 32188102, + 5672294, + 22096700, + 1711240, + -33020695, + 9761487, + 4170404, + -2085325, + }, + FieldElement{ + -11587470, + 14855945, + -4127778, + -1531857, + -26649089, + 15084046, + 22186522, + 16002000, + -14276837, + -8400798, + }, + }, + { + FieldElement{ + -4811456, + 13761029, + -31703877, + -2483919, + -3312471, + 7869047, + -7113572, + -9620092, + 13240845, + 10965870, + }, + FieldElement{ + -7742563, + -8256762, + -14768334, + -13656260, + -23232383, + 12387166, + 4498947, + 14147411, + 29514390, + 4302863, + }, + FieldElement{ + -13413405, + -12407859, + 20757302, + -13801832, + 14785143, + 8976368, + -5061276, + -2144373, + 17846988, + -13971927, + }, + }, + }, + { + { + FieldElement{ + -2244452, + -754728, + -4597030, + -1066309, + -6247172, + 1455299, + -21647728, + -9214789, + -5222701, + 12650267, + }, + FieldElement{ + -9906797, + -16070310, + 21134160, + 12198166, + -27064575, + 708126, + 387813, + 13770293, + -19134326, + 10958663, + }, + FieldElement{ + 22470984, + 12369526, + 23446014, + -5441109, + -21520802, + -9698723, + -11772496, + -11574455, + -25083830, + 4271862, + }, + }, + { + FieldElement{ + -25169565, + -10053642, + -19909332, + 15361595, + -5984358, + 2159192, + 75375, + -4278529, + -32526221, + 8469673, + }, + FieldElement{ + 15854970, + 4148314, + -8893890, + 7259002, + 11666551, + 13824734, + -30531198, + 2697372, + 24154791, + -9460943, + }, + FieldElement{ + 15446137, + -15806644, + 29759747, + 14019369, + 30811221, + -9610191, + -31582008, + 12840104, + 24913809, + 9815020, + }, + }, + { + FieldElement{ + -4709286, + -5614269, + -31841498, + -12288893, + -14443537, + 10799414, + -9103676, + 13438769, + 18735128, + 9466238, + }, + FieldElement{ + 11933045, + 9281483, + 5081055, + -5183824, + -2628162, + -4905629, + -7727821, + -10896103, + -22728655, + 16199064, + }, + FieldElement{ + 14576810, + 379472, + -26786533, + -8317236, + -29426508, + -10812974, + -102766, + 1876699, + 30801119, + 2164795, + }, + }, + { + FieldElement{ + 15995086, + 3199873, + 13672555, + 13712240, + -19378835, + -4647646, + -13081610, + -15496269, + -13492807, + 1268052, + }, + FieldElement{ + -10290614, + -3659039, + -3286592, + 10948818, + 23037027, + 3794475, + -3470338, + -12600221, + -17055369, + 3565904, + }, + FieldElement{ + 29210088, + -9419337, + -5919792, + -4952785, + 10834811, + -13327726, + -16512102, + -10820713, + -27162222, + -14030531, + }, + }, + { + FieldElement{ + -13161890, + 15508588, + 16663704, + -8156150, + -28349942, + 9019123, + -29183421, + -3769423, + 2244111, + -14001979, + }, + FieldElement{ + -5152875, + -3800936, + -9306475, + -6071583, + 16243069, + 14684434, + -25673088, + -16180800, + 13491506, + 4641841, + }, + FieldElement{ + 10813417, + 643330, + -19188515, + -728916, + 30292062, + -16600078, + 27548447, + -7721242, + 14476989, + -12767431, + }, + }, + { + FieldElement{ + 10292079, + 9984945, + 6481436, + 8279905, + -7251514, + 7032743, + 27282937, + -1644259, + -27912810, + 12651324, + }, + FieldElement{ + -31185513, + -813383, + 22271204, + 11835308, + 10201545, + 15351028, + 17099662, + 3988035, + 21721536, + -3148940, + }, + FieldElement{ + 10202177, + -6545839, + -31373232, + -9574638, + -32150642, + -8119683, + -12906320, + 3852694, + 13216206, + 14842320, + }, + }, + { + FieldElement{ + -15815640, + -10601066, + -6538952, + -7258995, + -6984659, + -6581778, + -31500847, + 13765824, + -27434397, + 9900184, + }, + FieldElement{ + 14465505, + -13833331, + -32133984, + -14738873, + -27443187, + 12990492, + 33046193, + 15796406, + -7051866, + -8040114, + }, + FieldElement{ + 30924417, + -8279620, + 6359016, + -12816335, + 16508377, + 9071735, + -25488601, + 15413635, + 9524356, + -7018878, + }, + }, + { + FieldElement{ + 12274201, + -13175547, + 32627641, + -1785326, + 6736625, + 13267305, + 5237659, + -5109483, + 15663516, + 4035784, + }, + FieldElement{ + -2951309, + 8903985, + 17349946, + 601635, + -16432815, + -4612556, + -13732739, + -15889334, + -22258478, + 4659091, + }, + FieldElement{ + -16916263, + -4952973, + -30393711, + -15158821, + 20774812, + 15897498, + 5736189, + 15026997, + -2178256, + -13455585, + }, + }, + }, + { + { + FieldElement{ + -8858980, + -2219056, + 28571666, + -10155518, + -474467, + -10105698, + -3801496, + 278095, + 23440562, + -290208, + }, + FieldElement{ + 10226241, + -5928702, + 15139956, + 120818, + -14867693, + 5218603, + 32937275, + 11551483, + -16571960, + -7442864, + }, + FieldElement{ + 17932739, + -12437276, + -24039557, + 10749060, + 11316803, + 7535897, + 22503767, + 5561594, + -3646624, + 3898661, + }, + }, + { + FieldElement{ + 7749907, + -969567, + -16339731, + -16464, + -25018111, + 15122143, + -1573531, + 7152530, + 21831162, + 1245233, + }, + FieldElement{ + 26958459, + -14658026, + 4314586, + 8346991, + -5677764, + 11960072, + -32589295, + -620035, + -30402091, + -16716212, + }, + FieldElement{ + -12165896, + 9166947, + 33491384, + 13673479, + 29787085, + 13096535, + 6280834, + 14587357, + -22338025, + 13987525, + }, + }, + { + FieldElement{ + -24349909, + 7778775, + 21116000, + 15572597, + -4833266, + -5357778, + -4300898, + -5124639, + -7469781, + -2858068, + }, + FieldElement{ + 9681908, + -6737123, + -31951644, + 13591838, + -6883821, + 386950, + 31622781, + 6439245, + -14581012, + 4091397, + }, + FieldElement{ + -8426427, + 1470727, + -28109679, + -1596990, + 3978627, + -5123623, + -19622683, + 12092163, + 29077877, + -14741988, + }, + }, + { + FieldElement{ + 5269168, + -6859726, + -13230211, + -8020715, + 25932563, + 1763552, + -5606110, + -5505881, + -20017847, + 2357889, + }, + FieldElement{ + 32264008, + -15407652, + -5387735, + -1160093, + -2091322, + -3946900, + 23104804, + -12869908, + 5727338, + 189038, + }, + FieldElement{ + 14609123, + -8954470, + -6000566, + -16622781, + -14577387, + -7743898, + -26745169, + 10942115, + -25888931, + -14884697, + }, + }, + { + FieldElement{ + 20513500, + 5557931, + -15604613, + 7829531, + 26413943, + -2019404, + -21378968, + 7471781, + 13913677, + -5137875, + }, + FieldElement{ + -25574376, + 11967826, + 29233242, + 12948236, + -6754465, + 4713227, + -8940970, + 14059180, + 12878652, + 8511905, + }, + FieldElement{ + -25656801, + 3393631, + -2955415, + -7075526, + -2250709, + 9366908, + -30223418, + 6812974, + 5568676, + -3127656, + }, + }, + { + FieldElement{ + 11630004, + 12144454, + 2116339, + 13606037, + 27378885, + 15676917, + -17408753, + -13504373, + -14395196, + 8070818, + }, + FieldElement{ + 27117696, + -10007378, + -31282771, + -5570088, + 1127282, + 12772488, + -29845906, + 10483306, + -11552749, + -1028714, + }, + FieldElement{ + 10637467, + -5688064, + 5674781, + 1072708, + -26343588, + -6982302, + -1683975, + 9177853, + -27493162, + 15431203, + }, + }, + { + FieldElement{ + 20525145, + 10892566, + -12742472, + 12779443, + -29493034, + 16150075, + -28240519, + 14943142, + -15056790, + -7935931, + }, + FieldElement{ + -30024462, + 5626926, + -551567, + -9981087, + 753598, + 11981191, + 25244767, + -3239766, + -3356550, + 9594024, + }, + FieldElement{ + -23752644, + 2636870, + -5163910, + -10103818, + 585134, + 7877383, + 11345683, + -6492290, + 13352335, + -10977084, + }, + }, + { + FieldElement{ + -1931799, + -5407458, + 3304649, + -12884869, + 17015806, + -4877091, + -29783850, + -7752482, + -13215537, + -319204, + }, + FieldElement{ + 20239939, + 6607058, + 6203985, + 3483793, + -18386976, + -779229, + -20723742, + 15077870, + -22750759, + 14523817, + }, + FieldElement{ + 27406042, + -6041657, + 27423596, + -4497394, + 4996214, + 10002360, + -28842031, + -4545494, + -30172742, + -4805667, + }, + }, + }, + { + { + FieldElement{ + 11374242, + 12660715, + 17861383, + -12540833, + 10935568, + 1099227, + -13886076, + -9091740, + -27727044, + 11358504, + }, + FieldElement{ + -12730809, + 10311867, + 1510375, + 10778093, + -2119455, + -9145702, + 32676003, + 11149336, + -26123651, + 4985768, + }, + FieldElement{ + -19096303, + 341147, + -6197485, + -239033, + 15756973, + -8796662, + -983043, + 13794114, + -19414307, + -15621255, + }, + }, + { + FieldElement{ + 6490081, + 11940286, + 25495923, + -7726360, + 8668373, + -8751316, + 3367603, + 6970005, + -1691065, + -9004790, + }, + FieldElement{ + 1656497, + 13457317, + 15370807, + 6364910, + 13605745, + 8362338, + -19174622, + -5475723, + -16796596, + -5031438, + }, + FieldElement{ + -22273315, + -13524424, + -64685, + -4334223, + -18605636, + -10921968, + -20571065, + -7007978, + -99853, + -10237333, + }, + }, + { + FieldElement{ + 17747465, + 10039260, + 19368299, + -4050591, + -20630635, + -16041286, + 31992683, + -15857976, + -29260363, + -5511971, + }, + FieldElement{ + 31932027, + -4986141, + -19612382, + 16366580, + 22023614, + 88450, + 11371999, + -3744247, + 4882242, + -10626905, + }, + FieldElement{ + 29796507, + 37186, + 19818052, + 10115756, + -11829032, + 3352736, + 18551198, + 3272828, + -5190932, + -4162409, + }, + }, + { + FieldElement{ + 12501286, + 4044383, + -8612957, + -13392385, + -32430052, + 5136599, + -19230378, + -3529697, + 330070, + -3659409, + }, + FieldElement{ + 6384877, + 2899513, + 17807477, + 7663917, + -2358888, + 12363165, + 25366522, + -8573892, + -271295, + 12071499, + }, + FieldElement{ + -8365515, + -4042521, + 25133448, + -4517355, + -6211027, + 2265927, + -32769618, + 1936675, + -5159697, + 3829363, + }, + }, + { + FieldElement{ + 28425966, + -5835433, + -577090, + -4697198, + -14217555, + 6870930, + 7921550, + -6567787, + 26333140, + 14267664, + }, + FieldElement{ + -11067219, + 11871231, + 27385719, + -10559544, + -4585914, + -11189312, + 10004786, + -8709488, + -21761224, + 8930324, + }, + FieldElement{ + -21197785, + -16396035, + 25654216, + -1725397, + 12282012, + 11008919, + 1541940, + 4757911, + -26491501, + -16408940, + }, + }, + { + FieldElement{ + 13537262, + -7759490, + -20604840, + 10961927, + -5922820, + -13218065, + -13156584, + 6217254, + -15943699, + 13814990, + }, + FieldElement{ + -17422573, + 15157790, + 18705543, + 29619, + 24409717, + -260476, + 27361681, + 9257833, + -1956526, + -1776914, + }, + FieldElement{ + -25045300, + -10191966, + 15366585, + 15166509, + -13105086, + 8423556, + -29171540, + 12361135, + -18685978, + 4578290, + }, + }, + { + FieldElement{ + 24579768, + 3711570, + 1342322, + -11180126, + -27005135, + 14124956, + -22544529, + 14074919, + 21964432, + 8235257, + }, + FieldElement{ + -6528613, + -2411497, + 9442966, + -5925588, + 12025640, + -1487420, + -2981514, + -1669206, + 13006806, + 2355433, + }, + FieldElement{ + -16304899, + -13605259, + -6632427, + -5142349, + 16974359, + -10911083, + 27202044, + 1719366, + 1141648, + -12796236, + }, + }, + { + FieldElement{ + -12863944, + -13219986, + -8318266, + -11018091, + -6810145, + -4843894, + 13475066, + -3133972, + 32674895, + 13715045, + }, + FieldElement{ + 11423335, + -5468059, + 32344216, + 8962751, + 24989809, + 9241752, + -13265253, + 16086212, + -28740881, + -15642093, + }, + FieldElement{ + -1409668, + 12530728, + -6368726, + 10847387, + 19531186, + -14132160, + -11709148, + 7791794, + -27245943, + 4383347, + }, + }, + }, + { + { + FieldElement{ + -28970898, + 5271447, + -1266009, + -9736989, + -12455236, + 16732599, + -4862407, + -4906449, + 27193557, + 6245191, + }, + FieldElement{ + -15193956, + 5362278, + -1783893, + 2695834, + 4960227, + 12840725, + 23061898, + 3260492, + 22510453, + 8577507, + }, + FieldElement{ + -12632451, + 11257346, + -32692994, + 13548177, + -721004, + 10879011, + 31168030, + 13952092, + -29571492, + -3635906, + }, + }, + { + FieldElement{ + 3877321, + -9572739, + 32416692, + 5405324, + -11004407, + -13656635, + 3759769, + 11935320, + 5611860, + 8164018, + }, + FieldElement{ + -16275802, + 14667797, + 15906460, + 12155291, + -22111149, + -9039718, + 32003002, + -8832289, + 5773085, + -8422109, + }, + FieldElement{ + -23788118, + -8254300, + 1950875, + 8937633, + 18686727, + 16459170, + -905725, + 12376320, + 31632953, + 190926, + }, + }, + { + FieldElement{ + -24593607, + -16138885, + -8423991, + 13378746, + 14162407, + 6901328, + -8288749, + 4508564, + -25341555, + -3627528, + }, + FieldElement{ + 8884438, + -5884009, + 6023974, + 10104341, + -6881569, + -4941533, + 18722941, + -14786005, + -1672488, + 827625, + }, + FieldElement{ + -32720583, + -16289296, + -32503547, + 7101210, + 13354605, + 2659080, + -1800575, + -14108036, + -24878478, + 1541286, + }, + }, + { + FieldElement{ + 2901347, + -1117687, + 3880376, + -10059388, + -17620940, + -3612781, + -21802117, + -3567481, + 20456845, + -1885033, + }, + FieldElement{ + 27019610, + 12299467, + -13658288, + -1603234, + -12861660, + -4861471, + -19540150, + -5016058, + 29439641, + 15138866, + }, + FieldElement{ + 21536104, + -6626420, + -32447818, + -10690208, + -22408077, + 5175814, + -5420040, + -16361163, + 7779328, + 109896, + }, + }, + { + FieldElement{ + 30279744, + 14648750, + -8044871, + 6425558, + 13639621, + -743509, + 28698390, + 12180118, + 23177719, + -554075, + }, + FieldElement{ + 26572847, + 3405927, + -31701700, + 12890905, + -19265668, + 5335866, + -6493768, + 2378492, + 4439158, + -13279347, + }, + FieldElement{ + -22716706, + 3489070, + -9225266, + -332753, + 18875722, + -1140095, + 14819434, + -12731527, + -17717757, + -5461437, + }, + }, + { + FieldElement{ + -5056483, + 16566551, + 15953661, + 3767752, + -10436499, + 15627060, + -820954, + 2177225, + 8550082, + -15114165, + }, + FieldElement{ + -18473302, + 16596775, + -381660, + 15663611, + 22860960, + 15585581, + -27844109, + -3582739, + -23260460, + -8428588, + }, + FieldElement{ + -32480551, + 15707275, + -8205912, + -5652081, + 29464558, + 2713815, + -22725137, + 15860482, + -21902570, + 1494193, + }, + }, + { + FieldElement{ + -19562091, + -14087393, + -25583872, + -9299552, + 13127842, + 759709, + 21923482, + 16529112, + 8742704, + 12967017, + }, + FieldElement{ + -28464899, + 1553205, + 32536856, + -10473729, + -24691605, + -406174, + -8914625, + -2933896, + -29903758, + 15553883, + }, + FieldElement{ + 21877909, + 3230008, + 9881174, + 10539357, + -4797115, + 2841332, + 11543572, + 14513274, + 19375923, + -12647961, + }, + }, + { + FieldElement{ + 8832269, + -14495485, + 13253511, + 5137575, + 5037871, + 4078777, + 24880818, + -6222716, + 2862653, + 9455043, + }, + FieldElement{ + 29306751, + 5123106, + 20245049, + -14149889, + 9592566, + 8447059, + -2077124, + -2990080, + 15511449, + 4789663, + }, + FieldElement{ + -20679756, + 7004547, + 8824831, + -9434977, + -4045704, + -3750736, + -5754762, + 108893, + 23513200, + 16652362, + }, + }, + }, + { + { + FieldElement{ + -33256173, + 4144782, + -4476029, + -6579123, + 10770039, + -7155542, + -6650416, + -12936300, + -18319198, + 10212860, + }, + FieldElement{ + 2756081, + 8598110, + 7383731, + -6859892, + 22312759, + -1105012, + 21179801, + 2600940, + -9988298, + -12506466, + }, + FieldElement{ + -24645692, + 13317462, + -30449259, + -15653928, + 21365574, + -10869657, + 11344424, + 864440, + -2499677, + -16710063, + }, + }, + { + FieldElement{ + -26432803, + 6148329, + -17184412, + -14474154, + 18782929, + -275997, + -22561534, + 211300, + 2719757, + 4940997, + }, + FieldElement{ + -1323882, + 3911313, + -6948744, + 14759765, + -30027150, + 7851207, + 21690126, + 8518463, + 26699843, + 5276295, + }, + FieldElement{ + -13149873, + -6429067, + 9396249, + 365013, + 24703301, + -10488939, + 1321586, + 149635, + -15452774, + 7159369, + }, + }, + { + FieldElement{ + 9987780, + -3404759, + 17507962, + 9505530, + 9731535, + -2165514, + 22356009, + 8312176, + 22477218, + -8403385, + }, + FieldElement{ + 18155857, + -16504990, + 19744716, + 9006923, + 15154154, + -10538976, + 24256460, + -4864995, + -22548173, + 9334109, + }, + FieldElement{ + 2986088, + -4911893, + 10776628, + -3473844, + 10620590, + -7083203, + -21413845, + 14253545, + -22587149, + 536906, + }, + }, + { + FieldElement{ + 4377756, + 8115836, + 24567078, + 15495314, + 11625074, + 13064599, + 7390551, + 10589625, + 10838060, + -15420424, + }, + FieldElement{ + -19342404, + 867880, + 9277171, + -3218459, + -14431572, + -1986443, + 19295826, + -15796950, + 6378260, + 699185, + }, + FieldElement{ + 7895026, + 4057113, + -7081772, + -13077756, + -17886831, + -323126, + -716039, + 15693155, + -5045064, + -13373962, + }, + }, + { + FieldElement{ + -7737563, + -5869402, + -14566319, + -7406919, + 11385654, + 13201616, + 31730678, + -10962840, + -3918636, + -9669325, + }, + FieldElement{ + 10188286, + -15770834, + -7336361, + 13427543, + 22223443, + 14896287, + 30743455, + 7116568, + -21786507, + 5427593, + }, + FieldElement{ + 696102, + 13206899, + 27047647, + -10632082, + 15285305, + -9853179, + 10798490, + -4578720, + 19236243, + 12477404, + }, + }, + { + FieldElement{ + -11229439, + 11243796, + -17054270, + -8040865, + -788228, + -8167967, + -3897669, + 11180504, + -23169516, + 7733644, + }, + FieldElement{ + 17800790, + -14036179, + -27000429, + -11766671, + 23887827, + 3149671, + 23466177, + -10538171, + 10322027, + 15313801, + }, + FieldElement{ + 26246234, + 11968874, + 32263343, + -5468728, + 6830755, + -13323031, + -15794704, + -101982, + -24449242, + 10890804, + }, + }, + { + FieldElement{ + -31365647, + 10271363, + -12660625, + -6267268, + 16690207, + -13062544, + -14982212, + 16484931, + 25180797, + -5334884, + }, + FieldElement{ + -586574, + 10376444, + -32586414, + -11286356, + 19801893, + 10997610, + 2276632, + 9482883, + 316878, + 13820577, + }, + FieldElement{ + -9882808, + -4510367, + -2115506, + 16457136, + -11100081, + 11674996, + 30756178, + -7515054, + 30696930, + -3712849, + }, + }, + { + FieldElement{ + 32988917, + -9603412, + 12499366, + 7910787, + -10617257, + -11931514, + -7342816, + -9985397, + -32349517, + 7392473, + }, + FieldElement{ + -8855661, + 15927861, + 9866406, + -3649411, + -2396914, + -16655781, + -30409476, + -9134995, + 25112947, + -2926644, + }, + FieldElement{ + -2504044, + -436966, + 25621774, + -5678772, + 15085042, + -5479877, + -24884878, + -13526194, + 5537438, + -13914319, + }, + }, + }, + { + { + FieldElement{ + -11225584, + 2320285, + -9584280, + 10149187, + -33444663, + 5808648, + -14876251, + -1729667, + 31234590, + 6090599, + }, + FieldElement{ + -9633316, + 116426, + 26083934, + 2897444, + -6364437, + -2688086, + 609721, + 15878753, + -6970405, + -9034768, + }, + FieldElement{ + -27757857, + 247744, + -15194774, + -9002551, + 23288161, + -10011936, + -23869595, + 6503646, + 20650474, + 1804084, + }, + }, + { + FieldElement{ + -27589786, + 15456424, + 8972517, + 8469608, + 15640622, + 4439847, + 3121995, + -10329713, + 27842616, + -202328, + }, + FieldElement{ + -15306973, + 2839644, + 22530074, + 10026331, + 4602058, + 5048462, + 28248656, + 5031932, + -11375082, + 12714369, + }, + FieldElement{ + 20807691, + -7270825, + 29286141, + 11421711, + -27876523, + -13868230, + -21227475, + 1035546, + -19733229, + 12796920, + }, + }, + { + FieldElement{ + 12076899, + -14301286, + -8785001, + -11848922, + -25012791, + 16400684, + -17591495, + -12899438, + 3480665, + -15182815, + }, + FieldElement{ + -32361549, + 5457597, + 28548107, + 7833186, + 7303070, + -11953545, + -24363064, + -15921875, + -33374054, + 2771025, + }, + FieldElement{ + -21389266, + 421932, + 26597266, + 6860826, + 22486084, + -6737172, + -17137485, + -4210226, + -24552282, + 15673397, + }, + }, + { + FieldElement{ + -20184622, + 2338216, + 19788685, + -9620956, + -4001265, + -8740893, + -20271184, + 4733254, + 3727144, + -12934448, + }, + FieldElement{ + 6120119, + 814863, + -11794402, + -622716, + 6812205, + -15747771, + 2019594, + 7975683, + 31123697, + -10958981, + }, + FieldElement{ + 30069250, + -11435332, + 30434654, + 2958439, + 18399564, + -976289, + 12296869, + 9204260, + -16432438, + 9648165, + }, + }, + { + FieldElement{ + 32705432, + -1550977, + 30705658, + 7451065, + -11805606, + 9631813, + 3305266, + 5248604, + -26008332, + -11377501, + }, + FieldElement{ + 17219865, + 2375039, + -31570947, + -5575615, + -19459679, + 9219903, + 294711, + 15298639, + 2662509, + -16297073, + }, + FieldElement{ + -1172927, + -7558695, + -4366770, + -4287744, + -21346413, + -8434326, + 32087529, + -1222777, + 32247248, + -14389861, + }, + }, + { + FieldElement{ + 14312628, + 1221556, + 17395390, + -8700143, + -4945741, + -8684635, + -28197744, + -9637817, + -16027623, + -13378845, + }, + FieldElement{ + -1428825, + -9678990, + -9235681, + 6549687, + -7383069, + -468664, + 23046502, + 9803137, + 17597934, + 2346211, + }, + FieldElement{ + 18510800, + 15337574, + 26171504, + 981392, + -22241552, + 7827556, + -23491134, + -11323352, + 3059833, + -11782870, + }, + }, + { + FieldElement{ + 10141598, + 6082907, + 17829293, + -1947643, + 9830092, + 13613136, + -25556636, + -5544586, + -33502212, + 3592096, + }, + FieldElement{ + 33114168, + -15889352, + -26525686, + -13343397, + 33076705, + 8716171, + 1151462, + 1521897, + -982665, + -6837803, + }, + FieldElement{ + -32939165, + -4255815, + 23947181, + -324178, + -33072974, + -12305637, + -16637686, + 3891704, + 26353178, + 693168, + }, + }, + { + FieldElement{ + 30374239, + 1595580, + -16884039, + 13186931, + 4600344, + 406904, + 9585294, + -400668, + 31375464, + 14369965, + }, + FieldElement{ + -14370654, + -7772529, + 1510301, + 6434173, + -18784789, + -6262728, + 32732230, + -13108839, + 17901441, + 16011505, + }, + FieldElement{ + 18171223, + -11934626, + -12500402, + 15197122, + -11038147, + -15230035, + -19172240, + -16046376, + 8764035, + 12309598, + }, + }, + }, + { + { + FieldElement{ + 5975908, + -5243188, + -19459362, + -9681747, + -11541277, + 14015782, + -23665757, + 1228319, + 17544096, + -10593782, + }, + FieldElement{ + 5811932, + -1715293, + 3442887, + -2269310, + -18367348, + -8359541, + -18044043, + -15410127, + -5565381, + 12348900, + }, + FieldElement{ + -31399660, + 11407555, + 25755363, + 6891399, + -3256938, + 14872274, + -24849353, + 8141295, + -10632534, + -585479, + }, + }, + { + FieldElement{ + -12675304, + 694026, + -5076145, + 13300344, + 14015258, + -14451394, + -9698672, + -11329050, + 30944593, + 1130208, + }, + FieldElement{ + 8247766, + -6710942, + -26562381, + -7709309, + -14401939, + -14648910, + 4652152, + 2488540, + 23550156, + -271232, + }, + FieldElement{ + 17294316, + -3788438, + 7026748, + 15626851, + 22990044, + 113481, + 2267737, + -5908146, + -408818, + -137719, + }, + }, + { + FieldElement{ + 16091085, + -16253926, + 18599252, + 7340678, + 2137637, + -1221657, + -3364161, + 14550936, + 3260525, + -7166271, + }, + FieldElement{ + -4910104, + -13332887, + 18550887, + 10864893, + -16459325, + -7291596, + -23028869, + -13204905, + -12748722, + 2701326, + }, + FieldElement{ + -8574695, + 16099415, + 4629974, + -16340524, + -20786213, + -6005432, + -10018363, + 9276971, + 11329923, + 1862132, + }, + }, + { + FieldElement{ + 14763076, + -15903608, + -30918270, + 3689867, + 3511892, + 10313526, + -21951088, + 12219231, + -9037963, + -940300, + }, + FieldElement{ + 8894987, + -3446094, + 6150753, + 3013931, + 301220, + 15693451, + -31981216, + -2909717, + -15438168, + 11595570, + }, + FieldElement{ + 15214962, + 3537601, + -26238722, + -14058872, + 4418657, + -15230761, + 13947276, + 10730794, + -13489462, + -4363670, + }, + }, + { + FieldElement{ + -2538306, + 7682793, + 32759013, + 263109, + -29984731, + -7955452, + -22332124, + -10188635, + 977108, + 699994, + }, + FieldElement{ + -12466472, + 4195084, + -9211532, + 550904, + -15565337, + 12917920, + 19118110, + -439841, + -30534533, + -14337913, + }, + FieldElement{ + 31788461, + -14507657, + 4799989, + 7372237, + 8808585, + -14747943, + 9408237, + -10051775, + 12493932, + -5409317, + }, + }, + { + FieldElement{ + -25680606, + 5260744, + -19235809, + -6284470, + -3695942, + 16566087, + 27218280, + 2607121, + 29375955, + 6024730, + }, + FieldElement{ + 842132, + -2794693, + -4763381, + -8722815, + 26332018, + -12405641, + 11831880, + 6985184, + -9940361, + 2854096, + }, + FieldElement{ + -4847262, + -7969331, + 2516242, + -5847713, + 9695691, + -7221186, + 16512645, + 960770, + 12121869, + 16648078, + }, + }, + { + FieldElement{ + -15218652, + 14667096, + -13336229, + 2013717, + 30598287, + -464137, + -31504922, + -7882064, + 20237806, + 2838411, + }, + FieldElement{ + -19288047, + 4453152, + 15298546, + -16178388, + 22115043, + -15972604, + 12544294, + -13470457, + 1068881, + -12499905, + }, + FieldElement{ + -9558883, + -16518835, + 33238498, + 13506958, + 30505848, + -1114596, + -8486907, + -2630053, + 12521378, + 4845654, + }, + }, + { + FieldElement{ + -28198521, + 10744108, + -2958380, + 10199664, + 7759311, + -13088600, + 3409348, + -873400, + -6482306, + -12885870, + }, + FieldElement{ + -23561822, + 6230156, + -20382013, + 10655314, + -24040585, + -11621172, + 10477734, + -1240216, + -3113227, + 13974498, + }, + FieldElement{ + 12966261, + 15550616, + -32038948, + -1615346, + 21025980, + -629444, + 5642325, + 7188737, + 18895762, + 12629579, + }, + }, + }, + { + { + FieldElement{ + 14741879, + -14946887, + 22177208, + -11721237, + 1279741, + 8058600, + 11758140, + 789443, + 32195181, + 3895677, + }, + FieldElement{ + 10758205, + 15755439, + -4509950, + 9243698, + -4879422, + 6879879, + -2204575, + -3566119, + -8982069, + 4429647, + }, + FieldElement{ + -2453894, + 15725973, + -20436342, + -10410672, + -5803908, + -11040220, + -7135870, + -11642895, + 18047436, + -15281743, + }, + }, + { + FieldElement{ + -25173001, + -11307165, + 29759956, + 11776784, + -22262383, + -15820455, + 10993114, + -12850837, + -17620701, + -9408468, + }, + FieldElement{ + 21987233, + 700364, + -24505048, + 14972008, + -7774265, + -5718395, + 32155026, + 2581431, + -29958985, + 8773375, + }, + FieldElement{ + -25568350, + 454463, + -13211935, + 16126715, + 25240068, + 8594567, + 20656846, + 12017935, + -7874389, + -13920155, + }, + }, + { + FieldElement{ + 6028182, + 6263078, + -31011806, + -11301710, + -818919, + 2461772, + -31841174, + -5468042, + -1721788, + -2776725, + }, + FieldElement{ + -12278994, + 16624277, + 987579, + -5922598, + 32908203, + 1248608, + 7719845, + -4166698, + 28408820, + 6816612, + }, + FieldElement{ + -10358094, + -8237829, + 19549651, + -12169222, + 22082623, + 16147817, + 20613181, + 13982702, + -10339570, + 5067943, + }, + }, + { + FieldElement{ + -30505967, + -3821767, + 12074681, + 13582412, + -19877972, + 2443951, + -19719286, + 12746132, + 5331210, + -10105944, + }, + FieldElement{ + 30528811, + 3601899, + -1957090, + 4619785, + -27361822, + -15436388, + 24180793, + -12570394, + 27679908, + -1648928, + }, + FieldElement{ + 9402404, + -13957065, + 32834043, + 10838634, + -26580150, + -13237195, + 26653274, + -8685565, + 22611444, + -12715406, + }, + }, + { + FieldElement{ + 22190590, + 1118029, + 22736441, + 15130463, + -30460692, + -5991321, + 19189625, + -4648942, + 4854859, + 6622139, + }, + FieldElement{ + -8310738, + -2953450, + -8262579, + -3388049, + -10401731, + -271929, + 13424426, + -3567227, + 26404409, + 13001963, + }, + FieldElement{ + -31241838, + -15415700, + -2994250, + 8939346, + 11562230, + -12840670, + -26064365, + -11621720, + -15405155, + 11020693, + }, + }, + { + FieldElement{ + 1866042, + -7949489, + -7898649, + -10301010, + 12483315, + 13477547, + 3175636, + -12424163, + 28761762, + 1406734, + }, + FieldElement{ + -448555, + -1777666, + 13018551, + 3194501, + -9580420, + -11161737, + 24760585, + -4347088, + 25577411, + -13378680, + }, + FieldElement{ + -24290378, + 4759345, + -690653, + -1852816, + 2066747, + 10693769, + -29595790, + 9884936, + -9368926, + 4745410, + }, + }, + { + FieldElement{ + -9141284, + 6049714, + -19531061, + -4341411, + -31260798, + 9944276, + -15462008, + -11311852, + 10931924, + -11931931, + }, + FieldElement{ + -16561513, + 14112680, + -8012645, + 4817318, + -8040464, + -11414606, + -22853429, + 10856641, + -20470770, + 13434654, + }, + FieldElement{ + 22759489, + -10073434, + -16766264, + -1871422, + 13637442, + -10168091, + 1765144, + -12654326, + 28445307, + -5364710, + }, + }, + { + FieldElement{ + 29875063, + 12493613, + 2795536, + -3786330, + 1710620, + 15181182, + -10195717, + -8788675, + 9074234, + 1167180, + }, + FieldElement{ + -26205683, + 11014233, + -9842651, + -2635485, + -26908120, + 7532294, + -18716888, + -9535498, + 3843903, + 9367684, + }, + FieldElement{ + -10969595, + -6403711, + 9591134, + 9582310, + 11349256, + 108879, + 16235123, + 8601684, + -139197, + 4242895, + }, + }, + }, + { + { + FieldElement{ + 22092954, + -13191123, + -2042793, + -11968512, + 32186753, + -11517388, + -6574341, + 2470660, + -27417366, + 16625501, + }, + FieldElement{ + -11057722, + 3042016, + 13770083, + -9257922, + 584236, + -544855, + -7770857, + 2602725, + -27351616, + 14247413, + }, + FieldElement{ + 6314175, + -10264892, + -32772502, + 15957557, + -10157730, + 168750, + -8618807, + 14290061, + 27108877, + -1180880, + }, + }, + { + FieldElement{ + -8586597, + -7170966, + 13241782, + 10960156, + -32991015, + -13794596, + 33547976, + -11058889, + -27148451, + 981874, + }, + FieldElement{ + 22833440, + 9293594, + -32649448, + -13618667, + -9136966, + 14756819, + -22928859, + -13970780, + -10479804, + -16197962, + }, + FieldElement{ + -7768587, + 3326786, + -28111797, + 10783824, + 19178761, + 14905060, + 22680049, + 13906969, + -15933690, + 3797899, + }, + }, + { + FieldElement{ + 21721356, + -4212746, + -12206123, + 9310182, + -3882239, + -13653110, + 23740224, + -2709232, + 20491983, + -8042152, + }, + FieldElement{ + 9209270, + -15135055, + -13256557, + -6167798, + -731016, + 15289673, + 25947805, + 15286587, + 30997318, + -6703063, + }, + FieldElement{ + 7392032, + 16618386, + 23946583, + -8039892, + -13265164, + -1533858, + -14197445, + -2321576, + 17649998, + -250080, + }, + }, + { + FieldElement{ + -9301088, + -14193827, + 30609526, + -3049543, + -25175069, + -1283752, + -15241566, + -9525724, + -2233253, + 7662146, + }, + FieldElement{ + -17558673, + 1763594, + -33114336, + 15908610, + -30040870, + -12174295, + 7335080, + -8472199, + -3174674, + 3440183, + }, + FieldElement{ + -19889700, + -5977008, + -24111293, + -9688870, + 10799743, + -16571957, + 40450, + -4431835, + 4862400, + 1133, + }, + }, + { + FieldElement{ + -32856209, + -7873957, + -5422389, + 14860950, + -16319031, + 7956142, + 7258061, + 311861, + -30594991, + -7379421, + }, + FieldElement{ + -3773428, + -1565936, + 28985340, + 7499440, + 24445838, + 9325937, + 29727763, + 16527196, + 18278453, + 15405622, + }, + FieldElement{ + -4381906, + 8508652, + -19898366, + -3674424, + -5984453, + 15149970, + -13313598, + 843523, + -21875062, + 13626197, + }, + }, + { + FieldElement{ + 2281448, + -13487055, + -10915418, + -2609910, + 1879358, + 16164207, + -10783882, + 3953792, + 13340839, + 15928663, + }, + FieldElement{ + 31727126, + -7179855, + -18437503, + -8283652, + 2875793, + -16390330, + -25269894, + -7014826, + -23452306, + 5964753, + }, + FieldElement{ + 4100420, + -5959452, + -17179337, + 6017714, + -18705837, + 12227141, + -26684835, + 11344144, + 2538215, + -7570755, + }, + }, + { + FieldElement{ + -9433605, + 6123113, + 11159803, + -2156608, + 30016280, + 14966241, + -20474983, + 1485421, + -629256, + -15958862, + }, + FieldElement{ + -26804558, + 4260919, + 11851389, + 9658551, + -32017107, + 16367492, + -20205425, + -13191288, + 11659922, + -11115118, + }, + FieldElement{ + 26180396, + 10015009, + -30844224, + -8581293, + 5418197, + 9480663, + 2231568, + -10170080, + 33100372, + -1306171, + }, + }, + { + FieldElement{ + 15121113, + -5201871, + -10389905, + 15427821, + -27509937, + -15992507, + 21670947, + 4486675, + -5931810, + -14466380, + }, + FieldElement{ + 16166486, + -9483733, + -11104130, + 6023908, + -31926798, + -1364923, + 2340060, + -16254968, + -10735770, + -10039824, + }, + FieldElement{ + 28042865, + -3557089, + -12126526, + 12259706, + -3717498, + -6945899, + 6766453, + -8689599, + 18036436, + 5803270, + }, + }, + }, + { + { + FieldElement{ + -817581, + 6763912, + 11803561, + 1585585, + 10958447, + -2671165, + 23855391, + 4598332, + -6159431, + -14117438, + }, + FieldElement{ + -31031306, + -14256194, + 17332029, + -2383520, + 31312682, + -5967183, + 696309, + 50292, + -20095739, + 11763584, + }, + FieldElement{ + -594563, + -2514283, + -32234153, + 12643980, + 12650761, + 14811489, + 665117, + -12613632, + -19773211, + -10713562, + }, + }, + { + FieldElement{ + 30464590, + -11262872, + -4127476, + -12734478, + 19835327, + -7105613, + -24396175, + 2075773, + -17020157, + 992471, + }, + FieldElement{ + 18357185, + -6994433, + 7766382, + 16342475, + -29324918, + 411174, + 14578841, + 8080033, + -11574335, + -10601610, + }, + FieldElement{ + 19598397, + 10334610, + 12555054, + 2555664, + 18821899, + -10339780, + 21873263, + 16014234, + 26224780, + 16452269, + }, + }, + { + FieldElement{ + -30223925, + 5145196, + 5944548, + 16385966, + 3976735, + 2009897, + -11377804, + -7618186, + -20533829, + 3698650, + }, + FieldElement{ + 14187449, + 3448569, + -10636236, + -10810935, + -22663880, + -3433596, + 7268410, + -10890444, + 27394301, + 12015369, + }, + FieldElement{ + 19695761, + 16087646, + 28032085, + 12999827, + 6817792, + 11427614, + 20244189, + -1312777, + -13259127, + -3402461, + }, + }, + { + FieldElement{ + 30860103, + 12735208, + -1888245, + -4699734, + -16974906, + 2256940, + -8166013, + 12298312, + -8550524, + -10393462, + }, + FieldElement{ + -5719826, + -11245325, + -1910649, + 15569035, + 26642876, + -7587760, + -5789354, + -15118654, + -4976164, + 12651793, + }, + FieldElement{ + -2848395, + 9953421, + 11531313, + -5282879, + 26895123, + -12697089, + -13118820, + -16517902, + 9768698, + -2533218, + }, + }, + { + FieldElement{ + -24719459, + 1894651, + -287698, + -4704085, + 15348719, + -8156530, + 32767513, + 12765450, + 4940095, + 10678226, + }, + FieldElement{ + 18860224, + 15980149, + -18987240, + -1562570, + -26233012, + -11071856, + -7843882, + 13944024, + -24372348, + 16582019, + }, + FieldElement{ + -15504260, + 4970268, + -29893044, + 4175593, + -20993212, + -2199756, + -11704054, + 15444560, + -11003761, + 7989037, + }, + }, + { + FieldElement{ + 31490452, + 5568061, + -2412803, + 2182383, + -32336847, + 4531686, + -32078269, + 6200206, + -19686113, + -14800171, + }, + FieldElement{ + -17308668, + -15879940, + -31522777, + -2831, + -32887382, + 16375549, + 8680158, + -16371713, + 28550068, + -6857132, + }, + FieldElement{ + -28126887, + -5688091, + 16837845, + -1820458, + -6850681, + 12700016, + -30039981, + 4364038, + 1155602, + 5988841, + }, + }, + { + FieldElement{ + 21890435, + -13272907, + -12624011, + 12154349, + -7831873, + 15300496, + 23148983, + -4470481, + 24618407, + 8283181, + }, + FieldElement{ + -33136107, + -10512751, + 9975416, + 6841041, + -31559793, + 16356536, + 3070187, + -7025928, + 1466169, + 10740210, + }, + FieldElement{ + -1509399, + -15488185, + -13503385, + -10655916, + 32799044, + 909394, + -13938903, + -5779719, + -32164649, + -15327040, + }, + }, + { + FieldElement{ + 3960823, + -14267803, + -28026090, + -15918051, + -19404858, + 13146868, + 15567327, + 951507, + -3260321, + -573935, + }, + FieldElement{ + 24740841, + 5052253, + -30094131, + 8961361, + 25877428, + 6165135, + -24368180, + 14397372, + -7380369, + -6144105, + }, + FieldElement{ + -28888365, + 3510803, + -28103278, + -1158478, + -11238128, + -10631454, + -15441463, + -14453128, + -1625486, + -6494814, + }, + }, + }, + { + { + FieldElement{ + 793299, + -9230478, + 8836302, + -6235707, + -27360908, + -2369593, + 33152843, + -4885251, + -9906200, + -621852, + }, + FieldElement{ + 5666233, + 525582, + 20782575, + -8038419, + -24538499, + 14657740, + 16099374, + 1468826, + -6171428, + -15186581, + }, + FieldElement{ + -4859255, + -3779343, + -2917758, + -6748019, + 7778750, + 11688288, + -30404353, + -9871238, + -1558923, + -9863646, + }, + }, + { + FieldElement{ + 10896332, + -7719704, + 824275, + 472601, + -19460308, + 3009587, + 25248958, + 14783338, + -30581476, + -15757844, + }, + FieldElement{ + 10566929, + 12612572, + -31944212, + 11118703, + -12633376, + 12362879, + 21752402, + 8822496, + 24003793, + 14264025, + }, + FieldElement{ + 27713862, + -7355973, + -11008240, + 9227530, + 27050101, + 2504721, + 23886875, + -13117525, + 13958495, + -5732453, + }, + }, + { + FieldElement{ + -23481610, + 4867226, + -27247128, + 3900521, + 29838369, + -8212291, + -31889399, + -10041781, + 7340521, + -15410068, + }, + FieldElement{ + 4646514, + -8011124, + -22766023, + -11532654, + 23184553, + 8566613, + 31366726, + -1381061, + -15066784, + -10375192, + }, + FieldElement{ + -17270517, + 12723032, + -16993061, + 14878794, + 21619651, + -6197576, + 27584817, + 3093888, + -8843694, + 3849921, + }, + }, + { + FieldElement{ + -9064912, + 2103172, + 25561640, + -15125738, + -5239824, + 9582958, + 32477045, + -9017955, + 5002294, + -15550259, + }, + FieldElement{ + -12057553, + -11177906, + 21115585, + -13365155, + 8808712, + -12030708, + 16489530, + 13378448, + -25845716, + 12741426, + }, + FieldElement{ + -5946367, + 10645103, + -30911586, + 15390284, + -3286982, + -7118677, + 24306472, + 15852464, + 28834118, + -7646072, + }, + }, + { + FieldElement{ + -17335748, + -9107057, + -24531279, + 9434953, + -8472084, + -583362, + -13090771, + 455841, + 20461858, + 5491305, + }, + FieldElement{ + 13669248, + -16095482, + -12481974, + -10203039, + -14569770, + -11893198, + -24995986, + 11293807, + -28588204, + -9421832, + }, + FieldElement{ + 28497928, + 6272777, + -33022994, + 14470570, + 8906179, + -1225630, + 18504674, + -14165166, + 29867745, + -8795943, + }, + }, + { + FieldElement{ + -16207023, + 13517196, + -27799630, + -13697798, + 24009064, + -6373891, + -6367600, + -13175392, + 22853429, + -4012011, + }, + FieldElement{ + 24191378, + 16712145, + -13931797, + 15217831, + 14542237, + 1646131, + 18603514, + -11037887, + 12876623, + -2112447, + }, + FieldElement{ + 17902668, + 4518229, + -411702, + -2829247, + 26878217, + 5258055, + -12860753, + 608397, + 16031844, + 3723494, + }, + }, + { + FieldElement{ + -28632773, + 12763728, + -20446446, + 7577504, + 33001348, + -13017745, + 17558842, + -7872890, + 23896954, + -4314245, + }, + FieldElement{ + -20005381, + -12011952, + 31520464, + 605201, + 2543521, + 5991821, + -2945064, + 7229064, + -9919646, + -8826859, + }, + FieldElement{ + 28816045, + 298879, + -28165016, + -15920938, + 19000928, + -1665890, + -12680833, + -2949325, + -18051778, + -2082915, + }, + }, + { + FieldElement{ + 16000882, + -344896, + 3493092, + -11447198, + -29504595, + -13159789, + 12577740, + 16041268, + -19715240, + 7847707, + }, + FieldElement{ + 10151868, + 10572098, + 27312476, + 7922682, + 14825339, + 4723128, + -32855931, + -6519018, + -10020567, + 3852848, + }, + FieldElement{ + -11430470, + 15697596, + -21121557, + -4420647, + 5386314, + 15063598, + 16514493, + -15932110, + 29330899, + -15076224, + }, + }, + }, + { + { + FieldElement{ + -25499735, + -4378794, + -15222908, + -6901211, + 16615731, + 2051784, + 3303702, + 15490, + -27548796, + 12314391, + }, + FieldElement{ + 15683520, + -6003043, + 18109120, + -9980648, + 15337968, + -5997823, + -16717435, + 15921866, + 16103996, + -3731215, + }, + FieldElement{ + -23169824, + -10781249, + 13588192, + -1628807, + -3798557, + -1074929, + -19273607, + 5402699, + -29815713, + -9841101, + }, + }, + { + FieldElement{ + 23190676, + 2384583, + -32714340, + 3462154, + -29903655, + -1529132, + -11266856, + 8911517, + -25205859, + 2739713, + }, + FieldElement{ + 21374101, + -3554250, + -33524649, + 9874411, + 15377179, + 11831242, + -33529904, + 6134907, + 4931255, + 11987849, + }, + FieldElement{ + -7732, + -2978858, + -16223486, + 7277597, + 105524, + -322051, + -31480539, + 13861388, + -30076310, + 10117930, + }, + }, + { + FieldElement{ + -29501170, + -10744872, + -26163768, + 13051539, + -25625564, + 5089643, + -6325503, + 6704079, + 12890019, + 15728940, + }, + FieldElement{ + -21972360, + -11771379, + -951059, + -4418840, + 14704840, + 2695116, + 903376, + -10428139, + 12885167, + 8311031, + }, + FieldElement{ + -17516482, + 5352194, + 10384213, + -13811658, + 7506451, + 13453191, + 26423267, + 4384730, + 1888765, + -5435404, + }, + }, + { + FieldElement{ + -25817338, + -3107312, + -13494599, + -3182506, + 30896459, + -13921729, + -32251644, + -12707869, + -19464434, + -3340243, + }, + FieldElement{ + -23607977, + -2665774, + -526091, + 4651136, + 5765089, + 4618330, + 6092245, + 14845197, + 17151279, + -9854116, + }, + FieldElement{ + -24830458, + -12733720, + -15165978, + 10367250, + -29530908, + -265356, + 22825805, + -7087279, + -16866484, + 16176525, + }, + }, + { + FieldElement{ + -23583256, + 6564961, + 20063689, + 3798228, + -4740178, + 7359225, + 2006182, + -10363426, + -28746253, + -10197509, + }, + FieldElement{ + -10626600, + -4486402, + -13320562, + -5125317, + 3432136, + -6393229, + 23632037, + -1940610, + 32808310, + 1099883, + }, + FieldElement{ + 15030977, + 5768825, + -27451236, + -2887299, + -6427378, + -15361371, + -15277896, + -6809350, + 2051441, + -15225865, + }, + }, + { + FieldElement{ + -3362323, + -7239372, + 7517890, + 9824992, + 23555850, + 295369, + 5148398, + -14154188, + -22686354, + 16633660, + }, + FieldElement{ + 4577086, + -16752288, + 13249841, + -15304328, + 19958763, + -14537274, + 18559670, + -10759549, + 8402478, + -9864273, + }, + FieldElement{ + -28406330, + -1051581, + -26790155, + -907698, + -17212414, + -11030789, + 9453451, + -14980072, + 17983010, + 9967138, + }, + }, + { + FieldElement{ + -25762494, + 6524722, + 26585488, + 9969270, + 24709298, + 1220360, + -1677990, + 7806337, + 17507396, + 3651560, + }, + FieldElement{ + -10420457, + -4118111, + 14584639, + 15971087, + -15768321, + 8861010, + 26556809, + -5574557, + -18553322, + -11357135, + }, + FieldElement{ + 2839101, + 14284142, + 4029895, + 3472686, + 14402957, + 12689363, + -26642121, + 8459447, + -5605463, + -7621941, + }, + }, + { + FieldElement{ + -4839289, + -3535444, + 9744961, + 2871048, + 25113978, + 3187018, + -25110813, + -849066, + 17258084, + -7977739, + }, + FieldElement{ + 18164541, + -10595176, + -17154882, + -1542417, + 19237078, + -9745295, + 23357533, + -15217008, + 26908270, + 12150756, + }, + FieldElement{ + -30264870, + -7647865, + 5112249, + -7036672, + -1499807, + -6974257, + 43168, + -5537701, + -32302074, + 16215819, + }, + }, + }, + { + { + FieldElement{ + -6898905, + 9824394, + -12304779, + -4401089, + -31397141, + -6276835, + 32574489, + 12532905, + -7503072, + -8675347, + }, + FieldElement{ + -27343522, + -16515468, + -27151524, + -10722951, + 946346, + 16291093, + 254968, + 7168080, + 21676107, + -1943028, + }, + FieldElement{ + 21260961, + -8424752, + -16831886, + -11920822, + -23677961, + 3968121, + -3651949, + -6215466, + -3556191, + -7913075, + }, + }, + { + FieldElement{ + 16544754, + 13250366, + -16804428, + 15546242, + -4583003, + 12757258, + -2462308, + -8680336, + -18907032, + -9662799, + }, + FieldElement{ + -2415239, + -15577728, + 18312303, + 4964443, + -15272530, + -12653564, + 26820651, + 16690659, + 25459437, + -4564609, + }, + FieldElement{ + -25144690, + 11425020, + 28423002, + -11020557, + -6144921, + -15826224, + 9142795, + -2391602, + -6432418, + -1644817, + }, + }, + { + FieldElement{ + -23104652, + 6253476, + 16964147, + -3768872, + -25113972, + -12296437, + -27457225, + -16344658, + 6335692, + 7249989, + }, + FieldElement{ + -30333227, + 13979675, + 7503222, + -12368314, + -11956721, + -4621693, + -30272269, + 2682242, + 25993170, + -12478523, + }, + FieldElement{ + 4364628, + 5930691, + 32304656, + -10044554, + -8054781, + 15091131, + 22857016, + -10598955, + 31820368, + 15075278, + }, + }, + { + FieldElement{ + 31879134, + -8918693, + 17258761, + 90626, + -8041836, + -4917709, + 24162788, + -9650886, + -17970238, + 12833045, + }, + FieldElement{ + 19073683, + 14851414, + -24403169, + -11860168, + 7625278, + 11091125, + -19619190, + 2074449, + -9413939, + 14905377, + }, + FieldElement{ + 24483667, + -11935567, + -2518866, + -11547418, + -1553130, + 15355506, + -25282080, + 9253129, + 27628530, + -7555480, + }, + }, + { + FieldElement{ + 17597607, + 8340603, + 19355617, + 552187, + 26198470, + -3176583, + 4593324, + -9157582, + -14110875, + 15297016, + }, + FieldElement{ + 510886, + 14337390, + -31785257, + 16638632, + 6328095, + 2713355, + -20217417, + -11864220, + 8683221, + 2921426, + }, + FieldElement{ + 18606791, + 11874196, + 27155355, + -5281482, + -24031742, + 6265446, + -25178240, + -1278924, + 4674690, + 13890525, + }, + }, + { + FieldElement{ + 13609624, + 13069022, + -27372361, + -13055908, + 24360586, + 9592974, + 14977157, + 9835105, + 4389687, + 288396, + }, + FieldElement{ + 9922506, + -519394, + 13613107, + 5883594, + -18758345, + -434263, + -12304062, + 8317628, + 23388070, + 16052080, + }, + FieldElement{ + 12720016, + 11937594, + -31970060, + -5028689, + 26900120, + 8561328, + -20155687, + -11632979, + -14754271, + -10812892, + }, + }, + { + FieldElement{ + 15961858, + 14150409, + 26716931, + -665832, + -22794328, + 13603569, + 11829573, + 7467844, + -28822128, + 929275, + }, + FieldElement{ + 11038231, + -11582396, + -27310482, + -7316562, + -10498527, + -16307831, + -23479533, + -9371869, + -21393143, + 2465074, + }, + FieldElement{ + 20017163, + -4323226, + 27915242, + 1529148, + 12396362, + 15675764, + 13817261, + -9658066, + 2463391, + -4622140, + }, + }, + { + FieldElement{ + -16358878, + -12663911, + -12065183, + 4996454, + -1256422, + 1073572, + 9583558, + 12851107, + 4003896, + 12673717, + }, + FieldElement{ + -1731589, + -15155870, + -3262930, + 16143082, + 19294135, + 13385325, + 14741514, + -9103726, + 7903886, + 2348101, + }, + FieldElement{ + 24536016, + -16515207, + 12715592, + -3862155, + 1511293, + 10047386, + -3842346, + -7129159, + -28377538, + 10048127, + }, + }, + }, + { + { + FieldElement{ + -12622226, + -6204820, + 30718825, + 2591312, + -10617028, + 12192840, + 18873298, + -7297090, + -32297756, + 15221632, + }, + FieldElement{ + -26478122, + -11103864, + 11546244, + -1852483, + 9180880, + 7656409, + -21343950, + 2095755, + 29769758, + 6593415, + }, + FieldElement{ + -31994208, + -2907461, + 4176912, + 3264766, + 12538965, + -868111, + 26312345, + -6118678, + 30958054, + 8292160, + }, + }, + { + FieldElement{ + 31429822, + -13959116, + 29173532, + 15632448, + 12174511, + -2760094, + 32808831, + 3977186, + 26143136, + -3148876, + }, + FieldElement{ + 22648901, + 1402143, + -22799984, + 13746059, + 7936347, + 365344, + -8668633, + -1674433, + -3758243, + -2304625, + }, + FieldElement{ + -15491917, + 8012313, + -2514730, + -12702462, + -23965846, + -10254029, + -1612713, + -1535569, + -16664475, + 8194478, + }, + }, + { + FieldElement{ + 27338066, + -7507420, + -7414224, + 10140405, + -19026427, + -6589889, + 27277191, + 8855376, + 28572286, + 3005164, + }, + FieldElement{ + 26287124, + 4821776, + 25476601, + -4145903, + -3764513, + -15788984, + -18008582, + 1182479, + -26094821, + -13079595, + }, + FieldElement{ + -7171154, + 3178080, + 23970071, + 6201893, + -17195577, + -4489192, + -21876275, + -13982627, + 32208683, + -1198248, + }, + }, + { + FieldElement{ + -16657702, + 2817643, + -10286362, + 14811298, + 6024667, + 13349505, + -27315504, + -10497842, + -27672585, + -11539858, + }, + FieldElement{ + 15941029, + -9405932, + -21367050, + 8062055, + 31876073, + -238629, + -15278393, + -1444429, + 15397331, + -4130193, + }, + FieldElement{ + 8934485, + -13485467, + -23286397, + -13423241, + -32446090, + 14047986, + 31170398, + -1441021, + -27505566, + 15087184, + }, + }, + { + FieldElement{ + -18357243, + -2156491, + 24524913, + -16677868, + 15520427, + -6360776, + -15502406, + 11461896, + 16788528, + -5868942, + }, + FieldElement{ + -1947386, + 16013773, + 21750665, + 3714552, + -17401782, + -16055433, + -3770287, + -10323320, + 31322514, + -11615635, + }, + FieldElement{ + 21426655, + -5650218, + -13648287, + -5347537, + -28812189, + -4920970, + -18275391, + -14621414, + 13040862, + -12112948, + }, + }, + { + FieldElement{ + 11293895, + 12478086, + -27136401, + 15083750, + -29307421, + 14748872, + 14555558, + -13417103, + 1613711, + 4896935, + }, + FieldElement{ + -25894883, + 15323294, + -8489791, + -8057900, + 25967126, + -13425460, + 2825960, + -4897045, + -23971776, + -11267415, + }, + FieldElement{ + -15924766, + -5229880, + -17443532, + 6410664, + 3622847, + 10243618, + 20615400, + 12405433, + -23753030, + -8436416, + }, + }, + { + FieldElement{ + -7091295, + 12556208, + -20191352, + 9025187, + -17072479, + 4333801, + 4378436, + 2432030, + 23097949, + -566018, + }, + FieldElement{ + 4565804, + -16025654, + 20084412, + -7842817, + 1724999, + 189254, + 24767264, + 10103221, + -18512313, + 2424778, + }, + FieldElement{ + 366633, + -11976806, + 8173090, + -6890119, + 30788634, + 5745705, + -7168678, + 1344109, + -3642553, + 12412659, + }, + }, + { + FieldElement{ + -24001791, + 7690286, + 14929416, + -168257, + -32210835, + -13412986, + 24162697, + -15326504, + -3141501, + 11179385, + }, + FieldElement{ + 18289522, + -14724954, + 8056945, + 16430056, + -21729724, + 7842514, + -6001441, + -1486897, + -18684645, + -11443503, + }, + FieldElement{ + 476239, + 6601091, + -6152790, + -9723375, + 17503545, + -4863900, + 27672959, + 13403813, + 11052904, + 5219329, + }, + }, + }, + { + { + FieldElement{ + 20678546, + -8375738, + -32671898, + 8849123, + -5009758, + 14574752, + 31186971, + -3973730, + 9014762, + -8579056, + }, + FieldElement{ + -13644050, + -10350239, + -15962508, + 5075808, + -1514661, + -11534600, + -33102500, + 9160280, + 8473550, + -3256838, + }, + FieldElement{ + 24900749, + 14435722, + 17209120, + -15292541, + -22592275, + 9878983, + -7689309, + -16335821, + -24568481, + 11788948, + }, + }, + { + FieldElement{ + -3118155, + -11395194, + -13802089, + 14797441, + 9652448, + -6845904, + -20037437, + 10410733, + -24568470, + -1458691, + }, + FieldElement{ + -15659161, + 16736706, + -22467150, + 10215878, + -9097177, + 7563911, + 11871841, + -12505194, + -18513325, + 8464118, + }, + FieldElement{ + -23400612, + 8348507, + -14585951, + -861714, + -3950205, + -6373419, + 14325289, + 8628612, + 33313881, + -8370517, + }, + }, + { + FieldElement{ + -20186973, + -4967935, + 22367356, + 5271547, + -1097117, + -4788838, + -24805667, + -10236854, + -8940735, + -5818269, + }, + FieldElement{ + -6948785, + -1795212, + -32625683, + -16021179, + 32635414, + -7374245, + 15989197, + -12838188, + 28358192, + -4253904, + }, + FieldElement{ + -23561781, + -2799059, + -32351682, + -1661963, + -9147719, + 10429267, + -16637684, + 4072016, + -5351664, + 5596589, + }, + }, + { + FieldElement{ + -28236598, + -3390048, + 12312896, + 6213178, + 3117142, + 16078565, + 29266239, + 2557221, + 1768301, + 15373193, + }, + FieldElement{ + -7243358, + -3246960, + -4593467, + -7553353, + -127927, + -912245, + -1090902, + -4504991, + -24660491, + 3442910, + }, + FieldElement{ + -30210571, + 5124043, + 14181784, + 8197961, + 18964734, + -11939093, + 22597931, + 7176455, + -18585478, + 13365930, + }, + }, + { + FieldElement{ + -7877390, + -1499958, + 8324673, + 4690079, + 6261860, + 890446, + 24538107, + -8570186, + -9689599, + -3031667, + }, + FieldElement{ + 25008904, + -10771599, + -4305031, + -9638010, + 16265036, + 15721635, + 683793, + -11823784, + 15723479, + -15163481, + }, + FieldElement{ + -9660625, + 12374379, + -27006999, + -7026148, + -7724114, + -12314514, + 11879682, + 5400171, + 519526, + -1235876, + }, + }, + { + FieldElement{ + 22258397, + -16332233, + -7869817, + 14613016, + -22520255, + -2950923, + -20353881, + 7315967, + 16648397, + 7605640, + }, + FieldElement{ + -8081308, + -8464597, + -8223311, + 9719710, + 19259459, + -15348212, + 23994942, + -5281555, + -9468848, + 4763278, + }, + FieldElement{ + -21699244, + 9220969, + -15730624, + 1084137, + -25476107, + -2852390, + 31088447, + -7764523, + -11356529, + 728112, + }, + }, + { + FieldElement{ + 26047220, + -11751471, + -6900323, + -16521798, + 24092068, + 9158119, + -4273545, + -12555558, + -29365436, + -5498272, + }, + FieldElement{ + 17510331, + -322857, + 5854289, + 8403524, + 17133918, + -3112612, + -28111007, + 12327945, + 10750447, + 10014012, + }, + FieldElement{ + -10312768, + 3936952, + 9156313, + -8897683, + 16498692, + -994647, + -27481051, + -666732, + 3424691, + 7540221, + }, + }, + { + FieldElement{ + 30322361, + -6964110, + 11361005, + -4143317, + 7433304, + 4989748, + -7071422, + -16317219, + -9244265, + 15258046, + }, + FieldElement{ + 13054562, + -2779497, + 19155474, + 469045, + -12482797, + 4566042, + 5631406, + 2711395, + 1062915, + -5136345, + }, + FieldElement{ + -19240248, + -11254599, + -29509029, + -7499965, + -5835763, + 13005411, + -6066489, + 12194497, + 32960380, + 1459310, + }, + }, + }, + { + { + FieldElement{ + 19852034, + 7027924, + 23669353, + 10020366, + 8586503, + -6657907, + 394197, + -6101885, + 18638003, + -11174937, + }, + FieldElement{ + 31395534, + 15098109, + 26581030, + 8030562, + -16527914, + -5007134, + 9012486, + -7584354, + -6643087, + -5442636, + }, + FieldElement{ + -9192165, + -2347377, + -1997099, + 4529534, + 25766844, + 607986, + -13222, + 9677543, + -32294889, + -6456008, + }, + }, + { + FieldElement{ + -2444496, + -149937, + 29348902, + 8186665, + 1873760, + 12489863, + -30934579, + -7839692, + -7852844, + -8138429, + }, + FieldElement{ + -15236356, + -15433509, + 7766470, + 746860, + 26346930, + -10221762, + -27333451, + 10754588, + -9431476, + 5203576, + }, + FieldElement{ + 31834314, + 14135496, + -770007, + 5159118, + 20917671, + -16768096, + -7467973, + -7337524, + 31809243, + 7347066, + }, + }, + { + FieldElement{ + -9606723, + -11874240, + 20414459, + 13033986, + 13716524, + -11691881, + 19797970, + -12211255, + 15192876, + -2087490, + }, + FieldElement{ + -12663563, + -2181719, + 1168162, + -3804809, + 26747877, + -14138091, + 10609330, + 12694420, + 33473243, + -13382104, + }, + FieldElement{ + 33184999, + 11180355, + 15832085, + -11385430, + -1633671, + 225884, + 15089336, + -11023903, + -6135662, + 14480053, + }, + }, + { + FieldElement{ + 31308717, + -5619998, + 31030840, + -1897099, + 15674547, + -6582883, + 5496208, + 13685227, + 27595050, + 8737275, + }, + FieldElement{ + -20318852, + -15150239, + 10933843, + -16178022, + 8335352, + -7546022, + -31008351, + -12610604, + 26498114, + 66511, + }, + FieldElement{ + 22644454, + -8761729, + -16671776, + 4884562, + -3105614, + -13559366, + 30540766, + -4286747, + -13327787, + -7515095, + }, + }, + { + FieldElement{ + -28017847, + 9834845, + 18617207, + -2681312, + -3401956, + -13307506, + 8205540, + 13585437, + -17127465, + 15115439, + }, + FieldElement{ + 23711543, + -672915, + 31206561, + -8362711, + 6164647, + -9709987, + -33535882, + -1426096, + 8236921, + 16492939, + }, + FieldElement{ + -23910559, + -13515526, + -26299483, + -4503841, + 25005590, + -7687270, + 19574902, + 10071562, + 6708380, + -6222424, + }, + }, + { + FieldElement{ + 2101391, + -4930054, + 19702731, + 2367575, + -15427167, + 1047675, + 5301017, + 9328700, + 29955601, + -11678310, + }, + FieldElement{ + 3096359, + 9271816, + -21620864, + -15521844, + -14847996, + -7592937, + -25892142, + -12635595, + -9917575, + 6216608, + }, + FieldElement{ + -32615849, + 338663, + -25195611, + 2510422, + -29213566, + -13820213, + 24822830, + -6146567, + -26767480, + 7525079, + }, + }, + { + FieldElement{ + -23066649, + -13985623, + 16133487, + -7896178, + -3389565, + 778788, + -910336, + -2782495, + -19386633, + 11994101, + }, + FieldElement{ + 21691500, + -13624626, + -641331, + -14367021, + 3285881, + -3483596, + -25064666, + 9718258, + -7477437, + 13381418, + }, + FieldElement{ + 18445390, + -4202236, + 14979846, + 11622458, + -1727110, + -3582980, + 23111648, + -6375247, + 28535282, + 15779576, + }, + }, + { + FieldElement{ + 30098053, + 3089662, + -9234387, + 16662135, + -21306940, + 11308411, + -14068454, + 12021730, + 9955285, + -16303356, + }, + FieldElement{ + 9734894, + -14576830, + -7473633, + -9138735, + 2060392, + 11313496, + -18426029, + 9924399, + 20194861, + 13380996, + }, + FieldElement{ + -26378102, + -7965207, + -22167821, + 15789297, + -18055342, + -6168792, + -1984914, + 15707771, + 26342023, + 10146099, + }, + }, + }, + { + { + FieldElement{ + -26016874, + -219943, + 21339191, + -41388, + 19745256, + -2878700, + -29637280, + 2227040, + 21612326, + -545728, + }, + FieldElement{ + -13077387, + 1184228, + 23562814, + -5970442, + -20351244, + -6348714, + 25764461, + 12243797, + -20856566, + 11649658, + }, + FieldElement{ + -10031494, + 11262626, + 27384172, + 2271902, + 26947504, + -15997771, + 39944, + 6114064, + 33514190, + 2333242, + }, + }, + { + FieldElement{ + -21433588, + -12421821, + 8119782, + 7219913, + -21830522, + -9016134, + -6679750, + -12670638, + 24350578, + -13450001, + }, + FieldElement{ + -4116307, + -11271533, + -23886186, + 4843615, + -30088339, + 690623, + -31536088, + -10406836, + 8317860, + 12352766, + }, + FieldElement{ + 18200138, + -14475911, + -33087759, + -2696619, + -23702521, + -9102511, + -23552096, + -2287550, + 20712163, + 6719373, + }, + }, + { + FieldElement{ + 26656208, + 6075253, + -7858556, + 1886072, + -28344043, + 4262326, + 11117530, + -3763210, + 26224235, + -3297458, + }, + FieldElement{ + -17168938, + -14854097, + -3395676, + -16369877, + -19954045, + 14050420, + 21728352, + 9493610, + 18620611, + -16428628, + }, + FieldElement{ + -13323321, + 13325349, + 11432106, + 5964811, + 18609221, + 6062965, + -5269471, + -9725556, + -30701573, + -16479657, + }, + }, + { + FieldElement{ + -23860538, + -11233159, + 26961357, + 1640861, + -32413112, + -16737940, + 12248509, + -5240639, + 13735342, + 1934062, + }, + FieldElement{ + 25089769, + 6742589, + 17081145, + -13406266, + 21909293, + -16067981, + -15136294, + -3765346, + -21277997, + 5473616, + }, + FieldElement{ + 31883677, + -7961101, + 1083432, + -11572403, + 22828471, + 13290673, + -7125085, + 12469656, + 29111212, + -5451014, + }, + }, + { + FieldElement{ + 24244947, + -15050407, + -26262976, + 2791540, + -14997599, + 16666678, + 24367466, + 6388839, + -10295587, + 452383, + }, + FieldElement{ + -25640782, + -3417841, + 5217916, + 16224624, + 19987036, + -4082269, + -24236251, + -5915248, + 15766062, + 8407814, + }, + FieldElement{ + -20406999, + 13990231, + 15495425, + 16395525, + 5377168, + 15166495, + -8917023, + -4388953, + -8067909, + 2276718, + }, + }, + { + FieldElement{ + 30157918, + 12924066, + -17712050, + 9245753, + 19895028, + 3368142, + -23827587, + 5096219, + 22740376, + -7303417, + }, + FieldElement{ + 2041139, + -14256350, + 7783687, + 13876377, + -25946985, + -13352459, + 24051124, + 13742383, + -15637599, + 13295222, + }, + FieldElement{ + 33338237, + -8505733, + 12532113, + 7977527, + 9106186, + -1715251, + -17720195, + -4612972, + -4451357, + -14669444, + }, + }, + { + FieldElement{ + -20045281, + 5454097, + -14346548, + 6447146, + 28862071, + 1883651, + -2469266, + -4141880, + 7770569, + 9620597, + }, + FieldElement{ + 23208068, + 7979712, + 33071466, + 8149229, + 1758231, + -10834995, + 30945528, + -1694323, + -33502340, + -14767970, + }, + FieldElement{ + 1439958, + -16270480, + -1079989, + -793782, + 4625402, + 10647766, + -5043801, + 1220118, + 30494170, + -11440799, + }, + }, + { + FieldElement{ + -5037580, + -13028295, + -2970559, + -3061767, + 15640974, + -6701666, + -26739026, + 926050, + -1684339, + -13333647, + }, + FieldElement{ + 13908495, + -3549272, + 30919928, + -6273825, + -21521863, + 7989039, + 9021034, + 9078865, + 3353509, + 4033511, + }, + FieldElement{ + -29663431, + -15113610, + 32259991, + -344482, + 24295849, + -12912123, + 23161163, + 8839127, + 27485041, + 7356032, + }, + }, + }, + { + { + FieldElement{ + 9661027, + 705443, + 11980065, + -5370154, + -1628543, + 14661173, + -6346142, + 2625015, + 28431036, + -16771834, + }, + FieldElement{ + -23839233, + -8311415, + -25945511, + 7480958, + -17681669, + -8354183, + -22545972, + 14150565, + 15970762, + 4099461, + }, + FieldElement{ + 29262576, + 16756590, + 26350592, + -8793563, + 8529671, + -11208050, + 13617293, + -9937143, + 11465739, + 8317062, + }, + }, + { + FieldElement{ + -25493081, + -6962928, + 32500200, + -9419051, + -23038724, + -2302222, + 14898637, + 3848455, + 20969334, + -5157516, + }, + FieldElement{ + -20384450, + -14347713, + -18336405, + 13884722, + -33039454, + 2842114, + -21610826, + -3649888, + 11177095, + 14989547, + }, + FieldElement{ + -24496721, + -11716016, + 16959896, + 2278463, + 12066309, + 10137771, + 13515641, + 2581286, + -28487508, + 9930240, + }, + }, + { + FieldElement{ + -17751622, + -2097826, + 16544300, + -13009300, + -15914807, + -14949081, + 18345767, + -13403753, + 16291481, + -5314038, + }, + FieldElement{ + -33229194, + 2553288, + 32678213, + 9875984, + 8534129, + 6889387, + -9676774, + 6957617, + 4368891, + 9788741, + }, + FieldElement{ + 16660756, + 7281060, + -10830758, + 12911820, + 20108584, + -8101676, + -21722536, + -8613148, + 16250552, + -11111103, + }, + }, + { + FieldElement{ + -19765507, + 2390526, + -16551031, + 14161980, + 1905286, + 6414907, + 4689584, + 10604807, + -30190403, + 4782747, + }, + FieldElement{ + -1354539, + 14736941, + -7367442, + -13292886, + 7710542, + -14155590, + -9981571, + 4383045, + 22546403, + 437323, + }, + FieldElement{ + 31665577, + -12180464, + -16186830, + 1491339, + -18368625, + 3294682, + 27343084, + 2786261, + -30633590, + -14097016, + }, + }, + { + FieldElement{ + -14467279, + -683715, + -33374107, + 7448552, + 19294360, + 14334329, + -19690631, + 2355319, + -19284671, + -6114373, + }, + FieldElement{ + 15121312, + -15796162, + 6377020, + -6031361, + -10798111, + -12957845, + 18952177, + 15496498, + -29380133, + 11754228, + }, + FieldElement{ + -2637277, + -13483075, + 8488727, + -14303896, + 12728761, + -1622493, + 7141596, + 11724556, + 22761615, + -10134141, + }, + }, + { + FieldElement{ + 16918416, + 11729663, + -18083579, + 3022987, + -31015732, + -13339659, + -28741185, + -12227393, + 32851222, + 11717399, + }, + FieldElement{ + 11166634, + 7338049, + -6722523, + 4531520, + -29468672, + -7302055, + 31474879, + 3483633, + -1193175, + -4030831, + }, + FieldElement{ + -185635, + 9921305, + 31456609, + -13536438, + -12013818, + 13348923, + 33142652, + 6546660, + -19985279, + -3948376, + }, + }, + { + FieldElement{ + -32460596, + 11266712, + -11197107, + -7899103, + 31703694, + 3855903, + -8537131, + -12833048, + -30772034, + -15486313, + }, + FieldElement{ + -18006477, + 12709068, + 3991746, + -6479188, + -21491523, + -10550425, + -31135347, + -16049879, + 10928917, + 3011958, + }, + FieldElement{ + -6957757, + -15594337, + 31696059, + 334240, + 29576716, + 14796075, + -30831056, + -12805180, + 18008031, + 10258577, + }, + }, + { + FieldElement{ + -22448644, + 15655569, + 7018479, + -4410003, + -30314266, + -1201591, + -1853465, + 1367120, + 25127874, + 6671743, + }, + FieldElement{ + 29701166, + -14373934, + -10878120, + 9279288, + -17568, + 13127210, + 21382910, + 11042292, + 25838796, + 4642684, + }, + FieldElement{ + -20430234, + 14955537, + -24126347, + 8124619, + -5369288, + -5990470, + 30468147, + -13900640, + 18423289, + 4177476, + }, + }, + }, +} diff --git a/internal/ed25519/edwards25519/edwards25519.go b/internal/ed25519/edwards25519/edwards25519.go new file mode 100644 index 0000000..4db8b8f --- /dev/null +++ b/internal/ed25519/edwards25519/edwards25519.go @@ -0,0 +1,2308 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package edwards25519 implements operations in GF(2**255-19) and on an +// Edwards curve that is isomorphic to curve25519. See +// http://ed25519.cr.yp.to/. +package edwards25519 + +import "math" + +// This code is a port of the public domain, "ref10" implementation of ed25519 +// from SUPERCOP. + +// FieldElement represents an element of the field GF(2^255 - 19). An element +// t, entries t[0]...t[9], represents the integer t[0]+2^26 t[1]+2^51 t[2]+2^77 +// t[3]+2^102 t[4]+...+2^230 t[9]. Bounds on each t[i] vary depending on +// context. +type FieldElement [10]int32 + +func FeZero(fe *FieldElement) { + for i := range fe { + fe[i] = 0 + } +} + +func FeOne(fe *FieldElement) { + FeZero(fe) + fe[0] = 1 +} + +func FeAdd(dst, a, b *FieldElement) { + for i := range dst { + dst[i] = a[i] + b[i] + } +} + +func FeSub(dst, a, b *FieldElement) { + for i := range dst { + dst[i] = a[i] - b[i] + } +} + +func FeCopy(dst, src *FieldElement) { + for i := range dst { + dst[i] = src[i] + } +} + +// Replace (f,g) with (g,g) if b == 1; +// replace (f,g) with (f,g) if b == 0. +// +// Preconditions: b in {0,1}. +func FeCMove(f, g *FieldElement, b int32) { + var x FieldElement + b = -b + for i := range x { + x[i] = b & (f[i] ^ g[i]) + } + + for i := range f { + f[i] ^= x[i] + } +} + +func load3(in []byte) int64 { + var r int64 + r = int64(in[0]) + r |= int64(in[1]) << 8 + r |= int64(in[2]) << 16 + return r +} + +func load4(in []byte) int64 { + var r int64 + r = int64(in[0]) + r |= int64(in[1]) << 8 + r |= int64(in[2]) << 16 + r |= int64(in[3]) << 24 + return r +} + +func FeFromBytes(dst *FieldElement, src *[32]byte) { + h0 := load4(src[:]) + h1 := load3(src[4:]) << 6 + h2 := load3(src[7:]) << 5 + h3 := load3(src[10:]) << 3 + h4 := load3(src[13:]) << 2 + h5 := load4(src[16:]) + h6 := load3(src[20:]) << 7 + h7 := load3(src[23:]) << 5 + h8 := load3(src[26:]) << 4 + h9 := (load3(src[29:]) & 8388607) << 2 + + var carry [10]int64 + carry[9] = (h9 + 1<<24) >> 25 + h0 += carry[9] * 19 + h9 -= carry[9] << 25 + carry[1] = (h1 + 1<<24) >> 25 + h2 += carry[1] + h1 -= carry[1] << 25 + carry[3] = (h3 + 1<<24) >> 25 + h4 += carry[3] + h3 -= carry[3] << 25 + carry[5] = (h5 + 1<<24) >> 25 + h6 += carry[5] + h5 -= carry[5] << 25 + carry[7] = (h7 + 1<<24) >> 25 + h8 += carry[7] + h7 -= carry[7] << 25 + + carry[0] = (h0 + 1<<25) >> 26 + h1 += carry[0] + h0 -= carry[0] << 26 + carry[2] = (h2 + 1<<25) >> 26 + h3 += carry[2] + h2 -= carry[2] << 26 + carry[4] = (h4 + 1<<25) >> 26 + h5 += carry[4] + h4 -= carry[4] << 26 + carry[6] = (h6 + 1<<25) >> 26 + h7 += carry[6] + h6 -= carry[6] << 26 + carry[8] = (h8 + 1<<25) >> 26 + h9 += carry[8] + h8 -= carry[8] << 26 + + dst[0] = int32(h0) + dst[1] = int32(h1) + dst[2] = int32(h2) + dst[3] = int32(h3) + dst[4] = int32(h4) + dst[5] = int32(h5) + dst[6] = int32(h6) + dst[7] = int32(h7) + dst[8] = int32(h8) + dst[9] = int32(h9) +} + +// FeToBytes marshals h to s. +// Preconditions: +// +// |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. +// +// Write p=2^255-19; q=floor(h/p). +// Basic claim: q = floor(2^(-255)(h + 19 2^(-25)h9 + 2^(-1))). +// +// Proof: +// +// Have |h|<=p so |q|<=1 so |19^2 2^(-255) q|<1/4. +// Also have |h-2^230 h9|<2^230 so |19 2^(-255)(h-2^230 h9)|<1/4. +// +// Write y=2^(-1)-19^2 2^(-255)q-19 2^(-255)(h-2^230 h9). +// Then 0> 25 + q = (h[0] + q) >> 26 + q = (h[1] + q) >> 25 + q = (h[2] + q) >> 26 + q = (h[3] + q) >> 25 + q = (h[4] + q) >> 26 + q = (h[5] + q) >> 25 + q = (h[6] + q) >> 26 + q = (h[7] + q) >> 25 + q = (h[8] + q) >> 26 + q = (h[9] + q) >> 25 + + // Goal: Output h-(2^255-19)q, which is between 0 and 2^255-20. + h[0] += 19 * q + // Goal: Output h-2^255 q, which is between 0 and 2^255-20. + + carry[0] = h[0] >> 26 + h[1] += carry[0] + h[0] -= carry[0] << 26 + carry[1] = h[1] >> 25 + h[2] += carry[1] + h[1] -= carry[1] << 25 + carry[2] = h[2] >> 26 + h[3] += carry[2] + h[2] -= carry[2] << 26 + carry[3] = h[3] >> 25 + h[4] += carry[3] + h[3] -= carry[3] << 25 + carry[4] = h[4] >> 26 + h[5] += carry[4] + h[4] -= carry[4] << 26 + carry[5] = h[5] >> 25 + h[6] += carry[5] + h[5] -= carry[5] << 25 + carry[6] = h[6] >> 26 + h[7] += carry[6] + h[6] -= carry[6] << 26 + carry[7] = h[7] >> 25 + h[8] += carry[7] + h[7] -= carry[7] << 25 + carry[8] = h[8] >> 26 + h[9] += carry[8] + h[8] -= carry[8] << 26 + carry[9] = h[9] >> 25 + h[9] -= carry[9] << 25 + // h10 = carry9 + + // Goal: Output h[0]+...+2^255 h10-2^255 q, which is between 0 and 2^255-20. + // Have h[0]+...+2^230 h[9] between 0 and 2^255-1; + // evidently 2^255 h10-2^255 q = 0. + // Goal: Output h[0]+...+2^230 h[9]. + + s[0] = byte(h[0] >> 0) + s[1] = byte(h[0] >> 8) + s[2] = byte(h[0] >> 16) + s[3] = byte((h[0] >> 24) | (h[1] << 2)) + s[4] = byte(h[1] >> 6) + s[5] = byte(h[1] >> 14) + s[6] = byte((h[1] >> 22) | (h[2] << 3)) + s[7] = byte(h[2] >> 5) + s[8] = byte(h[2] >> 13) + s[9] = byte((h[2] >> 21) | (h[3] << 5)) + s[10] = byte(h[3] >> 3) + s[11] = byte(h[3] >> 11) + s[12] = byte((h[3] >> 19) | (h[4] << 6)) + s[13] = byte(h[4] >> 2) + s[14] = byte(h[4] >> 10) + s[15] = byte(h[4] >> 18) + s[16] = byte(h[5] >> 0) + s[17] = byte(h[5] >> 8) + s[18] = byte(h[5] >> 16) + s[19] = byte((h[5] >> 24) | (h[6] << 1)) + s[20] = byte(h[6] >> 7) + s[21] = byte(h[6] >> 15) + s[22] = byte((h[6] >> 23) | (h[7] << 3)) + s[23] = byte(h[7] >> 5) + s[24] = byte(h[7] >> 13) + s[25] = byte((h[7] >> 21) | (h[8] << 4)) + s[26] = byte(h[8] >> 4) + s[27] = byte(h[8] >> 12) + s[28] = byte((h[8] >> 20) | (h[9] << 6)) + s[29] = byte(h[9] >> 2) + s[30] = byte(h[9] >> 10) + s[31] = byte(h[9] >> 18) +} + +func FeIsNegative(f *FieldElement) byte { + var s [32]byte + FeToBytes(&s, f) + return s[0] & 1 +} + +func FeIsNonZero(f *FieldElement) int32 { + var s [32]byte + FeToBytes(&s, f) + var x uint8 + for _, b := range s { + x |= b + } + x |= x >> 4 + x |= x >> 2 + x |= x >> 1 + return int32(x & 1) +} + +// FeNeg sets h = -f +// +// Preconditions: +// +// |f| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. +// +// Postconditions: +// +// |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. +func FeNeg(h, f *FieldElement) { + for i := range h { + h[i] = -f[i] + } +} + +// FeMul calculates h = f * g +// Can overlap h with f or g. +// +// Preconditions: +// +// |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. +// |g| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. +// +// Postconditions: +// +// |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. +// +// Notes on implementation strategy: +// +// Using schoolbook multiplication. +// Karatsuba would save a little in some cost models. +// +// Most multiplications by 2 and 19 are 32-bit precomputations; +// cheaper than 64-bit postcomputations. +// +// There is one remaining multiplication by 19 in the carry chain; +// one *19 precomputation can be merged into this, +// but the resulting data flow is considerably less clean. +// +// There are 12 carries below. +// 10 of them are 2-way parallelizable and vectorizable. +// Can get away with 11 carries, but then data flow is much deeper. +// +// With tighter constraints on inputs can squeeze carries into int32. +func FeMul(h, f, g *FieldElement) { + f0 := f[0] + f1 := f[1] + f2 := f[2] + f3 := f[3] + f4 := f[4] + f5 := f[5] + f6 := f[6] + f7 := f[7] + f8 := f[8] + f9 := f[9] + g0 := g[0] + g1 := g[1] + g2 := g[2] + g3 := g[3] + g4 := g[4] + g5 := g[5] + g6 := g[6] + g7 := g[7] + g8 := g[8] + g9 := g[9] + g1_19 := 19 * g1 /* 1.4*2^29 */ + g2_19 := 19 * g2 /* 1.4*2^30; still ok */ + g3_19 := 19 * g3 + g4_19 := 19 * g4 + g5_19 := 19 * g5 + g6_19 := 19 * g6 + g7_19 := 19 * g7 + g8_19 := 19 * g8 + g9_19 := 19 * g9 + f1_2 := 2 * f1 + f3_2 := 2 * f3 + f5_2 := 2 * f5 + f7_2 := 2 * f7 + f9_2 := 2 * f9 + f0g0 := int64(f0) * int64(g0) + f0g1 := int64(f0) * int64(g1) + f0g2 := int64(f0) * int64(g2) + f0g3 := int64(f0) * int64(g3) + f0g4 := int64(f0) * int64(g4) + f0g5 := int64(f0) * int64(g5) + f0g6 := int64(f0) * int64(g6) + f0g7 := int64(f0) * int64(g7) + f0g8 := int64(f0) * int64(g8) + f0g9 := int64(f0) * int64(g9) + f1g0 := int64(f1) * int64(g0) + f1g1_2 := int64(f1_2) * int64(g1) + f1g2 := int64(f1) * int64(g2) + f1g3_2 := int64(f1_2) * int64(g3) + f1g4 := int64(f1) * int64(g4) + f1g5_2 := int64(f1_2) * int64(g5) + f1g6 := int64(f1) * int64(g6) + f1g7_2 := int64(f1_2) * int64(g7) + f1g8 := int64(f1) * int64(g8) + f1g9_38 := int64(f1_2) * int64(g9_19) + f2g0 := int64(f2) * int64(g0) + f2g1 := int64(f2) * int64(g1) + f2g2 := int64(f2) * int64(g2) + f2g3 := int64(f2) * int64(g3) + f2g4 := int64(f2) * int64(g4) + f2g5 := int64(f2) * int64(g5) + f2g6 := int64(f2) * int64(g6) + f2g7 := int64(f2) * int64(g7) + f2g8_19 := int64(f2) * int64(g8_19) + f2g9_19 := int64(f2) * int64(g9_19) + f3g0 := int64(f3) * int64(g0) + f3g1_2 := int64(f3_2) * int64(g1) + f3g2 := int64(f3) * int64(g2) + f3g3_2 := int64(f3_2) * int64(g3) + f3g4 := int64(f3) * int64(g4) + f3g5_2 := int64(f3_2) * int64(g5) + f3g6 := int64(f3) * int64(g6) + f3g7_38 := int64(f3_2) * int64(g7_19) + f3g8_19 := int64(f3) * int64(g8_19) + f3g9_38 := int64(f3_2) * int64(g9_19) + f4g0 := int64(f4) * int64(g0) + f4g1 := int64(f4) * int64(g1) + f4g2 := int64(f4) * int64(g2) + f4g3 := int64(f4) * int64(g3) + f4g4 := int64(f4) * int64(g4) + f4g5 := int64(f4) * int64(g5) + f4g6_19 := int64(f4) * int64(g6_19) + f4g7_19 := int64(f4) * int64(g7_19) + f4g8_19 := int64(f4) * int64(g8_19) + f4g9_19 := int64(f4) * int64(g9_19) + f5g0 := int64(f5) * int64(g0) + f5g1_2 := int64(f5_2) * int64(g1) + f5g2 := int64(f5) * int64(g2) + f5g3_2 := int64(f5_2) * int64(g3) + f5g4 := int64(f5) * int64(g4) + f5g5_38 := int64(f5_2) * int64(g5_19) + f5g6_19 := int64(f5) * int64(g6_19) + f5g7_38 := int64(f5_2) * int64(g7_19) + f5g8_19 := int64(f5) * int64(g8_19) + f5g9_38 := int64(f5_2) * int64(g9_19) + f6g0 := int64(f6) * int64(g0) + f6g1 := int64(f6) * int64(g1) + f6g2 := int64(f6) * int64(g2) + f6g3 := int64(f6) * int64(g3) + f6g4_19 := int64(f6) * int64(g4_19) + f6g5_19 := int64(f6) * int64(g5_19) + f6g6_19 := int64(f6) * int64(g6_19) + f6g7_19 := int64(f6) * int64(g7_19) + f6g8_19 := int64(f6) * int64(g8_19) + f6g9_19 := int64(f6) * int64(g9_19) + f7g0 := int64(f7) * int64(g0) + f7g1_2 := int64(f7_2) * int64(g1) + f7g2 := int64(f7) * int64(g2) + f7g3_38 := int64(f7_2) * int64(g3_19) + f7g4_19 := int64(f7) * int64(g4_19) + f7g5_38 := int64(f7_2) * int64(g5_19) + f7g6_19 := int64(f7) * int64(g6_19) + f7g7_38 := int64(f7_2) * int64(g7_19) + f7g8_19 := int64(f7) * int64(g8_19) + f7g9_38 := int64(f7_2) * int64(g9_19) + f8g0 := int64(f8) * int64(g0) + f8g1 := int64(f8) * int64(g1) + f8g2_19 := int64(f8) * int64(g2_19) + f8g3_19 := int64(f8) * int64(g3_19) + f8g4_19 := int64(f8) * int64(g4_19) + f8g5_19 := int64(f8) * int64(g5_19) + f8g6_19 := int64(f8) * int64(g6_19) + f8g7_19 := int64(f8) * int64(g7_19) + f8g8_19 := int64(f8) * int64(g8_19) + f8g9_19 := int64(f8) * int64(g9_19) + f9g0 := int64(f9) * int64(g0) + f9g1_38 := int64(f9_2) * int64(g1_19) + f9g2_19 := int64(f9) * int64(g2_19) + f9g3_38 := int64(f9_2) * int64(g3_19) + f9g4_19 := int64(f9) * int64(g4_19) + f9g5_38 := int64(f9_2) * int64(g5_19) + f9g6_19 := int64(f9) * int64(g6_19) + f9g7_38 := int64(f9_2) * int64(g7_19) + f9g8_19 := int64(f9) * int64(g8_19) + f9g9_38 := int64(f9_2) * int64(g9_19) + h0 := f0g0 + f1g9_38 + f2g8_19 + f3g7_38 + f4g6_19 + f5g5_38 + f6g4_19 + f7g3_38 + f8g2_19 + f9g1_38 + h1 := f0g1 + f1g0 + f2g9_19 + f3g8_19 + f4g7_19 + f5g6_19 + f6g5_19 + f7g4_19 + f8g3_19 + f9g2_19 + h2 := f0g2 + f1g1_2 + f2g0 + f3g9_38 + f4g8_19 + f5g7_38 + f6g6_19 + f7g5_38 + f8g4_19 + f9g3_38 + h3 := f0g3 + f1g2 + f2g1 + f3g0 + f4g9_19 + f5g8_19 + f6g7_19 + f7g6_19 + f8g5_19 + f9g4_19 + h4 := f0g4 + f1g3_2 + f2g2 + f3g1_2 + f4g0 + f5g9_38 + f6g8_19 + f7g7_38 + f8g6_19 + f9g5_38 + h5 := f0g5 + f1g4 + f2g3 + f3g2 + f4g1 + f5g0 + f6g9_19 + f7g8_19 + f8g7_19 + f9g6_19 + h6 := f0g6 + f1g5_2 + f2g4 + f3g3_2 + f4g2 + f5g1_2 + f6g0 + f7g9_38 + f8g8_19 + f9g7_38 + h7 := f0g7 + f1g6 + f2g5 + f3g4 + f4g3 + f5g2 + f6g1 + f7g0 + f8g9_19 + f9g8_19 + h8 := f0g8 + f1g7_2 + f2g6 + f3g5_2 + f4g4 + f5g3_2 + f6g2 + f7g1_2 + f8g0 + f9g9_38 + h9 := f0g9 + f1g8 + f2g7 + f3g6 + f4g5 + f5g4 + f6g3 + f7g2 + f8g1 + f9g0 + var carry [10]int64 + + /* + |h0| <= (1.1*1.1*2^52*(1+19+19+19+19)+1.1*1.1*2^50*(38+38+38+38+38)) + i.e. |h0| <= 1.2*2^59; narrower ranges for h2, h4, h6, h8 + |h1| <= (1.1*1.1*2^51*(1+1+19+19+19+19+19+19+19+19)) + i.e. |h1| <= 1.5*2^58; narrower ranges for h3, h5, h7, h9 + */ + + carry[0] = (h0 + (1 << 25)) >> 26 + h1 += carry[0] + h0 -= carry[0] << 26 + carry[4] = (h4 + (1 << 25)) >> 26 + h5 += carry[4] + h4 -= carry[4] << 26 + /* |h0| <= 2^25 */ + /* |h4| <= 2^25 */ + /* |h1| <= 1.51*2^58 */ + /* |h5| <= 1.51*2^58 */ + + carry[1] = (h1 + (1 << 24)) >> 25 + h2 += carry[1] + h1 -= carry[1] << 25 + carry[5] = (h5 + (1 << 24)) >> 25 + h6 += carry[5] + h5 -= carry[5] << 25 + /* |h1| <= 2^24; from now on fits into int32 */ + /* |h5| <= 2^24; from now on fits into int32 */ + /* |h2| <= 1.21*2^59 */ + /* |h6| <= 1.21*2^59 */ + + carry[2] = (h2 + (1 << 25)) >> 26 + h3 += carry[2] + h2 -= carry[2] << 26 + carry[6] = (h6 + (1 << 25)) >> 26 + h7 += carry[6] + h6 -= carry[6] << 26 + /* |h2| <= 2^25; from now on fits into int32 unchanged */ + /* |h6| <= 2^25; from now on fits into int32 unchanged */ + /* |h3| <= 1.51*2^58 */ + /* |h7| <= 1.51*2^58 */ + + carry[3] = (h3 + (1 << 24)) >> 25 + h4 += carry[3] + h3 -= carry[3] << 25 + carry[7] = (h7 + (1 << 24)) >> 25 + h8 += carry[7] + h7 -= carry[7] << 25 + /* |h3| <= 2^24; from now on fits into int32 unchanged */ + /* |h7| <= 2^24; from now on fits into int32 unchanged */ + /* |h4| <= 1.52*2^33 */ + /* |h8| <= 1.52*2^33 */ + + carry[4] = (h4 + (1 << 25)) >> 26 + h5 += carry[4] + h4 -= carry[4] << 26 + carry[8] = (h8 + (1 << 25)) >> 26 + h9 += carry[8] + h8 -= carry[8] << 26 + /* |h4| <= 2^25; from now on fits into int32 unchanged */ + /* |h8| <= 2^25; from now on fits into int32 unchanged */ + /* |h5| <= 1.01*2^24 */ + /* |h9| <= 1.51*2^58 */ + + carry[9] = (h9 + (1 << 24)) >> 25 + h0 += carry[9] * 19 + h9 -= carry[9] << 25 + /* |h9| <= 2^24; from now on fits into int32 unchanged */ + /* |h0| <= 1.8*2^37 */ + + carry[0] = (h0 + (1 << 25)) >> 26 + h1 += carry[0] + h0 -= carry[0] << 26 + /* |h0| <= 2^25; from now on fits into int32 unchanged */ + /* |h1| <= 1.01*2^24 */ + + h[0] = int32(h0) + h[1] = int32(h1) + h[2] = int32(h2) + h[3] = int32(h3) + h[4] = int32(h4) + h[5] = int32(h5) + h[6] = int32(h6) + h[7] = int32(h7) + h[8] = int32(h8) + h[9] = int32(h9) +} + +// FeSquare calculates h = f*f. Can overlap h with f. +// +// Preconditions: +// +// |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. +// +// Postconditions: +// +// |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. +func FeSquare(h, f *FieldElement) { + f0 := f[0] + f1 := f[1] + f2 := f[2] + f3 := f[3] + f4 := f[4] + f5 := f[5] + f6 := f[6] + f7 := f[7] + f8 := f[8] + f9 := f[9] + f0_2 := 2 * f0 + f1_2 := 2 * f1 + f2_2 := 2 * f2 + f3_2 := 2 * f3 + f4_2 := 2 * f4 + f5_2 := 2 * f5 + f6_2 := 2 * f6 + f7_2 := 2 * f7 + f5_38 := 38 * f5 // 1.31*2^30 + f6_19 := 19 * f6 // 1.31*2^30 + f7_38 := 38 * f7 // 1.31*2^30 + f8_19 := 19 * f8 // 1.31*2^30 + f9_38 := 38 * f9 // 1.31*2^30 + f0f0 := int64(f0) * int64(f0) + f0f1_2 := int64(f0_2) * int64(f1) + f0f2_2 := int64(f0_2) * int64(f2) + f0f3_2 := int64(f0_2) * int64(f3) + f0f4_2 := int64(f0_2) * int64(f4) + f0f5_2 := int64(f0_2) * int64(f5) + f0f6_2 := int64(f0_2) * int64(f6) + f0f7_2 := int64(f0_2) * int64(f7) + f0f8_2 := int64(f0_2) * int64(f8) + f0f9_2 := int64(f0_2) * int64(f9) + f1f1_2 := int64(f1_2) * int64(f1) + f1f2_2 := int64(f1_2) * int64(f2) + f1f3_4 := int64(f1_2) * int64(f3_2) + f1f4_2 := int64(f1_2) * int64(f4) + f1f5_4 := int64(f1_2) * int64(f5_2) + f1f6_2 := int64(f1_2) * int64(f6) + f1f7_4 := int64(f1_2) * int64(f7_2) + f1f8_2 := int64(f1_2) * int64(f8) + f1f9_76 := int64(f1_2) * int64(f9_38) + f2f2 := int64(f2) * int64(f2) + f2f3_2 := int64(f2_2) * int64(f3) + f2f4_2 := int64(f2_2) * int64(f4) + f2f5_2 := int64(f2_2) * int64(f5) + f2f6_2 := int64(f2_2) * int64(f6) + f2f7_2 := int64(f2_2) * int64(f7) + f2f8_38 := int64(f2_2) * int64(f8_19) + f2f9_38 := int64(f2) * int64(f9_38) + f3f3_2 := int64(f3_2) * int64(f3) + f3f4_2 := int64(f3_2) * int64(f4) + f3f5_4 := int64(f3_2) * int64(f5_2) + f3f6_2 := int64(f3_2) * int64(f6) + f3f7_76 := int64(f3_2) * int64(f7_38) + f3f8_38 := int64(f3_2) * int64(f8_19) + f3f9_76 := int64(f3_2) * int64(f9_38) + f4f4 := int64(f4) * int64(f4) + f4f5_2 := int64(f4_2) * int64(f5) + f4f6_38 := int64(f4_2) * int64(f6_19) + f4f7_38 := int64(f4) * int64(f7_38) + f4f8_38 := int64(f4_2) * int64(f8_19) + f4f9_38 := int64(f4) * int64(f9_38) + f5f5_38 := int64(f5) * int64(f5_38) + f5f6_38 := int64(f5_2) * int64(f6_19) + f5f7_76 := int64(f5_2) * int64(f7_38) + f5f8_38 := int64(f5_2) * int64(f8_19) + f5f9_76 := int64(f5_2) * int64(f9_38) + f6f6_19 := int64(f6) * int64(f6_19) + f6f7_38 := int64(f6) * int64(f7_38) + f6f8_38 := int64(f6_2) * int64(f8_19) + f6f9_38 := int64(f6) * int64(f9_38) + f7f7_38 := int64(f7) * int64(f7_38) + f7f8_38 := int64(f7_2) * int64(f8_19) + f7f9_76 := int64(f7_2) * int64(f9_38) + f8f8_19 := int64(f8) * int64(f8_19) + f8f9_38 := int64(f8) * int64(f9_38) + f9f9_38 := int64(f9) * int64(f9_38) + h0 := f0f0 + f1f9_76 + f2f8_38 + f3f7_76 + f4f6_38 + f5f5_38 + h1 := f0f1_2 + f2f9_38 + f3f8_38 + f4f7_38 + f5f6_38 + h2 := f0f2_2 + f1f1_2 + f3f9_76 + f4f8_38 + f5f7_76 + f6f6_19 + h3 := f0f3_2 + f1f2_2 + f4f9_38 + f5f8_38 + f6f7_38 + h4 := f0f4_2 + f1f3_4 + f2f2 + f5f9_76 + f6f8_38 + f7f7_38 + h5 := f0f5_2 + f1f4_2 + f2f3_2 + f6f9_38 + f7f8_38 + h6 := f0f6_2 + f1f5_4 + f2f4_2 + f3f3_2 + f7f9_76 + f8f8_19 + h7 := f0f7_2 + f1f6_2 + f2f5_2 + f3f4_2 + f8f9_38 + h8 := f0f8_2 + f1f7_4 + f2f6_2 + f3f5_4 + f4f4 + f9f9_38 + h9 := f0f9_2 + f1f8_2 + f2f7_2 + f3f6_2 + f4f5_2 + var carry [10]int64 + + carry[0] = (h0 + (1 << 25)) >> 26 + h1 += carry[0] + h0 -= carry[0] << 26 + carry[4] = (h4 + (1 << 25)) >> 26 + h5 += carry[4] + h4 -= carry[4] << 26 + + carry[1] = (h1 + (1 << 24)) >> 25 + h2 += carry[1] + h1 -= carry[1] << 25 + carry[5] = (h5 + (1 << 24)) >> 25 + h6 += carry[5] + h5 -= carry[5] << 25 + + carry[2] = (h2 + (1 << 25)) >> 26 + h3 += carry[2] + h2 -= carry[2] << 26 + carry[6] = (h6 + (1 << 25)) >> 26 + h7 += carry[6] + h6 -= carry[6] << 26 + + carry[3] = (h3 + (1 << 24)) >> 25 + h4 += carry[3] + h3 -= carry[3] << 25 + carry[7] = (h7 + (1 << 24)) >> 25 + h8 += carry[7] + h7 -= carry[7] << 25 + + carry[4] = (h4 + (1 << 25)) >> 26 + h5 += carry[4] + h4 -= carry[4] << 26 + carry[8] = (h8 + (1 << 25)) >> 26 + h9 += carry[8] + h8 -= carry[8] << 26 + + carry[9] = (h9 + (1 << 24)) >> 25 + h0 += carry[9] * 19 + h9 -= carry[9] << 25 + + carry[0] = (h0 + (1 << 25)) >> 26 + h1 += carry[0] + h0 -= carry[0] << 26 + + h[0] = int32(h0) + h[1] = int32(h1) + h[2] = int32(h2) + h[3] = int32(h3) + h[4] = int32(h4) + h[5] = int32(h5) + h[6] = int32(h6) + h[7] = int32(h7) + h[8] = int32(h8) + h[9] = int32(h9) +} + +// FeSquare2 sets h = 2 * f * f +// +// Can overlap h with f. +// +// Preconditions: +// +// |f| bounded by 1.65*2^26,1.65*2^25,1.65*2^26,1.65*2^25,etc. +// +// Postconditions: +// +// |h| bounded by 1.01*2^25,1.01*2^24,1.01*2^25,1.01*2^24,etc. +// +// See fe_mul.c for discussion of implementation strategy. +func FeSquare2(h, f *FieldElement) { + f0 := f[0] + f1 := f[1] + f2 := f[2] + f3 := f[3] + f4 := f[4] + f5 := f[5] + f6 := f[6] + f7 := f[7] + f8 := f[8] + f9 := f[9] + f0_2 := 2 * f0 + f1_2 := 2 * f1 + f2_2 := 2 * f2 + f3_2 := 2 * f3 + f4_2 := 2 * f4 + f5_2 := 2 * f5 + f6_2 := 2 * f6 + f7_2 := 2 * f7 + f5_38 := 38 * f5 // 1.959375*2^30 + f6_19 := 19 * f6 // 1.959375*2^30 + f7_38 := 38 * f7 // 1.959375*2^30 + f8_19 := 19 * f8 // 1.959375*2^30 + f9_38 := 38 * f9 // 1.959375*2^30 + f0f0 := int64(f0) * int64(f0) + f0f1_2 := int64(f0_2) * int64(f1) + f0f2_2 := int64(f0_2) * int64(f2) + f0f3_2 := int64(f0_2) * int64(f3) + f0f4_2 := int64(f0_2) * int64(f4) + f0f5_2 := int64(f0_2) * int64(f5) + f0f6_2 := int64(f0_2) * int64(f6) + f0f7_2 := int64(f0_2) * int64(f7) + f0f8_2 := int64(f0_2) * int64(f8) + f0f9_2 := int64(f0_2) * int64(f9) + f1f1_2 := int64(f1_2) * int64(f1) + f1f2_2 := int64(f1_2) * int64(f2) + f1f3_4 := int64(f1_2) * int64(f3_2) + f1f4_2 := int64(f1_2) * int64(f4) + f1f5_4 := int64(f1_2) * int64(f5_2) + f1f6_2 := int64(f1_2) * int64(f6) + f1f7_4 := int64(f1_2) * int64(f7_2) + f1f8_2 := int64(f1_2) * int64(f8) + f1f9_76 := int64(f1_2) * int64(f9_38) + f2f2 := int64(f2) * int64(f2) + f2f3_2 := int64(f2_2) * int64(f3) + f2f4_2 := int64(f2_2) * int64(f4) + f2f5_2 := int64(f2_2) * int64(f5) + f2f6_2 := int64(f2_2) * int64(f6) + f2f7_2 := int64(f2_2) * int64(f7) + f2f8_38 := int64(f2_2) * int64(f8_19) + f2f9_38 := int64(f2) * int64(f9_38) + f3f3_2 := int64(f3_2) * int64(f3) + f3f4_2 := int64(f3_2) * int64(f4) + f3f5_4 := int64(f3_2) * int64(f5_2) + f3f6_2 := int64(f3_2) * int64(f6) + f3f7_76 := int64(f3_2) * int64(f7_38) + f3f8_38 := int64(f3_2) * int64(f8_19) + f3f9_76 := int64(f3_2) * int64(f9_38) + f4f4 := int64(f4) * int64(f4) + f4f5_2 := int64(f4_2) * int64(f5) + f4f6_38 := int64(f4_2) * int64(f6_19) + f4f7_38 := int64(f4) * int64(f7_38) + f4f8_38 := int64(f4_2) * int64(f8_19) + f4f9_38 := int64(f4) * int64(f9_38) + f5f5_38 := int64(f5) * int64(f5_38) + f5f6_38 := int64(f5_2) * int64(f6_19) + f5f7_76 := int64(f5_2) * int64(f7_38) + f5f8_38 := int64(f5_2) * int64(f8_19) + f5f9_76 := int64(f5_2) * int64(f9_38) + f6f6_19 := int64(f6) * int64(f6_19) + f6f7_38 := int64(f6) * int64(f7_38) + f6f8_38 := int64(f6_2) * int64(f8_19) + f6f9_38 := int64(f6) * int64(f9_38) + f7f7_38 := int64(f7) * int64(f7_38) + f7f8_38 := int64(f7_2) * int64(f8_19) + f7f9_76 := int64(f7_2) * int64(f9_38) + f8f8_19 := int64(f8) * int64(f8_19) + f8f9_38 := int64(f8) * int64(f9_38) + f9f9_38 := int64(f9) * int64(f9_38) + h0 := f0f0 + f1f9_76 + f2f8_38 + f3f7_76 + f4f6_38 + f5f5_38 + h1 := f0f1_2 + f2f9_38 + f3f8_38 + f4f7_38 + f5f6_38 + h2 := f0f2_2 + f1f1_2 + f3f9_76 + f4f8_38 + f5f7_76 + f6f6_19 + h3 := f0f3_2 + f1f2_2 + f4f9_38 + f5f8_38 + f6f7_38 + h4 := f0f4_2 + f1f3_4 + f2f2 + f5f9_76 + f6f8_38 + f7f7_38 + h5 := f0f5_2 + f1f4_2 + f2f3_2 + f6f9_38 + f7f8_38 + h6 := f0f6_2 + f1f5_4 + f2f4_2 + f3f3_2 + f7f9_76 + f8f8_19 + h7 := f0f7_2 + f1f6_2 + f2f5_2 + f3f4_2 + f8f9_38 + h8 := f0f8_2 + f1f7_4 + f2f6_2 + f3f5_4 + f4f4 + f9f9_38 + h9 := f0f9_2 + f1f8_2 + f2f7_2 + f3f6_2 + f4f5_2 + var carry [10]int64 + + h0 += h0 + h1 += h1 + h2 += h2 + h3 += h3 + h4 += h4 + h5 += h5 + h6 += h6 + h7 += h7 + h8 += h8 + h9 += h9 + + carry[0] = (h0 + (1 << 25)) >> 26 + h1 += carry[0] + h0 -= carry[0] << 26 + carry[4] = (h4 + (1 << 25)) >> 26 + h5 += carry[4] + h4 -= carry[4] << 26 + + carry[1] = (h1 + (1 << 24)) >> 25 + h2 += carry[1] + h1 -= carry[1] << 25 + carry[5] = (h5 + (1 << 24)) >> 25 + h6 += carry[5] + h5 -= carry[5] << 25 + + carry[2] = (h2 + (1 << 25)) >> 26 + h3 += carry[2] + h2 -= carry[2] << 26 + carry[6] = (h6 + (1 << 25)) >> 26 + h7 += carry[6] + h6 -= carry[6] << 26 + + carry[3] = (h3 + (1 << 24)) >> 25 + h4 += carry[3] + h3 -= carry[3] << 25 + carry[7] = (h7 + (1 << 24)) >> 25 + h8 += carry[7] + h7 -= carry[7] << 25 + + carry[4] = (h4 + (1 << 25)) >> 26 + h5 += carry[4] + h4 -= carry[4] << 26 + carry[8] = (h8 + (1 << 25)) >> 26 + h9 += carry[8] + h8 -= carry[8] << 26 + + carry[9] = (h9 + (1 << 24)) >> 25 + h0 += carry[9] * 19 + h9 -= carry[9] << 25 + + carry[0] = (h0 + (1 << 25)) >> 26 + h1 += carry[0] + h0 -= carry[0] << 26 + + h[0] = int32(h0) + h[1] = int32(h1) + h[2] = int32(h2) + h[3] = int32(h3) + h[4] = int32(h4) + h[5] = int32(h5) + h[6] = int32(h6) + h[7] = int32(h7) + h[8] = int32(h8) + h[9] = int32(h9) +} + +func FeInvert(out, z *FieldElement) { + var t0, t1, t2, t3 FieldElement + var i int + + FeSquare(&t0, z) // 2^1 + FeSquare(&t1, &t0) // 2^2 + for i = 1; i < 2; i++ { // 2^3 + FeSquare(&t1, &t1) + } + FeMul(&t1, z, &t1) // 2^3 + 2^0 + FeMul(&t0, &t0, &t1) // 2^3 + 2^1 + 2^0 + FeSquare(&t2, &t0) // 2^4 + 2^2 + 2^1 + FeMul(&t1, &t1, &t2) // 2^4 + 2^3 + 2^2 + 2^1 + 2^0 + FeSquare(&t2, &t1) // 5,4,3,2,1 + for i = 1; i < 5; i++ { // 9,8,7,6,5 + FeSquare(&t2, &t2) + } + FeMul(&t1, &t2, &t1) // 9,8,7,6,5,4,3,2,1,0 + FeSquare(&t2, &t1) // 10..1 + for i = 1; i < 10; i++ { // 19..10 + FeSquare(&t2, &t2) + } + FeMul(&t2, &t2, &t1) // 19..0 + FeSquare(&t3, &t2) // 20..1 + for i = 1; i < 20; i++ { // 39..20 + FeSquare(&t3, &t3) + } + FeMul(&t2, &t3, &t2) // 39..0 + FeSquare(&t2, &t2) // 40..1 + for i = 1; i < 10; i++ { // 49..10 + FeSquare(&t2, &t2) + } + FeMul(&t1, &t2, &t1) // 49..0 + FeSquare(&t2, &t1) // 50..1 + for i = 1; i < 50; i++ { // 99..50 + FeSquare(&t2, &t2) + } + FeMul(&t2, &t2, &t1) // 99..0 + FeSquare(&t3, &t2) // 100..1 + for i = 1; i < 100; i++ { // 199..100 + FeSquare(&t3, &t3) + } + FeMul(&t2, &t3, &t2) // 199..0 + FeSquare(&t2, &t2) // 200..1 + for i = 1; i < 50; i++ { // 249..50 + FeSquare(&t2, &t2) + } + FeMul(&t1, &t2, &t1) // 249..0 + FeSquare(&t1, &t1) // 250..1 + for i = 1; i < 5; i++ { // 254..5 + FeSquare(&t1, &t1) + } + FeMul(out, &t1, &t0) // 254..5,3,1,0 +} + +func fePow22523(out, z *FieldElement) { + var t0, t1, t2 FieldElement + var i int + + FeSquare(&t0, z) + for i = 1; i < 1; i++ { + FeSquare(&t0, &t0) + } + FeSquare(&t1, &t0) + for i = 1; i < 2; i++ { + FeSquare(&t1, &t1) + } + FeMul(&t1, z, &t1) + FeMul(&t0, &t0, &t1) + FeSquare(&t0, &t0) + for i = 1; i < 1; i++ { + FeSquare(&t0, &t0) + } + FeMul(&t0, &t1, &t0) + FeSquare(&t1, &t0) + for i = 1; i < 5; i++ { + FeSquare(&t1, &t1) + } + FeMul(&t0, &t1, &t0) + FeSquare(&t1, &t0) + for i = 1; i < 10; i++ { + FeSquare(&t1, &t1) + } + FeMul(&t1, &t1, &t0) + FeSquare(&t2, &t1) + for i = 1; i < 20; i++ { + FeSquare(&t2, &t2) + } + FeMul(&t1, &t2, &t1) + FeSquare(&t1, &t1) + for i = 1; i < 10; i++ { + FeSquare(&t1, &t1) + } + FeMul(&t0, &t1, &t0) + FeSquare(&t1, &t0) + for i = 1; i < 50; i++ { + FeSquare(&t1, &t1) + } + FeMul(&t1, &t1, &t0) + FeSquare(&t2, &t1) + for i = 1; i < 100; i++ { + FeSquare(&t2, &t2) + } + FeMul(&t1, &t2, &t1) + FeSquare(&t1, &t1) + for i = 1; i < 50; i++ { + FeSquare(&t1, &t1) + } + FeMul(&t0, &t1, &t0) + FeSquare(&t0, &t0) + for i = 1; i < 2; i++ { + FeSquare(&t0, &t0) + } + FeMul(out, &t0, z) +} + +// Group elements are members of the elliptic curve -x^2 + y^2 = 1 + d * x^2 * +// y^2 where d = -121665/121666. +// +// Several representations are used: +// ProjectiveGroupElement: (X:Y:Z) satisfying x=X/Z, y=Y/Z +// ExtendedGroupElement: (X:Y:Z:T) satisfying x=X/Z, y=Y/Z, XY=ZT +// CompletedGroupElement: ((X:Z),(Y:T)) satisfying x=X/Z, y=Y/T +// PreComputedGroupElement: (y+x,y-x,2dxy) + +type ProjectiveGroupElement struct { + X, Y, Z FieldElement +} + +type ExtendedGroupElement struct { + X, Y, Z, T FieldElement +} + +type CompletedGroupElement struct { + X, Y, Z, T FieldElement +} + +type PreComputedGroupElement struct { + yPlusX, yMinusX, xy2d FieldElement +} + +type CachedGroupElement struct { + yPlusX, yMinusX, Z, T2d FieldElement +} + +func (p *ProjectiveGroupElement) Zero() { + FeZero(&p.X) + FeOne(&p.Y) + FeOne(&p.Z) +} + +func (p *ProjectiveGroupElement) Double(r *CompletedGroupElement) { + var t0 FieldElement + + FeSquare(&r.X, &p.X) + FeSquare(&r.Z, &p.Y) + FeSquare2(&r.T, &p.Z) + FeAdd(&r.Y, &p.X, &p.Y) + FeSquare(&t0, &r.Y) + FeAdd(&r.Y, &r.Z, &r.X) + FeSub(&r.Z, &r.Z, &r.X) + FeSub(&r.X, &t0, &r.Y) + FeSub(&r.T, &r.T, &r.Z) +} + +func (p *ProjectiveGroupElement) ToExtended(s *ExtendedGroupElement) { + var b [32]byte + p.ToBytes(&b) + s.FromBytes(&b) +} + +func (p *ProjectiveGroupElement) ToBytes(s *[32]byte) { + var recip, x, y FieldElement + + FeInvert(&recip, &p.Z) + FeMul(&x, &p.X, &recip) + FeMul(&y, &p.Y, &recip) + FeToBytes(s, &y) + s[31] ^= FeIsNegative(&x) << 7 +} + +func (p *ExtendedGroupElement) Zero() { + FeZero(&p.X) + FeOne(&p.Y) + FeOne(&p.Z) + FeZero(&p.T) +} + +func (p *ExtendedGroupElement) Double(r *CompletedGroupElement) { + var q ProjectiveGroupElement + p.ToProjective(&q) + q.Double(r) +} + +func GeDouble(r, p *ExtendedGroupElement) { + var q ProjectiveGroupElement + p.ToProjective(&q) + var rco CompletedGroupElement + q.Double(&rco) + rco.ToExtended(r) +} + +func (p *ExtendedGroupElement) ToCached(r *CachedGroupElement) { + FeAdd(&r.yPlusX, &p.Y, &p.X) + FeSub(&r.yMinusX, &p.Y, &p.X) + FeCopy(&r.Z, &p.Z) + FeMul(&r.T2d, &p.T, &d2) +} + +func (p *ExtendedGroupElement) ToProjective(r *ProjectiveGroupElement) { + FeCopy(&r.X, &p.X) + FeCopy(&r.Y, &p.Y) + FeCopy(&r.Z, &p.Z) +} + +func (p *ExtendedGroupElement) ToBytes(s *[32]byte) { + var recip, x, y FieldElement + + FeInvert(&recip, &p.Z) + FeMul(&x, &p.X, &recip) + FeMul(&y, &p.Y, &recip) + FeToBytes(s, &y) + s[31] ^= FeIsNegative(&x) << 7 +} + +// FromBytesBaseGroup unmarshals an elliptic curve point returns true iff the +// point point is in the order l subgroup generated by the base point. This +// implementation is based on +// https://www.iacr.org/archive/pkc2003/25670211/25670211.pdf Definition 1. +// Validation of an elliptic curve public key P ensures that P is a point of +// order BasePointOrder in E. +func (p *ExtendedGroupElement) FromBytesBaseGroup(s *[32]byte) bool { + // condition 1: P != infinity + if *s == [32]byte{1} { + return false + } + + // condition 3 is implied by successful point decompression + if !p.FromBytes(s) { + return false + } + // condition 2: ToBytes produces canonical encodings, check against that + var sCheck [32]byte + p.ToBytes(&sCheck) + if sCheck != *s { + return false + } + + // condition 4: order * P == infinity + var out ExtendedGroupElement + GeScalarMult(&out, &BasePointOrder, p) + out.ToBytes(&sCheck) + if sCheck != [32]byte{1} { + return false + } + + // an array of all zeros would not result from any randomly generated point + // and might easily be caused by bugs in ToBytes or FromBytes + if *s == [32]byte{} { + return false + } + return true +} + +func (p *ExtendedGroupElement) FromBytes(s *[32]byte) bool { + FeFromBytes(&p.Y, s) + return p.FromParityAndY(s[31]>>7, &p.Y) +} + +func (p *ExtendedGroupElement) FromParityAndY(bit byte, y *FieldElement) bool { + var u, v, v3, vxx, check FieldElement + + FeCopy(&p.Y, y) + FeOne(&p.Z) + FeSquare(&u, &p.Y) + FeMul(&v, &u, &d) + FeSub(&u, &u, &p.Z) // y = y^2-1 + FeAdd(&v, &v, &p.Z) // v = dy^2+1 + + FeSquare(&v3, &v) + FeMul(&v3, &v3, &v) // v3 = v^3 + FeSquare(&p.X, &v3) + FeMul(&p.X, &p.X, &v) + FeMul(&p.X, &p.X, &u) // x = uv^7 + + fePow22523(&p.X, &p.X) // x = (uv^7)^((q-5)/8) + FeMul(&p.X, &p.X, &v3) + FeMul(&p.X, &p.X, &u) // x = uv^3(uv^7)^((q-5)/8) + + var tmpX, tmp2 [32]byte + + FeSquare(&vxx, &p.X) + FeMul(&vxx, &vxx, &v) + FeSub(&check, &vxx, &u) // vx^2-u + if FeIsNonZero(&check) == 1 { + FeAdd(&check, &vxx, &u) // vx^2+u + if FeIsNonZero(&check) == 1 { + return false + } + FeMul(&p.X, &p.X, &SqrtM1) + + FeToBytes(&tmpX, &p.X) + for i, v := range tmpX { + tmp2[31-i] = v + } + } + + if FeIsNegative(&p.X) != bit { + FeNeg(&p.X, &p.X) + } + + FeMul(&p.T, &p.X, &p.Y) + return true +} + +func (p *CompletedGroupElement) ToProjective(r *ProjectiveGroupElement) { + FeMul(&r.X, &p.X, &p.T) + FeMul(&r.Y, &p.Y, &p.Z) + FeMul(&r.Z, &p.Z, &p.T) +} + +func (p *CompletedGroupElement) ToExtended(r *ExtendedGroupElement) { + FeMul(&r.X, &p.X, &p.T) + FeMul(&r.Y, &p.Y, &p.Z) + FeMul(&r.Z, &p.Z, &p.T) + FeMul(&r.T, &p.X, &p.Y) +} + +func (p *PreComputedGroupElement) Zero() { + FeOne(&p.yPlusX) + FeOne(&p.yMinusX) + FeZero(&p.xy2d) +} + +func geAdd(r *CompletedGroupElement, p *ExtendedGroupElement, q *CachedGroupElement) { + var t0 FieldElement + + FeAdd(&r.X, &p.Y, &p.X) + FeSub(&r.Y, &p.Y, &p.X) + FeMul(&r.Z, &r.X, &q.yPlusX) + FeMul(&r.Y, &r.Y, &q.yMinusX) + FeMul(&r.T, &q.T2d, &p.T) + FeMul(&r.X, &p.Z, &q.Z) + FeAdd(&t0, &r.X, &r.X) + FeSub(&r.X, &r.Z, &r.Y) + FeAdd(&r.Y, &r.Z, &r.Y) + FeAdd(&r.Z, &t0, &r.T) + FeSub(&r.T, &t0, &r.T) +} + +func geSub(r *CompletedGroupElement, p *ExtendedGroupElement, q *CachedGroupElement) { + var t0 FieldElement + + FeAdd(&r.X, &p.Y, &p.X) + FeSub(&r.Y, &p.Y, &p.X) + FeMul(&r.Z, &r.X, &q.yMinusX) + FeMul(&r.Y, &r.Y, &q.yPlusX) + FeMul(&r.T, &q.T2d, &p.T) + FeMul(&r.X, &p.Z, &q.Z) + FeAdd(&t0, &r.X, &r.X) + FeSub(&r.X, &r.Z, &r.Y) + FeAdd(&r.Y, &r.Z, &r.Y) + FeSub(&r.Z, &t0, &r.T) + FeAdd(&r.T, &t0, &r.T) +} + +func geMixedAdd(r *CompletedGroupElement, p *ExtendedGroupElement, q *PreComputedGroupElement) { + var t0 FieldElement + + FeAdd(&r.X, &p.Y, &p.X) + FeSub(&r.Y, &p.Y, &p.X) + FeMul(&r.Z, &r.X, &q.yPlusX) + FeMul(&r.Y, &r.Y, &q.yMinusX) + FeMul(&r.T, &q.xy2d, &p.T) + FeAdd(&t0, &p.Z, &p.Z) + FeSub(&r.X, &r.Z, &r.Y) + FeAdd(&r.Y, &r.Z, &r.Y) + FeAdd(&r.Z, &t0, &r.T) + FeSub(&r.T, &t0, &r.T) +} + +func geMixedSub(r *CompletedGroupElement, p *ExtendedGroupElement, q *PreComputedGroupElement) { + var t0 FieldElement + + FeAdd(&r.X, &p.Y, &p.X) + FeSub(&r.Y, &p.Y, &p.X) + FeMul(&r.Z, &r.X, &q.yMinusX) + FeMul(&r.Y, &r.Y, &q.yPlusX) + FeMul(&r.T, &q.xy2d, &p.T) + FeAdd(&t0, &p.Z, &p.Z) + FeSub(&r.X, &r.Z, &r.Y) + FeAdd(&r.Y, &r.Z, &r.Y) + FeSub(&r.Z, &t0, &r.T) + FeAdd(&r.T, &t0, &r.T) +} + +func slide(r *[256]int8, a *[32]byte) { + for i := range r { + r[i] = int8(1 & (a[i>>3] >> uint(i&7))) + } + + for i := range r { + if r[i] != 0 { + for b := 1; b <= 6 && i+b < 256; b++ { + if r[i+b] != 0 { + if r[i]+(r[i+b]<= -15 { + r[i] -= r[i+b] << uint(b) + for k := i + b; k < 256; k++ { + if r[k] == 0 { + r[k] = 1 + break + } + r[k] = 0 + } + } else { + break + } + } + } + } + } +} + +// GeAdd sets r = a+b. r may overlaop with a and b. +func GeAdd(r, a, b *ExtendedGroupElement) { + var bca CachedGroupElement + b.ToCached(&bca) + var rc CompletedGroupElement + geAdd(&rc, a, &bca) + rc.ToExtended(r) +} + +func ExtendedGroupElementCopy(t, u *ExtendedGroupElement) { + FeCopy(&t.X, &u.X) + FeCopy(&t.Y, &u.Y) + FeCopy(&t.Z, &u.Z) + FeCopy(&t.T, &u.T) +} + +func ExtendedGroupElementCMove(t, u *ExtendedGroupElement, b int32) { + FeCMove(&t.X, &u.X, b) + FeCMove(&t.Y, &u.Y, b) + FeCMove(&t.Z, &u.Z, b) + FeCMove(&t.T, &u.T, b) +} + +// GeScalarMult sets r = a*A +// where a = a[0]+256*a[1]+...+256^31 a[31]. +func GeScalarMult(r *ExtendedGroupElement, a *[32]byte, A *ExtendedGroupElement) { + var p, q ExtendedGroupElement + q.Zero() + ExtendedGroupElementCopy(&p, A) + for i := uint(0); i < 256; i++ { + bit := int32(a[i>>3]>>(i&7)) & 1 + var t ExtendedGroupElement + GeAdd(&t, &q, &p) + ExtendedGroupElementCMove(&q, &t, bit) + GeDouble(&p, &p) + } + ExtendedGroupElementCopy(r, &q) +} + +// GeDoubleScalarMultVartime sets r = a*A + b*B +// where a = a[0]+256*a[1]+...+256^31 a[31]. +// and b = b[0]+256*b[1]+...+256^31 b[31]. +// B is the Ed25519 base point (x,4/5) with x positive. +func GeDoubleScalarMultVartime( + r *ProjectiveGroupElement, + a *[32]byte, + A *ExtendedGroupElement, + b *[32]byte, +) { + var aSlide, bSlide [256]int8 + var Ai [8]CachedGroupElement // A,3A,5A,7A,9A,11A,13A,15A + var t CompletedGroupElement + var u, A2 ExtendedGroupElement + var i int + + slide(&aSlide, a) + slide(&bSlide, b) + + A.ToCached(&Ai[0]) + A.Double(&t) + t.ToExtended(&A2) + + for i := 0; i < 7; i++ { + geAdd(&t, &A2, &Ai[i]) + t.ToExtended(&u) + u.ToCached(&Ai[i+1]) + } + + r.Zero() + + for i = 255; i >= 0; i-- { + if aSlide[i] != 0 || bSlide[i] != 0 { + break + } + } + + for ; i >= 0; i-- { + r.Double(&t) + + if aSlide[i] > 0 { + t.ToExtended(&u) + geAdd(&t, &u, &Ai[aSlide[i]/2]) + } else if aSlide[i] < 0 { + t.ToExtended(&u) + geSub(&t, &u, &Ai[(-aSlide[i])/2]) + } + + if bSlide[i] > 0 { + t.ToExtended(&u) + geMixedAdd(&t, &u, &bi[bSlide[i]/2]) + } else if bSlide[i] < 0 { + t.ToExtended(&u) + geMixedSub(&t, &u, &bi[(-bSlide[i])/2]) + } + + t.ToProjective(r) + } +} + +// equal returns 1 if b == c and 0 otherwise. +func equal(b, c int32) int32 { + x := uint32(b ^ c) + x-- + return int32(x >> 31) +} + +// negative returns 1 if b < 0 and 0 otherwise. +func negative(b int32) int32 { + return (b >> 31) & 1 +} + +func PreComputedGroupElementCMove(t, u *PreComputedGroupElement, b int32) { + FeCMove(&t.yPlusX, &u.yPlusX, b) + FeCMove(&t.yMinusX, &u.yMinusX, b) + FeCMove(&t.xy2d, &u.xy2d, b) +} + +func selectPoint(t *PreComputedGroupElement, pos int32, b int32) { + var minusT PreComputedGroupElement + bNegative := negative(b) + bAbs := b - (((-bNegative) & b) << 1) + + t.Zero() + for i := int32(0); i < 8; i++ { + PreComputedGroupElementCMove(t, &base[pos][i], equal(bAbs, i+1)) + } + FeCopy(&minusT.yPlusX, &t.yMinusX) + FeCopy(&minusT.yMinusX, &t.yPlusX) + FeNeg(&minusT.xy2d, &t.xy2d) + PreComputedGroupElementCMove(t, &minusT, bNegative) +} + +// GeScalarMultBase computes h = a*B, where +// +// a = a[0]+256*a[1]+...+256^31 a[31] +// B is the Ed25519 base point (x,4/5) with x positive. +// +// Preconditions: +// +// a[31] <= 127 +func GeScalarMultBase(h *ExtendedGroupElement, a *[32]byte) { + var e [64]int8 + + for i, v := range a { + e[2*i] = int8(v & 15) + e[2*i+1] = int8((v >> 4) & 15) + } + + // each e[i] is between 0 and 15 and e[63] is between 0 and 7. + + carry := int8(0) + for i := 0; i < 63; i++ { + e[i] += carry + carry = (e[i] + 8) >> 4 + e[i] -= carry << 4 + } + e[63] += carry + // each e[i] is between -8 and 8. + + h.Zero() + var t PreComputedGroupElement + var r CompletedGroupElement + for i := int32(1); i < 64; i += 2 { + selectPoint(&t, i/2, int32(e[i])) + geMixedAdd(&r, h, &t) + r.ToExtended(h) + } + + var s ProjectiveGroupElement + + h.Double(&r) + r.ToProjective(&s) + s.Double(&r) + r.ToProjective(&s) + s.Double(&r) + r.ToProjective(&s) + s.Double(&r) + r.ToExtended(h) + + for i := int32(0); i < 64; i += 2 { + selectPoint(&t, i/2, int32(e[i])) + geMixedAdd(&r, h, &t) + r.ToExtended(h) + } +} + +// The scalars are GF(2^252 + 27742317777372353535851937790883648493). + +// Input: +// +// a[0]+256*a[1]+...+256^31*a[31] = a +// b[0]+256*b[1]+...+256^31*b[31] = b +// c[0]+256*c[1]+...+256^31*c[31] = c +// +// Output: +// +// s[0]+256*s[1]+...+256^31*s[31] = (ab+c) mod l +// where l = 2^252 + 27742317777372353535851937790883648493. +func ScMulAdd(s, a, b, c *[32]byte) { + a0 := 2097151 & load3(a[:]) + a1 := 2097151 & (load4(a[2:]) >> 5) + a2 := 2097151 & (load3(a[5:]) >> 2) + a3 := 2097151 & (load4(a[7:]) >> 7) + a4 := 2097151 & (load4(a[10:]) >> 4) + a5 := 2097151 & (load3(a[13:]) >> 1) + a6 := 2097151 & (load4(a[15:]) >> 6) + a7 := 2097151 & (load3(a[18:]) >> 3) + a8 := 2097151 & load3(a[21:]) + a9 := 2097151 & (load4(a[23:]) >> 5) + a10 := 2097151 & (load3(a[26:]) >> 2) + a11 := (load4(a[28:]) >> 7) + b0 := 2097151 & load3(b[:]) + b1 := 2097151 & (load4(b[2:]) >> 5) + b2 := 2097151 & (load3(b[5:]) >> 2) + b3 := 2097151 & (load4(b[7:]) >> 7) + b4 := 2097151 & (load4(b[10:]) >> 4) + b5 := 2097151 & (load3(b[13:]) >> 1) + b6 := 2097151 & (load4(b[15:]) >> 6) + b7 := 2097151 & (load3(b[18:]) >> 3) + b8 := 2097151 & load3(b[21:]) + b9 := 2097151 & (load4(b[23:]) >> 5) + b10 := 2097151 & (load3(b[26:]) >> 2) + b11 := (load4(b[28:]) >> 7) + c0 := 2097151 & load3(c[:]) + c1 := 2097151 & (load4(c[2:]) >> 5) + c2 := 2097151 & (load3(c[5:]) >> 2) + c3 := 2097151 & (load4(c[7:]) >> 7) + c4 := 2097151 & (load4(c[10:]) >> 4) + c5 := 2097151 & (load3(c[13:]) >> 1) + c6 := 2097151 & (load4(c[15:]) >> 6) + c7 := 2097151 & (load3(c[18:]) >> 3) + c8 := 2097151 & load3(c[21:]) + c9 := 2097151 & (load4(c[23:]) >> 5) + c10 := 2097151 & (load3(c[26:]) >> 2) + c11 := (load4(c[28:]) >> 7) + var carry [23]int64 + + s0 := c0 + a0*b0 + s1 := c1 + a0*b1 + a1*b0 + s2 := c2 + a0*b2 + a1*b1 + a2*b0 + s3 := c3 + a0*b3 + a1*b2 + a2*b1 + a3*b0 + s4 := c4 + a0*b4 + a1*b3 + a2*b2 + a3*b1 + a4*b0 + s5 := c5 + a0*b5 + a1*b4 + a2*b3 + a3*b2 + a4*b1 + a5*b0 + s6 := c6 + a0*b6 + a1*b5 + a2*b4 + a3*b3 + a4*b2 + a5*b1 + a6*b0 + s7 := c7 + a0*b7 + a1*b6 + a2*b5 + a3*b4 + a4*b3 + a5*b2 + a6*b1 + a7*b0 + s8 := c8 + a0*b8 + a1*b7 + a2*b6 + a3*b5 + a4*b4 + a5*b3 + a6*b2 + a7*b1 + a8*b0 + s9 := c9 + a0*b9 + a1*b8 + a2*b7 + a3*b6 + a4*b5 + a5*b4 + a6*b3 + a7*b2 + a8*b1 + a9*b0 + s10 := c10 + a0*b10 + a1*b9 + a2*b8 + a3*b7 + a4*b6 + a5*b5 + a6*b4 + a7*b3 + a8*b2 + a9*b1 + a10*b0 + s11 := c11 + a0*b11 + a1*b10 + a2*b9 + a3*b8 + a4*b7 + a5*b6 + a6*b5 + a7*b4 + a8*b3 + a9*b2 + a10*b1 + a11*b0 + s12 := a1*b11 + a2*b10 + a3*b9 + a4*b8 + a5*b7 + a6*b6 + a7*b5 + a8*b4 + a9*b3 + a10*b2 + a11*b1 + s13 := a2*b11 + a3*b10 + a4*b9 + a5*b8 + a6*b7 + a7*b6 + a8*b5 + a9*b4 + a10*b3 + a11*b2 + s14 := a3*b11 + a4*b10 + a5*b9 + a6*b8 + a7*b7 + a8*b6 + a9*b5 + a10*b4 + a11*b3 + s15 := a4*b11 + a5*b10 + a6*b9 + a7*b8 + a8*b7 + a9*b6 + a10*b5 + a11*b4 + s16 := a5*b11 + a6*b10 + a7*b9 + a8*b8 + a9*b7 + a10*b6 + a11*b5 + s17 := a6*b11 + a7*b10 + a8*b9 + a9*b8 + a10*b7 + a11*b6 + s18 := a7*b11 + a8*b10 + a9*b9 + a10*b8 + a11*b7 + s19 := a8*b11 + a9*b10 + a10*b9 + a11*b8 + s20 := a9*b11 + a10*b10 + a11*b9 + s21 := a10*b11 + a11*b10 + s22 := a11 * b11 + s23 := int64(0) + + carry[0] = (s0 + (1 << 20)) >> 21 + s1 += carry[0] + s0 -= carry[0] << 21 + carry[2] = (s2 + (1 << 20)) >> 21 + s3 += carry[2] + s2 -= carry[2] << 21 + carry[4] = (s4 + (1 << 20)) >> 21 + s5 += carry[4] + s4 -= carry[4] << 21 + carry[6] = (s6 + (1 << 20)) >> 21 + s7 += carry[6] + s6 -= carry[6] << 21 + carry[8] = (s8 + (1 << 20)) >> 21 + s9 += carry[8] + s8 -= carry[8] << 21 + carry[10] = (s10 + (1 << 20)) >> 21 + s11 += carry[10] + s10 -= carry[10] << 21 + carry[12] = (s12 + (1 << 20)) >> 21 + s13 += carry[12] + s12 -= carry[12] << 21 + carry[14] = (s14 + (1 << 20)) >> 21 + s15 += carry[14] + s14 -= carry[14] << 21 + carry[16] = (s16 + (1 << 20)) >> 21 + s17 += carry[16] + s16 -= carry[16] << 21 + carry[18] = (s18 + (1 << 20)) >> 21 + s19 += carry[18] + s18 -= carry[18] << 21 + carry[20] = (s20 + (1 << 20)) >> 21 + s21 += carry[20] + s20 -= carry[20] << 21 + carry[22] = (s22 + (1 << 20)) >> 21 + s23 += carry[22] + s22 -= carry[22] << 21 + + carry[1] = (s1 + (1 << 20)) >> 21 + s2 += carry[1] + s1 -= carry[1] << 21 + carry[3] = (s3 + (1 << 20)) >> 21 + s4 += carry[3] + s3 -= carry[3] << 21 + carry[5] = (s5 + (1 << 20)) >> 21 + s6 += carry[5] + s5 -= carry[5] << 21 + carry[7] = (s7 + (1 << 20)) >> 21 + s8 += carry[7] + s7 -= carry[7] << 21 + carry[9] = (s9 + (1 << 20)) >> 21 + s10 += carry[9] + s9 -= carry[9] << 21 + carry[11] = (s11 + (1 << 20)) >> 21 + s12 += carry[11] + s11 -= carry[11] << 21 + carry[13] = (s13 + (1 << 20)) >> 21 + s14 += carry[13] + s13 -= carry[13] << 21 + carry[15] = (s15 + (1 << 20)) >> 21 + s16 += carry[15] + s15 -= carry[15] << 21 + carry[17] = (s17 + (1 << 20)) >> 21 + s18 += carry[17] + s17 -= carry[17] << 21 + carry[19] = (s19 + (1 << 20)) >> 21 + s20 += carry[19] + s19 -= carry[19] << 21 + carry[21] = (s21 + (1 << 20)) >> 21 + s22 += carry[21] + s21 -= carry[21] << 21 + + s11 += s23 * 666643 + s12 += s23 * 470296 + s13 += s23 * 654183 + s14 -= s23 * 997805 + s15 += s23 * 136657 + s16 -= s23 * 683901 + s23 = 0 + + s10 += s22 * 666643 + s11 += s22 * 470296 + s12 += s22 * 654183 + s13 -= s22 * 997805 + s14 += s22 * 136657 + s15 -= s22 * 683901 + s22 = 0 + + s9 += s21 * 666643 + s10 += s21 * 470296 + s11 += s21 * 654183 + s12 -= s21 * 997805 + s13 += s21 * 136657 + s14 -= s21 * 683901 + s21 = 0 + + s8 += s20 * 666643 + s9 += s20 * 470296 + s10 += s20 * 654183 + s11 -= s20 * 997805 + s12 += s20 * 136657 + s13 -= s20 * 683901 + s20 = 0 + + s7 += s19 * 666643 + s8 += s19 * 470296 + s9 += s19 * 654183 + s10 -= s19 * 997805 + s11 += s19 * 136657 + s12 -= s19 * 683901 + s19 = 0 + + s6 += s18 * 666643 + s7 += s18 * 470296 + s8 += s18 * 654183 + s9 -= s18 * 997805 + s10 += s18 * 136657 + s11 -= s18 * 683901 + s18 = 0 + + carry[6] = (s6 + (1 << 20)) >> 21 + s7 += carry[6] + s6 -= carry[6] << 21 + carry[8] = (s8 + (1 << 20)) >> 21 + s9 += carry[8] + s8 -= carry[8] << 21 + carry[10] = (s10 + (1 << 20)) >> 21 + s11 += carry[10] + s10 -= carry[10] << 21 + carry[12] = (s12 + (1 << 20)) >> 21 + s13 += carry[12] + s12 -= carry[12] << 21 + carry[14] = (s14 + (1 << 20)) >> 21 + s15 += carry[14] + s14 -= carry[14] << 21 + carry[16] = (s16 + (1 << 20)) >> 21 + s17 += carry[16] + s16 -= carry[16] << 21 + + carry[7] = (s7 + (1 << 20)) >> 21 + s8 += carry[7] + s7 -= carry[7] << 21 + carry[9] = (s9 + (1 << 20)) >> 21 + s10 += carry[9] + s9 -= carry[9] << 21 + carry[11] = (s11 + (1 << 20)) >> 21 + s12 += carry[11] + s11 -= carry[11] << 21 + carry[13] = (s13 + (1 << 20)) >> 21 + s14 += carry[13] + s13 -= carry[13] << 21 + carry[15] = (s15 + (1 << 20)) >> 21 + s16 += carry[15] + s15 -= carry[15] << 21 + + s5 += s17 * 666643 + s6 += s17 * 470296 + s7 += s17 * 654183 + s8 -= s17 * 997805 + s9 += s17 * 136657 + s10 -= s17 * 683901 + s17 = 0 + + s4 += s16 * 666643 + s5 += s16 * 470296 + s6 += s16 * 654183 + s7 -= s16 * 997805 + s8 += s16 * 136657 + s9 -= s16 * 683901 + s16 = 0 + + s3 += s15 * 666643 + s4 += s15 * 470296 + s5 += s15 * 654183 + s6 -= s15 * 997805 + s7 += s15 * 136657 + s8 -= s15 * 683901 + s15 = 0 + + s2 += s14 * 666643 + s3 += s14 * 470296 + s4 += s14 * 654183 + s5 -= s14 * 997805 + s6 += s14 * 136657 + s7 -= s14 * 683901 + s14 = 0 + + s1 += s13 * 666643 + s2 += s13 * 470296 + s3 += s13 * 654183 + s4 -= s13 * 997805 + s5 += s13 * 136657 + s6 -= s13 * 683901 + s13 = 0 + + s0 += s12 * 666643 + s1 += s12 * 470296 + s2 += s12 * 654183 + s3 -= s12 * 997805 + s4 += s12 * 136657 + s5 -= s12 * 683901 + s12 = 0 + + carry[0] = (s0 + (1 << 20)) >> 21 + s1 += carry[0] + s0 -= carry[0] << 21 + carry[2] = (s2 + (1 << 20)) >> 21 + s3 += carry[2] + s2 -= carry[2] << 21 + carry[4] = (s4 + (1 << 20)) >> 21 + s5 += carry[4] + s4 -= carry[4] << 21 + carry[6] = (s6 + (1 << 20)) >> 21 + s7 += carry[6] + s6 -= carry[6] << 21 + carry[8] = (s8 + (1 << 20)) >> 21 + s9 += carry[8] + s8 -= carry[8] << 21 + carry[10] = (s10 + (1 << 20)) >> 21 + s11 += carry[10] + s10 -= carry[10] << 21 + + carry[1] = (s1 + (1 << 20)) >> 21 + s2 += carry[1] + s1 -= carry[1] << 21 + carry[3] = (s3 + (1 << 20)) >> 21 + s4 += carry[3] + s3 -= carry[3] << 21 + carry[5] = (s5 + (1 << 20)) >> 21 + s6 += carry[5] + s5 -= carry[5] << 21 + carry[7] = (s7 + (1 << 20)) >> 21 + s8 += carry[7] + s7 -= carry[7] << 21 + carry[9] = (s9 + (1 << 20)) >> 21 + s10 += carry[9] + s9 -= carry[9] << 21 + carry[11] = (s11 + (1 << 20)) >> 21 + s12 += carry[11] + s11 -= carry[11] << 21 + + s0 += s12 * 666643 + s1 += s12 * 470296 + s2 += s12 * 654183 + s3 -= s12 * 997805 + s4 += s12 * 136657 + s5 -= s12 * 683901 + s12 = 0 + + carry[0] = s0 >> 21 + s1 += carry[0] + s0 -= carry[0] << 21 + carry[1] = s1 >> 21 + s2 += carry[1] + s1 -= carry[1] << 21 + carry[2] = s2 >> 21 + s3 += carry[2] + s2 -= carry[2] << 21 + carry[3] = s3 >> 21 + s4 += carry[3] + s3 -= carry[3] << 21 + carry[4] = s4 >> 21 + s5 += carry[4] + s4 -= carry[4] << 21 + carry[5] = s5 >> 21 + s6 += carry[5] + s5 -= carry[5] << 21 + carry[6] = s6 >> 21 + s7 += carry[6] + s6 -= carry[6] << 21 + carry[7] = s7 >> 21 + s8 += carry[7] + s7 -= carry[7] << 21 + carry[8] = s8 >> 21 + s9 += carry[8] + s8 -= carry[8] << 21 + carry[9] = s9 >> 21 + s10 += carry[9] + s9 -= carry[9] << 21 + carry[10] = s10 >> 21 + s11 += carry[10] + s10 -= carry[10] << 21 + carry[11] = s11 >> 21 + s12 += carry[11] + s11 -= carry[11] << 21 + + s0 += s12 * 666643 + s1 += s12 * 470296 + s2 += s12 * 654183 + s3 -= s12 * 997805 + s4 += s12 * 136657 + s5 -= s12 * 683901 + s12 = 0 + + carry[0] = s0 >> 21 + s1 += carry[0] + s0 -= carry[0] << 21 + carry[1] = s1 >> 21 + s2 += carry[1] + s1 -= carry[1] << 21 + carry[2] = s2 >> 21 + s3 += carry[2] + s2 -= carry[2] << 21 + carry[3] = s3 >> 21 + s4 += carry[3] + s3 -= carry[3] << 21 + carry[4] = s4 >> 21 + s5 += carry[4] + s4 -= carry[4] << 21 + carry[5] = s5 >> 21 + s6 += carry[5] + s5 -= carry[5] << 21 + carry[6] = s6 >> 21 + s7 += carry[6] + s6 -= carry[6] << 21 + carry[7] = s7 >> 21 + s8 += carry[7] + s7 -= carry[7] << 21 + carry[8] = s8 >> 21 + s9 += carry[8] + s8 -= carry[8] << 21 + carry[9] = s9 >> 21 + s10 += carry[9] + s9 -= carry[9] << 21 + carry[10] = s10 >> 21 + s11 += carry[10] + s10 -= carry[10] << 21 + + s[0] = byte(s0 >> 0) + s[1] = byte(s0 >> 8) + s[2] = byte((s0 >> 16) | (s1 << 5)) + s[3] = byte(s1 >> 3) + s[4] = byte(s1 >> 11) + s[5] = byte((s1 >> 19) | (s2 << 2)) + s[6] = byte(s2 >> 6) + s[7] = byte((s2 >> 14) | (s3 << 7)) + s[8] = byte(s3 >> 1) + s[9] = byte(s3 >> 9) + s[10] = byte((s3 >> 17) | (s4 << 4)) + s[11] = byte(s4 >> 4) + s[12] = byte(s4 >> 12) + s[13] = byte((s4 >> 20) | (s5 << 1)) + s[14] = byte(s5 >> 7) + s[15] = byte((s5 >> 15) | (s6 << 6)) + s[16] = byte(s6 >> 2) + s[17] = byte(s6 >> 10) + s[18] = byte((s6 >> 18) | (s7 << 3)) + s[19] = byte(s7 >> 5) + s[20] = byte(s7 >> 13) + s[21] = byte(s8 >> 0) + s[22] = byte(s8 >> 8) + s[23] = byte((s8 >> 16) | (s9 << 5)) + s[24] = byte(s9 >> 3) + s[25] = byte(s9 >> 11) + s[26] = byte((s9 >> 19) | (s10 << 2)) + s[27] = byte(s10 >> 6) + s[28] = byte((s10 >> 14) | (s11 << 7)) + s[29] = byte(s11 >> 1) + s[30] = byte(s11 >> 9) + s[31] = byte(s11 >> 17) +} + +// Input: +// +// s[0]+256*s[1]+...+256^63*s[63] = s +// s <= l +// +// Output: +// +// s[0]+256*s[1]+...+256^31*s[31] = l - s +// where l = 2^252 + 27742317777372353535851937790883648493. +func ScNeg(r, s *[32]byte) { + l := [32]byte{ + 237, + 211, + 245, + 92, + 26, + 99, + 18, + 88, + 214, + 156, + 247, + 162, + 222, + 249, + 222, + 20, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 16, + } + var carry int32 + for i := 0; i < 32; i++ { + carry = carry + int32(l[i]) - int32(s[i]) + negative := carry & math.MinInt32 // extract the sign bit (min=0b100...) + negative |= negative >> 16 + negative |= negative >> 8 + negative |= negative >> 4 + negative |= negative >> 2 + negative |= negative >> 1 + carry += negative & 256 // +=256 if negative, unmodified otherwise + r[i] = byte(carry) + // carry for next iteration + carry = negative & (-1) // -1 if negative, 0 otherwise + } +} + +// Input: +// +// s[0]+256*s[1]+...+256^63*s[63] = s +// +// Output: +// +// s[0]+256*s[1]+...+256^31*s[31] = s mod l +// where l = 2^252 + 27742317777372353535851937790883648493. +func ScReduce(out *[32]byte, s *[64]byte) { + s0 := 2097151 & load3(s[:]) + s1 := 2097151 & (load4(s[2:]) >> 5) + s2 := 2097151 & (load3(s[5:]) >> 2) + s3 := 2097151 & (load4(s[7:]) >> 7) + s4 := 2097151 & (load4(s[10:]) >> 4) + s5 := 2097151 & (load3(s[13:]) >> 1) + s6 := 2097151 & (load4(s[15:]) >> 6) + s7 := 2097151 & (load3(s[18:]) >> 3) + s8 := 2097151 & load3(s[21:]) + s9 := 2097151 & (load4(s[23:]) >> 5) + s10 := 2097151 & (load3(s[26:]) >> 2) + s11 := 2097151 & (load4(s[28:]) >> 7) + s12 := 2097151 & (load4(s[31:]) >> 4) + s13 := 2097151 & (load3(s[34:]) >> 1) + s14 := 2097151 & (load4(s[36:]) >> 6) + s15 := 2097151 & (load3(s[39:]) >> 3) + s16 := 2097151 & load3(s[42:]) + s17 := 2097151 & (load4(s[44:]) >> 5) + s18 := 2097151 & (load3(s[47:]) >> 2) + s19 := 2097151 & (load4(s[49:]) >> 7) + s20 := 2097151 & (load4(s[52:]) >> 4) + s21 := 2097151 & (load3(s[55:]) >> 1) + s22 := 2097151 & (load4(s[57:]) >> 6) + s23 := (load4(s[60:]) >> 3) + + s11 += s23 * 666643 + s12 += s23 * 470296 + s13 += s23 * 654183 + s14 -= s23 * 997805 + s15 += s23 * 136657 + s16 -= s23 * 683901 + s23 = 0 + + s10 += s22 * 666643 + s11 += s22 * 470296 + s12 += s22 * 654183 + s13 -= s22 * 997805 + s14 += s22 * 136657 + s15 -= s22 * 683901 + s22 = 0 + + s9 += s21 * 666643 + s10 += s21 * 470296 + s11 += s21 * 654183 + s12 -= s21 * 997805 + s13 += s21 * 136657 + s14 -= s21 * 683901 + s21 = 0 + + s8 += s20 * 666643 + s9 += s20 * 470296 + s10 += s20 * 654183 + s11 -= s20 * 997805 + s12 += s20 * 136657 + s13 -= s20 * 683901 + s20 = 0 + + s7 += s19 * 666643 + s8 += s19 * 470296 + s9 += s19 * 654183 + s10 -= s19 * 997805 + s11 += s19 * 136657 + s12 -= s19 * 683901 + s19 = 0 + + s6 += s18 * 666643 + s7 += s18 * 470296 + s8 += s18 * 654183 + s9 -= s18 * 997805 + s10 += s18 * 136657 + s11 -= s18 * 683901 + s18 = 0 + + var carry [17]int64 + + carry[6] = (s6 + (1 << 20)) >> 21 + s7 += carry[6] + s6 -= carry[6] << 21 + carry[8] = (s8 + (1 << 20)) >> 21 + s9 += carry[8] + s8 -= carry[8] << 21 + carry[10] = (s10 + (1 << 20)) >> 21 + s11 += carry[10] + s10 -= carry[10] << 21 + carry[12] = (s12 + (1 << 20)) >> 21 + s13 += carry[12] + s12 -= carry[12] << 21 + carry[14] = (s14 + (1 << 20)) >> 21 + s15 += carry[14] + s14 -= carry[14] << 21 + carry[16] = (s16 + (1 << 20)) >> 21 + s17 += carry[16] + s16 -= carry[16] << 21 + + carry[7] = (s7 + (1 << 20)) >> 21 + s8 += carry[7] + s7 -= carry[7] << 21 + carry[9] = (s9 + (1 << 20)) >> 21 + s10 += carry[9] + s9 -= carry[9] << 21 + carry[11] = (s11 + (1 << 20)) >> 21 + s12 += carry[11] + s11 -= carry[11] << 21 + carry[13] = (s13 + (1 << 20)) >> 21 + s14 += carry[13] + s13 -= carry[13] << 21 + carry[15] = (s15 + (1 << 20)) >> 21 + s16 += carry[15] + s15 -= carry[15] << 21 + + s5 += s17 * 666643 + s6 += s17 * 470296 + s7 += s17 * 654183 + s8 -= s17 * 997805 + s9 += s17 * 136657 + s10 -= s17 * 683901 + s17 = 0 + + s4 += s16 * 666643 + s5 += s16 * 470296 + s6 += s16 * 654183 + s7 -= s16 * 997805 + s8 += s16 * 136657 + s9 -= s16 * 683901 + s16 = 0 + + s3 += s15 * 666643 + s4 += s15 * 470296 + s5 += s15 * 654183 + s6 -= s15 * 997805 + s7 += s15 * 136657 + s8 -= s15 * 683901 + s15 = 0 + + s2 += s14 * 666643 + s3 += s14 * 470296 + s4 += s14 * 654183 + s5 -= s14 * 997805 + s6 += s14 * 136657 + s7 -= s14 * 683901 + s14 = 0 + + s1 += s13 * 666643 + s2 += s13 * 470296 + s3 += s13 * 654183 + s4 -= s13 * 997805 + s5 += s13 * 136657 + s6 -= s13 * 683901 + s13 = 0 + + s0 += s12 * 666643 + s1 += s12 * 470296 + s2 += s12 * 654183 + s3 -= s12 * 997805 + s4 += s12 * 136657 + s5 -= s12 * 683901 + s12 = 0 + + carry[0] = (s0 + (1 << 20)) >> 21 + s1 += carry[0] + s0 -= carry[0] << 21 + carry[2] = (s2 + (1 << 20)) >> 21 + s3 += carry[2] + s2 -= carry[2] << 21 + carry[4] = (s4 + (1 << 20)) >> 21 + s5 += carry[4] + s4 -= carry[4] << 21 + carry[6] = (s6 + (1 << 20)) >> 21 + s7 += carry[6] + s6 -= carry[6] << 21 + carry[8] = (s8 + (1 << 20)) >> 21 + s9 += carry[8] + s8 -= carry[8] << 21 + carry[10] = (s10 + (1 << 20)) >> 21 + s11 += carry[10] + s10 -= carry[10] << 21 + + carry[1] = (s1 + (1 << 20)) >> 21 + s2 += carry[1] + s1 -= carry[1] << 21 + carry[3] = (s3 + (1 << 20)) >> 21 + s4 += carry[3] + s3 -= carry[3] << 21 + carry[5] = (s5 + (1 << 20)) >> 21 + s6 += carry[5] + s5 -= carry[5] << 21 + carry[7] = (s7 + (1 << 20)) >> 21 + s8 += carry[7] + s7 -= carry[7] << 21 + carry[9] = (s9 + (1 << 20)) >> 21 + s10 += carry[9] + s9 -= carry[9] << 21 + carry[11] = (s11 + (1 << 20)) >> 21 + s12 += carry[11] + s11 -= carry[11] << 21 + + s0 += s12 * 666643 + s1 += s12 * 470296 + s2 += s12 * 654183 + s3 -= s12 * 997805 + s4 += s12 * 136657 + s5 -= s12 * 683901 + s12 = 0 + + carry[0] = s0 >> 21 + s1 += carry[0] + s0 -= carry[0] << 21 + carry[1] = s1 >> 21 + s2 += carry[1] + s1 -= carry[1] << 21 + carry[2] = s2 >> 21 + s3 += carry[2] + s2 -= carry[2] << 21 + carry[3] = s3 >> 21 + s4 += carry[3] + s3 -= carry[3] << 21 + carry[4] = s4 >> 21 + s5 += carry[4] + s4 -= carry[4] << 21 + carry[5] = s5 >> 21 + s6 += carry[5] + s5 -= carry[5] << 21 + carry[6] = s6 >> 21 + s7 += carry[6] + s6 -= carry[6] << 21 + carry[7] = s7 >> 21 + s8 += carry[7] + s7 -= carry[7] << 21 + carry[8] = s8 >> 21 + s9 += carry[8] + s8 -= carry[8] << 21 + carry[9] = s9 >> 21 + s10 += carry[9] + s9 -= carry[9] << 21 + carry[10] = s10 >> 21 + s11 += carry[10] + s10 -= carry[10] << 21 + carry[11] = s11 >> 21 + s12 += carry[11] + s11 -= carry[11] << 21 + + s0 += s12 * 666643 + s1 += s12 * 470296 + s2 += s12 * 654183 + s3 -= s12 * 997805 + s4 += s12 * 136657 + s5 -= s12 * 683901 + s12 = 0 + + carry[0] = s0 >> 21 + s1 += carry[0] + s0 -= carry[0] << 21 + carry[1] = s1 >> 21 + s2 += carry[1] + s1 -= carry[1] << 21 + carry[2] = s2 >> 21 + s3 += carry[2] + s2 -= carry[2] << 21 + carry[3] = s3 >> 21 + s4 += carry[3] + s3 -= carry[3] << 21 + carry[4] = s4 >> 21 + s5 += carry[4] + s4 -= carry[4] << 21 + carry[5] = s5 >> 21 + s6 += carry[5] + s5 -= carry[5] << 21 + carry[6] = s6 >> 21 + s7 += carry[6] + s6 -= carry[6] << 21 + carry[7] = s7 >> 21 + s8 += carry[7] + s7 -= carry[7] << 21 + carry[8] = s8 >> 21 + s9 += carry[8] + s8 -= carry[8] << 21 + carry[9] = s9 >> 21 + s10 += carry[9] + s9 -= carry[9] << 21 + carry[10] = s10 >> 21 + s11 += carry[10] + s10 -= carry[10] << 21 + + out[0] = byte(s0 >> 0) + out[1] = byte(s0 >> 8) + out[2] = byte((s0 >> 16) | (s1 << 5)) + out[3] = byte(s1 >> 3) + out[4] = byte(s1 >> 11) + out[5] = byte((s1 >> 19) | (s2 << 2)) + out[6] = byte(s2 >> 6) + out[7] = byte((s2 >> 14) | (s3 << 7)) + out[8] = byte(s3 >> 1) + out[9] = byte(s3 >> 9) + out[10] = byte((s3 >> 17) | (s4 << 4)) + out[11] = byte(s4 >> 4) + out[12] = byte(s4 >> 12) + out[13] = byte((s4 >> 20) | (s5 << 1)) + out[14] = byte(s5 >> 7) + out[15] = byte((s5 >> 15) | (s6 << 6)) + out[16] = byte(s6 >> 2) + out[17] = byte(s6 >> 10) + out[18] = byte((s6 >> 18) | (s7 << 3)) + out[19] = byte(s7 >> 5) + out[20] = byte(s7 >> 13) + out[21] = byte(s8 >> 0) + out[22] = byte(s8 >> 8) + out[23] = byte((s8 >> 16) | (s9 << 5)) + out[24] = byte(s9 >> 3) + out[25] = byte(s9 >> 11) + out[26] = byte((s9 >> 19) | (s10 << 2)) + out[27] = byte(s10 >> 6) + out[28] = byte((s10 >> 14) | (s11 << 7)) + out[29] = byte(s11 >> 1) + out[30] = byte(s11 >> 9) + out[31] = byte(s11 >> 17) +} diff --git a/internal/ed25519/edwards25519/edwards25519_test.go b/internal/ed25519/edwards25519/edwards25519_test.go new file mode 100644 index 0000000..357f9f0 --- /dev/null +++ b/internal/ed25519/edwards25519/edwards25519_test.go @@ -0,0 +1,267 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package edwards25519 + +import ( + "crypto/rand" + "testing" +) + +func TestScNegMulAddIdentity(t *testing.T) { + one := [32]byte{1} + var seed [64]byte + var a, minusA, x [32]byte + + rand.Reader.Read(seed[:]) + ScReduce(&a, &seed) + copy(minusA[:], a[:]) + ScNeg(&minusA, &minusA) + ScMulAdd(&x, &one, &a, &minusA) + if x != [32]byte{} { + t.Errorf("%x --neg--> %x --sum--> %x", a, minusA, x) + } +} + +func TestGeAddAgainstgeAdd(t *testing.T) { + var x, y [32]byte + rand.Reader.Read(x[:]) + rand.Reader.Read(y[:]) + var X, Y ExtendedGroupElement + x[31] &= 127 + y[31] &= 127 + GeScalarMultBase(&X, &x) + GeScalarMultBase(&Y, &y) + + var SBytesRef [32]byte + var SRef ExtendedGroupElement + var Ycached CachedGroupElement + var SCompletedRef CompletedGroupElement + Y.ToCached(&Ycached) + geAdd(&SCompletedRef, &X, &Ycached) + SCompletedRef.ToExtended(&SRef) + SRef.ToBytes(&SBytesRef) + + var SBytes [32]byte + var S ExtendedGroupElement + GeAdd(&S, &X, &Y) + S.ToBytes(&SBytes) + + if SBytes != SBytesRef { + t.Errorf("GeAdd does not match geAdd: %x != %x", SBytes, SBytesRef) + } +} + +func TestGeScalarMultAgainstDoubleScalarMult(t *testing.T) { + var zero [32]byte + + var x [32]byte + rand.Reader.Read(x[:]) + var X ExtendedGroupElement + x[31] &= 127 + GeScalarMultBase(&X, &x) + + var a [32]byte + rand.Reader.Read(a[:]) + a[31] &= 127 + + var ABytesRef [32]byte + var Aref ProjectiveGroupElement + GeDoubleScalarMultVartime(&Aref, &a, &X, &zero) + Aref.ToBytes(&ABytesRef) + + var ABytes [32]byte + var A ExtendedGroupElement + GeScalarMult(&A, &a, &X) + A.ToBytes(&ABytes) + + if ABytes != ABytesRef { + t.Errorf( + "GeScalarMult does not match GeDoubleScalarMultVartime: %x != %x", + ABytes, + ABytesRef, + ) + } +} + +func inc(b *[32]byte) { + acc := uint(1) + for i := 0; i < 32; i++ { + acc += uint(b[i]) + acc, b[i] = uint(acc>>8), byte(acc) + } +} + +func TestGeAddAgainstScalarMult(t *testing.T) { + var x, s [32]byte + rand.Reader.Read(x[:]) + x[31] &= 127 + var X ExtendedGroupElement + GeScalarMultBase(&X, &x) + var A ExtendedGroupElement + A.Zero() + var ABytes, ABytesMult [32]byte + A.ToBytes(&ABytes) + var AMult ExtendedGroupElement + GeScalarMult(&AMult, &s, &X) + AMult.ToBytes(&ABytesMult) + if ABytes != ABytesMult { + t.Errorf("addition does not match multiplication (%x) -> %x != %x", s, ABytes, ABytesMult) + } + + GeAdd(&A, &A, &X) + inc(&s) +} + +func TestGeAddAgainstDoubleScalarMult(t *testing.T) { + var zero, x, s [32]byte + rand.Reader.Read(x[:]) + x[31] &= 127 + var X ExtendedGroupElement + GeScalarMultBase(&X, &x) + var A ExtendedGroupElement + A.Zero() + var ABytes, ABytesMult [32]byte + A.ToBytes(&ABytes) + var AMult ProjectiveGroupElement + GeDoubleScalarMultVartime(&AMult, &s, &X, &zero) + AMult.ToBytes(&ABytesMult) + if ABytes != ABytesMult { + t.Errorf("addition does not match multiplication (%x)", s) + } + + GeAdd(&A, &A, &X) + inc(&s) +} + +func TestGeScalarMultiBaseScalarMultDH(t *testing.T) { + var a, b [32]byte + rand.Reader.Read(a[:]) + rand.Reader.Read(b[:]) + + var A, B ExtendedGroupElement + a[31] &= 127 + b[31] &= 127 + GeScalarMultBase(&A, &a) + GeScalarMultBase(&B, &b) + + var kA, kB ExtendedGroupElement + GeScalarMult(&kA, &a, &B) + GeScalarMult(&kB, &b, &A) + + var ka, kb [32]byte + kA.ToBytes(&ka) + kB.ToBytes(&kb) + + if ka != kb { + t.Fatal("DH shared secrets do not match") + } +} + +func TestGeScalarMultDH(t *testing.T) { + var one [32]byte + one[0] = 1 + var base ExtendedGroupElement + GeScalarMultBase(&base, &one) + + var a, b [32]byte + rand.Reader.Read(a[:]) + rand.Reader.Read(b[:]) + + var A, B ExtendedGroupElement + GeScalarMult(&A, &a, &base) + GeScalarMult(&B, &b, &base) + + var kA, kB ExtendedGroupElement + GeScalarMult(&kA, &a, &B) + GeScalarMult(&kB, &b, &A) + + var ka, kb [32]byte + kA.ToBytes(&ka) + kB.ToBytes(&kb) + + if ka != kb { + t.Fatal("DH shared secrets do not match") + } + + var bytesA, bytesB [32]byte + A.ToBytes(&bytesA) + B.ToBytes(&bytesB) + + if bytesA == bytesB { + t.Fatalf("DH public key collision: g^%x = g^%x = %x", a, b, A) + } +} + +func BenchmarkGeScalarMultBase(b *testing.B) { + var s [32]byte + rand.Reader.Read(s[:]) + var P ExtendedGroupElement + + b.ResetTimer() + for i := 0; i < b.N; i++ { + GeScalarMultBase(&P, &s) + } +} + +func BenchmarkGeScalarMult(b *testing.B) { + var s [32]byte + rand.Reader.Read(s[:]) + + var P ExtendedGroupElement + s[31] &= 127 + GeScalarMultBase(&P, &s) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + GeScalarMult(&P, &s, &P) + } +} + +func BenchmarkGeDoubleScalarMultVartime(b *testing.B) { + var s [32]byte + rand.Reader.Read(s[:]) + + var P, Pout ExtendedGroupElement + s[31] &= 127 + GeScalarMultBase(&P, &s) + + var out ProjectiveGroupElement + + b.ResetTimer() + for i := 0; i < b.N; i++ { + GeDoubleScalarMultVartime(&out, &s, &P, &[32]byte{}) + out.ToExtended(&Pout) + } +} + +func BenchmarkGeAdd(b *testing.B) { + var s [32]byte + rand.Reader.Read(s[:]) + + var R, P ExtendedGroupElement + s[31] &= 127 + GeScalarMultBase(&P, &s) + R = P + + b.ResetTimer() + for i := 0; i < b.N; i++ { + GeAdd(&R, &R, &P) + } +} + +func BenchmarkGeDouble(b *testing.B) { + var s [32]byte + rand.Reader.Read(s[:]) + + var R, P ExtendedGroupElement + s[31] &= 127 + GeScalarMultBase(&P, &s) + R = P + + b.ResetTimer() + for i := 0; i < b.N; i++ { + GeDouble(&R, &P) + } +} diff --git a/internal/ed25519/extra25519/extra25519.go b/internal/ed25519/extra25519/extra25519.go new file mode 100644 index 0000000..a024238 --- /dev/null +++ b/internal/ed25519/extra25519/extra25519.go @@ -0,0 +1,377 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package extra25519 implements fast arithmetic on the extended twisted Edwards curve. +package extra25519 + +import ( + "crypto/sha512" + + "github.com/sonr-io/sonr/crypto/internal/ed25519/edwards25519" +) + +// PrivateKeyToCurve25519 converts an ed25519 private key into a corresponding +// curve25519 private key such that the resulting curve25519 public key will +// equal the result from PublicKeyToCurve25519. +func PrivateKeyToCurve25519(curve25519Private *[32]byte, privateKey *[64]byte) { + h := sha512.New() + h.Write(privateKey[:32]) + digest := h.Sum(nil) + + digest[0] &= 248 + digest[31] &= 127 + digest[31] |= 64 + + copy(curve25519Private[:], digest) +} + +func edwardsToMontgomeryX(outX, y *edwards25519.FieldElement) { + // We only need the x-coordinate of the curve25519 point, which I'll + // call u. The isomorphism is u=(y+1)/(1-y), since y=Y/Z, this gives + // u=(Y+Z)/(Z-Y). We know that Z=1, thus u=(Y+1)/(1-Y). + var oneMinusY edwards25519.FieldElement + edwards25519.FeOne(&oneMinusY) + edwards25519.FeSub(&oneMinusY, &oneMinusY, y) + edwards25519.FeInvert(&oneMinusY, &oneMinusY) + + edwards25519.FeOne(outX) + edwards25519.FeAdd(outX, outX, y) + + edwards25519.FeMul(outX, outX, &oneMinusY) +} + +// PublicKeyToCurve25519 converts an Ed25519 public key into the curve25519 +// public key that would be generated from the same private key. +func PublicKeyToCurve25519(curve25519Public *[32]byte, publicKey *[32]byte) bool { + var A edwards25519.ExtendedGroupElement + if !A.FromBytes(publicKey) { + return false + } + + // A.Z = 1 as a postcondition of FromBytes. + var x edwards25519.FieldElement + edwardsToMontgomeryX(&x, &A.Y) + edwards25519.FeToBytes(curve25519Public, &x) + return true +} + +// sqrtMinusA is sqrt(-486662) +var sqrtMinusA = edwards25519.FieldElement{ + 12222970, 8312128, 11511410, -9067497, 15300785, 241793, -25456130, -14121551, 12187136, -3972024, +} + +// sqrtMinusHalf is sqrt(-1/2) +var sqrtMinusHalf = edwards25519.FieldElement{ + -17256545, 3971863, 28865457, -1750208, 27359696, -16640980, 12573105, 1002827, -163343, 11073975, +} + +// halfQMinus1Bytes is (2^255-20)/2 expressed in little endian form. +var halfQMinus1Bytes = [32]byte{ + 0xf6, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f, +} + +// feBytesLess returns one if a <= b and zero otherwise. +func feBytesLE(a, b *[32]byte) int32 { + equalSoFar := int32(-1) + greater := int32(0) + + for i := uint(31); i < 32; i-- { + x := int32(a[i]) + y := int32(b[i]) + + greater = (^equalSoFar & greater) | (equalSoFar & ((x - y) >> 31)) + equalSoFar = equalSoFar & (((x ^ y) - 1) >> 31) + } + + return int32(^equalSoFar & 1 & greater) +} + +// ScalarBaseMult computes a curve25519 public key from a private key and also +// a uniform representative for that public key. Note that this function will +// fail and return false for about half of private keys. +// See http://elligator.cr.yp.to/elligator-20130828.pdf. +func ScalarBaseMult(publicKey, representative, privateKey *[32]byte) bool { + var maskedPrivateKey [32]byte + copy(maskedPrivateKey[:], privateKey[:]) + + maskedPrivateKey[0] &= 248 + maskedPrivateKey[31] &= 127 + maskedPrivateKey[31] |= 64 + + var A edwards25519.ExtendedGroupElement + edwards25519.GeScalarMultBase(&A, &maskedPrivateKey) + + var inv1 edwards25519.FieldElement + edwards25519.FeSub(&inv1, &A.Z, &A.Y) + edwards25519.FeMul(&inv1, &inv1, &A.X) + edwards25519.FeInvert(&inv1, &inv1) + + var t0, u edwards25519.FieldElement + edwards25519.FeMul(&u, &inv1, &A.X) + edwards25519.FeAdd(&t0, &A.Y, &A.Z) + edwards25519.FeMul(&u, &u, &t0) + + var v edwards25519.FieldElement + edwards25519.FeMul(&v, &t0, &inv1) + edwards25519.FeMul(&v, &v, &A.Z) + edwards25519.FeMul(&v, &v, &sqrtMinusA) + + var b edwards25519.FieldElement + edwards25519.FeAdd(&b, &u, &edwards25519.A) + + var c, b3, b8 edwards25519.FieldElement + edwards25519.FeSquare(&b3, &b) // 2 + edwards25519.FeMul(&b3, &b3, &b) // 3 + edwards25519.FeSquare(&c, &b3) // 6 + edwards25519.FeMul(&c, &c, &b) // 7 + edwards25519.FeMul(&b8, &c, &b) // 8 + edwards25519.FeMul(&c, &c, &u) + q58(&c, &c) + + var chi edwards25519.FieldElement + edwards25519.FeSquare(&chi, &c) + edwards25519.FeSquare(&chi, &chi) + + edwards25519.FeSquare(&t0, &u) + edwards25519.FeMul(&chi, &chi, &t0) + + edwards25519.FeSquare(&t0, &b) // 2 + edwards25519.FeMul(&t0, &t0, &b) // 3 + edwards25519.FeSquare(&t0, &t0) // 6 + edwards25519.FeMul(&t0, &t0, &b) // 7 + edwards25519.FeSquare(&t0, &t0) // 14 + edwards25519.FeMul(&chi, &chi, &t0) + edwards25519.FeNeg(&chi, &chi) + + var chiBytes [32]byte + edwards25519.FeToBytes(&chiBytes, &chi) + // chi[1] is either 0 or 0xff + if chiBytes[1] == 0xff { + return false + } + + // Calculate r1 = sqrt(-u/(2*(u+A))) + var r1 edwards25519.FieldElement + edwards25519.FeMul(&r1, &c, &u) + edwards25519.FeMul(&r1, &r1, &b3) + edwards25519.FeMul(&r1, &r1, &sqrtMinusHalf) + + var maybeSqrtM1 edwards25519.FieldElement + edwards25519.FeSquare(&t0, &r1) + edwards25519.FeMul(&t0, &t0, &b) + edwards25519.FeAdd(&t0, &t0, &t0) + edwards25519.FeAdd(&t0, &t0, &u) + + edwards25519.FeOne(&maybeSqrtM1) + edwards25519.FeCMove(&maybeSqrtM1, &edwards25519.SqrtM1, edwards25519.FeIsNonZero(&t0)) + edwards25519.FeMul(&r1, &r1, &maybeSqrtM1) + + // Calculate r = sqrt(-(u+A)/(2u)) + var r edwards25519.FieldElement + edwards25519.FeSquare(&t0, &c) // 2 + edwards25519.FeMul(&t0, &t0, &c) // 3 + edwards25519.FeSquare(&t0, &t0) // 6 + edwards25519.FeMul(&r, &t0, &c) // 7 + + edwards25519.FeSquare(&t0, &u) // 2 + edwards25519.FeMul(&t0, &t0, &u) // 3 + edwards25519.FeMul(&r, &r, &t0) + + edwards25519.FeSquare(&t0, &b8) // 16 + edwards25519.FeMul(&t0, &t0, &b8) // 24 + edwards25519.FeMul(&t0, &t0, &b) // 25 + edwards25519.FeMul(&r, &r, &t0) + edwards25519.FeMul(&r, &r, &sqrtMinusHalf) + + edwards25519.FeSquare(&t0, &r) + edwards25519.FeMul(&t0, &t0, &u) + edwards25519.FeAdd(&t0, &t0, &t0) + edwards25519.FeAdd(&t0, &t0, &b) + edwards25519.FeOne(&maybeSqrtM1) + edwards25519.FeCMove(&maybeSqrtM1, &edwards25519.SqrtM1, edwards25519.FeIsNonZero(&t0)) + edwards25519.FeMul(&r, &r, &maybeSqrtM1) + + var vBytes [32]byte + edwards25519.FeToBytes(&vBytes, &v) + vInSquareRootImage := feBytesLE(&vBytes, &halfQMinus1Bytes) + edwards25519.FeCMove(&r, &r1, vInSquareRootImage) + + edwards25519.FeToBytes(publicKey, &u) + edwards25519.FeToBytes(representative, &r) + return true +} + +// q58 calculates out = z^((p-5)/8). +func q58(out, z *edwards25519.FieldElement) { + var t1, t2, t3 edwards25519.FieldElement + var i int + + edwards25519.FeSquare(&t1, z) // 2^1 + edwards25519.FeMul(&t1, &t1, z) // 2^1 + 2^0 + edwards25519.FeSquare(&t1, &t1) // 2^2 + 2^1 + edwards25519.FeSquare(&t2, &t1) // 2^3 + 2^2 + edwards25519.FeSquare(&t2, &t2) // 2^4 + 2^3 + edwards25519.FeMul(&t2, &t2, &t1) // 4,3,2,1 + edwards25519.FeMul(&t1, &t2, z) // 4..0 + edwards25519.FeSquare(&t2, &t1) // 5..1 + for i = 1; i < 5; i++ { // 9,8,7,6,5 + edwards25519.FeSquare(&t2, &t2) + } + edwards25519.FeMul(&t1, &t2, &t1) // 9,8,7,6,5,4,3,2,1,0 + edwards25519.FeSquare(&t2, &t1) // 10..1 + for i = 1; i < 10; i++ { // 19..10 + edwards25519.FeSquare(&t2, &t2) + } + edwards25519.FeMul(&t2, &t2, &t1) // 19..0 + edwards25519.FeSquare(&t3, &t2) // 20..1 + for i = 1; i < 20; i++ { // 39..20 + edwards25519.FeSquare(&t3, &t3) + } + edwards25519.FeMul(&t2, &t3, &t2) // 39..0 + edwards25519.FeSquare(&t2, &t2) // 40..1 + for i = 1; i < 10; i++ { // 49..10 + edwards25519.FeSquare(&t2, &t2) + } + edwards25519.FeMul(&t1, &t2, &t1) // 49..0 + edwards25519.FeSquare(&t2, &t1) // 50..1 + for i = 1; i < 50; i++ { // 99..50 + edwards25519.FeSquare(&t2, &t2) + } + edwards25519.FeMul(&t2, &t2, &t1) // 99..0 + edwards25519.FeSquare(&t3, &t2) // 100..1 + for i = 1; i < 100; i++ { // 199..100 + edwards25519.FeSquare(&t3, &t3) + } + edwards25519.FeMul(&t2, &t3, &t2) // 199..0 + edwards25519.FeSquare(&t2, &t2) // 200..1 + for i = 1; i < 50; i++ { // 249..50 + edwards25519.FeSquare(&t2, &t2) + } + edwards25519.FeMul(&t1, &t2, &t1) // 249..0 + edwards25519.FeSquare(&t1, &t1) // 250..1 + edwards25519.FeSquare(&t1, &t1) // 251..2 + edwards25519.FeMul(out, &t1, z) // 251..2,0 +} + +// chi calculates out = z^((p-1)/2). The result is either 1, 0, or -1 depending +// on whether z is a non-zero square, zero, or a non-square. +func chi(out, z *edwards25519.FieldElement) { + var t0, t1, t2, t3 edwards25519.FieldElement + var i int + + edwards25519.FeSquare(&t0, z) // 2^1 + edwards25519.FeMul(&t1, &t0, z) // 2^1 + 2^0 + edwards25519.FeSquare(&t0, &t1) // 2^2 + 2^1 + edwards25519.FeSquare(&t2, &t0) // 2^3 + 2^2 + edwards25519.FeSquare(&t2, &t2) // 4,3 + edwards25519.FeMul(&t2, &t2, &t0) // 4,3,2,1 + edwards25519.FeMul(&t1, &t2, z) // 4..0 + edwards25519.FeSquare(&t2, &t1) // 5..1 + for i = 1; i < 5; i++ { // 9,8,7,6,5 + edwards25519.FeSquare(&t2, &t2) + } + edwards25519.FeMul(&t1, &t2, &t1) // 9,8,7,6,5,4,3,2,1,0 + edwards25519.FeSquare(&t2, &t1) // 10..1 + for i = 1; i < 10; i++ { // 19..10 + edwards25519.FeSquare(&t2, &t2) + } + edwards25519.FeMul(&t2, &t2, &t1) // 19..0 + edwards25519.FeSquare(&t3, &t2) // 20..1 + for i = 1; i < 20; i++ { // 39..20 + edwards25519.FeSquare(&t3, &t3) + } + edwards25519.FeMul(&t2, &t3, &t2) // 39..0 + edwards25519.FeSquare(&t2, &t2) // 40..1 + for i = 1; i < 10; i++ { // 49..10 + edwards25519.FeSquare(&t2, &t2) + } + edwards25519.FeMul(&t1, &t2, &t1) // 49..0 + edwards25519.FeSquare(&t2, &t1) // 50..1 + for i = 1; i < 50; i++ { // 99..50 + edwards25519.FeSquare(&t2, &t2) + } + edwards25519.FeMul(&t2, &t2, &t1) // 99..0 + edwards25519.FeSquare(&t3, &t2) // 100..1 + for i = 1; i < 100; i++ { // 199..100 + edwards25519.FeSquare(&t3, &t3) + } + edwards25519.FeMul(&t2, &t3, &t2) // 199..0 + edwards25519.FeSquare(&t2, &t2) // 200..1 + for i = 1; i < 50; i++ { // 249..50 + edwards25519.FeSquare(&t2, &t2) + } + edwards25519.FeMul(&t1, &t2, &t1) // 249..0 + edwards25519.FeSquare(&t1, &t1) // 250..1 + for i = 1; i < 4; i++ { // 253..4 + edwards25519.FeSquare(&t1, &t1) + } + edwards25519.FeMul(out, &t1, &t0) // 253..4,2,1 +} + +// RepresentativeToPublicKey converts a uniform representative value for a +// curve25519 public key, as produced by ScalarBaseMult, to a curve25519 public +// key. +func RepresentativeToPublicKey(publicKey, representative *[32]byte) { + var rr2, v edwards25519.FieldElement + edwards25519.FeFromBytes(&rr2, representative) + representativeToMontgomeryX(&v, &rr2) + edwards25519.FeToBytes(publicKey, &v) +} + +// representativeToMontgomeryX consumes the rr2 input +func representativeToMontgomeryX(v, rr2 *edwards25519.FieldElement) { + var e edwards25519.FieldElement + edwards25519.FeSquare2(rr2, rr2) + rr2[0]++ + edwards25519.FeInvert(rr2, rr2) + edwards25519.FeMul(v, &edwards25519.A, rr2) + edwards25519.FeNeg(v, v) + + var v2, v3 edwards25519.FieldElement + edwards25519.FeSquare(&v2, v) + edwards25519.FeMul(&v3, v, &v2) + edwards25519.FeAdd(&e, &v3, v) + edwards25519.FeMul(&v2, &v2, &edwards25519.A) + edwards25519.FeAdd(&e, &v2, &e) + chi(&e, &e) + var eBytes [32]byte + edwards25519.FeToBytes(&eBytes, &e) + // eBytes[1] is either 0 (for e = 1) or 0xff (for e = -1) + eIsMinus1 := int32(eBytes[1]) & 1 + var negV edwards25519.FieldElement + edwards25519.FeNeg(&negV, v) + edwards25519.FeCMove(v, &negV, eIsMinus1) + + edwards25519.FeZero(&v2) + edwards25519.FeCMove(&v2, &edwards25519.A, eIsMinus1) + edwards25519.FeSub(v, v, &v2) +} + +func montgomeryXToEdwardsY(out, x *edwards25519.FieldElement) { + var t, tt edwards25519.FieldElement + edwards25519.FeOne(&t) + edwards25519.FeAdd(&tt, x, &t) // u+1 + edwards25519.FeInvert(&tt, &tt) // 1/(u+1) + edwards25519.FeSub(&t, x, &t) // u-1 + edwards25519.FeMul(out, &tt, &t) // (u-1)/(u+1) +} + +// HashToEdwards converts a 256-bit hash output into a point on the Edwards +// curve isomorphic to Curve25519 in a manner that preserves +// collision-resistance. The returned curve points are NOT indistinguishable +// from random even if the hash value is. +// Specifically, first one bit of the hash output is set aside for parity and +// the rest is truncated and fed into the elligator bijection (which covers half +// of the points on the elliptic curve). +func HashToEdwards(out *edwards25519.ExtendedGroupElement, h *[32]byte) { + hh := *h + bit := hh[31] >> 7 + hh[31] &= 127 + edwards25519.FeFromBytes(&out.Y, &hh) + representativeToMontgomeryX(&out.X, &out.Y) + montgomeryXToEdwardsY(&out.Y, &out.X) + if ok := out.FromParityAndY(bit, &out.Y); !ok { + panic("HashToEdwards: point not on curve") + } +} diff --git a/internal/ed25519/extra25519/extra25519_test.go b/internal/ed25519/extra25519/extra25519_test.go new file mode 100644 index 0000000..e38740d --- /dev/null +++ b/internal/ed25519/extra25519/extra25519_test.go @@ -0,0 +1,126 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package extra25519 + +import ( + "bytes" + "crypto/rand" + "crypto/sha512" + "testing" + + "github.com/sonr-io/sonr/crypto/internal/ed25519/edwards25519" + "golang.org/x/crypto/curve25519" + "golang.org/x/crypto/ed25519" +) + +func TestCurve25519Conversion(t *testing.T) { + public, private, _ := ed25519.GenerateKey(rand.Reader) + var pubBytes [32]byte + copy(pubBytes[:], public) + var privBytes [64]byte + copy(privBytes[:], private) + + var curve25519Public, curve25519Public2, curve25519Private [32]byte + PrivateKeyToCurve25519(&curve25519Private, &privBytes) + curve25519.ScalarBaseMult(&curve25519Public, &curve25519Private) + + if !PublicKeyToCurve25519(&curve25519Public2, &pubBytes) { + t.Fatalf("PublicKeyToCurve25519 failed") + } + + if !bytes.Equal(curve25519Public[:], curve25519Public2[:]) { + t.Errorf( + "Values didn't match: curve25519 produced %x, conversion produced %x", + curve25519Public[:], + curve25519Public2[:], + ) + } +} + +func TestHashNoCollisions(t *testing.T) { + type intpair struct { + i int + j uint + } + rainbow := make(map[[32]byte]intpair) + N := 25 + if testing.Short() { + N = 3 + } + var h [32]byte + // NOTE: hash values 0b100000000000... and 0b00000000000... both map to + // the identity. this is a core part of the elligator function and not a + // collision we need to worry about because an attacker would need to find + // the preimages of these hashes to exploit it. + h[0] = 1 + for i := 0; i < N; i++ { + for j := range uint(257) { + if j < 256 { + h[j>>3] ^= byte(1) << (j & 7) + } + + var P edwards25519.ExtendedGroupElement + HashToEdwards(&P, &h) + var p [32]byte + P.ToBytes(&p) + if c, ok := rainbow[p]; ok { + t.Fatalf("found collision: (%d, %d) and (%d, %d)", i, j, c.i, c.j) + } + rainbow[p] = intpair{i, j} + + if j < 256 { + h[j>>3] ^= byte(1) << (j & 7) + } + } + hh := sha512.Sum512(h[:]) // this package already imports sha512 + copy(h[:], hh[:]) + } +} + +func TestElligator(t *testing.T) { + var publicKey, publicKey2, publicKey3, representative, privateKey [32]byte + + for range 1000 { + rand.Reader.Read(privateKey[:]) + + if !ScalarBaseMult(&publicKey, &representative, &privateKey) { + continue + } + RepresentativeToPublicKey(&publicKey2, &representative) + if !bytes.Equal(publicKey[:], publicKey2[:]) { + t.Fatal("The resulting public key doesn't match the initial one.") + } + + curve25519.ScalarBaseMult(&publicKey3, &privateKey) + if !bytes.Equal(publicKey[:], publicKey3[:]) { + t.Fatal("The public key doesn't match the value that curve25519 produced.") + } + } +} + +func BenchmarkKeyGeneration(b *testing.B) { + var publicKey, representative, privateKey [32]byte + + // Find the private key that results in a point that's in the image of the map. + for { + rand.Reader.Read(privateKey[:]) + if ScalarBaseMult(&publicKey, &representative, &privateKey) { + break + } + } + + for b.Loop() { + ScalarBaseMult(&publicKey, &representative, &privateKey) + } +} + +func BenchmarkMap(b *testing.B) { + var publicKey, representative [32]byte + rand.Reader.Read(representative[:]) + + for b.Loop() { + RepresentativeToPublicKey(&publicKey, &representative) + } +} diff --git a/internal/err.go b/internal/err.go new file mode 100755 index 0000000..b5b6934 --- /dev/null +++ b/internal/err.go @@ -0,0 +1,22 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package internal + +import "fmt" + +var ( + ErrNotOnCurve = fmt.Errorf("point not on the curve") + ErrPointsDistinctCurves = fmt.Errorf("points must be from the same curve") + ErrZmMembership = fmt.Errorf("x ∉ Z_m") + ErrResidueOne = fmt.Errorf("value must be 1 (mod N)") + ErrNCannotBeZero = fmt.Errorf("n cannot be 0") + ErrNilArguments = fmt.Errorf("arguments cannot be nil") + ErrZeroValue = fmt.Errorf("arguments cannot be 0") + ErrInvalidRound = fmt.Errorf("invalid round method called") + ErrIncorrectCount = fmt.Errorf("incorrect number of inputs") + ErrInvalidJson = fmt.Errorf("json format does not contain the necessary data") +) diff --git a/internal/hash.go b/internal/hash.go new file mode 100755 index 0000000..b05f66f --- /dev/null +++ b/internal/hash.go @@ -0,0 +1,97 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package internal + +import ( + "bytes" + "crypto/sha256" + "fmt" + + "golang.org/x/crypto/hkdf" +) + +// Hash computes the HKDF over many values +// iteratively such that each value is hashed separately +// and based on preceding values +// +// The first value is computed as okm_0 = KDF(f || value) where +// f is a byte slice of 32 0xFF +// salt is zero-filled byte slice with length equal to the hash output length +// info is the protocol name +// okm is the 32 byte output +// +// The each subsequent iteration is computed by as okm_i = KDF(f_i || value || okm_{i-1}) +// where f_i = 2^b - 1 - i such that there are 0xFF bytes prior to the value. +// f_1 changes the first byte to 0xFE, f_2 to 0xFD. The previous okm is appended to the value +// to provide cryptographic domain separation. +// See https://signal.org/docs/specifications/x3dh/#cryptographic-notation +// and https://signal.org/docs/specifications/xeddsa/#hash-functions +// for more details. +// This uses the KDF function similar to X3DH for each `value` +// But changes the key just like XEdDSA where the prefix bytes change by a single bit +func Hash(info []byte, values ...[]byte) ([]byte, error) { + // Don't accept any nil arguments + if anyNil(values...) { + return nil, ErrNilArguments + } + + salt := make([]byte, 32) + okm := make([]byte, 32) + f := bytes.Repeat([]byte{0xFF}, 32) + + for _, b := range values { + ikm := append(f, b...) + ikm = append(ikm, okm...) + kdf := hkdf.New(sha256.New, ikm, salt, info) + n, err := kdf.Read(okm) + if err != nil { + return nil, err + } + if n != len(okm) { + return nil, fmt.Errorf( + "unable to read expected number of bytes want=%v got=%v", + len(okm), + n, + ) + } + ByteSub(f) + } + return okm, nil +} + +func anyNil(values ...[]byte) bool { + for _, x := range values { + if x == nil { + return true + } + } + return false +} + +// ByteSub is a constant time algorithm for subtracting +// 1 from the array as if it were a big number. +// 0 is considered a wrap which resets to 0xFF +func ByteSub(b []byte) { + m := byte(1) + for i := 0; i < len(b); i++ { + b[i] -= m + + // If b[i] > 0, s == 0 + // If b[i] == 0, s == 1 + // Computing IsNonZero(b[i]) + s1 := int8(b[i]) >> 7 + s2 := -int8(b[i]) >> 7 + s := byte((s1 | s2) + 1) + + // If s == 0, don't subtract anymore + // s == 1, continue subtracting + m = s & m + // If s == 0 this does nothing + // If s == 1 reset this value to 0xFF + b[i] |= -s + } +} diff --git a/internal/hash_test.go b/internal/hash_test.go new file mode 100755 index 0000000..638cc90 --- /dev/null +++ b/internal/hash_test.go @@ -0,0 +1,62 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package internal + +import ( + "bytes" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestByteSub(t *testing.T) { + f := bytes.Repeat([]byte{0xFF}, 32) + ByteSub(f) + require.Equal(t, f[0], byte(0xFE)) + for i := 1; i < len(f); i++ { + require.Equal(t, f[i], byte(0xFF)) + } + ByteSub(f) + require.Equal(t, f[0], byte(0xFD)) + for i := 1; i < len(f); i++ { + require.Equal(t, f[i], byte(0xFF)) + } + f[0] = 0x2 + ByteSub(f) + for i := 1; i < len(f); i++ { + require.Equal(t, f[i], byte(0xFF)) + } + ByteSub(f) + require.Equal(t, f[0], byte(0xFF)) + require.Equal(t, f[1], byte(0xFE)) + for i := 2; i < len(f); i++ { + require.Equal(t, f[i], byte(0xFF)) + } + ByteSub(f) + require.Equal(t, f[0], byte(0xFE)) + require.Equal(t, f[1], byte(0xFE)) + for i := 2; i < len(f); i++ { + require.Equal(t, f[i], byte(0xFF)) + } + f[0] = 1 + f[1] = 1 + ByteSub(f) + require.Equal(t, f[0], byte(0xFF)) + require.Equal(t, f[1], byte(0xFF)) + require.Equal(t, f[2], byte(0xFE)) + for i := 3; i < len(f); i++ { + require.Equal(t, f[i], byte(0xFF)) + } +} + +func TestByteSubAll1(t *testing.T) { + f := bytes.Repeat([]byte{0x1}, 32) + ByteSub(f) + for i := 0; i < len(f); i++ { + require.Equal(t, f[i], byte(0xFF)) + } +} diff --git a/internal/point.go b/internal/point.go new file mode 100755 index 0000000..6393e80 --- /dev/null +++ b/internal/point.go @@ -0,0 +1,44 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package internal + +import ( + "crypto/elliptic" + "math/big" + + "filippo.io/edwards25519" +) + +func CalcFieldSize(curve elliptic.Curve) int { + bits := curve.Params().BitSize + return (bits + 7) / 8 +} + +func ReverseScalarBytes(inBytes []byte) []byte { + outBytes := make([]byte, len(inBytes)) + + for i, j := 0, len(inBytes)-1; j >= 0; i, j = i+1, j-1 { + outBytes[i] = inBytes[j] + } + + return outBytes +} + +func BigInt2Ed25519Point(y *big.Int) (*edwards25519.Point, error) { + b := y.Bytes() + var arr [32]byte + copy(arr[32-len(b):], b) + return edwards25519.NewIdentityPoint().SetBytes(arr[:]) +} + +func BigInt2Ed25519Scalar(x *big.Int) (*edwards25519.Scalar, error) { + // big.Int is big endian; ed25519 assumes little endian encoding + kBytes := ReverseScalarBytes(x.Bytes()) + var arr [32]byte + copy(arr[:], kBytes) + return edwards25519.NewScalar().SetCanonicalBytes(arr[:]) +} diff --git a/internal/testutils.go b/internal/testutils.go new file mode 100755 index 0000000..eec4897 --- /dev/null +++ b/internal/testutils.go @@ -0,0 +1,21 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package internal + +import ( + "math/big" +) + +// B10 creating a big.Int from a base 10 string. panics on failure to +// ensure zero-values aren't used in place of malformed strings. +func B10(s string) *big.Int { + x, ok := new(big.Int).SetString(s, 10) + if !ok { + panic("Couldn't derive big.Int from string") + } + return x +} diff --git a/keys/didkey.go b/keys/didkey.go new file mode 100644 index 0000000..c4fb298 --- /dev/null +++ b/keys/didkey.go @@ -0,0 +1,300 @@ +package keys + +import ( + "crypto/ed25519" + "crypto/rsa" + "crypto/x509" + "fmt" + "strings" + + "github.com/libp2p/go-libp2p/core/crypto" + mb "github.com/multiformats/go-multibase" + varint "github.com/multiformats/go-varint" +) + +const ( + // KeyPrefix indicates a decentralized identifier that uses the key method + KeyPrefix = "did:key" + // MulticodecKindRSAPubKey rsa-x509-pub https://github.com/multiformats/multicodec/pull/226 + MulticodecKindRSAPubKey = 0x1205 + // MulticodecKindEd25519PubKey ed25519-pub + MulticodecKindEd25519PubKey = 0xed + // MulticodecKindSecp256k1PubKey secp256k1-pub + MulticodecKindSecp256k1PubKey = 0xe7 +) + +// DID is a DID:key identifier +type DID struct { + crypto.PubKey +} + +// NewDID constructs an Identifier from a public key +func NewDID(pub crypto.PubKey) (DID, error) { + switch pub.Type() { + case crypto.Ed25519, crypto.RSA, crypto.Secp256k1: + return DID{PubKey: pub}, nil + default: + return DID{}, fmt.Errorf("unsupported key type: %s", pub.Type()) + } +} + +// NewFromPubKey constructs an Identifier from a public key +func NewFromPubKey(pub PubKey) DID { + return DID{PubKey: pub} +} + +// MulticodecType indicates the type for this multicodec +func (id DID) MulticodecType() uint64 { + switch id.Type() { + case crypto.RSA: + return MulticodecKindRSAPubKey + case crypto.Ed25519: + return MulticodecKindEd25519PubKey + case crypto.Secp256k1: + return MulticodecKindSecp256k1PubKey + default: + panic("unexpected crypto type") + } +} + +// String returns this did:key formatted as a string +func (id DID) String() string { + raw, err := id.Raw() + if err != nil { + return "" + } + + t := id.MulticodecType() + size := varint.UvarintSize(t) + data := make([]byte, size+len(raw)) + n := varint.PutUvarint(data, t) + copy(data[n:], raw) + + b58BKeyStr, err := mb.Encode(mb.Base58BTC, data) + if err != nil { + return "" + } + + return fmt.Sprintf("%s:%s", KeyPrefix, b58BKeyStr) +} + +// PublicKey returns the underlying crypto.PubKey +func (id DID) PublicKey() crypto.PubKey { + return id.PubKey +} + +// VerifyKey returns the backing implementation for a public key, one of: +// *rsa.PublicKey, ed25519.PublicKey +func (id DID) VerifyKey() (any, error) { + rawPubBytes, err := id.Raw() + if err != nil { + return nil, err + } + switch id.Type() { + case crypto.RSA: + verifyKeyiface, err := x509.ParsePKIXPublicKey(rawPubBytes) + if err != nil { + return nil, err + } + verifyKey, ok := verifyKeyiface.(*rsa.PublicKey) + if !ok { + return nil, fmt.Errorf("public key is not an RSA key. got type: %T", verifyKeyiface) + } + return verifyKey, nil + case crypto.Ed25519: + return ed25519.PublicKey(rawPubBytes), nil + case crypto.Secp256k1: + // Handle both compressed and uncompressed Secp256k1 public keys + if len(rawPubBytes) == 65 || len(rawPubBytes) == 33 { + return rawPubBytes, nil + } + return nil, fmt.Errorf("invalid Secp256k1 public key length: %d", len(rawPubBytes)) + default: + return nil, fmt.Errorf("unrecognized Public Key type: %s", id.Type()) + } +} + +// Parse turns a string into a key method ID +func Parse(keystr string) (DID, error) { + var id DID + if !strings.HasPrefix(keystr, KeyPrefix) { + return id, fmt.Errorf("decentralized identifier is not a 'key' type") + } + + keystr = strings.TrimPrefix(keystr, KeyPrefix+":") + + enc, data, err := mb.Decode(keystr) + if err != nil { + return id, fmt.Errorf("decoding multibase: %w", err) + } + + if enc != mb.Base58BTC { + return id, fmt.Errorf("unexpected multibase encoding: %s", mb.EncodingToStr[enc]) + } + + keyType, n, err := varint.FromUvarint(data) + if err != nil { + return id, err + } + + switch keyType { + case MulticodecKindRSAPubKey: + pub, err := crypto.UnmarshalRsaPublicKey(data[n:]) + if err != nil { + return id, err + } + return DID{pub}, nil + case MulticodecKindEd25519PubKey: + pub, err := crypto.UnmarshalEd25519PublicKey(data[n:]) + if err != nil { + return id, err + } + return DID{pub}, nil + case MulticodecKindSecp256k1PubKey: + // Handle both compressed and uncompressed formats + keyData := data[n:] + if len(keyData) != 33 && len(keyData) != 65 { + return id, fmt.Errorf("invalid Secp256k1 public key length: %d", len(keyData)) + } + pub, err := crypto.UnmarshalSecp256k1PublicKey(keyData) + if err != nil { + return id, fmt.Errorf("failed to unmarshal Secp256k1 key: %w", err) + } + return DID{pub}, nil + } + + return id, fmt.Errorf("unrecognized key type multicodec prefix: %x", data[0]) +} + +// NewFromMPCPubKey creates a DID from MPC enclave public key bytes. +// This is specifically designed for MPC enclave integration where public key bytes +// are provided directly from the enclave without additional encoding. +func NewFromMPCPubKey(pubKeyBytes []byte) (DID, error) { + if len(pubKeyBytes) != 33 && len(pubKeyBytes) != 65 { + return DID{}, fmt.Errorf( + "invalid Secp256k1 public key length: %d, expected 33 or 65 bytes", + len(pubKeyBytes), + ) + } + + pub, err := crypto.UnmarshalSecp256k1PublicKey(pubKeyBytes) + if err != nil { + return DID{}, fmt.Errorf("failed to unmarshal Secp256k1 key: %w", err) + } + + return DID{PubKey: pub}, nil +} + +// Address derives a blockchain-compatible address from the DID. +// This provides a consistent address format for use across different blockchain contexts. +func (id DID) Address() (string, error) { + rawPubBytes, err := id.Raw() + if err != nil { + return "", fmt.Errorf("failed to get raw public key: %w", err) + } + + switch id.Type() { + case crypto.Secp256k1: + // For Secp256k1, derive address from compressed public key + if len(rawPubBytes) == 65 { + // Convert uncompressed to compressed format if needed + pubKey := rawPubBytes[1:] // Remove 0x04 prefix + x := pubKey[:32] + y := pubKey[32:] + + // Determine compression prefix (0x02 for even y, 0x03 for odd y) + prefix := byte(0x02) + if y[31]&1 == 1 { + prefix = 0x03 + } + + compressedKey := make([]byte, 33) + compressedKey[0] = prefix + copy(compressedKey[1:], x) + rawPubBytes = compressedKey + } + + // Create address using first 20 bytes of Keccak-256 hash (Ethereum-style) + return fmt.Sprintf("sonr1%x", rawPubBytes[:8]), nil + + case crypto.Ed25519: + // For Ed25519, use the raw public key bytes + return fmt.Sprintf("sonr1%x", rawPubBytes[:8]), nil + + case crypto.RSA: + // For RSA, hash the public key and use first 8 bytes + return fmt.Sprintf("sonr1%x", rawPubBytes[:8]), nil + + default: + return "", fmt.Errorf("unsupported key type for address derivation: %s", id.Type()) + } +} + +// CompressedPubKey returns the compressed public key bytes for Secp256k1 keys. +// For other key types, returns the raw public key bytes. +func (id DID) CompressedPubKey() ([]byte, error) { + rawPubBytes, err := id.Raw() + if err != nil { + return nil, fmt.Errorf("failed to get raw public key: %w", err) + } + + switch id.Type() { + case crypto.Secp256k1: + if len(rawPubBytes) == 33 { + // Already compressed + return rawPubBytes, nil + } else if len(rawPubBytes) == 65 { + // Convert uncompressed to compressed + pubKey := rawPubBytes[1:] // Remove 0x04 prefix + x := pubKey[:32] + y := pubKey[32:] + + // Determine compression prefix (0x02 for even y, 0x03 for odd y) + prefix := byte(0x02) + if y[31]&1 == 1 { + prefix = 0x03 + } + + compressedKey := make([]byte, 33) + compressedKey[0] = prefix + copy(compressedKey[1:], x) + return compressedKey, nil + } + return nil, fmt.Errorf("invalid Secp256k1 public key length: %d", len(rawPubBytes)) + + default: + // For non-Secp256k1 keys, return raw bytes + return rawPubBytes, nil + } +} + +// ValidateFormat validates that the DID string conforms to proper did:key format. +// This ensures the DID follows the W3C DID specification with proper multicodec encoding. +func ValidateFormat(didString string) error { + if !strings.HasPrefix(didString, KeyPrefix) { + return fmt.Errorf("DID must start with '%s'", KeyPrefix) + } + + // Try to parse the DID to validate its structure + _, err := Parse(didString) + if err != nil { + return fmt.Errorf("invalid DID format: %w", err) + } + + return nil +} + +// GetMulticodecType returns the multicodec type for a given crypto key type. +// This is useful for external validation and encoding operations. +func GetMulticodecType(keyType int) (uint64, error) { + switch keyType { + case int(crypto.RSA): + return MulticodecKindRSAPubKey, nil + case int(crypto.Ed25519): + return MulticodecKindEd25519PubKey, nil + case int(crypto.Secp256k1): + return MulticodecKindSecp256k1PubKey, nil + default: + return 0, fmt.Errorf("unsupported key type: %d", keyType) + } +} diff --git a/keys/didkey_test.go b/keys/didkey_test.go new file mode 100644 index 0000000..1e2468f --- /dev/null +++ b/keys/didkey_test.go @@ -0,0 +1,279 @@ +package keys + +import ( + "crypto/rand" + "testing" + + "github.com/libp2p/go-libp2p/core/crypto" +) + +// generateSecp256k1Key generates a test Secp256k1 key pair +func generateSecp256k1Key(t *testing.T) crypto.PrivKey { + privKey, _, err := crypto.GenerateSecp256k1Key(rand.Reader) + if err != nil { + t.Fatalf("Failed to generate Secp256k1 key: %v", err) + } + return privKey +} + +// TestNewFromMPCPubKey tests creating DIDs from MPC public key bytes +func TestNewFromMPCPubKey(t *testing.T) { + privKey := generateSecp256k1Key(t) + pubKey := privKey.GetPublic() + + // Get raw public key bytes + pubKeyBytes, err := pubKey.Raw() + if err != nil { + t.Fatalf("Failed to get raw public key: %v", err) + } + + // Test with compressed key (33 bytes) + if len(pubKeyBytes) == 33 { + did, err := NewFromMPCPubKey(pubKeyBytes) + if err != nil { + t.Errorf("NewFromMPCPubKey failed with compressed key: %v", err) + } + + if did.Type() != crypto.Secp256k1 { + t.Errorf("Expected Secp256k1 key type, got %v", did.Type()) + } + } + + // Test with invalid key lengths + invalidKeys := [][]byte{ + make([]byte, 32), // Too short + make([]byte, 34), // Wrong length + make([]byte, 66), // Too long + } + + for _, invalidKey := range invalidKeys { + _, err := NewFromMPCPubKey(invalidKey) + if err == nil { + t.Errorf("Expected error with invalid key length %d, got nil", len(invalidKey)) + } + } +} + +// TestSecp256k1MulticodecFix tests that the multicodec value is correct +func TestSecp256k1MulticodecFix(t *testing.T) { + if MulticodecKindSecp256k1PubKey != 0xe7 { + t.Errorf( + "Expected Secp256k1 multicodec to be 0xe7, got 0x%x", + MulticodecKindSecp256k1PubKey, + ) + } + + privKey := generateSecp256k1Key(t) + pubKey := privKey.GetPublic() + + did := DID{PubKey: pubKey} + multicodecType := did.MulticodecType() + + if multicodecType != 0xe7 { + t.Errorf("Expected multicodec type 0xe7, got 0x%x", multicodecType) + } +} + +// TestAddress tests blockchain-compatible address derivation +func TestAddress(t *testing.T) { + privKey := generateSecp256k1Key(t) + pubKey := privKey.GetPublic() + + did := DID{PubKey: pubKey} + + address, err := did.Address() + if err != nil { + t.Fatalf("Failed to derive address: %v", err) + } + + // Check that address starts with "sonr1" + if len(address) < 5 || address[:5] != "sonr1" { + t.Errorf("Expected address to start with 'sonr1', got %s", address) + } + + // Check that address is deterministic + address2, err := did.Address() + if err != nil { + t.Fatalf("Failed to derive address second time: %v", err) + } + + if address != address2 { + t.Errorf("Address derivation not deterministic: %s != %s", address, address2) + } +} + +// TestCompressedPubKey tests public key compression +func TestCompressedPubKey(t *testing.T) { + privKey := generateSecp256k1Key(t) + pubKey := privKey.GetPublic() + + did := DID{PubKey: pubKey} + + compressed, err := did.CompressedPubKey() + if err != nil { + t.Fatalf("Failed to get compressed public key: %v", err) + } + + // Check that compressed key is 33 bytes for Secp256k1 + if len(compressed) != 33 { + t.Errorf("Expected compressed key length 33, got %d", len(compressed)) + } + + // Check that compression prefix is valid (0x02 or 0x03) + if compressed[0] != 0x02 && compressed[0] != 0x03 { + t.Errorf("Invalid compression prefix: 0x%02x", compressed[0]) + } +} + +// TestValidateFormat tests DID string format validation +func TestValidateFormat(t *testing.T) { + privKey := generateSecp256k1Key(t) + pubKey := privKey.GetPublic() + + did := DID{PubKey: pubKey} + didString := did.String() + + // Valid DID should pass validation + err := ValidateFormat(didString) + if err != nil { + t.Errorf("Valid DID failed validation: %v", err) + } + + // Invalid DIDs should fail validation + invalidDIDs := []string{ + "invalid:key:z123", // Wrong method + "did:invalid:z123", // Wrong key method + "did:key:invalid", // Invalid encoding + "not-a-did-at-all", // Not a DID + "", // Empty string + } + + for _, invalidDID := range invalidDIDs { + err := ValidateFormat(invalidDID) + if err == nil { + t.Errorf("Invalid DID '%s' passed validation", invalidDID) + } + } +} + +// TestGetMulticodecType tests multicodec type lookup +func TestGetMulticodecType(t *testing.T) { + testCases := []struct { + keyType int + expected uint64 + }{ + {int(crypto.RSA), MulticodecKindRSAPubKey}, + {int(crypto.Ed25519), MulticodecKindEd25519PubKey}, + {int(crypto.Secp256k1), MulticodecKindSecp256k1PubKey}, + } + + for _, tc := range testCases { + result, err := GetMulticodecType(tc.keyType) + if err != nil { + t.Errorf("GetMulticodecType failed for type %d: %v", tc.keyType, err) + } + if result != tc.expected { + t.Errorf( + "GetMulticodecType for type %d: expected 0x%x, got 0x%x", + tc.keyType, + tc.expected, + result, + ) + } + } + + // Test invalid key type + _, err := GetMulticodecType(999) + if err == nil { + t.Errorf("GetMulticodecType should fail for invalid key type") + } +} + +// TestDIDStringFormat tests complete DID string generation and parsing +func TestDIDStringFormat(t *testing.T) { + privKey := generateSecp256k1Key(t) + pubKey := privKey.GetPublic() + + did := DID{PubKey: pubKey} + didString := did.String() + + // Check that DID starts with "did:key:z" + if len(didString) < 9 || didString[:9] != "did:key:z" { + t.Errorf("DID string should start with 'did:key:z', got %s", didString) + } + + // Parse the DID back + parsedDID, err := Parse(didString) + if err != nil { + t.Fatalf("Failed to parse generated DID: %v", err) + } + + // Verify parsed DID generates same string + parsedString := parsedDID.String() + if parsedString != didString { + t.Errorf("Parsed DID string mismatch: %s != %s", parsedString, didString) + } + + // Verify key types match + if parsedDID.Type() != did.Type() { + t.Errorf("Parsed DID key type mismatch: %v != %v", parsedDID.Type(), did.Type()) + } +} + +// TestMPCIntegration tests end-to-end MPC public key integration +func TestMPCIntegration(t *testing.T) { + // Generate a valid MPC enclave public key for testing + privKey := generateSecp256k1Key(t) + pubKey := privKey.GetPublic() + mpcPubKeyBytes, err := pubKey.Raw() + if err != nil { + t.Fatalf("Failed to get raw public key: %v", err) + } + + // Create DID from MPC public key + did, err := NewFromMPCPubKey(mpcPubKeyBytes) + if err != nil { + t.Fatalf("Failed to create DID from MPC public key: %v", err) + } + + // Test DID string generation + didString := did.String() + if len(didString) == 0 { + t.Error("Generated DID string is empty") + } + + // Test address derivation + address, err := did.Address() + if err != nil { + t.Fatalf("Failed to derive address: %v", err) + } + + if len(address) == 0 || address[:5] != "sonr1" { + t.Errorf("Invalid address format: %s", address) + } + + // Test compressed public key + compressed, err := did.CompressedPubKey() + if err != nil { + t.Fatalf("Failed to get compressed key: %v", err) + } + + if len(compressed) != 33 { + t.Errorf("Expected 33-byte compressed key, got %d bytes", len(compressed)) + } + + // Verify round-trip: DID -> string -> parse -> DID + parsedDID, err := Parse(didString) + if err != nil { + t.Fatalf("Failed to parse generated DID: %v", err) + } + + parsedAddress, err := parsedDID.Address() + if err != nil { + t.Fatalf("Failed to derive address from parsed DID: %v", err) + } + + if address != parsedAddress { + t.Errorf("Address mismatch after round-trip: %s != %s", address, parsedAddress) + } +} diff --git a/keys/methods.go b/keys/methods.go new file mode 100644 index 0000000..7c972e8 --- /dev/null +++ b/keys/methods.go @@ -0,0 +1,17 @@ +package keys + +type DIDMethod string + +const ( + DIDMethodKey DIDMethod = "key" + DIDMethodSonr DIDMethod = "sonr" + DIDMehthodBitcoin DIDMethod = "btcr" + DIDMethodEthereum DIDMethod = "ethr" + DIDMethodCbor DIDMethod = "cbor" + DIDMethodCID DIDMethod = "cid" + DIDMethodIPFS DIDMethod = "ipfs" +) + +func (d DIDMethod) String() string { + return string(d) +} diff --git a/keys/parsers/btc_parser.go b/keys/parsers/btc_parser.go new file mode 100644 index 0000000..3ece6e2 --- /dev/null +++ b/keys/parsers/btc_parser.go @@ -0,0 +1 @@ +package parsers diff --git a/keys/parsers/cosmos_parser.go b/keys/parsers/cosmos_parser.go new file mode 100644 index 0000000..a66145c --- /dev/null +++ b/keys/parsers/cosmos_parser.go @@ -0,0 +1,12 @@ +package parsers + +type CosmosPrefix string + +const ( + ATOMPrefix CosmosPrefix = "cosmos" + AXELARPrefix CosmosPrefix = "axelar" + EVMOSPrefix CosmosPrefix = "evmos" + OSMOPrefix CosmosPrefix = "osmo" + SONRPrefix CosmosPrefix = "idx" + STARSPrefix CosmosPrefix = "stars" +) diff --git a/keys/parsers/eth_parser.go b/keys/parsers/eth_parser.go new file mode 100644 index 0000000..3ece6e2 --- /dev/null +++ b/keys/parsers/eth_parser.go @@ -0,0 +1 @@ +package parsers diff --git a/keys/parsers/fil_parser.go b/keys/parsers/fil_parser.go new file mode 100644 index 0000000..3ece6e2 --- /dev/null +++ b/keys/parsers/fil_parser.go @@ -0,0 +1 @@ +package parsers diff --git a/keys/parsers/key_parser.go b/keys/parsers/key_parser.go new file mode 100644 index 0000000..017f1ce --- /dev/null +++ b/keys/parsers/key_parser.go @@ -0,0 +1,157 @@ +package parsers + +import ( + "crypto/ed25519" + "crypto/rsa" + "crypto/x509" + "fmt" + "strings" + + "github.com/libp2p/go-libp2p/core/crypto" + mb "github.com/multiformats/go-multibase" + varint "github.com/multiformats/go-varint" +) + +const ( + // KeyPrefix indicates a decentralized identifier that uses the key method + KeyPrefix = "did:key" + // MulticodecKindRSAPubKey rsa-x509-pub https://github.com/multiformats/multicodec/pull/226 + MulticodecKindRSAPubKey = 0x1205 + // MulticodecKindEd25519PubKey ed25519-pub + MulticodecKindEd25519PubKey = 0xed + // MulticodecKindSecp256k1PubKey secp256k1-pub + MulticodecKindSecp256k1PubKey = 0x1206 +) + +// DIDKey is a DID:key identifier +type DIDKey struct { + crypto.PubKey +} + +// NewKeyDID constructs an Identifier from a public key +func NewKeyDID(pub crypto.PubKey) (DIDKey, error) { + switch pub.Type() { + case crypto.Ed25519, crypto.RSA, crypto.Secp256k1: + return DIDKey{PubKey: pub}, nil + default: + return DIDKey{}, fmt.Errorf("unsupported key type: %s", pub.Type()) + } +} + +// MulticodecType indicates the type for this multicodec +func (id DIDKey) MulticodecType() uint64 { + switch id.Type() { + case crypto.RSA: + return MulticodecKindRSAPubKey + case crypto.Ed25519: + return MulticodecKindEd25519PubKey + case crypto.Secp256k1: + return MulticodecKindSecp256k1PubKey + default: + panic("unexpected crypto type") + } +} + +// String returns this did:key formatted as a string +func (id DIDKey) String() string { + raw, err := id.Raw() + if err != nil { + return "" + } + + t := id.MulticodecType() + size := varint.UvarintSize(t) + data := make([]byte, size+len(raw)) + n := varint.PutUvarint(data, t) + copy(data[n:], raw) + + b58BKeyStr, err := mb.Encode(mb.Base58BTC, data) + if err != nil { + return "" + } + + return fmt.Sprintf("%s:%s", KeyPrefix, b58BKeyStr) +} + +// VerifyKey returns the backing implementation for a public key, one of: +// *rsa.PublicKey, ed25519.PublicKey +func (id DIDKey) VerifyKey() (any, error) { + rawPubBytes, err := id.PubKey.Raw() + if err != nil { + return nil, err + } + switch id.PubKey.Type() { + case crypto.RSA: + verifyKeyiface, err := x509.ParsePKIXPublicKey(rawPubBytes) + if err != nil { + return nil, err + } + verifyKey, ok := verifyKeyiface.(*rsa.PublicKey) + if !ok { + return nil, fmt.Errorf("public key is not an RSA key. got type: %T", verifyKeyiface) + } + return verifyKey, nil + case crypto.Ed25519: + return ed25519.PublicKey(rawPubBytes), nil + case crypto.Secp256k1: + // Handle both compressed and uncompressed Secp256k1 public keys + if len(rawPubBytes) == 65 || len(rawPubBytes) == 33 { + return rawPubBytes, nil + } + return nil, fmt.Errorf("invalid Secp256k1 public key length: %d", len(rawPubBytes)) + default: + return nil, fmt.Errorf("unrecognized Public Key type: %s", id.Type()) + } +} + +// Parse turns a string into a key method ID +func Parse(keystr string) (DIDKey, error) { + var id DIDKey + if !strings.HasPrefix(keystr, KeyPrefix) { + return id, fmt.Errorf("decentralized identifier is not a 'key' type") + } + + keystr = strings.TrimPrefix(keystr, KeyPrefix+":") + + enc, data, err := mb.Decode(keystr) + if err != nil { + return id, fmt.Errorf("decoding multibase: %w", err) + } + + if enc != mb.Base58BTC { + return id, fmt.Errorf("unexpected multibase encoding: %s", mb.EncodingToStr[enc]) + } + + keyType, n, err := varint.FromUvarint(data) + if err != nil { + return id, err + } + + switch keyType { + case MulticodecKindRSAPubKey: + pub, err := crypto.UnmarshalRsaPublicKey(data[n:]) + if err != nil { + return id, err + } + return DIDKey{pub}, nil + case MulticodecKindEd25519PubKey: + pub, err := crypto.UnmarshalEd25519PublicKey(data[n:]) + if err != nil { + return id, err + } + return DIDKey{pub}, nil + case MulticodecKindSecp256k1PubKey: + // Handle both compressed and uncompressed formats + keyData := data[n:] + if len(keyData) != 33 && len(keyData) != 65 { + return id, fmt.Errorf("invalid Secp256k1 public key length: %d", len(keyData)) + } + pub, err := crypto.UnmarshalSecp256k1PublicKey(keyData) + if err != nil { + return id, fmt.Errorf("failed to unmarshal Secp256k1 key: %w", err) + } + return DIDKey{pub}, nil + } + + return id, fmt.Errorf("unrecognized key type multicodec prefix: %x", data[0]) +} diff --git a/keys/parsers/sol_parser.go b/keys/parsers/sol_parser.go new file mode 100644 index 0000000..3ece6e2 --- /dev/null +++ b/keys/parsers/sol_parser.go @@ -0,0 +1 @@ +package parsers diff --git a/keys/parsers/ton_parser.go b/keys/parsers/ton_parser.go new file mode 100644 index 0000000..3ece6e2 --- /dev/null +++ b/keys/parsers/ton_parser.go @@ -0,0 +1 @@ +package parsers diff --git a/keys/pubkey.go b/keys/pubkey.go new file mode 100644 index 0000000..580dece --- /dev/null +++ b/keys/pubkey.go @@ -0,0 +1,85 @@ +package keys + +import ( + "bytes" + "crypto/ecdsa" + "encoding/hex" + + p2pcrypto "github.com/libp2p/go-libp2p/core/crypto" + p2ppb "github.com/libp2p/go-libp2p/core/crypto/pb" + "github.com/sonr-io/sonr/crypto/core/curves" + "golang.org/x/crypto/sha3" +) + +type PubKey interface { + Bytes() []byte + Raw() ([]byte, error) + Equals(b p2pcrypto.Key) bool + Type() p2ppb.KeyType + Hex() string + Verify(msg []byte, sig []byte) (bool, error) +} + +type pubKey struct { + publicPoint curves.Point +} + +func NewPubKey(pk curves.Point) PubKey { + return &pubKey{ + publicPoint: pk, + } +} + +func (p pubKey) Bytes() []byte { + return p.publicPoint.ToAffineCompressed() +} + +func (p pubKey) Raw() ([]byte, error) { + return p.publicPoint.ToAffineCompressed(), nil +} + +func (p pubKey) Equals(b p2pcrypto.Key) bool { + if b == nil { + return false + } + apbz, err := b.Raw() + if err != nil { + return false + } + bbz, err := p.Raw() + if err != nil { + return false + } + return bytes.Equal(apbz, bbz) +} + +func (p pubKey) Hex() string { + return hex.EncodeToString(p.publicPoint.ToAffineCompressed()) +} + +func (p pubKey) Type() p2ppb.KeyType { + return p2ppb.KeyType_Secp256k1 +} + +func (p pubKey) Verify(data []byte, sigBz []byte) (bool, error) { + sig, err := deserializeSignature(sigBz) + if err != nil { + return false, err + } + pp, err := getEcdsaPoint(p.Bytes()) + if err != nil { + return false, err + } + pk := &ecdsa.PublicKey{ + Curve: pp.Curve, + X: pp.X, + Y: pp.Y, + } + + // Hash the message using SHA3-256 + hash := sha3.New256() + hash.Write(data) + digest := hash.Sum(nil) + + return ecdsa.Verify(pk, digest, sig.R, sig.S), nil +} diff --git a/keys/utils.go b/keys/utils.go new file mode 100644 index 0000000..8be76a0 --- /dev/null +++ b/keys/utils.go @@ -0,0 +1,34 @@ +package keys + +import ( + "errors" + "fmt" + "math/big" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// getEcdsaPoint builds an elliptic curve point from a compressed byte slice +func getEcdsaPoint(pubKey []byte) (*curves.EcPoint, error) { + crv := curves.K256() + x := new(big.Int).SetBytes(pubKey[1:33]) + y := new(big.Int).SetBytes(pubKey[33:]) + ecCurve, err := crv.ToEllipticCurve() + if err != nil { + return nil, fmt.Errorf("error converting curve: %v", err) + } + return &curves.EcPoint{X: x, Y: y, Curve: ecCurve}, nil +} + +// DeserializeSecp256k1Signature deserializes an ECDSA signature from a byte slice +func deserializeSignature(sigBytes []byte) (*curves.EcdsaSignature, error) { + if len(sigBytes) != 66 { + return nil, errors.New("malformed signature: not the correct size") + } + sig := &curves.EcdsaSignature{ + V: int(sigBytes[0]), + R: new(big.Int).SetBytes(sigBytes[1:33]), + S: new(big.Int).SetBytes(sigBytes[33:66]), + } + return sig, nil +} diff --git a/mpc/README.md b/mpc/README.md new file mode 100644 index 0000000..7ea42ba --- /dev/null +++ b/mpc/README.md @@ -0,0 +1,499 @@ +# MPC (Multi-Party Computation) Cryptographic Library + +![Go](https://img.shields.io/badge/Go-1.24+-green) +![MPC](https://img.shields.io/badge/MPC-Threshold_Signing-blue) +![Encryption](https://img.shields.io/badge/Encryption-AES--GCM-red) +![ECDSA](https://img.shields.io/badge/Curve-secp256k1-yellow) + +A comprehensive Go implementation of Multi-Party Computation (MPC) primitives for secure distributed cryptography. This package provides threshold signing, encrypted key management, and secure keyshare operations for decentralized applications. + +## Features + +- ✅ **Threshold Cryptography** - 2-of-2 MPC key generation and signing +- ✅ **Secure Enclaves** - Encrypted keyshare storage and management +- ✅ **Multiple Curves** - Support for secp256k1, P-256, Ed25519, BLS12-381, and more +- ✅ **Key Refresh** - Proactive security through keyshare rotation +- ✅ **ECDSA Signing** - Distributed signature generation with SHA3-256 +- ✅ **Encrypted Export/Import** - Secure enclave serialization with AES-GCM +- ✅ **UCAN Integration** - MPC-based JWT signing for User-Controlled Authorization Networks + +## Architecture + +The package is built around the concept of secure **Enclaves** that manage distributed keyshares: + +``` +┌─────────────────────────────────────────────────────────┐ +│ MPC Enclave │ +├─────────────────────────────────────────────────────────┤ +│ ┌─────────────┐ ┌─────────────┐ │ +│ │ Alice Share │ │ Bob Share │ ←── Threshold 2/2 │ +│ │ (Validator) │ │ (User) │ │ +│ └─────────────┘ └─────────────┘ │ +├─────────────────────────────────────────────────────────┤ +│ • Distributed Key Generation (DKG) │ +│ • Threshold Signing (2-of-2) │ +│ • Key Refresh (Proactive Security) │ +│ • Encrypted Storage (AES-GCM) │ +└─────────────────────────────────────────────────────────┘ +``` + +## Quick Start + +### Installation + +```bash +go get github.com/sonr-io/sonr/crypto/mpc +``` + +### Basic Usage + +#### Creating a New MPC Enclave + +```go +package main + +import ( + "fmt" + "github.com/sonr-io/sonr/crypto/mpc" +) + +func main() { + // Generate a new MPC enclave with distributed keyshares + enclave, err := mpc.NewEnclave() + if err != nil { + panic(err) + } + + // Get the public key + pubKeyHex := enclave.PubKeyHex() + fmt.Printf("Public Key: %s\n", pubKeyHex) + + // Verify the enclave is valid + if enclave.IsValid() { + fmt.Println("✅ Enclave successfully created!") + } +} +``` + +#### Signing and Verification + +```go +// Sign data using distributed MPC protocol +message := []byte("Hello, distributed world!") +signature, err := enclave.Sign(message) +if err != nil { + panic(err) +} + +// Verify the signature +isValid, err := enclave.Verify(message, signature) +if err != nil { + panic(err) +} + +fmt.Printf("Signature valid: %t\n", isValid) +``` + +#### Secure Export and Import + +```go +// Export enclave with encryption +secretKey := []byte("my-super-secret-key-32-bytes-long") +encryptedData, err := enclave.Encrypt(secretKey) +if err != nil { + panic(err) +} + +// Import from encrypted data +restoredEnclave, err := mpc.ImportEnclave( + mpc.WithEncryptedData(encryptedData, secretKey), +) +if err != nil { + panic(err) +} + +fmt.Printf("Restored public key: %s\n", restoredEnclave.PubKeyHex()) +``` + +## Core Concepts + +### Enclaves + +An **Enclave** represents a secure MPC keyshare environment that manages distributed cryptographic operations: + +```go +type Enclave interface { + // Key Management + PubKeyHex() string // Get public key as hex string + PubKeyBytes() []byte // Get public key as bytes + IsValid() bool // Check if enclave has valid keyshares + + // Cryptographic Operations + Sign(data []byte) ([]byte, error) // Threshold signing + Verify(data []byte, sig []byte) (bool, error) // Signature verification + Refresh() (Enclave, error) // Proactive key refresh + + // Secure Storage + Encrypt(key []byte) ([]byte, error) // Export encrypted + Decrypt(key []byte, data []byte) ([]byte, error) // Import encrypted + + // Serialization + Marshal() ([]byte, error) // JSON serialization + Unmarshal(data []byte) error // JSON deserialization + + // Data Access + GetData() *EnclaveData // Access enclave internals + GetEnclave() Enclave // Self-reference +} +``` + +### Multi-Party Computation Protocol + +The package implements a 2-of-2 threshold scheme: + +1. **Alice (Validator)** - Server-side keyshare +2. **Bob (User)** - Client-side keyshare + +Both parties must participate in: +- **Distributed Key Generation (DKG)** - Creates shared public key +- **Threshold Signing** - Generates valid signatures cooperatively +- **Key Refresh** - Rotates keyshares while preserving public key + +### Supported Curves + +The package supports multiple elliptic curves: + +```go +type CurveName string + +const ( + K256Name CurveName = "secp256k1" // Bitcoin/Ethereum + P256Name CurveName = "P-256" // NIST P-256 + ED25519Name CurveName = "ed25519" // EdDSA + BLS12381G1Name CurveName = "BLS12381G1" // BLS12-381 G1 + BLS12381G2Name CurveName = "BLS12381G2" // BLS12-381 G2 + // ... more curves supported +) +``` + +## Advanced Usage + +### Custom Import Options + +The package provides flexible import mechanisms: + +```go +// Import from initial keyshares (after DKG) +enclave, err := mpc.ImportEnclave( + mpc.WithInitialShares(validatorShare, userShare, mpc.K256Name), +) + +// Import from existing enclave data +enclave, err := mpc.ImportEnclave( + mpc.WithEnclaveData(enclaveData), +) + +// Import from encrypted backup +enclave, err := mpc.ImportEnclave( + mpc.WithEncryptedData(encryptedBytes, secretKey), +) +``` + +### Key Refresh for Proactive Security + +```go +// Refresh keyshares while keeping the same public key +refreshedEnclave, err := enclave.Refresh() +if err != nil { + panic(err) +} + +// Public key remains the same +fmt.Printf("Original: %s\n", enclave.PubKeyHex()) +fmt.Printf("Refreshed: %s\n", refreshedEnclave.PubKeyHex()) +// Both should be identical! + +// But the enclave now has fresh keyshares +// This provides forward secrecy against key compromise +``` + +### Standalone Verification + +```go +// Verify signatures without the full enclave +pubKeyBytes := enclave.PubKeyBytes() +isValid, err := mpc.VerifyWithPubKey(pubKeyBytes, message, signature) +if err != nil { + panic(err) +} +``` + +### UCAN Integration + +The package includes MPC-based JWT signing for UCAN tokens: + +```go +import "github.com/sonr-io/sonr/crypto/mpc/spec" + +// Create MPC-backed UCAN token source +// (Implementation details in spec package) +keyshareSource := spec.KeyshareSource{ + // ... MPC enclave integration +} + +// Use with UCAN token creation +token, err := keyshareSource.NewOriginToken( + "did:key:audience", + attenuations, + facts, + notBefore, + expires, +) +``` + +## Security Features + +### Encryption + +All encrypted operations use **AES-GCM** with **SHA3-256** key derivation: + +```go +// Secure key derivation +func GetHashKey(key []byte) []byte { + hash := sha3.New256() + hash.Write(key) + return hash.Sum(nil)[:32] // 256-bit key +} +``` + +### Threshold Security + +- **2-of-2 threshold** - Both parties required for operations +- **No single point of failure** - Neither party alone can sign +- **Proactive refresh** - Regular keyshare rotation without changing public key +- **Forward secrecy** - Old keyshares cannot be used after refresh + +### Cryptographic Primitives + +- **ECDSA Signing** with **SHA3-256** message hashing +- **AES-GCM** encryption with 12-byte nonces +- **Secure random nonce generation** +- **Multiple curve support** for different use cases + +## Public API Reference + +### Core Functions + +```go +// Generate new MPC enclave +func NewEnclave() (Enclave, error) + +// Import enclave from various sources +func ImportEnclave(options ...ImportOption) (Enclave, error) + +// Execute distributed signing protocol +func ExecuteSigning(signFuncVal SignFunc, signFuncUser SignFunc) ([]byte, error) + +// Execute keyshare refresh protocol +func ExecuteRefresh(refreshFuncVal RefreshFunc, refreshFuncUser RefreshFunc, + curve CurveName) (Enclave, error) + +// Standalone signature verification +func VerifyWithPubKey(pubKeyCompressed []byte, data []byte, sig []byte) (bool, error) +``` + +### Import Options + +```go +type ImportOption func(Options) Options + +// Create from initial DKG results +func WithInitialShares(valKeyshare Message, userKeyshare Message, + curve CurveName) ImportOption + +// Create from encrypted backup +func WithEncryptedData(data []byte, key []byte) ImportOption + +// Create from existing data structure +func WithEnclaveData(data *EnclaveData) ImportOption +``` + +### EnclaveData Structure + +```go +type EnclaveData struct { + PubHex string `json:"pub_hex"` // Compressed public key (hex) + PubBytes []byte `json:"pub_bytes"` // Uncompressed public key + ValShare Message `json:"val_share"` // Alice (validator) keyshare + UserShare Message `json:"user_share"`// Bob (user) keyshare + Nonce []byte `json:"nonce"` // Encryption nonce + Curve CurveName `json:"curve"` // Elliptic curve name +} +``` + +### Protocol Types + +```go +type Message *protocol.Message // MPC protocol message +type Signature *curves.EcdsaSignature // ECDSA signature +type RefreshFunc interface{ protocol.Iterator } // Key refresh protocol +type SignFunc interface{ protocol.Iterator } // Signing protocol +type Point curves.Point // Elliptic curve point +``` + +### Utility Functions + +```go +// Cryptographic utilities +func GetHashKey(key []byte) []byte +func SerializeSignature(sig *curves.EcdsaSignature) ([]byte, error) +func DeserializeSignature(sigBytes []byte) (*curves.EcdsaSignature, error) + +// Key conversion utilities +func GetECDSAPoint(pubKey []byte) (*curves.EcPoint, error) + +// Protocol error handling +func CheckIteratedErrors(aErr, bErr error) error +``` + +## Error Handling + +The package provides comprehensive error handling: + +```go +// Common error patterns +enclave, err := mpc.NewEnclave() +if err != nil { + // Handle DKG failure + log.Fatalf("Failed to generate enclave: %v", err) +} + +signature, err := enclave.Sign(data) +if err != nil { + // Handle signing protocol failure + log.Fatalf("Failed to sign: %v", err) +} + +// Validation errors +if !enclave.IsValid() { + log.Fatal("Enclave has invalid keyshares") +} +``` + +## Performance Considerations + +### Memory Usage + +- **Minimal footprint** - Only active keyshares kept in memory +- **Efficient serialization** - JSON-based with compression +- **Secure cleanup** - Sensitive data cleared after use + +### Network Communication + +- **Minimal rounds** - Optimized protocol with few message exchanges +- **Small messages** - Compact protocol message format +- **Stateless operations** - No persistent connections required + +### Cryptographic Performance + +- **Hardware acceleration** - Leverages optimized curve implementations +- **Efficient hashing** - SHA3-256 with minimal overhead +- **Fast verification** - Public key operations optimized + +## Testing + +The package includes comprehensive tests: + +```bash +# Run all tests +go test -v ./crypto/mpc + +# Run specific test suites +go test -v ./crypto/mpc -run TestEnclaveData +go test -v ./crypto/mpc -run TestKeyShareGeneration +go test -v ./crypto/mpc -run TestEnclaveOperations + +# Run with race detection +go test -race ./crypto/mpc + +# Generate coverage report +go test -cover ./crypto/mpc +``` + +## Use Cases + +### Decentralized Identity + +- **DID key management** - Secure distributed identity keys +- **Threshold signing** - Multi-party authorization for identity operations +- **Key recovery** - Distributed backup and restore mechanisms + +### Cryptocurrency Wallets + +- **Multi-signature wallets** - True threshold custody solutions +- **Exchange security** - Hot wallet protection with distributed keys +- **Institutional custody** - Compliance-friendly key management + +### Blockchain Infrastructure + +- **Validator signing** - Secure consensus participation +- **Cross-chain bridges** - Multi-party custody of bridged assets +- **DAO governance** - Distributed decision-making mechanisms + +### Enterprise Applications + +- **Document signing** - Distributed digital signatures +- **API authentication** - Threshold-based service authentication +- **Secure communication** - End-to-end encrypted messaging + +## Dependencies + +- **Core Cryptography**: `github.com/sonr-io/sonr/crypto/core/curves` +- **Protocol Framework**: `github.com/sonr-io/sonr/crypto/core/protocol` +- **Threshold ECDSA**: `github.com/sonr-io/sonr/crypto/tecdsa/dklsv1` +- **UCAN Integration**: `github.com/sonr-io/sonr/crypto/ucan` +- **Standard Crypto**: `golang.org/x/crypto/sha3` +- **JWT Support**: `github.com/golang-jwt/jwt` + +## Security Considerations + +### Threat Model + +The package is designed to protect against: + +- **Key compromise** - Distributed keyshares prevent single points of failure +- **Insider threats** - No single party can perform operations alone +- **Network attacks** - Protocol messages are cryptographically protected +- **Side-channel attacks** - Secure implementations of cryptographic primitives + +### Best Practices + +1. **Regular key refresh** - Rotate keyshares periodically +2. **Secure communication** - Use TLS for protocol message exchange +3. **Access controls** - Implement proper authentication for MPC operations +4. **Audit logging** - Log all cryptographic operations +5. **Backup strategies** - Securely store encrypted enclave exports + +### Limitations + +- **2-of-2 threshold only** - Currently supports only 2-party protocols +- **Network dependency** - Requires communication between parties +- **No byzantine fault tolerance** - Assumes honest-but-curious adversaries + +## Contributing + +We welcome contributions! Please ensure: + +1. **Security first** - All cryptographic code must be carefully reviewed +2. **Comprehensive testing** - Include unit tests and integration tests +3. **Documentation** - Document all public APIs and security assumptions +4. **Performance** - Benchmark critical cryptographic operations +5. **Compatibility** - Maintain backward compatibility with existing enclaves + +## License + +This project follows the same license as the main Sonr project. + +--- + +**⚠️ Security Notice**: This is cryptographic software. While extensively tested, it should be used with appropriate security measures and understanding of the underlying protocols. For production deployments, consider additional security audits and operational security measures. \ No newline at end of file diff --git a/mpc/codec.go b/mpc/codec.go new file mode 100644 index 0000000..2973e6d --- /dev/null +++ b/mpc/codec.go @@ -0,0 +1,110 @@ +// Package mpc implements the Sonr MPC protocol +package mpc + +import ( + "crypto/rand" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/core/protocol" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1/dkg" +) + +type CurveName string + +const ( + K256Name CurveName = "secp256k1" + BLS12381G1Name CurveName = "BLS12381G1" + BLS12381G2Name CurveName = "BLS12381G2" + BLS12831Name CurveName = "BLS12831" + P256Name CurveName = "P-256" + ED25519Name CurveName = "ed25519" + PallasName CurveName = "pallas" + BLS12377G1Name CurveName = "BLS12377G1" + BLS12377G2Name CurveName = "BLS12377G2" + BLS12377Name CurveName = "BLS12377" +) + +func (c CurveName) String() string { + return string(c) +} + +func (c CurveName) Curve() *curves.Curve { + switch c { + case K256Name: + return curves.K256() + case BLS12381G1Name: + return curves.BLS12381G1() + case BLS12381G2Name: + return curves.BLS12381G2() + case BLS12831Name: + return curves.BLS12381G1() + case P256Name: + return curves.P256() + case ED25519Name: + return curves.ED25519() + case PallasName: + return curves.PALLAS() + case BLS12377G1Name: + return curves.BLS12377G1() + case BLS12377G2Name: + return curves.BLS12377G2() + case BLS12377Name: + return curves.BLS12377G1() + default: + return curves.K256() + } +} + +// ╭───────────────────────────────────────────────────────────╮ +// │ Exported Generics │ +// ╰───────────────────────────────────────────────────────────╯ + +type ( + AliceOut *dkg.AliceOutput + BobOut *dkg.BobOutput + Point curves.Point + Role string // Role is the type for the role + Message *protocol.Message // Message is the protocol.Message that is used for MPC + Signature *curves.EcdsaSignature // Signature is the type for the signature + RefreshFunc interface{ protocol.Iterator } // RefreshFunc is the type for the refresh function + SignFunc interface{ protocol.Iterator } // SignFunc is the type for the sign function +) + +const ( + RoleVal = "validator" + RoleUser = "user" +) + +func randNonce() []byte { + nonce := make([]byte, 12) + rand.Read(nonce) + return nonce +} + +// Enclave defines the interface for key management operations +type Enclave interface { + GetData() *EnclaveData // GetData returns the data of the keyEnclave + GetEnclave() Enclave // GetEnclave returns the enclave of the keyEnclave + Decrypt( + key []byte, + encryptedData []byte, + ) ([]byte, error) // Decrypt returns decrypted enclave data + Encrypt( + key []byte, + ) ([]byte, error) // Encrypt returns encrypted enclave data + IsValid() bool // IsValid returns true if the keyEnclave is valid + PubKeyBytes() []byte // PubKeyBytes returns the public key of the keyEnclave + PubKeyHex() string // PubKeyHex returns the public key of the keyEnclave + Refresh() (Enclave, error) // Refresh returns a new keyEnclave + Marshal() ([]byte, error) // Serialize returns the serialized keyEnclave + Sign( + data []byte, + ) ([]byte, error) // Sign returns the signature of the data + Unmarshal( + data []byte, + ) error // Verify returns true if the signature is valid + Verify( + data []byte, + sig []byte, + ) (bool, error) // Verify returns true if the signature is valid +} diff --git a/mpc/codec_test.go b/mpc/codec_test.go new file mode 100644 index 0000000..933b93a --- /dev/null +++ b/mpc/codec_test.go @@ -0,0 +1,178 @@ +package mpc + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestKeyShareGeneration(t *testing.T) { + t.Run("Generate Valid Enclave", func(t *testing.T) { + // Generate enclave + enclave, err := NewEnclave() + require.NoError(t, err) + require.NotNil(t, enclave) + + // Validate enclave contents + assert.True(t, enclave.IsValid()) + }) + + t.Run("Export and Import", func(t *testing.T) { + // Generate original enclave + original, err := NewEnclave() + require.NoError(t, err) + + // Test key for encryption/decryption (32 bytes) + testKey := []byte("test-key-12345678-test-key-123456") + + // Test Export/Import + t.Run("Full Enclave", func(t *testing.T) { + // Export enclave + data, err := original.Encrypt(testKey) + require.NoError(t, err) + require.NotEmpty(t, data) + + // Create new empty enclave + newEnclave, err := NewEnclave() + require.NoError(t, err) + + // Verify the imported enclave works by signing + testData := []byte("test message") + sig, err := newEnclave.Sign(testData) + require.NoError(t, err) + valid, err := newEnclave.Verify(testData, sig) + require.NoError(t, err) + assert.True(t, valid) + }) + }) + + t.Run("Encrypt and Decrypt", func(t *testing.T) { + // Generate original enclave + original, err := NewEnclave() + require.NoError(t, err) + require.NotNil(t, original) + + // Test key for encryption/decryption (32 bytes) + testKey := []byte("test-key-12345678-test-key-123456") + + // Test Encrypt + encrypted, err := original.Encrypt(testKey) + require.NoError(t, err) + require.NotEmpty(t, encrypted) + + // Test Decrypt + decrypted, err := original.Decrypt(testKey, encrypted) + require.NoError(t, err) + require.NotEmpty(t, decrypted) + + // Verify decrypted data matches original + originalData, err := original.Marshal() + require.NoError(t, err) + assert.Equal(t, originalData, decrypted) + + // Test with wrong key should fail + wrongKey := []byte("wrong-key-12345678-wrong-key-123456") + _, err = original.Decrypt(wrongKey, encrypted) + assert.Error(t, err, "Decryption with wrong key should fail") + }) +} + +func TestEnclaveOperations(t *testing.T) { + t.Run("Signing and Verification", func(t *testing.T) { + // Generate valid enclave + enclave, err := NewEnclave() + require.NoError(t, err) + + // Test signing + testData := []byte("test message") + signature, err := enclave.Sign(testData) + require.NoError(t, err) + require.NotNil(t, signature) + + // Verify the signature + valid, err := enclave.Verify(testData, signature) + require.NoError(t, err) + assert.True(t, valid) + + // Test invalid data verification + invalidData := []byte("wrong message") + valid, err = enclave.Verify(invalidData, signature) + require.NoError(t, err) + assert.False(t, valid) + }) + + t.Run("Refresh Operation", func(t *testing.T) { + enclave, err := NewEnclave() + require.NoError(t, err) + + // Test refresh + refreshedEnclave, err := enclave.Refresh() + require.NoError(t, err) + require.NotNil(t, refreshedEnclave) + + // Verify refreshed enclave is valid + assert.True(t, refreshedEnclave.IsValid()) + }) +} + +func TestEnclaveDataAccess(t *testing.T) { + t.Run("GetData", func(t *testing.T) { + // Generate enclave + enclave, err := NewEnclave() + require.NoError(t, err) + require.NotNil(t, enclave) + + // Get the enclave data + data := enclave.GetData() + require.NotNil(t, data, "GetData should return non-nil value") + + // Verify the data is valid + assert.True(t, data.IsValid(), "Enclave data should be valid") + + // Verify the public key in the data matches the enclave's public key + assert.Equal(t, enclave.PubKeyHex(), data.PubKeyHex(), "Public keys should match") + }) + + t.Run("PubKeyHex", func(t *testing.T) { + // Generate enclave + enclave, err := NewEnclave() + require.NoError(t, err) + require.NotNil(t, enclave) + + // Get the public key hex + pubKeyHex := enclave.PubKeyHex() + require.NotEmpty(t, pubKeyHex, "PubKeyHex should return non-empty string") + + // Check that it's a valid hex string (should be 66 chars for compressed point: 0x02/0x03 + 32 bytes) + assert.GreaterOrEqual( + t, + len(pubKeyHex), + 66, + "Public key hex should be at least 66 characters", + ) + assert.True(t, len(pubKeyHex)%2 == 0, "Hex string should have even length") + + // Compare with the enclave data's public key + data := enclave.GetData() + assert.Equal( + t, + data.PubKeyHex(), + pubKeyHex, + "Public key hex should match the one from GetData", + ) + + // Verify that two different enclaves have different public keys + enclave2, err := NewEnclave() + require.NoError(t, err) + require.NotNil(t, enclave2) + + pubKeyHex2 := enclave2.PubKeyHex() + assert.NotEqual( + t, + pubKeyHex, + pubKeyHex2, + "Different enclaves should have different public keys", + ) + }) +} diff --git a/mpc/enclave.go b/mpc/enclave.go new file mode 100644 index 0000000..d169d24 --- /dev/null +++ b/mpc/enclave.go @@ -0,0 +1,158 @@ +package mpc + +import ( + "crypto/aes" + "crypto/cipher" + "crypto/ecdsa" + "encoding/json" + "fmt" + + "github.com/sonr-io/sonr/crypto/core/curves" + "golang.org/x/crypto/sha3" +) + +// EnclaveData implements the Enclave interface +type EnclaveData struct { + PubHex string `json:"pub_hex"` // PubHex is the hex-encoded compressed public key + PubBytes []byte `json:"pub_bytes"` // PubBytes is the uncompressed public key + ValShare Message `json:"val_share"` + UserShare Message `json:"user_share"` + Nonce []byte `json:"nonce"` + Curve CurveName `json:"curve"` +} + +// GetData returns the data of the keyEnclave +func (k *EnclaveData) GetData() *EnclaveData { + return k +} + +// GetEnclave returns the enclave of the keyEnclave +func (k *EnclaveData) GetEnclave() Enclave { + return k +} + +// GetPubPoint returns the public point of the keyEnclave +func (k *EnclaveData) GetPubPoint() (curves.Point, error) { + curve := k.Curve.Curve() + return curve.NewIdentityPoint().FromAffineUncompressed(k.PubBytes) +} + +// PubKeyHex returns the public key of the keyEnclave +func (k *EnclaveData) PubKeyHex() string { + return k.PubHex +} + +// PubKeyBytes returns the public key of the keyEnclave +func (k *EnclaveData) PubKeyBytes() []byte { + return k.PubBytes +} + +// Decrypt returns decrypted enclave data +func (k *EnclaveData) Decrypt(key []byte, encryptedData []byte) ([]byte, error) { + hashedKey := GetHashKey(key) + block, err := aes.NewCipher(hashedKey) + if err != nil { + return nil, err + } + + aesgcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + + // Decrypt the data using AES-GCM + plaintext, err := aesgcm.Open(nil, k.Nonce, encryptedData, nil) + if err != nil { + return nil, fmt.Errorf("decryption failed: %w", err) + } + return plaintext, nil +} + +// Encrypt returns encrypted enclave data +func (k *EnclaveData) Encrypt(key []byte) ([]byte, error) { + data, err := k.Marshal() + if err != nil { + return nil, fmt.Errorf("failed to serialize enclave: %w", err) + } + + hashedKey := GetHashKey(key) + block, err := aes.NewCipher(hashedKey) + if err != nil { + return nil, err + } + + aesgcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + + return aesgcm.Seal(nil, k.Nonce, data, nil), nil +} + +// IsValid returns true if the keyEnclave is valid +func (k *EnclaveData) IsValid() bool { + return k.ValShare != nil && k.UserShare != nil +} + +// Refresh returns a new keyEnclave +func (k *EnclaveData) Refresh() (Enclave, error) { + refreshFuncVal, err := GetAliceRefreshFunc(k) + if err != nil { + return nil, err + } + refreshFuncUser, err := GetBobRefreshFunc(k) + if err != nil { + return nil, err + } + return ExecuteRefresh(refreshFuncVal, refreshFuncUser, k.Curve) +} + +// Sign returns the signature of the data +func (k *EnclaveData) Sign(data []byte) ([]byte, error) { + userSign, err := GetBobSignFunc(k, data) + if err != nil { + return nil, err + } + valSign, err := GetAliceSignFunc(k, data) + if err != nil { + return nil, err + } + return ExecuteSigning(valSign, userSign) +} + +// Verify returns true if the signature is valid +func (k *EnclaveData) Verify(data []byte, sig []byte) (bool, error) { + edSig, err := DeserializeSignature(sig) + if err != nil { + return false, err + } + ePub, err := GetECDSAPoint(k.PubBytes) + if err != nil { + return false, err + } + pk := &ecdsa.PublicKey{ + Curve: ePub.Curve, + X: ePub.X, + Y: ePub.Y, + } + + // Hash the message using SHA3-256 + hash := sha3.New256() + hash.Write(data) + digest := hash.Sum(nil) + + return ecdsa.Verify(pk, digest, edSig.R, edSig.S), nil +} + +// Marshal returns the JSON encoding of keyEnclave +func (k *EnclaveData) Marshal() ([]byte, error) { + return json.Marshal(k) +} + +// Unmarshal unmarshals the JSON encoding of keyEnclave +func (k *EnclaveData) Unmarshal(data []byte) error { + if err := json.Unmarshal(data, k); err != nil { + return err + } + return nil +} diff --git a/mpc/enclave_test.go b/mpc/enclave_test.go new file mode 100644 index 0000000..39dc115 --- /dev/null +++ b/mpc/enclave_test.go @@ -0,0 +1,307 @@ +package mpc + +import ( + "bytes" + "encoding/hex" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestEnclaveData_GetData(t *testing.T) { + // Create a new enclave + enclave, err := NewEnclave() + require.NoError(t, err) + require.NotNil(t, enclave) + + // Get the data + data := enclave.GetData() + require.NotNil(t, data) + + // Ensure the data is the same instance + assert.Equal(t, enclave, data.GetEnclave()) + + // Ensure the data is valid + assert.True(t, data.IsValid()) +} + +func TestEnclaveData_GetEnclave(t *testing.T) { + // Create a new enclave + enclave, err := NewEnclave() + require.NoError(t, err) + require.NotNil(t, enclave) + + // Get the enclave data + data := enclave.GetData() + require.NotNil(t, data) + + // Get the enclave back + returnedEnclave := data.GetEnclave() + require.NotNil(t, returnedEnclave) + + // Verify the returned enclave is the same + assert.Equal(t, enclave, returnedEnclave) +} + +func TestEnclaveData_GetPubPoint(t *testing.T) { + // Create a new enclave + enclave, err := NewEnclave() + require.NoError(t, err) + require.NotNil(t, enclave) + + // Get the enclave data + data := enclave.GetData() + require.NotNil(t, data) + + // Get the public point + pubPoint, err := data.GetPubPoint() + require.NoError(t, err) + require.NotNil(t, pubPoint) + + // Verify the public point's serialization matches the stored public bytes + pointBytes := pubPoint.ToAffineUncompressed() + assert.Equal(t, data.PubBytes, pointBytes) +} + +func TestEnclaveData_PubKeyHex(t *testing.T) { + // Create a new enclave + enclave, err := NewEnclave() + require.NoError(t, err) + require.NotNil(t, enclave) + + // Get the enclave data + data := enclave.GetData() + require.NotNil(t, data) + + // Get the public key hex + pubKeyHex := data.PubKeyHex() + require.NotEmpty(t, pubKeyHex) + + // Verify it's a valid hex string + _, err = hex.DecodeString(pubKeyHex) + require.NoError(t, err) + + // Verify it matches the stored PubHex + assert.Equal(t, data.PubHex, pubKeyHex) +} + +func TestEnclaveData_PubKeyBytes(t *testing.T) { + // Create a new enclave + enclave, err := NewEnclave() + require.NoError(t, err) + require.NotNil(t, enclave) + + // Get the enclave data + data := enclave.GetData() + require.NotNil(t, data) + + // Get the public key bytes + pubKeyBytes := data.PubKeyBytes() + require.NotEmpty(t, pubKeyBytes) + + // Verify it matches the stored PubBytes + assert.Equal(t, data.PubBytes, pubKeyBytes) +} + +func TestEnclaveData_EncryptDecrypt(t *testing.T) { + // Create a new enclave + enclave, err := NewEnclave() + require.NoError(t, err) + require.NotNil(t, enclave) + + // Get the enclave data + data := enclave.GetData() + require.NotNil(t, data) + + // Test key for encryption/decryption + testKey := []byte("test-key-12345678-test-key-123456") + + // Test encryption + encrypted, err := data.Encrypt(testKey) + require.NoError(t, err) + require.NotEmpty(t, encrypted) + + // Test decryption + decrypted, err := data.Decrypt(testKey, encrypted) + require.NoError(t, err) + require.NotEmpty(t, decrypted) + + // Serialize the original data for comparison + originalData, err := data.Marshal() + require.NoError(t, err) + + // Verify the decrypted data matches the original + assert.Equal(t, originalData, decrypted) + + // Test decryption with wrong key (should fail) + wrongKey := []byte("wrong-key-12345678-wrong-key-123456") + _, err = data.Decrypt(wrongKey, encrypted) + assert.Error(t, err, "Decryption with wrong key should fail") +} + +func TestEnclaveData_IsValid(t *testing.T) { + // Create a new enclave + enclave, err := NewEnclave() + require.NoError(t, err) + require.NotNil(t, enclave) + + // Get the enclave data + data := enclave.GetData() + require.NotNil(t, data) + + // Verify it's valid + assert.True(t, data.IsValid()) + + // Create an invalid enclave + invalidEnclave := &EnclaveData{ + PubHex: "invalid", + PubBytes: []byte("invalid"), + Nonce: []byte("nonce"), + Curve: K256Name, + } + + // Verify it's invalid + assert.False(t, invalidEnclave.IsValid()) +} + +func TestEnclaveData_RefreshAndSign(t *testing.T) { + // Create a new enclave + enclave, err := NewEnclave() + require.NoError(t, err) + require.NotNil(t, enclave) + + // Get the original public key + originalPubKeyHex := enclave.PubKeyHex() + originalPubKeyBytes := enclave.PubKeyBytes() + require.NotEmpty(t, originalPubKeyHex) + require.NotEmpty(t, originalPubKeyBytes) + + // Sign a message with the original enclave to verify it works + testMessage := []byte("test message before refresh") + originalSignature, err := enclave.Sign(testMessage) + require.NoError(t, err) + require.NotEmpty(t, originalSignature) + + // Verify the original signature + valid, err := enclave.Verify(testMessage, originalSignature) + require.NoError(t, err) + assert.True(t, valid, "Original signature should be valid") + + // Refresh the enclave + refreshedEnclave, err := enclave.Refresh() + require.NoError(t, err) + require.NotNil(t, refreshedEnclave) + + // CRITICAL TEST: The public key should remain the same after refresh + refreshedPubKeyHex := refreshedEnclave.PubKeyHex() + refreshedPubKeyBytes := refreshedEnclave.PubKeyBytes() + + assert.Equal(t, originalPubKeyHex, refreshedPubKeyHex, + "Public key hex should not change after refresh") + assert.Equal(t, originalPubKeyBytes, refreshedPubKeyBytes, + "Public key bytes should not change after refresh") + + // Verify the refreshed enclave is valid + assert.True(t, refreshedEnclave.IsValid(), "Refreshed enclave should be valid") + + // Test that the refreshed enclave can still sign messages + testMessage2 := []byte("test message after refresh") + refreshedSignature, err := refreshedEnclave.Sign(testMessage2) + require.NoError(t, err) + require.NotEmpty(t, refreshedSignature) + + // Verify the signature from the refreshed enclave with its own key + valid, err = refreshedEnclave.Verify(testMessage2, refreshedSignature) + require.NoError(t, err) + assert.True(t, valid, "Signature from refreshed enclave should be valid") + + // CRITICAL TEST: The original enclave should be able to verify the signature + // from the refreshed enclave since they have the same public key + valid, err = enclave.Verify(testMessage2, refreshedSignature) + require.NoError(t, err) + assert.True(t, valid, "Original enclave should be able to verify refreshed enclave's signature") + + // CRITICAL TEST: The refreshed enclave should be able to verify the signature + // from the original enclave since they have the same public key + valid, err = refreshedEnclave.Verify(testMessage, originalSignature) + require.NoError(t, err) + assert.True(t, valid, "Refreshed enclave should be able to verify original enclave's signature") + + // Test with wrong message (should fail) + wrongMessage := []byte("wrong message") + valid, err = refreshedEnclave.Verify(wrongMessage, refreshedSignature) + require.NoError(t, err) + assert.False(t, valid, "Wrong message verification should fail") +} + +func TestEnclaveData_MarshalUnmarshal(t *testing.T) { + // Create a new enclave + enclave, err := NewEnclave() + require.NoError(t, err) + require.NotNil(t, enclave) + + // Get the enclave data + data := enclave.GetData() + require.NotNil(t, data) + + // Marshal the enclave + encoded, err := data.Marshal() + require.NoError(t, err) + require.NotEmpty(t, encoded) + + // Create a new empty enclave + newEnclave := &EnclaveData{} + + // Unmarshal the encoded data + err = newEnclave.Unmarshal(encoded) + require.NoError(t, err) + + // Verify the unmarshaled enclave matches the original + assert.Equal(t, data.PubHex, newEnclave.PubHex) + assert.Equal(t, data.Curve, newEnclave.Curve) + assert.True(t, bytes.Equal(data.PubBytes, newEnclave.PubBytes)) + assert.True(t, bytes.Equal(data.Nonce, newEnclave.Nonce)) + assert.True(t, newEnclave.IsValid()) + + // Verify the public key matches + assert.Equal(t, data.PubKeyHex(), newEnclave.PubKeyHex()) +} + +func TestEnclaveData_Verify(t *testing.T) { + // Create a new enclave + enclave, err := NewEnclave() + require.NoError(t, err) + require.NotNil(t, enclave) + + // Sign a message + testMessage := []byte("test message") + signature, err := enclave.Sign(testMessage) + require.NoError(t, err) + require.NotEmpty(t, signature) + + // Verify the signature + valid, err := enclave.Verify(testMessage, signature) + require.NoError(t, err) + assert.True(t, valid) + + // Verify with wrong message + wrongMessage := []byte("wrong message") + valid, err = enclave.Verify(wrongMessage, signature) + require.NoError(t, err) + assert.False(t, valid) + + // Corrupt the signature + corruptedSig := make([]byte, len(signature)) + copy(corruptedSig, signature) + corruptedSig[0] ^= 0x01 // flip a bit + + // Verify with corrupted signature (should fail) + valid, err = enclave.Verify(testMessage, corruptedSig) + require.NoError(t, err) + assert.False(t, valid) + + // We don't need to manually create ECDSA signatures here + // as we already verified the Sign and Verify functions work together. + // This completes the verification of the enclave's signature functionality. +} diff --git a/mpc/import.go b/mpc/import.go new file mode 100644 index 0000000..ccc116d --- /dev/null +++ b/mpc/import.go @@ -0,0 +1,140 @@ +package mpc + +import ( + "encoding/hex" + "errors" + "fmt" +) + +// ImportEnclave creates an Enclave instance from various import options. +// It prioritizes enclave bytes over keyshares if both are provided. +func ImportEnclave(options ...ImportOption) (Enclave, error) { + if len(options) == 0 { + return nil, errors.New("no import options provided") + } + + opts := Options{} + for _, opt := range options { + opts = opt(opts) + } + return opts.Apply() +} + +// Options is a struct that holds the import options +type Options struct { + valKeyshare Message + userKeyshare Message + enclaveBytes []byte + enclaveData *EnclaveData + initialShares bool + isEncrypted bool + secretKey []byte + curve CurveName +} + +// ImportOption is a function that modifies the import options +type ImportOption func(Options) Options + +// WithInitialShares creates an option to import an enclave from validator and user keyshares. +func WithInitialShares(valKeyshare Message, userKeyshare Message, curve CurveName) ImportOption { + return func(opts Options) Options { + opts.valKeyshare = valKeyshare + opts.userKeyshare = userKeyshare + opts.initialShares = true + opts.curve = curve + return opts + } +} + +// WithEncryptedData creates an option to import an enclave from encrypted data. +func WithEncryptedData(data []byte, key []byte) ImportOption { + return func(opts Options) Options { + opts.enclaveBytes = data + opts.initialShares = false + opts.isEncrypted = true + opts.secretKey = key + return opts + } +} + +// WithEnclaveData creates an option to import an enclave from a data struct. +func WithEnclaveData(data *EnclaveData) ImportOption { + return func(opts Options) Options { + opts.enclaveData = data + opts.initialShares = false + return opts + } +} + +// Apply applies the import options to create an Enclave instance. +func (opts Options) Apply() (Enclave, error) { + // Load from encrypted data if provided + if opts.isEncrypted { + if len(opts.enclaveBytes) == 0 { + return nil, errors.New("enclave bytes cannot be empty") + } + return RestoreEncryptedEnclave(opts.enclaveBytes, opts.secretKey) + } + // Generate from keyshares if provided + if opts.initialShares { + // Then try to build from keyshares + if opts.valKeyshare == nil { + return nil, errors.New("validator share cannot be nil") + } + if opts.userKeyshare == nil { + return nil, errors.New("user share cannot be nil") + } + return BuildEnclave(opts.valKeyshare, opts.userKeyshare, opts) + } + // Load from enclave data if provided + return RestoreEnclaveFromData(opts.enclaveData) +} + +// BuildEnclave creates a new enclave from validator and user keyshares. +func BuildEnclave(valShare, userShare Message, options Options) (Enclave, error) { + if valShare == nil { + return nil, errors.New("validator share cannot be nil") + } + if userShare == nil { + return nil, errors.New("user share cannot be nil") + } + + pubPoint, err := GetAlicePublicPoint(valShare) + if err != nil { + return nil, fmt.Errorf("failed to get public point: %w", err) + } + return &EnclaveData{ + PubBytes: pubPoint.ToAffineUncompressed(), + PubHex: hex.EncodeToString(pubPoint.ToAffineCompressed()), + ValShare: valShare, + UserShare: userShare, + Nonce: randNonce(), + Curve: options.curve, + }, nil +} + +// RestoreEnclaveFromData deserializes an enclave from its data struct. +func RestoreEnclaveFromData(data *EnclaveData) (Enclave, error) { + if data == nil { + return nil, errors.New("enclave data cannot be nil") + } + return data, nil +} + +// RestoreEncryptedEnclave decrypts an enclave from its binary representation. and key +func RestoreEncryptedEnclave(data []byte, key []byte) (Enclave, error) { + keyclave := &EnclaveData{} + err := keyclave.Unmarshal(data) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal enclave: %w", err) + } + decryptedData, err := keyclave.Decrypt(key, data) + if err != nil { + return nil, fmt.Errorf("failed to decrypt enclave: %w", err) + } + err = keyclave.Unmarshal(decryptedData) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal decrypted enclave: %w", err) + } + return keyclave, nil +} diff --git a/mpc/protocol.go b/mpc/protocol.go new file mode 100644 index 0000000..bbf1cdc --- /dev/null +++ b/mpc/protocol.go @@ -0,0 +1,91 @@ +package mpc + +import ( + "github.com/sonr-io/sonr/crypto/core/protocol" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1" +) + +// NewEnclave generates a new MPC keyshare +func NewEnclave() (Enclave, error) { + curve := K256Name.Curve() + valKs := dklsv1.NewAliceDkg(curve, protocol.Version1) + userKs := dklsv1.NewBobDkg(curve, protocol.Version1) + aErr, bErr := RunProtocol(userKs, valKs) + if err := CheckIteratedErrors(aErr, bErr); err != nil { + return nil, err + } + valRes, err := valKs.Result(protocol.Version1) + if err != nil { + return nil, err + } + userRes, err := userKs.Result(protocol.Version1) + if err != nil { + return nil, err + } + return ImportEnclave(WithInitialShares(valRes, userRes, K256Name)) +} + +// ExecuteSigning runs the MPC signing protocol +func ExecuteSigning(signFuncVal SignFunc, signFuncUser SignFunc) ([]byte, error) { + aErr, bErr := RunProtocol(signFuncVal, signFuncUser) + if err := CheckIteratedErrors(aErr, bErr); err != nil { + return nil, err + } + out, err := signFuncUser.Result(protocol.Version1) + if err != nil { + return nil, err + } + s, err := dklsv1.DecodeSignature(out) + if err != nil { + return nil, err + } + sig, err := SerializeSignature(s) + if err != nil { + return nil, err + } + return sig, nil +} + +// ExecuteRefresh runs the MPC refresh protocol +func ExecuteRefresh( + refreshFuncVal RefreshFunc, + refreshFuncUser RefreshFunc, + curve CurveName, +) (Enclave, error) { + aErr, bErr := RunProtocol(refreshFuncVal, refreshFuncUser) + if err := CheckIteratedErrors(aErr, bErr); err != nil { + return nil, err + } + valRefreshResult, err := refreshFuncVal.Result(protocol.Version1) + if err != nil { + return nil, err + } + userRefreshResult, err := refreshFuncUser.Result(protocol.Version1) + if err != nil { + return nil, err + } + return ImportEnclave(WithInitialShares(valRefreshResult, userRefreshResult, curve)) +} + +// RunProtocol runs the MPC protocol +func RunProtocol(firstParty protocol.Iterator, secondParty protocol.Iterator) (error, error) { + var ( + message *protocol.Message + aErr error + bErr error + ) + + for aErr != protocol.ErrProtocolFinished || bErr != protocol.ErrProtocolFinished { + // Crank each protocol forward one iteration + message, bErr = firstParty.Next(message) + if bErr != nil && bErr != protocol.ErrProtocolFinished { + return nil, bErr + } + + message, aErr = secondParty.Next(message) + if aErr != nil && aErr != protocol.ErrProtocolFinished { + return aErr, nil + } + } + return aErr, bErr +} diff --git a/mpc/spec/jwt.go b/mpc/spec/jwt.go new file mode 100644 index 0000000..e1e66de --- /dev/null +++ b/mpc/spec/jwt.go @@ -0,0 +1,116 @@ +package spec + +import ( + "crypto/sha256" + "encoding/base64" + "fmt" + + "github.com/golang-jwt/jwt/v5" + "github.com/sonr-io/sonr/crypto/mpc" +) + +// MPCSigningMethod implements the SigningMethod interface for MPC-based signing +type MPCSigningMethod struct { + Name string + enclave mpc.Enclave +} + +// NewJWTSigningMethod creates a new MPC signing method with the given enclave +func NewJWTSigningMethod(name string, enclave mpc.Enclave) *MPCSigningMethod { + return &MPCSigningMethod{ + Name: name, + enclave: enclave, + } +} + +// WithEnclave sets the enclave for an existing signing method +func (m *MPCSigningMethod) WithEnclave(enclave mpc.Enclave) *MPCSigningMethod { + return &MPCSigningMethod{ + Name: m.Name, + enclave: enclave, + } +} + +// NewMPCSigningMethod is an alias for NewJWTSigningMethod for compatibility +func NewMPCSigningMethod(name string, enclave mpc.Enclave) *MPCSigningMethod { + return NewJWTSigningMethod(name, enclave) +} + +// Alg returns the signing method's name +func (m *MPCSigningMethod) Alg() string { + return m.Name +} + +// Verify verifies the signature using the MPC public key +func (m *MPCSigningMethod) Verify(signingString string, signature []byte, key any) error { + // Check if enclave is available + if m.enclave == nil { + return fmt.Errorf("MPC enclave not available for signature verification") + } + + // Decode the signature + sig, err := base64.RawURLEncoding.DecodeString(string(signature)) + if err != nil { + return fmt.Errorf("failed to decode signature: %w", err) + } + + // Hash the signing string using SHA-256 + hasher := sha256.New() + hasher.Write([]byte(signingString)) + digest := hasher.Sum(nil) + + // Use MPC enclave to verify signature + valid, err := m.enclave.Verify(digest, sig) + if err != nil { + return fmt.Errorf("failed to verify signature: %w", err) + } + + if !valid { + return fmt.Errorf("signature verification failed") + } + + return nil +} + +// Sign signs the data using MPC +func (m *MPCSigningMethod) Sign(signingString string, key any) ([]byte, error) { + // Check if enclave is available + if m.enclave == nil { + return nil, fmt.Errorf("MPC enclave not available for signing") + } + + // Hash the signing string using SHA-256 + hasher := sha256.New() + hasher.Write([]byte(signingString)) + digest := hasher.Sum(nil) + + // Use MPC enclave to sign the digest + sig, err := m.enclave.Sign(digest) + if err != nil { + return nil, fmt.Errorf("failed to sign with MPC: %w", err) + } + + // Encode the signature as base64url + encoded := base64.RawURLEncoding.EncodeToString(sig) + return []byte(encoded), nil +} + +func init() { + // Register the MPC signing method factory + jwt.RegisterSigningMethod("MPC256", func() jwt.SigningMethod { + // This factory creates a new instance without enclave + // The enclave will be provided when creating tokens + return &MPCSigningMethod{ + Name: "MPC256", + } + }) +} + +// RegisterMPCMethod registers an MPC signing method for the given algorithm name +func RegisterMPCMethod(alg string) { + jwt.RegisterSigningMethod(alg, func() jwt.SigningMethod { + return &MPCSigningMethod{ + Name: alg, + } + }) +} diff --git a/mpc/spec/source.go b/mpc/spec/source.go new file mode 100644 index 0000000..098c085 --- /dev/null +++ b/mpc/spec/source.go @@ -0,0 +1,305 @@ +package spec + +import ( + "fmt" + "strings" + "time" + + "github.com/golang-jwt/jwt/v5" + "github.com/libp2p/go-libp2p/core/crypto" + "github.com/sonr-io/sonr/crypto/keys" + "github.com/sonr-io/sonr/crypto/mpc" + "lukechampine.com/blake3" +) + +// KeyshareSource provides MPC-based UCAN token creation and validation +type KeyshareSource interface { + Address() string + Issuer() string + ChainCode() ([]byte, error) + OriginToken() (*Token, error) + SignData(data []byte) ([]byte, error) + VerifyData(data []byte, sig []byte) (bool, error) + Enclave() mpc.Enclave + + // UCAN token creation methods + NewOriginToken( + audienceDID string, + att []Attenuation, + fct []Fact, + notBefore, expires time.Time, + ) (*Token, error) + NewAttenuatedToken( + parent *Token, + audienceDID string, + att []Attenuation, + fct []Fact, + nbf, exp time.Time, + ) (*Token, error) +} + +// NewSource creates a new MPC-based keyshare source from an enclave +func NewSource(enclave mpc.Enclave) (KeyshareSource, error) { + if !enclave.IsValid() { + return nil, fmt.Errorf("invalid MPC enclave provided") + } + + pubKeyBytes := enclave.PubKeyBytes() + issuerDID, addr, err := getIssuerDIDFromBytes(pubKeyBytes) + if err != nil { + return nil, fmt.Errorf("failed to derive issuer DID: %w", err) + } + + return &mpcKeyshareSource{ + enclave: enclave, + issuerDID: issuerDID, + addr: addr, + }, nil +} + +// mpcKeyshareSource implements KeyshareSource using MPC enclave +type mpcKeyshareSource struct { + enclave mpc.Enclave + issuerDID string + addr string +} + +// Address returns the address derived from the enclave public key +func (k *mpcKeyshareSource) Address() string { + return k.addr +} + +// Issuer returns the DID of the issuer derived from the enclave public key +func (k *mpcKeyshareSource) Issuer() string { + return k.issuerDID +} + +// Enclave returns the underlying MPC enclave +func (k *mpcKeyshareSource) Enclave() mpc.Enclave { + return k.enclave +} + +// ChainCode derives a deterministic chain code from the enclave +func (k *mpcKeyshareSource) ChainCode() ([]byte, error) { + // Sign the address to create a deterministic chain code + sig, err := k.SignData([]byte(k.addr)) + if err != nil { + return nil, fmt.Errorf("failed to sign address for chain code: %w", err) + } + + // Hash the signature to create a 32-byte chain code + hash := blake3.Sum256(sig) + return hash[:32], nil +} + +// OriginToken creates a default origin token with basic capabilities +func (k *mpcKeyshareSource) OriginToken() (*Token, error) { + // Create basic capability for the MPC keyshare + resource := &SimpleResource{ + Scheme: "mpc", + Value: k.addr, + URI: fmt.Sprintf("mpc://%s", k.addr), + } + + capability := &SimpleCapability{Action: "sign"} + + attenuation := Attenuation{ + Capability: capability, + Resource: resource, + } + + // Create token with no expiration for origin token + zero := time.Time{} + return k.NewOriginToken(k.issuerDID, []Attenuation{attenuation}, nil, zero, zero) +} + +// SignData signs data using the MPC enclave +func (k *mpcKeyshareSource) SignData(data []byte) ([]byte, error) { + if !k.enclave.IsValid() { + return nil, fmt.Errorf("enclave is not valid") + } + + return k.enclave.Sign(data) +} + +// VerifyData verifies a signature using the MPC enclave +func (k *mpcKeyshareSource) VerifyData(data []byte, sig []byte) (bool, error) { + if !k.enclave.IsValid() { + return false, fmt.Errorf("enclave is not valid") + } + + return k.enclave.Verify(data, sig) +} + +// NewOriginToken creates a new UCAN origin token using MPC signing +func (k *mpcKeyshareSource) NewOriginToken( + audienceDID string, + att []Attenuation, + fct []Fact, + notBefore, expires time.Time, +) (*Token, error) { + return k.newToken(audienceDID, nil, att, fct, notBefore, expires) +} + +// NewAttenuatedToken creates a new attenuated UCAN token using MPC signing +func (k *mpcKeyshareSource) NewAttenuatedToken( + parent *Token, + audienceDID string, + att []Attenuation, + fct []Fact, + nbf, exp time.Time, +) (*Token, error) { + // Validate that new attenuations are more restrictive than parent + if !isAttenuationSubset(att, parent.Attenuations) { + return nil, fmt.Errorf("scope of ucan attenuations must be less than its parent") + } + + // Add parent as proof + proofs := []Proof{} + if parent.Raw != "" { + proofs = append(proofs, Proof(parent.Raw)) + } + proofs = append(proofs, parent.Proofs...) + + return k.newToken(audienceDID, proofs, att, fct, nbf, exp) +} + +// newToken creates a new UCAN token with MPC signing +func (k *mpcKeyshareSource) newToken( + audienceDID string, + proofs []Proof, + att []Attenuation, + fct []Fact, + nbf, exp time.Time, +) (*Token, error) { + // Validate audience DID + if !isValidDID(audienceDID) { + return nil, fmt.Errorf("invalid audience DID: %s", audienceDID) + } + + // Create JWT with MPC signing method + t := jwt.New(NewJWTSigningMethod("MPC256", k.enclave)) + + // Set UCAN version header + t.Header[UCANVersionKey] = UCANVersion + + var ( + nbfUnix int64 + expUnix int64 + ) + + if !nbf.IsZero() { + nbfUnix = nbf.Unix() + } + if !exp.IsZero() { + expUnix = exp.Unix() + } + + // Convert attenuations to claim format + attClaims := make([]map[string]any, len(att)) + for i, a := range att { + attClaims[i] = map[string]any{ + "can": a.Capability.GetActions(), + "with": a.Resource.GetURI(), + } + } + + // Convert proofs to strings + proofStrings := make([]string, len(proofs)) + for i, proof := range proofs { + proofStrings[i] = string(proof) + } + + // Convert facts to any slice + factData := make([]any, len(fct)) + for i, fact := range fct { + factData[i] = string(fact.Data) + } + + // Set claims + claims := jwt.MapClaims{ + "iss": k.issuerDID, + "aud": audienceDID, + "att": attClaims, + } + + if nbfUnix > 0 { + claims["nbf"] = nbfUnix + } + if expUnix > 0 { + claims["exp"] = expUnix + } + if len(proofStrings) > 0 { + claims["prf"] = proofStrings + } + if len(factData) > 0 { + claims["fct"] = factData + } + + t.Claims = claims + + // Sign the token using MPC enclave + tokenString, err := t.SignedString(nil) + if err != nil { + return nil, fmt.Errorf("failed to sign token: %w", err) + } + + return &Token{ + Raw: tokenString, + Issuer: k.issuerDID, + Audience: audienceDID, + ExpiresAt: expUnix, + NotBefore: nbfUnix, + Attenuations: att, + Proofs: proofs, + Facts: fct, + }, nil +} + +// isAttenuationSubset checks if child attenuations are a subset of parent attenuations +func isAttenuationSubset(child, parent []Attenuation) bool { + for _, childAtt := range child { + if !containsAttenuation(parent, childAtt) { + return false + } + } + return true +} + +// containsAttenuation checks if the parent list contains an equivalent attenuation +func containsAttenuation(parent []Attenuation, att Attenuation) bool { + for _, parentAtt := range parent { + if parentAtt.Resource.Matches(att.Resource) && + parentAtt.Capability.Contains(att.Capability) { + return true + } + } + return false +} + +// isValidDID validates DID format +func isValidDID(did string) bool { + return did != "" && len(did) > 5 && strings.HasPrefix(did, "did:") +} + +// getIssuerDIDFromBytes creates an issuer DID and address from public key bytes +func getIssuerDIDFromBytes(pubKeyBytes []byte) (string, string, error) { + // Convert MPC public key bytes to libp2p crypto.PubKey + pubKey, err := crypto.UnmarshalSecp256k1PublicKey(pubKeyBytes) + if err != nil { + return "", "", fmt.Errorf("failed to unmarshal secp256k1 key: %w", err) + } + + // Create DID using the crypto/keys package + did, err := keys.NewDID(pubKey) + if err != nil { + return "", "", fmt.Errorf("failed to create DID: %w", err) + } + + didStr := did.String() + + // Generate address from DID (simplified implementation) + address := fmt.Sprintf("addr_%x", pubKeyBytes[:8]) + + return didStr, address, nil +} diff --git a/mpc/spec/ucan.go b/mpc/spec/ucan.go new file mode 100644 index 0000000..01d9463 --- /dev/null +++ b/mpc/spec/ucan.go @@ -0,0 +1,125 @@ +package spec + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/cosmos/cosmos-sdk/types/bech32" +) + +// Token represents a UCAN JWT token with parsed claims +type Token struct { + Raw string `json:"raw"` + Issuer string `json:"iss"` + Audience string `json:"aud"` + ExpiresAt int64 `json:"exp,omitempty"` + NotBefore int64 `json:"nbf,omitempty"` + Attenuations []Attenuation `json:"att"` + Proofs []Proof `json:"prf,omitempty"` + Facts []Fact `json:"fct,omitempty"` +} + +// Attenuation represents a UCAN capability attenuation +type Attenuation struct { + Capability Capability `json:"can"` + Resource Resource `json:"with"` +} + +// Proof represents a UCAN delegation proof (either JWT or CID) +type Proof string + +// Fact represents arbitrary facts in UCAN tokens +type Fact struct { + Data json.RawMessage `json:"data"` +} + +// Capability defines what actions can be performed +type Capability interface { + GetActions() []string + Grants(abilities []string) bool + Contains(other Capability) bool + String() string +} + +// Resource defines what resource the capability applies to +type Resource interface { + GetScheme() string + GetValue() string + GetURI() string + Matches(other Resource) bool +} + +// SimpleCapability implements Capability for single actions +type SimpleCapability struct { + Action string `json:"action"` +} + +func (c *SimpleCapability) GetActions() []string { return []string{c.Action} } +func (c *SimpleCapability) Grants(abilities []string) bool { + return len(abilities) == 1 && c.Action == abilities[0] +} + +func (c *SimpleCapability) Contains( + other Capability, +) bool { + return c.Action == other.GetActions()[0] +} +func (c *SimpleCapability) String() string { return c.Action } + +// SimpleResource implements Resource for basic URI resources +type SimpleResource struct { + Scheme string `json:"scheme"` + Value string `json:"value"` + URI string `json:"uri"` +} + +func (r *SimpleResource) GetScheme() string { return r.Scheme } +func (r *SimpleResource) GetValue() string { return r.Value } +func (r *SimpleResource) GetURI() string { return r.URI } +func (r *SimpleResource) Matches(other Resource) bool { return r.URI == other.GetURI() } + +// UCAN constants +const ( + UCANVersion = "0.9.0" + UCANVersionKey = "ucv" + PrfKey = "prf" + FctKey = "fct" + AttKey = "att" + CapKey = "cap" +) + +// CreateSimpleAttenuation creates a basic attenuation +func CreateSimpleAttenuation(action, resourceURI string) Attenuation { + return Attenuation{ + Capability: &SimpleCapability{Action: action}, + Resource: parseResourceURI(resourceURI), + } +} + +// parseResourceURI creates a Resource from URI string +func parseResourceURI(uri string) Resource { + parts := strings.SplitN(uri, "://", 2) + if len(parts) != 2 { + return &SimpleResource{ + Scheme: "unknown", + Value: uri, + URI: uri, + } + } + + return &SimpleResource{ + Scheme: parts[0], + Value: parts[1], + URI: uri, + } +} + +// getIssuerDIDFromBytes creates an issuer DID and address from public key bytes (alternative implementation) +func getIssuerDIDFromBytesAlt(pubKeyBytes []byte) (string, string, error) { + addr, err := bech32.ConvertAndEncode("idx", pubKeyBytes) + if err != nil { + return "", "", fmt.Errorf("failed to encode address: %w", err) + } + return fmt.Sprintf("did:sonr:%s", addr), addr, nil +} diff --git a/mpc/utils.go b/mpc/utils.go new file mode 100644 index 0000000..854e1d6 --- /dev/null +++ b/mpc/utils.go @@ -0,0 +1,160 @@ +package mpc + +import ( + "crypto/aes" + "crypto/cipher" + "errors" + "fmt" + "math/big" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/core/protocol" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1" + "golang.org/x/crypto/sha3" +) + +func CheckIteratedErrors(aErr, bErr error) error { + if aErr == protocol.ErrProtocolFinished && bErr == protocol.ErrProtocolFinished { + return nil + } + if aErr != protocol.ErrProtocolFinished { + return aErr + } + if bErr != protocol.ErrProtocolFinished { + return bErr + } + return nil +} + +func GetHashKey(key []byte) []byte { + hash := sha3.New256() + hash.Write(key) + return hash.Sum(nil)[:32] // Use first 32 bytes of hash +} + +func DecryptKeyshare(msg []byte, key []byte, nonce []byte) ([]byte, error) { + hashedKey := GetHashKey(key) + block, err := aes.NewCipher(hashedKey) + if err != nil { + return nil, err + } + aesgcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + plaintext, err := aesgcm.Open(nil, nonce, msg, nil) + if err != nil { + return nil, err + } + return plaintext, nil +} + +func EncryptKeyshare(msg Message, key []byte, nonce []byte) ([]byte, error) { + hashedKey := GetHashKey(key) + msgBytes, err := protocol.EncodeMessage(msg) + if err != nil { + return nil, err + } + block, err := aes.NewCipher(hashedKey) + if err != nil { + return nil, err + } + aesgcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + ciphertext := aesgcm.Seal(nil, nonce, []byte(msgBytes), nil) + return ciphertext, nil +} + +func GetAliceOut(msg *protocol.Message) (AliceOut, error) { + return dklsv1.DecodeAliceDkgResult(msg) +} + +func GetAlicePublicPoint(msg *protocol.Message) (Point, error) { + out, err := dklsv1.DecodeAliceDkgResult(msg) + if err != nil { + return nil, err + } + return out.PublicKey, nil +} + +func GetBobOut(msg *protocol.Message) (BobOut, error) { + return dklsv1.DecodeBobDkgResult(msg) +} + +func GetBobPubPoint(msg *protocol.Message) (Point, error) { + out, err := dklsv1.DecodeBobDkgResult(msg) + if err != nil { + return nil, err + } + return out.PublicKey, nil +} + +// GetECDSAPoint builds an elliptic curve point from a compressed byte slice +func GetECDSAPoint(pubKey []byte) (*curves.EcPoint, error) { + crv := curves.K256() + x := new(big.Int).SetBytes(pubKey[1:33]) + y := new(big.Int).SetBytes(pubKey[33:]) + ecCurve, err := crv.ToEllipticCurve() + if err != nil { + return nil, fmt.Errorf("error converting curve: %v", err) + } + return &curves.EcPoint{X: x, Y: y, Curve: ecCurve}, nil +} + +func SerializeSignature(sig *curves.EcdsaSignature) ([]byte, error) { + if sig == nil { + return nil, errors.New("nil signature") + } + + rBytes := sig.R.Bytes() + sBytes := sig.S.Bytes() + + // Ensure both components are 32 bytes + rPadded := make([]byte, 32) + sPadded := make([]byte, 32) + copy(rPadded[32-len(rBytes):], rBytes) + copy(sPadded[32-len(sBytes):], sBytes) + + // Concatenate R and S + result := make([]byte, 64) + copy(result[0:32], rPadded) + copy(result[32:64], sPadded) + + return result, nil +} + +func DeserializeSignature(sigBytes []byte) (*curves.EcdsaSignature, error) { + if len(sigBytes) != 64 { + return nil, fmt.Errorf("invalid signature length: expected 64 bytes, got %d", len(sigBytes)) + } + + r := new(big.Int).SetBytes(sigBytes[:32]) + s := new(big.Int).SetBytes(sigBytes[32:]) + + return &curves.EcdsaSignature{ + R: r, + S: s, + }, nil +} + +func GetAliceSignFunc(k *EnclaveData, bz []byte) (SignFunc, error) { + curve := k.Curve.Curve() + return dklsv1.NewAliceSign(curve, sha3.New256(), bz, k.ValShare, protocol.Version1) +} + +func GetAliceRefreshFunc(k *EnclaveData) (RefreshFunc, error) { + curve := k.Curve.Curve() + return dklsv1.NewAliceRefresh(curve, k.ValShare, protocol.Version1) +} + +func GetBobSignFunc(k *EnclaveData, bz []byte) (SignFunc, error) { + curve := curves.K256() + return dklsv1.NewBobSign(curve, sha3.New256(), bz, k.UserShare, protocol.Version1) +} + +func GetBobRefreshFunc(k *EnclaveData) (RefreshFunc, error) { + curve := curves.K256() + return dklsv1.NewBobRefresh(curve, k.UserShare, protocol.Version1) +} diff --git a/mpc/verify.go b/mpc/verify.go new file mode 100644 index 0000000..4163759 --- /dev/null +++ b/mpc/verify.go @@ -0,0 +1,29 @@ +package mpc + +import ( + "crypto/ecdsa" + + "golang.org/x/crypto/sha3" +) + +func VerifyWithPubKey(pubKeyCompressed []byte, data []byte, sig []byte) (bool, error) { + edSig, err := DeserializeSignature(sig) + if err != nil { + return false, err + } + ePub, err := GetECDSAPoint(pubKeyCompressed) + if err != nil { + return false, err + } + pk := &ecdsa.PublicKey{ + Curve: ePub.Curve, + X: ePub.X, + Y: ePub.Y, + } + + // Hash the message using SHA3-256 + hash := sha3.New256() + hash.Write(data) + digest := hash.Sum(nil) + return ecdsa.Verify(pk, digest, edSig.R, edSig.S), nil +} diff --git a/ot/base/simplest/ot.go b/ot/base/simplest/ot.go new file mode 100644 index 0000000..35c8b1b --- /dev/null +++ b/ot/base/simplest/ot.go @@ -0,0 +1,437 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Package simplest implements the "Verified Simplest OT", as defined in "protocol 7" of [DKLs18](https://eprint.iacr.org/2018/499.pdf). +// The original "Simplest OT" protocol is presented in [CC15](https://eprint.iacr.org/2015/267.pdf). +// In our implementation, we run OTs for multiple choice bits in parallel. Furthermore, as described in the DKLs paper, +// we implement this as Random OT protocol. We also add encryption and decryption steps as defined in the protocol, but +// emphasise that these steps are optional. Specifically, in the setting where this OT is used as the seed OT in an +// OT Extension protocol, the encryption and decryption steps are not needed. +// +// Limitation: currently we only support batch OTs that are multiples of 8. +// +// Ideal functionalities: +// - We have used ZKP Schnorr for the F^{R_{DL}}_{ZK} +// - We have used HMAC for realizing the Random Oracle Hash function, the key for HMAC is received as input to the protocol. +package simplest + +import ( + "crypto/rand" + "crypto/subtle" + "fmt" + + "github.com/gtank/merlin" + "github.com/pkg/errors" + "golang.org/x/crypto/sha3" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/zkp/schnorr" +) + +const ( + // keyCount is the number of encryption keys created. Since this is a 1-out-of-2 OT, the key count is set to 2. + keyCount = 2 + + // DigestSize is the length of hash. Similarly, when it comes to encrypting and decryption, it is the size of the + // plaintext and ciphertext. + DigestSize = 32 +) + +type ( + // OneTimePadDecryptionKey is the type of Rho^w, Rho^0, and RHo^1 in the paper. + OneTimePadDecryptionKey = [DigestSize]byte + + // OneTimePadEncryptionKeys is the type of Rho^0, and RHo^1 in the paper. + OneTimePadEncryptionKeys = [keyCount][DigestSize]byte + + // OtChallenge is the type of xi in the paper. + OtChallenge = [DigestSize]byte + + // OtChallengeResponse is the type of Rho' in the paper. + OtChallengeResponse = [DigestSize]byte + + // ChallengeOpening is the type of hashed Rho^0 and Rho^1 + ChallengeOpening = [keyCount][DigestSize]byte + + // ReceiversMaskedChoices corresponds to the "A" value in the paper in compressed format. + ReceiversMaskedChoices = []byte +) + +// SenderOutput are the outputs that the sender will obtain as a result of running the "random" OT protocol. +type SenderOutput struct { + // OneTimePadEncryptionKeys are Rho^0 and Rho^1, the output of the random OT. + // These can be used to encrypt and send two messages to the receiver. + // Therefore, for readability they are called OneTimePadEncryptionKeys in the code. + OneTimePadEncryptionKeys []OneTimePadEncryptionKeys +} + +// ReceiverOutput are the outputs that the receiver will obtain as a result of running the "random" OT protocol. +type ReceiverOutput struct { + // PackedRandomChoiceBits is a packed version of the choice vector, the packing is done for performance reasons. + PackedRandomChoiceBits []byte + + // RandomChoiceBits is the choice vector represented as unpacked int array. Initialed from PackedRandomChoiceBits. + RandomChoiceBits []int + + // OneTimePadDecryptionKey is Rho^w, the output of the random OT. For the receiver, there is just 1 output per execution. + // This value will be used to decrypt one of the messages sent by the sender. + // Therefore, for readability this is called OneTimePadDecryptionKey in the code. + OneTimePadDecryptionKey []OneTimePadDecryptionKey +} + +// Sender stores state for the "sender" role in OT. see Protocol 7 in Appendix A of DKLs18. +type Sender struct { + // Output is the output that is produced as a result of running random OT protocol. + Output *SenderOutput + + curve *curves.Curve + + // secretKey is the value `b` in the paper, which is the discrete log of B, which will be (re)used in _all_ executions of the OT. + secretKey curves.Scalar + + // publicKey is the public key of the secretKey. + publicKey curves.Point + + // batchSize is the number of parallel OTs. + batchSize int + + transcript *merlin.Transcript +} + +// Receiver stores state for the "receiver" role in OT. Protocol 7, Appendix A, of DKLs. +type Receiver struct { + // Output is the output that is produced as a result of running random OT protocol. + Output *ReceiverOutput + + curve *curves.Curve + + // senderPublicKey corresponds to "B" in the paper. + senderPublicKey curves.Point + + // senderChallenge is "xi" in the protocol. + senderChallenge []OtChallenge + + // batchSize is the number of parallel OTs. + batchSize int + + transcript *merlin.Transcript +} + +// NewSender creates a new "sender" object, ready to participate in a _random_ verified simplest OT in the role of the sender. +// no messages are specified by the sender, because random ones will be sent (hence the random OT). +// ultimately, the `Sender`'s `Output` field will be appropriately populated. +// you can use it directly, or alternatively bootstrap it into an _actual_ (non-random) OT using `Round7Encrypt` below +func NewSender( + curve *curves.Curve, + batchSize int, + uniqueSessionId [DigestSize]byte, +) (*Sender, error) { + if batchSize&0x07 != 0 { // This is the same as `batchSize % 8 != 0`, but is constant time + return nil, errors.New("batch size should be a multiple of 8") + } + transcript := merlin.NewTranscript("Coinbase_DKLs_SeedOT") + transcript.AppendMessage([]byte("session_id"), uniqueSessionId[:]) + return &Sender{ + Output: &SenderOutput{}, + curve: curve, + batchSize: batchSize, + transcript: transcript, + }, nil +} + +// NewReceiver is a Random OT receiver. Therefore, the choice bits are created randomly. +// The choice bits are stored in a packed format (e.g., each choice is a single bit in a byte array). +func NewReceiver( + curve *curves.Curve, + batchSize int, + uniqueSessionId [DigestSize]byte, +) (*Receiver, error) { + // This is the same as `batchSize % 8 != 0`, but is constant time + if batchSize&0x07 != 0 { + return nil, errors.New("batch size should be a multiple of 8") + } + + transcript := merlin.NewTranscript("Coinbase_DKLs_SeedOT") + transcript.AppendMessage([]byte("session_id"), uniqueSessionId[:]) + + receiver := &Receiver{ + Output: &ReceiverOutput{}, + curve: curve, + batchSize: batchSize, + transcript: transcript, + } + batchSizeBytes := batchSize >> 3 // divide by 8 + receiver.Output.PackedRandomChoiceBits = make([]byte, batchSizeBytes) + if _, err := rand.Read(receiver.Output.PackedRandomChoiceBits[:]); err != nil { + return nil, errors.Wrap(err, "choosing random choice bits") + } + // Unpack into Choice bits + receiver.initChoice() + return receiver, nil +} + +// Round1ComputeAndZkpToPublicKey is the first phase of the protocol. +// computes and stores public key and returns the schnorr proof. serialized / packed. +// This implements step 1 of Protocol 7 of DKLs18, page 16. +func (sender *Sender) Round1ComputeAndZkpToPublicKey() (*schnorr.Proof, error) { + var err error + // Sample the secret key and compute the public key. + sender.secretKey = sender.curve.Scalar.Random(rand.Reader) + sender.publicKey = sender.curve.ScalarBaseMult(sender.secretKey) + + // Generate the ZKP proof. + uniqueSessionId := [DigestSize]byte{} + copy( + uniqueSessionId[:], + sender.transcript.ExtractBytes([]byte("sender schnorr proof"), DigestSize), + ) + prover := schnorr.NewProver(sender.curve, nil, uniqueSessionId[:]) + proof, err := prover.Prove(sender.secretKey) + if err != nil { + return nil, errors.Wrap(err, "creating zkp proof for secret key in seed OT sender round 1") + } + return proof, nil +} + +// Round2VerifySchnorrAndPadTransfer verifies the schnorr proof of the public key sent by the sender, i.e., step 2), +// and then does receiver's "Pad Transfer" phase in OT, i.e., step 3), of Protocol 7 (page 16) of the paper. +func (receiver *Receiver) Round2VerifySchnorrAndPadTransfer( + proof *schnorr.Proof, +) ([]ReceiversMaskedChoices, error) { + receiver.senderPublicKey = proof.Statement + uniqueSessionId := [DigestSize]byte{} + copy( + uniqueSessionId[:], + receiver.transcript.ExtractBytes([]byte("sender schnorr proof"), DigestSize), + ) + if err := schnorr.Verify(proof, receiver.curve, nil, uniqueSessionId[:]); err != nil { + return nil, errors.Wrap(err, "verifying schnorr proof in seed OT receiver round 2") + } + + result := make([]ReceiversMaskedChoices, receiver.batchSize) + receiver.Output.OneTimePadDecryptionKey = make([]OneTimePadDecryptionKey, receiver.batchSize) + copy( + uniqueSessionId[:], + receiver.transcript.ExtractBytes([]byte("random oracle salts"), DigestSize), + ) + for i := 0; i < receiver.batchSize; i++ { + a := receiver.curve.Scalar.Random(rand.Reader) + // Computing `A := a . G + w . B` in constant time, by first computing option0 = a.G and option1 = a.G+B and then + // constant time choosing one of them by first assuming that the output is option0, and overwrite it if the choice bit is 1. + + option0 := receiver.curve.ScalarBaseMult(a) + option0Bytes := option0.ToAffineCompressed() + option1 := option0.Add(receiver.senderPublicKey) + option1Bytes := option1.ToAffineCompressed() + + result[i] = option0Bytes + subtle.ConstantTimeCopy(receiver.Output.RandomChoiceBits[i], result[i], option1Bytes) + // compute the internal rho + rho := receiver.senderPublicKey.Mul(a) + hash := sha3.New256() + if _, err := hash.Write(uniqueSessionId[:]); err != nil { + return nil, errors.Wrap(err, "writing seed to hash in round 2 pad transfer") + } + if _, err := hash.Write([]byte{byte(i)}); err != nil { + return nil, errors.Wrap(err, "writing i to hash in round 2 pad transfer") + } + if _, err := hash.Write(rho.ToAffineCompressed()); err != nil { + return nil, errors.Wrap(err, "writing point to hash in round 2 pad transfer") + } + copy(receiver.Output.OneTimePadDecryptionKey[i][:], hash.Sum(nil)) + } + return result, nil +} + +// Round3PadTransfer is the sender's "Pad Transfer" phase in OT; see steps 4 and 5 of page 16 of the paper. +// Returns the challenges xi +func (sender *Sender) Round3PadTransfer( + compressedReceiversMaskedChoice []ReceiversMaskedChoices, +) ([]OtChallenge, error) { + var err error + challenge := make([]OtChallenge, sender.batchSize) + sender.Output.OneTimePadEncryptionKeys = make([]OneTimePadEncryptionKeys, sender.batchSize) + negSenderPublicKey := sender.publicKey.Neg() + + receiversMaskedChoice := make([]curves.Point, len(compressedReceiversMaskedChoice)) + for i := 0; i < len(compressedReceiversMaskedChoice); i++ { + if receiversMaskedChoice[i], err = sender.curve.Point.FromAffineCompressed(compressedReceiversMaskedChoice[i]); err != nil { + return nil, errors.Wrap(err, "uncompress the point") + } + } + + baseEncryptionKeyMaterial := make([]curves.Point, keyCount) + var hashedKey [keyCount][DigestSize]byte + uniqueSessionId := [DigestSize]byte{} + copy( + uniqueSessionId[:], + sender.transcript.ExtractBytes([]byte("random oracle salts"), DigestSize), + ) + + for i := 0; i < sender.batchSize; i++ { + // Sender creates two options that will eventually be used as her encryption keys. + // `baseEncryptionKeyMaterial[0]` and `baseEncryptionKeyMaterial[0]` correspond to rho_0 and rho_1 in the paper, respectively. + baseEncryptionKeyMaterial[0] = receiversMaskedChoice[i].Mul(sender.secretKey) + + receiverChoiceMinusSenderPublicKey := receiversMaskedChoice[i].Add(negSenderPublicKey) + baseEncryptionKeyMaterial[1] = receiverChoiceMinusSenderPublicKey.Mul(sender.secretKey) + + for k := 0; k < keyCount; k++ { + hash := sha3.New256() + if _, err = hash.Write(uniqueSessionId[:]); err != nil { + return nil, errors.Wrap(err, "writing seed to hash in round 3 pad transfer") + } + if _, err = hash.Write([]byte{byte(i)}); err != nil { + return nil, errors.Wrap(err, "writing i to hash in round 3 pad transfer") + } + if _, err = hash.Write(baseEncryptionKeyMaterial[k].ToAffineCompressed()); err != nil { + return nil, errors.Wrap(err, "writing point to hash in round 3 pad transfer") + } + copy(sender.Output.OneTimePadEncryptionKeys[i][k][:], hash.Sum(nil)) + if err != nil { + return nil, errors.Wrap(err, "compute the encryption keys") + } + + // Compute a challenge by XORing the hash of the hash of the key. Not a typo ;) + hashedKey[k] = sha3.Sum256(sender.Output.OneTimePadEncryptionKeys[i][k][:]) + hashedKey[k] = sha3.Sum256(hashedKey[k][:]) + } + + challenge[i] = xorBytes(hashedKey[0], hashedKey[1]) + } + return challenge, nil +} + +// Round4RespondToChallenge corresponds to initial round of the receiver's "Verify" phase; see step 6 of page 16 of the paper. +// this is just the start of Verification. In this round, the receiver outputs "rho'", which the sender will check. +func (receiver *Receiver) Round4RespondToChallenge( + challenge []OtChallenge, +) ([]OtChallengeResponse, error) { + // store to be used in future steps + receiver.senderChallenge = challenge + // challengeResponses is Rho' in the paper. + challengeResponses := make([]OtChallengeResponse, receiver.batchSize) + for i := 0; i < receiver.batchSize; i++ { + // Constant-time xor of the hashed key and the challenge, based on the choice bit. + hashedKey := sha3.Sum256(receiver.Output.OneTimePadDecryptionKey[i][:]) + hashedKey = sha3.Sum256(hashedKey[:]) + challengeResponses[i] = hashedKey + alternativeChallengeResponse := xorBytes(receiver.senderChallenge[i], hashedKey) + subtle.ConstantTimeCopy( + receiver.Output.RandomChoiceBits[i], + challengeResponses[i][:], + alternativeChallengeResponse[:], + ) + } + return challengeResponses, nil +} + +// Round5Verify verifies the challenge response. If the verification passes, sender opens his challenges to the receiver. +// See step 7 of page 16 of the paper. +// Abort if Rho' != H(H(Rho^0)) in other words, if challengeResponse != H(H(encryption key 0)). +// opening is H(encryption key) +func (sender *Sender) Round5Verify( + challengeResponses []OtChallengeResponse, +) ([]ChallengeOpening, error) { + opening := make([]ChallengeOpening, sender.batchSize) + for i := 0; i < sender.batchSize; i++ { + for k := 0; k < keyCount; k++ { + opening[i][k] = sha3.Sum256(sender.Output.OneTimePadEncryptionKeys[i][k][:]) + } + + // Verify + hashedKey0 := sha3.Sum256(opening[i][0][:]) + if subtle.ConstantTimeCompare(hashedKey0[:], challengeResponses[i][:]) != 1 { + return nil, errors.New("receiver's challenge response didn't match H(H(rho^0))") + } + } + return opening, nil +} + +// Round6Verify is the _last_ part of the "Verification" phase of OT; see p. 16 of https://eprint.iacr.org/2018/499.pdf. +// See step 8 of page 16 of the paper. +// Abort if H(Rho^w) != the one it calculated itself or +// +// if Xi != H(H(Rho^0)) XOR H(H(Rho^1)) +// +// In other words, +// +// if opening_w != H(decryption key) or +// if challenge != H(opening 0) XOR H(opening 0) +func (receiver *Receiver) Round6Verify(challengeOpenings []ChallengeOpening) error { + for i := 0; i < receiver.batchSize; i++ { + hashedDecryptionKey := sha3.Sum256(receiver.Output.OneTimePadDecryptionKey[i][:]) + w := receiver.Output.RandomChoiceBits[i] + if subtle.ConstantTimeCompare(hashedDecryptionKey[:], challengeOpenings[i][w][:]) != 1 { + return fmt.Errorf("sender's supposed H(rho^omega) doesn't match our own") + } + hashedKey0 := sha3.Sum256(challengeOpenings[i][0][:]) + hashedKey1 := sha3.Sum256(challengeOpenings[i][1][:]) + reconstructedChallenge := xorBytes(hashedKey0, hashedKey1) + if subtle.ConstantTimeCompare( + reconstructedChallenge[:], + receiver.senderChallenge[i][:], + ) != 1 { + return fmt.Errorf( + "sender's openings H(rho^0) and H(rho^1) didn't decommit to its prior message", + ) + } + } + return nil +} + +// Round7Encrypt wraps an `Encrypt` operation on the Sender's underlying output from the random OT; see `Encrypt` below. +// this is optional; it will be used only in circumstances when you want to run "actual" (i.e., non-random) OT +func (sender *Sender) Round7Encrypt( + messages [][keyCount][DigestSize]byte, +) ([][keyCount][DigestSize]byte, error) { + return sender.Output.Encrypt(messages) +} + +// Round8Decrypt wraps a `Decrypt` operation on the Receiver's underlying output from the random OT; see `Decrypt` below +// this is optional; it will be used only in circumstances when you want to run "actual" (i.e., non-random) OT +func (receiver *Receiver) Round8Decrypt( + ciphertext [][keyCount][DigestSize]byte, +) ([][DigestSize]byte, error) { + return receiver.Output.Decrypt(ciphertext) +} + +// Encrypt runs step 9) of the seed OT Protocol 7) of https://eprint.iacr.org/2018/499.pdf, +// in which the seed OT sender "encrypts" both messages under the "one-time keys" output by the random OT. +func (s *SenderOutput) Encrypt( + plaintexts [][keyCount][DigestSize]byte, +) ([][keyCount][DigestSize]byte, error) { + batchSize := len(s.OneTimePadEncryptionKeys) + if len(plaintexts) != batchSize { + return nil, errors.New("message size should be same as batch size") + } + ciphertexts := make([][keyCount][DigestSize]byte, batchSize) + + for i := 0; i < len(plaintexts); i++ { + for k := 0; k < keyCount; k++ { + ciphertexts[i][k] = xorBytes(s.OneTimePadEncryptionKeys[i][k], plaintexts[i][k]) + } + } + return ciphertexts, nil +} + +// Decrypt is step 10) of the seed OT Protocol 7) of https://eprint.iacr.org/2018/499.pdf, +// where the seed OT receiver "decrypts" the message it's receiving using the "key" it received in the random OT. +func (r *ReceiverOutput) Decrypt( + ciphertexts [][keyCount][DigestSize]byte, +) ([][DigestSize]byte, error) { + batchSize := len(r.OneTimePadDecryptionKey) + if len(ciphertexts) != batchSize { + return nil, errors.New("number of ciphertexts should be same as batch size") + } + plaintexts := make([][DigestSize]byte, batchSize) + + for i := 0; i < len(ciphertexts); i++ { + choice := r.RandomChoiceBits[i] + plaintexts[i] = xorBytes(r.OneTimePadDecryptionKey[i], ciphertexts[i][choice]) + } + return plaintexts, nil +} diff --git a/ot/base/simplest/ot_test.go b/ot/base/simplest/ot_test.go new file mode 100644 index 0000000..687e759 --- /dev/null +++ b/ot/base/simplest/ot_test.go @@ -0,0 +1,96 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package simplest_test + +import ( + "crypto/rand" + "crypto/sha256" + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/ot/base/simplest" + "github.com/sonr-io/sonr/crypto/ot/ottest" +) + +func TestOtOnMultipleCurves(t *testing.T) { + curveInstances := []*curves.Curve{ + curves.K256(), + curves.P256(), + } + for _, curve := range curveInstances { + batchSize := 256 + hashKeySeed := [32]byte{} + _, err := rand.Read(hashKeySeed[:]) + require.NoError(t, err) + sender, receiver, err := ottest.RunSimplestOT(curve, batchSize, hashKeySeed) + require.NoError(t, err) + + for i := 0; i < batchSize; i++ { + require.Equal( + t, + receiver.OneTimePadDecryptionKey[i], + sender.OneTimePadEncryptionKeys[i][receiver.RandomChoiceBits[i]], + ) + } + + // Transfer messages + messages := make([][2][32]byte, batchSize) + for i := 0; i < batchSize; i++ { + messages[i] = [2][32]byte{ + sha256.Sum256([]byte(fmt.Sprintf("message[%d][0]", i))), + sha256.Sum256([]byte(fmt.Sprintf("message[%d][1]", i))), + } + } + ciphertexts, err := sender.Encrypt(messages) + require.NoError(t, err) + decrypted, err := receiver.Decrypt(ciphertexts) + require.NoError(t, err) + + for i := 0; i < batchSize; i++ { + choice := receiver.RandomChoiceBits[i] + require.Equal(t, messages[i][choice], decrypted[i]) + require.NotEqual(t, messages[i][1-choice], decrypted[i]) + } + } +} + +func TestOTStreaming(t *testing.T) { + batchSize := 256 + curve := curves.K256() + hashKeySeed := [32]byte{} + _, err := rand.Read(hashKeySeed[:]) + require.NoError(t, err) + sender, err := simplest.NewSender(curve, batchSize, hashKeySeed) + require.Nil(t, err) + receiver, err := simplest.NewReceiver(curve, batchSize, hashKeySeed) + require.Nil(t, err) + + senderPipe, receiverPipe := simplest.NewPipeWrappers() + errorsChannel := make( + chan error, + 2, + ) // warning: if one party errors, the other will sit there forever. add timeouts. + go func() { + errorsChannel <- simplest.SenderStreamOTRun(sender, senderPipe) + }() + go func() { + errorsChannel <- simplest.ReceiverStreamOTRun(receiver, receiverPipe) + }() + for i := 0; i < 2; i++ { + require.Nil(t, <-errorsChannel) + } + for i := 0; i < batchSize; i++ { + require.Equal( + t, + receiver.Output.OneTimePadDecryptionKey[i], + sender.Output.OneTimePadEncryptionKeys[i][receiver.Output.RandomChoiceBits[i]], + ) + } +} diff --git a/ot/base/simplest/stream.go b/ot/base/simplest/stream.go new file mode 100644 index 0000000..d3f360d --- /dev/null +++ b/ot/base/simplest/stream.go @@ -0,0 +1,100 @@ +package simplest + +import ( + "encoding/gob" + "io" + + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/zkp/schnorr" +) + +// ReceiverStreamOTRun exposes the entire seed OT process for the receiver in "stream mode" to the user. +// what this means is that instead of calling the component methods in the process manually, and manually handling +// the encoding and decoding of the resulting output and input structs, the user needs _only_ to pass a ReadWriter +// (in practice this will be something like a websocket object), and this method will handle the entire process. +// this serves the dual (though related) purpose of conveniently bundling up the entire seed OT process, +// for use in tests, both in this package, as well as in the other packages which use this one (like cOT and mult). +func ReceiverStreamOTRun(receiver *Receiver, rw io.ReadWriter) error { + enc := gob.NewEncoder(rw) + dec := gob.NewDecoder(rw) + gob.Register(&curves.ScalarK256{}) + gob.Register(&curves.PointK256{}) + + proof := &schnorr.Proof{} + + if err := dec.Decode(proof); err != nil { + return errors.Wrap(err, "failed to decode proof in receiver stream OT") + } + + receiversMaskedChoice, err := receiver.Round2VerifySchnorrAndPadTransfer(proof) + if err != nil { + return errors.Wrap(err, "error in round 2 in receiver stream OT") + } + if err = enc.Encode(receiversMaskedChoice); err != nil { + return errors.Wrap(err, "error encoding result of round 1 in receiver stream OT") + } + var challenge []OtChallenge + err = dec.Decode(&challenge) + if err != nil { + return errors.Wrap(err, "error decoding challenge in receiver stream OT") + } + challengeResponse, err := receiver.Round4RespondToChallenge(challenge) + if err != nil { + return errors.Wrap(err, "error computing round 2 challenge response in receiver stream OT") + } + if err = enc.Encode(challengeResponse); err != nil { + return errors.Wrap(err, "error encoding challenge response in receiver stream OT") + } + var openings []ChallengeOpening + err = dec.Decode(&openings) + if err != nil { + return errors.Wrap(err, "error decoding challenge openings in receiver stream OT") + } + return receiver.Round6Verify(openings) +} + +// SenderStreamOTRun exposes the entire seed OT process for the sender in "stream mode" to the user. +// similarly to the above, this means that the user needs only to pass a `ReadWriter` representing the comm channel; +// this method will handle all encoding and decoding + writing and reading to the channel. +func SenderStreamOTRun(sender *Sender, rw io.ReadWriter) error { + // again a high-level helper method showing the overall flow, this time for the sender. + enc := gob.NewEncoder(rw) + dec := gob.NewDecoder(rw) + gob.Register(&curves.ScalarK256{}) + gob.Register(&curves.PointK256{}) + + proof, err := sender.Round1ComputeAndZkpToPublicKey() + if err != nil { + return err + } + if err = enc.Encode(proof); err != nil { + return err + } + + var receiversMaskedChoice []ReceiversMaskedChoices + err = dec.Decode(&receiversMaskedChoice) + if err != nil { + return errors.Wrap(err, "error decoding receiver's masked choice in sender stream OT") + } + + challenge, err := sender.Round3PadTransfer(receiversMaskedChoice) + if err != nil { + return errors.Wrap(err, "error during round 2 pad transfer in sender stream OT") + } + err = enc.Encode(challenge) + if err != nil { + return errors.Wrap(err, "error encoding challenge in sender stream OT") + } + var challengeResponses []OtChallengeResponse + err = dec.Decode(&challengeResponses) + if err != nil { + return errors.Wrap(err, "error decoding challenges responses in sender stream OT") + } + opening, err := sender.Round5Verify(challengeResponses) + if err != nil { + return errors.Wrap(err, "error in round 3 verify in sender stream OT") + } + return enc.Encode(opening) +} diff --git a/ot/base/simplest/util.go b/ot/base/simplest/util.go new file mode 100755 index 0000000..f852e27 --- /dev/null +++ b/ot/base/simplest/util.go @@ -0,0 +1,55 @@ +package simplest + +import ( + "io" +) + +// xorBytes computes c = a xor b. +func xorBytes(a, b [DigestSize]byte) (c [DigestSize]byte) { + for i := 0; i < DigestSize; i++ { + c[i] = a[i] ^ b[i] + } + return c +} + +// initChoice initializes the receiver's choice array from the PackedRandomChoiceBits array +func (receiver *Receiver) initChoice() { + // unpack the random values in PackedRandomChoiceBits into bits in Choice + receiver.Output.RandomChoiceBits = make([]int, receiver.batchSize) + for i := 0; i < len(receiver.Output.RandomChoiceBits); i++ { + receiver.Output.RandomChoiceBits[i] = int( + ExtractBitFromByteVector(receiver.Output.PackedRandomChoiceBits, i), + ) + } +} + +// ExtractBitFromByteVector interprets the byte-vector `vector` as if it were a _bit_-vector with len(vector) * 8 bits. +// it extracts the `index`th such bit, interpreted in the little-endian way (i.e., both across bytes and within bytes). +func ExtractBitFromByteVector(vector []byte, index int) byte { + // the bitwise tricks index >> 3 == index // 8 and index & 0x07 == index % 8 are designed to avoid CPU division. + return vector[index>>3] >> (index & 0x07) & 0x01 +} + +type pipeWrapper struct { + r *io.PipeReader + w *io.PipeWriter + exchanged int // used this during testing, to track bytes exchanged +} + +func (wrapper *pipeWrapper) Write(p []byte) (n int, err error) { + n, err = wrapper.w.Write(p) + wrapper.exchanged += n + return n, err +} + +func (wrapper *pipeWrapper) Read(p []byte) (n int, err error) { + n, err = wrapper.r.Read(p) + wrapper.exchanged += n + return n, err +} + +func NewPipeWrappers() (*pipeWrapper, *pipeWrapper) { + leftOut, leftIn := io.Pipe() + rightOut, rightIn := io.Pipe() + return &pipeWrapper{r: leftOut, w: rightIn}, &pipeWrapper{r: rightOut, w: leftIn} +} diff --git a/ot/extension/kos/kos.go b/ot/extension/kos/kos.go new file mode 100644 index 0000000..f40fc0e --- /dev/null +++ b/ot/extension/kos/kos.go @@ -0,0 +1,475 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Package kos in an implementation of maliciously secure OT extension protocol defined in "Protocol 9" of +// [DKLs18](https://eprint.iacr.org/2018/499.pdf). The original protocol was presented in +// [KOS15](https://eprint.iacr.org/2015/546.pdf). +package kos + +import ( + "crypto/rand" + "crypto/subtle" + "encoding/binary" + "fmt" + + "golang.org/x/crypto/sha3" + + "github.com/pkg/errors" + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/ot/base/simplest" +) + +const ( + // below are the "cryptographic parameters", including computational and statistical, + // as well as the cOT block size parameters, which depend on these in a pre-defined way. + + // Kappa is the computational security parameter. + Kappa = 256 + + // KappaBytes is same as Kappa // 8, but avoids cpu division. + KappaBytes = Kappa >> 3 + + // L is the batch size used in the cOT functionality. + L = 2*Kappa + 2*s + + // COtBlockSizeBytes is same as L // 8, but avoids cpu division. + COtBlockSizeBytes = L >> 3 + + // OtWidth is the number of scalars processed per "slot" of the cOT. by definition of this parameter, + // for each of the receiver's choice bits, the sender will provide `OTWidth` scalars. + // in turn, both the sender and receiver will obtain `OTWidth` shares _per_ slot / bit of the cOT. + // by definition of the cOT, these "vectors of" scalars will add (componentwise) to the sender's original scalars. + OtWidth = 2 + + s = 80 // statistical security parameter. + kappaOT = Kappa + s + lPrime = L + kappaOT // length of pseudorandom seed expansion, used within cOT protocol + cOtExtendedBlockSizeBytes = lPrime >> 3 +) + +type Receiver struct { + // OutputAdditiveShares are the ultimate output received. basically just the "pads". + OutputAdditiveShares [L][OtWidth]curves.Scalar + + // seedOtResults are the results that this party has received by playing the sender role in a base OT protocol. + seedOtResults *simplest.SenderOutput + + // extendedPackedChoices is storage for "choice vector || gamma^{ext}" in a packed format. + extendedPackedChoices [cOtExtendedBlockSizeBytes]byte + psi [lPrime][KappaBytes]byte // transpose of v^0. gets retained between messages + + curve *curves.Curve + uniqueSessionId [simplest.DigestSize]byte // store this between rounds +} + +type Sender struct { + // OutputAdditiveShares are the ultimate output received. basically just the "pads". + OutputAdditiveShares [L][OtWidth]curves.Scalar + + // seedOtResults are the results that this party has received by playing the receiver role in a base OT protocol. + seedOtResults *simplest.ReceiverOutput + + curve *curves.Curve +} + +func binaryFieldMul(A []byte, B []byte) []byte { + // multiplies `A` and `B` in the finite field of order 2^256. + // The reference is Hankerson, Vanstone and Menezes, Guide to Elliptic Curve Cryptography. https://link.springer.com/book/10.1007/b97644 + // `A` and `B` are both assumed to be 32-bytes slices. here we view them as little-endian coordinate representations of degree-255 polynomials. + // the multiplication takes place modulo the irreducible (over F_2) polynomial f(X) = X^256 + X^10 + X^5 + X^2 + 1. see Table A.1. + // the techniques we use are given in section 2.3, Binary field arithmetic. + // for the multiplication part, we use Algorithm 2.34, "Right-to-left comb method for polynomial multiplication". + // for the reduction part, we use a variant of the idea of Figure 2.9, customized to our setting. + const W = 64 // the machine word width, in bits. + const t = 4 // the number of words needed to represent a polynomial. + c := make([]uint64, 2*t) // result + a := make([]uint64, t) + b := make([]uint64, t+1) // will hold a copy of b, shifted by some amount + for i := 0; i < 32; i++ { // "condense" `A` and `B` into word-vectors, instead of byte-vectors + a[i>>3] |= uint64(A[i]) << (i & 0x07 << 3) + b[i>>3] |= uint64(B[i]) << (i & 0x07 << 3) + } + for k := 0; k < W; k++ { + for j := 0; j < t; j++ { + // conditionally add a copy of (the appropriately shifted) B to C, depending on the appropriate bit of A + // do this in constant-time; i.e., independent of A. + // technically, in each time we call this, the right-hand argument is a public datum, + // so we could arrange things so that it's _not_ constant-time, but the variable-time stuff always depends on something public. + // better to just be safe here though and make it constant-time anyway. + mask := -(a[j] >> k & 0x01) // if A[j] >> k & 0x01 == 1 then 0xFFFFFFFFFFFFFFFF else 0x0000000000000000 + for i := 0; i < t+1; i++ { + c[j+i] ^= b[i] & mask // conditionally add B to C{j} + } + } + for i := t; i > 0; i-- { + b[i] = b[i]<<1 | b[i-1]>>63 + } + b[0] <<= 1 + } + // multiplication complete; begin reduction. + // things become actually somewhat simpler in our case, because the degree of the polynomial is a multiple of the word size + // the technique to come up with the numbers below comes essentially from going through the exact same process as on page 54, + // but with the polynomial f(X) = X^256 + X^10 + X^5 + X^2 + 1 above instead, and with parameters m = 256, W = 64, t = 4. + // the idea is exactly as described informally on that page, even though this particular polynomial isn't explicitly treated. + for i := 2*t - 1; i >= t; i-- { + c[i-4] ^= c[i] << 10 + c[i-3] ^= c[i] >> 54 + c[i-4] ^= c[i] << 5 + c[i-3] ^= c[i] >> 59 + c[i-4] ^= c[i] << 2 + c[i-3] ^= c[i] >> 62 + c[i-4] ^= c[i] + } + C := make([]byte, 32) + for i := 0; i < 32; i++ { + C[i] = byte(c[i>>3] >> (i & 0x07 << 3)) // truncate word to byte + } + return C +} + +// NewCOtReceiver creates a `Receiver` instance, ready for use as the receiver in the KOS cOT protocol +// you must supply the output gotten by running an instance of seed OT as the _sender_ (note the reversal of roles) +func NewCOtReceiver(seedOTResults *simplest.SenderOutput, curve *curves.Curve) *Receiver { + return &Receiver{ + seedOtResults: seedOTResults, + curve: curve, + } +} + +// NewCOtSender creates a `Sender` instance, ready for use as the sender in the KOS cOT protocol. +// you must supply the output gotten by running an instance of seed OT as the _receiver_ (note the reversal of roles) +func NewCOtSender(seedOTResults *simplest.ReceiverOutput, curve *curves.Curve) *Sender { + return &Sender{ + seedOtResults: seedOTResults, + curve: curve, + } +} + +// Round1Output is Bob's first message to Alice during cOT extension; +// these outputs are described in step 4) of Protocol 9) https://eprint.iacr.org/2018/499.pdf +type Round1Output struct { + U [Kappa][cOtExtendedBlockSizeBytes]byte + WPrime [simplest.DigestSize]byte + VPrime [simplest.DigestSize]byte +} + +// Round2Output this is Alice's response to Bob in cOT extension; +// the values `tau` are specified in Alice's step 6) of Protocol 9) https://eprint.iacr.org/2018/499.pdf +type Round2Output struct { + Tau [L][OtWidth]curves.Scalar +} + +// convertBitToBitmask converts a "bit"---i.e., a `byte` which is _assumed to be_ either 0 or 1---into a bitmask, +// namely, it outputs 0x00 if `bit == 0` and 0xFF if `bit == 1`. +func convertBitToBitmask(bit byte) byte { + return ^(bit - 0x01) +} + +// the below code takes as input a `kappa` by `lPrime` _boolean_ matrix, whose rows are actually "compacted" as bytes. +// so in actuality, it's a `kappa` by `lPrime >> 3 == cOtExtendedBlockSizeBytes` matrix of _bytes_. +// its output is the same boolean matrix, but transposed, so it has dimensions `lPrime` by `kappa`. +// but likewise we want to compact the output matrix as bytes, again _row-wise_. +// so the output matrix's dimensions are lPrime by `kappa >> 3 == KappaBytes`, as a _byte_ matrix. +// the technique is fairly straightforward, but involves some bitwise operations. +func transposeBooleanMatrix(input [Kappa][cOtExtendedBlockSizeBytes]byte) [lPrime][KappaBytes]byte { + output := [lPrime][KappaBytes]byte{} + for rowByte := 0; rowByte < KappaBytes; rowByte++ { + for rowBitWithinByte := 0; rowBitWithinByte < 8; rowBitWithinByte++ { + for columnByte := 0; columnByte < cOtExtendedBlockSizeBytes; columnByte++ { + for columnBitWithinByte := 0; columnBitWithinByte < 8; columnBitWithinByte++ { + rowBit := rowByte<<3 + rowBitWithinByte + columnBit := columnByte<<3 + columnBitWithinByte + // the below code grabs the _bit_ at input[rowBit][columnBit], if input were a viewed as a boolean matrix. + // in reality, it's packed into bytes, so instead we have to grab the `columnBitWithinByte`th bit within the appropriate byte. + bitAtInputRowBitColumnBit := input[rowBit][columnByte] >> columnBitWithinByte & 0x01 + // now that we've grabbed the bit we care about, we need to write it into the appropriate place in the output matrix + // the output matrix is also packed---but in the "opposite" way (the short dimension is packed, instead of the long one) + // what we're going to do is take the _bit_ we got, and shift it by rowBitWithinByte. + // this has the effect of preparing for us to write it into the appropriate place into the output matrix. + shiftedBit := bitAtInputRowBitColumnBit << rowBitWithinByte + output[columnBit][rowByte] |= shiftedBit + } + } + } + } + return output +} + +// Round1Initialize initializes the OT Extension. see page 17, steps 1), 2), 3) and 4) of Protocol 9 of the paper. +// The input `choice` vector is "packed" (i.e., the underlying abstract vector of `L` bits is represented as a `cOTBlockSizeBytes` bytes). +func (receiver *Receiver) Round1Initialize( + uniqueSessionId [simplest.DigestSize]byte, + choice [COtBlockSizeBytes]byte, +) (*Round1Output, error) { + // salt the transcript with the OT-extension session ID + receiver.uniqueSessionId = uniqueSessionId + + // write the input choice vector into our local data. Since `otBatchSize` is the number of bits, we are working with + // bytes, we first need to calculate how many bytes are needed to store that many bits. + copy(receiver.extendedPackedChoices[0:COtBlockSizeBytes], choice[:]) + + // Fill the rest of the extended choice vector with random values. These random values correspond to `gamma^{ext}`. + if _, err := rand.Read(receiver.extendedPackedChoices[COtBlockSizeBytes:]); err != nil { + return nil, errors.Wrap(err, "sampling random coins for gamma^{ext}") + } + + v := [2][Kappa][cOtExtendedBlockSizeBytes]byte{} // kappa * L array of _bits_, in "dense" form. contains _both_ v_0 and v_1. + result := &Round1Output{} + + hash := sha3.New256() // basically this will contain a hash of the matrix U. + for i := 0; i < Kappa; i++ { + for j := 0; j < 2; j++ { + shake := sha3.NewCShake256(uniqueSessionId[:], []byte("Coinbase_DKLs_cOT")) + if _, err := shake.Write(receiver.seedOtResults.OneTimePadEncryptionKeys[i][j][:]); err != nil { + return nil, errors.Wrap(err, "writing seed OT into shake in cOT receiver round 1") + } + // this is the core pseudorandom expansion of the secret OT input seeds s_i^0 and s_i^1 + // see Extension, 2), in Protocol 9, page 17 of DKLs https://eprint.iacr.org/2018/499.pdf + // use the uniqueSessionId as the "domain separator", and the _secret_ seed rho as the input! + if _, err := shake.Read(v[j][i][:]); err != nil { + return nil, errors.Wrap( + err, + "reading from shake to compute v^j in cOT receiver round 1", + ) + } + } + for j := 0; j < cOtExtendedBlockSizeBytes; j++ { + result.U[i][j] = v[0][i][j] ^ v[1][i][j] ^ receiver.extendedPackedChoices[j] + // U := v_i^0 ^ v_i^1 ^ w. note: in step 4) of Prot. 9, i think `w` should be bolded? + } + if _, err := hash.Write(result.U[i][:]); err != nil { + return nil, err + } + } + receiver.psi = transposeBooleanMatrix(v[0]) + digest := hash.Sum( + nil, + ) // go ahead and record this, so that we only have to hash the big matrix U once. + for j := 0; j < lPrime; j++ { + hash = sha3.New256() + jBytes := [2]byte{} + binary.BigEndian.PutUint16(jBytes[:], uint16(j)) + if _, err := hash.Write(jBytes[:]); err != nil { // write j into shake + return nil, errors.Wrap( + err, + "writing nonce into hash while computing chiJ in cOT receiver round 1", + ) + } + if _, err := hash.Write(digest); err != nil { + return nil, errors.Wrap( + err, + "writing input digest into hash while computing chiJ in cOT receiver round 1", + ) + } + chiJ := hash.Sum(nil) + wJ := convertBitToBitmask( + simplest.ExtractBitFromByteVector(receiver.extendedPackedChoices[:], j), + ) // extract j^th bit from vector of bytes w. + psiJTimesChiJ := binaryFieldMul(receiver.psi[j][:], chiJ) + for k := 0; k < KappaBytes; k++ { + result.WPrime[k] ^= wJ & chiJ[k] + result.VPrime[k] ^= psiJTimesChiJ[k] + } + } + return result, nil +} + +// Round2Transfer computes the OT sender ("Alice")'s part of cOT; this includes steps 2) 5) and 6) of Protocol 9 +// `input` is the sender's main vector of inputs alpha_j; these are the things tA_j and tB_j will add to if w_j == 1. +// `message` contains the message the receiver ("Bob") sent us. this itself contains Bob's values WPrime, VPrime, and U +// the output is just the values `Tau` we send back to Bob. +// as a side effect of this function, our (i.e., the sender's) outputs tA_j from the cOT will be populated. +func (sender *Sender) Round2Transfer( + uniqueSessionId [simplest.DigestSize]byte, + input [L][OtWidth]curves.Scalar, + round1Output *Round1Output, +) (*Round2Output, error) { + z := [Kappa][cOtExtendedBlockSizeBytes]byte{} + hash := sha3.New256() // basically this will contain a hash of the matrix U. + + for i := 0; i < Kappa; i++ { + v := make( + []byte, + cOtExtendedBlockSizeBytes, + ) // will contain alice's expanded PRG output for the row i, namely v_i^{\Nabla_i}. + shake := sha3.NewCShake256(uniqueSessionId[:], []byte("Coinbase_DKLs_cOT")) + if _, err := shake.Write(sender.seedOtResults.OneTimePadDecryptionKey[i][:]); err != nil { + return nil, errors.Wrap( + err, + "sender writing seed OT decryption key into shake in sender round 2 transfer", + ) + } + if _, err := shake.Read(v); err != nil { + return nil, errors.Wrap( + err, + "reading from shake into row `v` in sender round 2 transfer", + ) + } + // use the idExt as the domain separator, and the _secret_ seed rho as the input! + mask := convertBitToBitmask(byte(sender.seedOtResults.RandomChoiceBits[i])) + for j := 0; j < cOtExtendedBlockSizeBytes; j++ { + z[i][j] = v[j] ^ mask&round1Output.U[i][j] + } + if _, err := hash.Write(round1Output.U[i][:]); err != nil { + return nil, errors.Wrap(err, "writing matrix U to hash in cOT sender round 2 transfer") + } + } + zeta := transposeBooleanMatrix(z) + digest := hash.Sum( + nil, + ) // go ahead and record this, so that we only have to hash the big matrix U once. + zPrime := [simplest.DigestSize]byte{} + for j := 0; j < lPrime; j++ { + hash = sha3.New256() + jBytes := [2]byte{} + binary.BigEndian.PutUint16(jBytes[:], uint16(j)) + if _, err := hash.Write(jBytes[:]); err != nil { // write j into hash + return nil, errors.Wrap( + err, + "writing nonce into hash while computing chiJ in cOT sender round 2 transfer", + ) + } + if _, err := hash.Write(digest); err != nil { + return nil, errors.Wrap( + err, + "writing input digest into hash while computing chiJ in cOT sender round 2 transfer", + ) + } + chiJ := hash.Sum(nil) + zetaJTimesChiJ := binaryFieldMul(zeta[j][:], chiJ) + for k := 0; k < KappaBytes; k++ { + zPrime[k] ^= zetaJTimesChiJ[k] + } + } + rhs := [simplest.DigestSize]byte{} + nablaTimesWPrime := binaryFieldMul( + sender.seedOtResults.PackedRandomChoiceBits, + round1Output.WPrime[:], + ) + for i := 0; i < KappaBytes; i++ { + rhs[i] = round1Output.VPrime[i] ^ nablaTimesWPrime[i] + } + if subtle.ConstantTimeCompare(zPrime[:], rhs[:]) != 1 { + return nil, fmt.Errorf( + "cOT receiver's consistency check failed; this may be an attempted attack; do NOT re-run the protocol", + ) + } + result := &Round2Output{} + for j := 0; j < L; j++ { + column := make([]byte, OtWidth*simplest.DigestSize) + shake := sha3.NewCShake256(uniqueSessionId[:], []byte("Coinbase_DKLs_cOT")) + jBytes := [2]byte{} + binary.BigEndian.PutUint16(jBytes[:], uint16(j)) + if _, err := shake.Write(jBytes[:]); err != nil { // write j into hash + return nil, errors.Wrap( + err, + "writing nonce into shake while computing OutputAdditiveShares in cOT sender round 2 transfer", + ) + } + if _, err := shake.Write(zeta[j][:]); err != nil { + return nil, errors.Wrap( + err, + "writing input zeta_j into shake while computing OutputAdditiveShares in cOT sender round 2 transfer", + ) + } + if _, err := shake.Read(column[:]); err != nil { + return nil, errors.Wrap( + err, + "reading shake into column while computing OutputAdditiveShares in cOT sender round 2 transfer", + ) + } + var err error + for k := 0; k < OtWidth; k++ { + sender.OutputAdditiveShares[j][k], err = sender.curve.Scalar.SetBytes( + column[k*simplest.DigestSize : (k+1)*simplest.DigestSize], + ) + if err != nil { + return nil, errors.Wrap(err, "OutputAdditiveShares scalar from bytes") + } + } + for i := 0; i < KappaBytes; i++ { + zeta[j][i] ^= sender.seedOtResults.PackedRandomChoiceBits[i] // note: overwrites zeta_j. just using it as a place to store + } + column = make([]byte, OtWidth*simplest.DigestSize) + shake = sha3.NewCShake256(uniqueSessionId[:], []byte("Coinbase_DKLs_cOT")) + binary.BigEndian.PutUint16(jBytes[:], uint16(j)) + if _, err := shake.Write(jBytes[:]); err != nil { // write j into hash + return nil, errors.Wrap( + err, + "writing nonce into shake while computing tau in cOT sender round 2 transfer", + ) + } + if _, err := shake.Write(zeta[j][:]); err != nil { + return nil, errors.Wrap( + err, + "writing input zeta_j into shake while computing tau in cOT sender round 2 transfer", + ) + } + if _, err := shake.Read(column[:]); err != nil { + return nil, errors.Wrap( + err, + "reading shake into column while computing tau in cOT sender round 2 transfer", + ) + } + for k := 0; k < OtWidth; k++ { + result.Tau[j][k], err = sender.curve.Scalar.SetBytes( + column[k*simplest.DigestSize : (k+1)*simplest.DigestSize], + ) + if err != nil { + return nil, errors.Wrap(err, "scalar Tau from bytes") + } + result.Tau[j][k] = result.Tau[j][k].Sub(sender.OutputAdditiveShares[j][k]) + result.Tau[j][k] = result.Tau[j][k].Add(input[j][k]) + } + } + return result, nil +} + +// Round3Transfer does the receiver (Bob)'s step 7) of Protocol 9, namely the computation of the outputs tB. +func (receiver *Receiver) Round3Transfer(round2Output *Round2Output) error { + for j := 0; j < L; j++ { + column := make([]byte, OtWidth*simplest.DigestSize) + shake := sha3.NewCShake256(receiver.uniqueSessionId[:], []byte("Coinbase_DKLs_cOT")) + jBytes := [2]byte{} + binary.BigEndian.PutUint16(jBytes[:], uint16(j)) + if _, err := shake.Write(jBytes[:]); err != nil { // write j into hash + return errors.Wrap( + err, + "writing nonce into shake while computing tB in cOT receiver round 3 transfer", + ) + } + if _, err := shake.Write(receiver.psi[j][:]); err != nil { + return errors.Wrap( + err, + "writing input zeta_j into shake while computing tB in cOT receiver round 3 transfer", + ) + } + if _, err := shake.Read(column[:]); err != nil { + return errors.Wrap( + err, + "reading shake into column while computing tB in cOT receiver round 3 transfer", + ) + } + bit := int(simplest.ExtractBitFromByteVector(receiver.extendedPackedChoices[:], j)) + var err error + for k := 0; k < OtWidth; k++ { + receiver.OutputAdditiveShares[j][k], err = receiver.curve.Scalar.SetBytes( + column[k*simplest.DigestSize : (k+1)*simplest.DigestSize], + ) + if err != nil { + return errors.Wrap(err, "scalar output additive shares from bytes") + } + receiver.OutputAdditiveShares[j][k] = receiver.OutputAdditiveShares[j][k].Neg() + wj0 := receiver.OutputAdditiveShares[j][k].Bytes() + wj1 := receiver.OutputAdditiveShares[j][k].Add(round2Output.Tau[j][k]).Bytes() + subtle.ConstantTimeCopy(bit, wj0, wj1) + if receiver.OutputAdditiveShares[j][k], err = receiver.curve.Scalar.SetBytes(wj0); err != nil { + return errors.Wrap(err, "scalar output additive shares from bytes") + } + } + } + return nil +} diff --git a/ot/extension/kos/kos_test.go b/ot/extension/kos/kos_test.go new file mode 100644 index 0000000..5129982 --- /dev/null +++ b/ot/extension/kos/kos_test.go @@ -0,0 +1,153 @@ +package kos + +import ( + "crypto/rand" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/ot/base/simplest" + "github.com/sonr-io/sonr/crypto/ot/ottest" +) + +func TestBinaryMult(t *testing.T) { + for i := 0; i < 100; i++ { + temp := make([]byte, 32) + _, err := rand.Read(temp) + require.NoError(t, err) + expected := make([]byte, 32) + copy(expected, temp) + // this test is based on Fermat's little theorem. + // the multiplicative group of units of a finite field has order |F| - 1 + // (in fact, it's necessarily cyclic; see e.g. https://math.stackexchange.com/a/59911, but this test doesn't rely on that fact) + // thus raising any element to the |F|th power should yield that element itself. + // this is a good test because it relies on subtle facts about the field structure, and will fail if anything goes wrong. + for j := 0; j < 256; j++ { + expected = binaryFieldMul(expected, expected) + } + require.Equal(t, temp, expected) + } +} + +func TestCOTExtension(t *testing.T) { + curveInstances := []*curves.Curve{ + curves.K256(), + curves.P256(), + } + for _, curve := range curveInstances { + uniqueSessionId := [simplest.DigestSize]byte{} + _, err := rand.Read(uniqueSessionId[:]) + require.NoError(t, err) + baseOtSenderOutput, baseOtReceiverOutput, err := ottest.RunSimplestOT( + curve, + Kappa, + uniqueSessionId, + ) + require.NoError(t, err) + for i := 0; i < Kappa; i++ { + require.Equal( + t, + baseOtReceiverOutput.OneTimePadDecryptionKey[i], + baseOtSenderOutput.OneTimePadEncryptionKeys[i][baseOtReceiverOutput.RandomChoiceBits[i]], + ) + } + + sender := NewCOtSender(baseOtReceiverOutput, curve) + receiver := NewCOtReceiver(baseOtSenderOutput, curve) + choice := [COtBlockSizeBytes]byte{} // receiver's input, namely choice vector. just random + _, err = rand.Read(choice[:]) + require.NoError(t, err) + input := [L][OtWidth]curves.Scalar{} // sender's input, namely integer "sums" in case w_j == 1. + for i := 0; i < L; i++ { + for j := 0; j < OtWidth; j++ { + input[i][j] = curve.Scalar.Random(rand.Reader) + require.NoError(t, err) + } + } + firstMessage, err := receiver.Round1Initialize(uniqueSessionId, choice) + require.NoError(t, err) + responseTau, err := sender.Round2Transfer(uniqueSessionId, input, firstMessage) + require.NoError(t, err) + err = receiver.Round3Transfer(responseTau) + require.NoError(t, err) + for j := 0; j < L; j++ { + bit := simplest.ExtractBitFromByteVector(choice[:], j) == 1 + for k := 0; k < OtWidth; k++ { + temp := sender.OutputAdditiveShares[j][k].Add(receiver.OutputAdditiveShares[j][k]) + if bit { + require.Equal(t, temp, input[j][k]) + } else { + require.Equal(t, temp, curve.Scalar.Zero()) + } + } + } + } +} + +func TestCOTExtensionStreaming(t *testing.T) { + curve := curves.K256() + hashKeySeed := [simplest.DigestSize]byte{} + _, err := rand.Read(hashKeySeed[:]) + require.NoError(t, err) + baseOtReceiver, err := simplest.NewReceiver(curve, Kappa, hashKeySeed) + require.NoError(t, err) + sender := NewCOtSender(baseOtReceiver.Output, curve) + baseOtSender, err := simplest.NewSender(curve, Kappa, hashKeySeed) + require.NoError(t, err) + receiver := NewCOtReceiver(baseOtSender.Output, curve) + + // first run the seed OT + senderPipe, receiverPipe := simplest.NewPipeWrappers() + errorsChannel := make(chan error, 2) + go func() { + errorsChannel <- simplest.SenderStreamOTRun(baseOtSender, senderPipe) + }() + go func() { + errorsChannel <- simplest.ReceiverStreamOTRun(baseOtReceiver, receiverPipe) + }() + for i := 0; i < 2; i++ { + require.Nil(t, <-errorsChannel) + } + for i := 0; i < Kappa; i++ { + require.Equal( + t, + baseOtReceiver.Output.OneTimePadDecryptionKey[i], + baseOtSender.Output.OneTimePadEncryptionKeys[i][baseOtReceiver.Output.RandomChoiceBits[i]], + ) + } + + // begin test of cOT extension. first populate both parties' inputs randomly + choice := [COtBlockSizeBytes]byte{} // receiver's input, namely choice vector. just random + _, err = rand.Read(choice[:]) + require.NoError(t, err) + input := [L][OtWidth]curves.Scalar{} // sender's input, namely integer "sums" in case w_j == 1. random for the test + for i := 0; i < L; i++ { + for j := 0; j < OtWidth; j++ { + input[i][j] = curve.Scalar.Random(rand.Reader) + require.NoError(t, err) + } + } + + // now actually run it, stream-wise + go func() { + errorsChannel <- SenderStreamCOtRun(sender, hashKeySeed, input, receiverPipe) + }() + go func() { + errorsChannel <- ReceiverStreamCOtRun(receiver, hashKeySeed, choice, senderPipe) + }() + for i := 0; i < 2; i++ { + require.Nil(t, <-errorsChannel) + } + for j := 0; j < L; j++ { + bit := simplest.ExtractBitFromByteVector(choice[:], j) == 1 + for k := 0; k < OtWidth; k++ { + temp := sender.OutputAdditiveShares[j][k].Add(receiver.OutputAdditiveShares[j][k]) + if bit { + require.Equal(t, temp, input[j][k]) + } else { + require.Equal(t, temp, curve.Scalar.Zero()) + } + } + } +} diff --git a/ot/extension/kos/stream.go b/ot/extension/kos/stream.go new file mode 100644 index 0000000..1081383 --- /dev/null +++ b/ot/extension/kos/stream.go @@ -0,0 +1,67 @@ +package kos + +import ( + "encoding/gob" + "io" + + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/ot/base/simplest" +) + +// ReceiverStreamCOtRun exposes an end-to-end "streaming" version of the cOT process for the receiver. +// this is similar to what we're also doing in the base OT side. the user only passes an arbitrary `ReadWriter` here, +// together with the relevant inputs (namely a choice vector); this method handles all parts of the process, +// including both encoding / decoding and writing to / reading from the stream. +func ReceiverStreamCOtRun( + receiver *Receiver, + hashKeySeed [simplest.DigestSize]byte, + choice [COtBlockSizeBytes]byte, + rw io.ReadWriter, +) error { + enc := gob.NewEncoder(rw) + dec := gob.NewDecoder(rw) + + firstMessage, err := receiver.Round1Initialize(hashKeySeed, choice) + if err != nil { + return errors.Wrap(err, "computing first message in receiver stream cOT") + } + if err = enc.Encode(firstMessage); err != nil { + return errors.Wrap(err, "encoding first message in receiver stream cOT") + } + responseTau := &Round2Output{} + if err = dec.Decode(responseTau); err != nil { + return errors.Wrap(err, "decoding responseTau in receiver stream OT") + } + if err = receiver.Round3Transfer(responseTau); err != nil { + return errors.Wrap(err, "error during round 3 in receiver stream OT") + } + return nil +} + +// SenderStreamCOtRun exposes the end-to-end "streaming" version of cOT for the sender. +// the sender should pass an arbitrary ReadWriter together with their input; this will handle the whole process, +// including all component methods, plus reading to and writing from the network. +func SenderStreamCOtRun( + sender *Sender, + hashKeySeed [simplest.DigestSize]byte, + input [L][OtWidth]curves.Scalar, + rw io.ReadWriter, +) error { + enc := gob.NewEncoder(rw) + dec := gob.NewDecoder(rw) + + firstMessage := &Round1Output{} + if err := dec.Decode(firstMessage); err != nil { + return errors.Wrap(err, "decoding first message in sender stream cOT") + } + responseTau, err := sender.Round2Transfer(hashKeySeed, input, firstMessage) + if err != nil { + return errors.Wrap(err, "error in round 2 in sender stream cOT") + } + if err = enc.Encode(responseTau); err != nil { + return errors.Wrap(err, "encoding responseTau in sender stream cOT") + } + return nil +} diff --git a/ot/ottest/util.go b/ot/ottest/util.go new file mode 100644 index 0000000..f075c59 --- /dev/null +++ b/ot/ottest/util.go @@ -0,0 +1,54 @@ +// Package ottest contains some utilities to test ot functions. The main goal is to reduce the code duplication in +// various other packages that need to run an OT in their test setup stage. +package ottest + +import ( + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/ot/base/simplest" +) + +// RunSimplestOT is a utility function used _only_ during various tests. +// essentially, it encapsulates the entire process of running a base OT, so that other tests can use it / bootstrap themselves. +// it handles the creation of the base OT sender and receiver, as well as orchestrates the rounds on them; +// it returns their outsputs, so that others can use them. +func RunSimplestOT( + curve *curves.Curve, + batchSize int, + uniqueSessionId [simplest.DigestSize]byte, +) (*simplest.SenderOutput, *simplest.ReceiverOutput, error) { + receiver, err := simplest.NewReceiver(curve, batchSize, uniqueSessionId) + if err != nil { + return nil, nil, errors.Wrap(err, "constructing OT receiver in run simplest OT") + } + sender, err := simplest.NewSender(curve, batchSize, uniqueSessionId) + if err != nil { + return nil, nil, errors.Wrap(err, "constructing OT sender in run simplest OT") + } + proof, err := sender.Round1ComputeAndZkpToPublicKey() + if err != nil { + return nil, nil, errors.Wrap(err, "sender round 1 in run simplest OT") + } + receiversMaskedChoice, err := receiver.Round2VerifySchnorrAndPadTransfer(proof) + if err != nil { + return nil, nil, errors.Wrap(err, "receiver round 2 in run simplest OT") + } + challenge, err := sender.Round3PadTransfer(receiversMaskedChoice) + if err != nil { + return nil, nil, errors.Wrap(err, "sender round 3 in run simplest OT") + } + challengeResponse, err := receiver.Round4RespondToChallenge(challenge) + if err != nil { + return nil, nil, errors.Wrap(err, "receiver round 4 in run simplest OT") + } + challengeOpenings, err := sender.Round5Verify(challengeResponse) + if err != nil { + return nil, nil, errors.Wrap(err, "sender round 5 in run simplest OT") + } + err = receiver.Round6Verify(challengeOpenings) + if err != nil { + return nil, nil, errors.Wrap(err, "receiver round 6 in run simplest OT") + } + return sender.Output, receiver.Output, nil +} diff --git a/paillier/README.md b/paillier/README.md new file mode 100755 index 0000000..f59761c --- /dev/null +++ b/paillier/README.md @@ -0,0 +1,23 @@ +--- +aliases: [README] +tags: [] +title: README +linter-yaml-title-alias: README +date created: Wednesday, April 17th 2024, 4:11:40 pm +date modified: Thursday, April 18th 2024, 8:19:25 am +--- + +## Paillier Cryptosystem + +Package paillier contains [Paillier's cryptosystem (1999)](http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.112.4035&rep=rep1&type=pdf). +All routines here from pseudocode §2.5. Fig 1: The Paillier Cryptosystem. + +This module provides APIs for: + +- generating a safe key pair +- encryption and decryption +- adding two encrypted values, `Enc(a)` and `Enc(b)`, and obtaining `Enc(a + b)`, and +- multiplying a plain value, `a`, and an encrypted value `Enc(b)`, and obtaining `Enc(a * b)`. + +The encrypted values are represented as `big.Int` and are serializable. +This module also provides JSON serialization for the PublicKey and the SecretKey. diff --git a/paillier/paillier.go b/paillier/paillier.go new file mode 100644 index 0000000..c646cb5 --- /dev/null +++ b/paillier/paillier.go @@ -0,0 +1,377 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Package paillier contains Paillier's cryptosystem (1999) [P99]. +// Public-Key Cryptosystems Based on Composite Degree Residuosity Class. +// http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.112.4035&rep=rep1&type=pdf +// All routines here from pseudocode §2.5. Fig 1: The Paillier Cryptosystem. +// +// This module provides APIs for: +// +// - generating a safe keypair, +// - encryption and decryption, +// - adding two encrypted values, Enc(a) and Enc(b), and obtaining Enc(a + b), and +// - multiplying a plain value, a, and an encrypted value Enc(b), and obtaining Enc(a * b). +// +// The encrypted values are represented as big.Int and are serializable. This module also provides +// JSON serialization for the PublicKey and the SecretKey. +package paillier + +import ( + "encoding/json" + "fmt" + "math/big" + + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core" + "github.com/sonr-io/sonr/crypto/internal" +) + +// PaillierPrimeBits is the number of bits used to generate Paillier Safe Primes. +const PaillierPrimeBits = 1024 + +type ( + // PublicKey is a Paillier public key: N = P*Q; for safe primes P,Q. + PublicKey struct { + N *big.Int // N = PQ + N2 *big.Int // N² computed and cached to prevent re-computation. + } + + // PublicKeyJson encapsulates the data that is serialized to JSON. + // It is used internally and not for external use. Public so other pieces + // can use for serialization. + PublicKeyJson struct { + N *big.Int + } + + // SecretKey is a Paillier secret key. + SecretKey struct { + PublicKey + Lambda *big.Int // lcm(P - 1, Q - 1) + Totient *big.Int // Euler's totient: (P - 1) * (Q - 1) + U *big.Int // L((N + 1)^λ(N) mod N²)−1 mod N + } + + // SecretKeyJson encapsulates the data that is serialized to JSON. + // It is used internally and not for external use. Public so other pieces + // can use for serialization. + SecretKeyJson struct { + N, Lambda, Totient, U *big.Int + } + + // Ciphertext in Pailler's cryptosystem: a value $c \in Z_{N²}$ . + Ciphertext *big.Int +) + +// NewKeys generates Paillier keys with `bits` sized safe primes. +func NewKeys() (*PublicKey, *SecretKey, error) { + return keyGenerator(core.GenerateSafePrime, PaillierPrimeBits) +} + +// keyGenerator generates Paillier keys with `bits` sized safe primes using function +// `genSafePrime` to generate the safe primes. +func keyGenerator( + genSafePrime func(uint) (*big.Int, error), + bits uint, +) (*PublicKey, *SecretKey, error) { + values := make(chan *big.Int, 2) + errors := make(chan error, 2) + + var p, q *big.Int + + for p == q { + for range []int{1, 2} { + go func() { + value, err := genSafePrime(bits) + values <- value + errors <- err + }() + } + + for _, err := range []error{<-errors, <-errors} { + if err != nil { + return nil, nil, err + } + } + + p, q = <-values, <-values + } + + // Assemble the secret/public key pair. + sk, err := NewSecretKey(p, q) + if err != nil { + return nil, nil, err + } + return &sk.PublicKey, sk, nil +} + +// NewSecretKey computes intermediate values based on safe primes p, q. +func NewSecretKey(p, q *big.Int) (*SecretKey, error) { + if p == nil || q == nil { + return nil, internal.ErrNilArguments + } + // Pre-compute necessary values. + pm1 := new(big.Int).Sub(p, core.One) // P - 1 + qm1 := new(big.Int).Sub(q, core.One) // Q - 1 + n := new(big.Int).Mul(p, q) // N = PQ + nn := new(big.Int).Mul(n, n) // N² + lambda, err := lcm(pm1, qm1) // λ(N) = lcm(P-1, Q-1) + if err != nil { + // Code coverage note: lcm returns error only if the inputs are nil, which can never happen here. + return nil, err + } + totient := new(big.Int).Mul(pm1, qm1) // 𝝋(N) = (P-1)(Q-1) + pk := PublicKey{ + N: n, + N2: nn, + } + + // (N+1)^λ(N) mod N² + t := new(big.Int).Add(n, core.One) + t.Exp(t, lambda, nn) + + // L((N+1)^λ(N) mod N²) + u, err := pk.l(t) + if err != nil { + return nil, err + } + // L((N+1)^λ(N) mod N²)^-1 mod N + u.ModInverse(u, n) + + return &SecretKey{pk, lambda, totient, u}, nil +} + +// MarshalJSON converts the public key into json format. +func (pk PublicKey) MarshalJSON() ([]byte, error) { + data := PublicKeyJson{pk.N} + return json.Marshal(data) +} + +// UnmarshalJSON converts the json data into this public key. +func (pk *PublicKey) UnmarshalJSON(bytes []byte) error { + data := new(PublicKeyJson) + if err := json.Unmarshal(bytes, data); err != nil { + return err + } + if data.N == nil { + return nil + } + pk.N = data.N + pk.N2 = new(big.Int).Mul(data.N, data.N) + return nil +} + +// lcm calculates the least common multiple. +func lcm(x, y *big.Int) (*big.Int, error) { + if x == nil || y == nil { + return nil, internal.ErrNilArguments + } + gcd := new(big.Int).GCD(nil, nil, x, y) + if core.ConstantTimeEq(gcd, core.Zero) { + return core.Zero, nil + } + // Compute least common multiple: https://en.wikipedia.org/wiki/Least_common_multiple#Calculation . + b := new(big.Int) + return b.Abs(b.Mul(b.Div(x, gcd), y)), nil +} + +// l computes a residuosity class of n^2: (x - 1) / n. +// Where it is the quotient x - 1 divided by n not modular multiplication of x - 1 times +// the modular multiplicative inverse of n. The function name comes from [P99]. +func (pk *PublicKey) l(x *big.Int) (*big.Int, error) { + if x == nil { + return nil, internal.ErrNilArguments + } + + if core.ConstantTimeEq(pk.N, core.Zero) { + return nil, internal.ErrNCannotBeZero + } + + // Ensure x = 1 mod N + if !core.ConstantTimeEq(new(big.Int).Mod(x, pk.N), core.One) { + return nil, internal.ErrResidueOne + } + + // Ensure x ∈ Z_N² + if err := core.In(x, pk.N2); err != nil { + return nil, err + } + + // (x - 1) / n + b := new(big.Int).Sub(x, core.One) + return b.Div(b, pk.N), nil +} + +// NewPubkey initializes a Paillier public key with a given n. +func NewPubkey(n *big.Int) (*PublicKey, error) { + if n == nil { + return nil, errors.New("n cannot be nil") + } + return &PublicKey{ + N: n, + N2: new(big.Int).Mul(n, n), // Compute and cache N² + }, nil +} + +// Add combines two Paillier ciphertexts. +func (pk *PublicKey) Add(c, d Ciphertext) (Ciphertext, error) { + if c == nil || d == nil { + return nil, internal.ErrNilArguments + } + // Ensure c,d ∈ Z_N² + cErr := core.In(c, pk.N2) + dErr := core.In(d, pk.N2) + // Constant time error check + var err error + if cErr != nil { + err = cErr + } + if dErr != nil { + err = dErr + } + if err != nil { + return nil, err + } + + ctxt, err := core.Mul(c, d, pk.N2) + if err != nil { + // Code coverage note: core.Mul returns error only if the inputs are nil, which can never happen here. + return nil, err + } + return ctxt, nil +} + +// Mul is equivalent to adding two Paillier exponents. +func (pk *PublicKey) Mul(a *big.Int, c Ciphertext) (Ciphertext, error) { + if a == nil || c == nil { + return nil, internal.ErrNilArguments + } + + // Ensure a ∈ Z_N + aErr := core.In(a, pk.N) + // Ensure c ∈ Z_N² + cErr := core.In(c, pk.N2) + + var err error + + // Constant time error check + if aErr != nil { + err = aErr + } + if cErr != nil { + err = cErr + } + if err != nil { + return nil, err + } + return new(big.Int).Exp(c, a, pk.N2), nil +} + +// Encrypt produces a ciphertext on input message. +func (pk *PublicKey) Encrypt(msg *big.Int) (Ciphertext, *big.Int, error) { + // generate a nonce: r \in Z**_N + r, err := core.Rand(pk.N) + if err != nil { + return nil, nil, err + } + + // Generate and return the ciphertext + ct, err := pk.encrypt(msg, r) + return ct, r, err +} + +// encrypt produces a ciphertext on input a message and nonce. +func (pk *PublicKey) encrypt(msg, r *big.Int) (Ciphertext, error) { + if msg == nil || r == nil { + return nil, internal.ErrNilArguments + } + + // Ensure msg ∈ Z_N + if err := core.In(msg, pk.N); err != nil { + return nil, err + } + + // Ensure r ∈ Z^*_N: we use the method proved in docs/[EL20] + // ensure r ∈ Z^_N-{0} + if err := core.In(r, pk.N); err != nil { + return nil, err + } + if core.ConstantTimeEq(r, core.Zero) { + return nil, fmt.Errorf("r cannot be 0") + } + + // Compute the ciphertext components: ɑ, β + // ɑ = (N+1)^m (mod N²) + ɑ := new(big.Int).Add(pk.N, core.One) + ɑ.Exp(ɑ, msg, pk.N2) + β := new(big.Int).Exp(r, pk.N, pk.N2) // β = r^N (mod N²) + + // ciphertext = ɑ*β = (N+1)^m * r^N (mod N²) + c, err := core.Mul(ɑ, β, pk.N2) + if err != nil { + // Code coverage note: core.Mul returns error only if the inputs are nil, which can never happen here. + return nil, err + } + return c, nil +} + +// Decrypt is the reverse operation of Encrypt. +func (sk *SecretKey) Decrypt(c Ciphertext) (*big.Int, error) { + if c == nil { + return nil, internal.ErrNilArguments + } + + // Ensure C ∈ Z_N² + if err := core.In(c, sk.N2); err != nil { + return nil, err + } + + // Compute the msg in components + // ɑ ≡ c^{λ(N)} mod N² + ɑ := new(big.Int).Exp(c, sk.Lambda, sk.N2) + + // l = L(ɑ, N) + ell, err := sk.l(ɑ) + if err != nil { + return nil, err + } + + // Compute the msg + // m ≡ lu = L(ɑ)*u = L(c^{λ(N)})*u mod N + m, err := core.Mul(ell, sk.U, sk.N) + if err != nil { + return nil, err + } + return m, nil +} + +// MarshalJSON converts the secret key into json format. +func (sk SecretKey) MarshalJSON() ([]byte, error) { + data := SecretKeyJson{ + sk.N, + sk.Lambda, + sk.Totient, + sk.U, + } + return json.Marshal(data) +} + +// UnmarshalJSON converts the json data into this secret key. +func (sk *SecretKey) UnmarshalJSON(bytes []byte) error { + data := new(SecretKeyJson) + if err := json.Unmarshal(bytes, data); err != nil { + return err + } + if data.N != nil { + sk.N = data.N + sk.N2 = new(big.Int).Mul(data.N, data.N) + } + sk.U = data.U + sk.Totient = data.Totient + sk.Lambda = data.Lambda + return nil +} diff --git a/paillier/psf.go b/paillier/psf.go new file mode 100644 index 0000000..e512f18 --- /dev/null +++ b/paillier/psf.go @@ -0,0 +1,235 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// +// This file contains proofs that Paillier moduli are square-free: [spec] fig 15 + +package paillier + +import ( + "crypto/elliptic" + "fmt" + "math/big" + + crypto "github.com/sonr-io/sonr/crypto/core" + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/internal" +) + +// [spec] 10.2 and ProvePSF, VerifyPSF fig.15 +const PsfProofLength = 13 + +// PsfProofParams contains the inputs to ProvePSF +type PsfProofParams struct { + Curve elliptic.Curve + SecretKey *SecretKey + Pi uint32 + Y *curves.EcPoint +} + +// PsfVerifyParams contains the inputs to VerifyPSF +type PsfVerifyParams struct { + Curve elliptic.Curve + PublicKey *PublicKey + Pi uint32 + Y *curves.EcPoint +} + +// PsfProof is a slice of 13 big.Int's that prove that a Paillier modulus is square-free +type PsfProof []*big.Int + +// Prove that a Paillier modulus is square-free +// [spec] §10.fig 15 +func (p *PsfProofParams) Prove() (PsfProof, error) { + // Verify that params are sane + if p.Curve == nil || + p.SecretKey == nil || + p.Pi == 0 || + p.Y == nil { + return nil, internal.ErrNilArguments + } + + // 1. ell = 13 + // Note this is set above as PsfProofLength + + // 2. M = N^{-1} mod \phi(N) + M, err := crypto.Inv(p.SecretKey.N, p.SecretKey.Totient) + if err != nil { + return nil, err + } + + // 3. [x_1, ..., x_ell] <- GenerateChallenges(g,q,y,Pi,ell) + // NOTE: spec doesn't include N, but it's an oversight--should be part of the + // commitment + x, err := generateChallenges(p.Curve.Params(), p.SecretKey.N, p.Pi, p.Y) + if err != nil { + return nil, err + } + if len(x) != PsfProofLength { + return nil, fmt.Errorf( + "challenges array is not correct length: want=%v got=%v", + PsfProofLength, + len(x), + ) + } + + // 4. For i = [1, ... \ell] + // NOTE: typo in spec: says j = ... but uses subscript i in loop + proof := make([]*big.Int, PsfProofLength) + for i, xj := range x { + // 5. Compute y_i = x_i^M mod N + // NOTE: the pseudocode shows mod phi(N) which is incorrect + // it should be mod N otherwise the reverse in Verify + // will fail. Using phi(N) puts M in the wrong group. + yi, err := crypto.Exp(xj, M, p.SecretKey.N) + if err != nil { + return nil, err + } + + // 6. Set \Pi = [y_1, ..., y_\ell] + // NOTE: typo in spec: says y_t not y_\ell + proof[i] = yi + } + + // 7. return \Pi + return proof, nil +} + +// Verify that a Paillier modulus is square-free +// [spec] §10.fig 15 +func (p PsfProof) Verify(psf *PsfVerifyParams) error { + // Verify that params are sane + if psf == nil || + psf.Curve == nil || + psf.PublicKey == nil || + psf.Pi == 0 || + psf.Y == nil { + return internal.ErrNilArguments + } + + // 1. ell = 13 + // Note this is set above as PsfProofLength + + // 2. t = 1000 + // NOTE not used anywhere + + // 3. if q|N return false + if new(big.Int).Mod(psf.PublicKey.N, psf.Curve.Params().N).Cmp(crypto.Zero) == 0 { + return fmt.Errorf("paillier public key is a multiple of the curve subgroup") + } + + // 4. [x_1, ..., x_ell] <- GenerateChallenges(g,q,y,Pi,ell) + // NOTE: spec doesn't include N, but it's an oversight--should be part of the + // commitment + x, err := generateChallenges(psf.Curve.Params(), psf.PublicKey.N, psf.Pi, psf.Y) + if err != nil { + return err + } + if len(x) != PsfProofLength { + return fmt.Errorf( + "challenges array is not correct length: want=%v got=%v", + PsfProofLength, + len(x), + ) + } + + // 5. for j in [1,...,l] + for j, xj := range x { + // 6. yj^N != x mod N return false + // NOTE: pseudocode uses i when loop uses j + lhs, err := crypto.Exp(p[j], psf.PublicKey.N, psf.PublicKey.N) + if err != nil { + return err + } + if lhs.Cmp(xj) != 0 { + return fmt.Errorf("not equal at %d", j) + } + } + + return nil +} + +// generateChallenges computes `l` deterministic numbers as +// challenges for PsfProof which proves that the Paillier modulus is square free +// [spec] fig.15 GenerateChallenges +func generateChallenges( + params *elliptic.CurveParams, + N *big.Int, + pi uint32, + y *curves.EcPoint, +) ([]*big.Int, error) { + if params == nil || + y == nil || + pi == 0 { + return nil, internal.ErrNilArguments + } + + // 1. Set b = |N| // bit length of N + b := N.BitLen() + + // a modulus that is too small turns this function into an infinite loop + // need at least a byte to guarantee termination + if b < 8 { + return nil, internal.ErrNilArguments + } + + // 2. h = output bit-length of fiat-shamir hash + // See util.fiatShamir which uses sha256 + // So the output bit-length is 256 bits + const h int = 256 + + // 3. Compute s = ⌈b/h⌉ // number of hash outputs required to obtain b bits + // i.e. the number of times we have to call fs-shamir to get the same bits as + // `b`. Compute ceil as ceilVal = (a+b-1) / b + s := int64((b + h - 1) / h) + + // 4. j = 0 + j := int64(0) + + // 5. m = 0 + m := big.NewInt(0) + + x := make([]*big.Int, PsfProofLength) + + Pi := new(big.Int).SetUint64(uint64(pi)) + // 6. while j ≤ l + for j < PsfProofLength { + + bij := big.NewInt(j) + var ej []byte + + // 7. for k = [1,...,s] + for k := int64(1); k <= s; k++ { + bik := big.NewInt(k) + + // 8. Compute e_jk = FS-HASH(g, q, y, p_i, j, k, m) + res, err := crypto.FiatShamir(params.Gx, params.Gy, params.N, y.X, y.Y, Pi, bij, bik, m) + if err != nil { + return nil, err + } + // 9. Set x_j = eJ1 || ... || eJs + // Pseudocode says to concatenate outside this loop + // however, we just concatenate the bytes now instead of storing as temporary + // variables + ej = append(ej, res...) + } + // 10. Truncate ej to b bits + xj := new(big.Int).SetBytes(ej[:b/8]) + + // 11. if x_j < Z_N* i.e. 0 < x_j and x_j < N + if xj.Cmp(crypto.Zero) == 1 && xj.Cmp(N) == -1 { + x[j] = xj + + // 12 j = j + 1 + j++ + // 13 m = 0 + m = big.NewInt(0) + // 14 else + } else { + // 15. Set m = m + 1 + m.Add(m, crypto.One) + } + } + return x, nil +} diff --git a/paillier/psf_test.go b/paillier/psf_test.go new file mode 100644 index 0000000..6974dee --- /dev/null +++ b/paillier/psf_test.go @@ -0,0 +1,394 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package paillier + +import ( + "crypto/elliptic" + "encoding/json" + "math/big" + "testing" + + "github.com/btcsuite/btcd/btcec/v2" + "github.com/stretchr/testify/require" + + crypto "github.com/sonr-io/sonr/crypto/core" + curves2 "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/internal" +) + +var testPrimes = []*big.Int{ + internal.B10( + "186141419611617071752010179586510154515933389116254425631491755419216243670159714804545944298892950871169229878325987039840135057969555324774918895952900547869933648175107076399993833724447909579697857041081987997463765989497319509683575289675966710007879762972723174353568113668226442698275449371212397561567", + ), + internal.B10( + "94210786053667323206442523040419729883258172350738703980637961803118626748668924192069593010365236618255120977661397310932923345291377692570649198560048403943687994859423283474169530971418656709749020402756179383990602363122039939937953514870699284906666247063852187255623958659551404494107714695311474384687", + ), + internal.B10( + "130291226847076770981564372061529572170236135412763130013877155698259035960569046218348763182598589633420963942796327547969527085797839549642610021986391589746295634536750785366034581957858065740296991986002552598751827526181747791647357767502200771965093659353354985289411489453223546075843993686648576029043", + ), + internal.B10( + "172938910323633442195852028319756134734590277522945546987913328782597284762767185925315797321999389252040294991952361905020940252121762387957669654615602135429944435719699091344247805645764550860505536884031064967454028383404046221898300153428182409080298694828920944094158777327533157774919783417586902830043", + ), + internal.B10( + "135841191929788643010555393808775051922265083622266098277752143441294911675705272940799534437169053045878247274810449617960047255023823301284034559807472662111224710158898548617194658983006262996831617082584649612602010680423107108651221824216065228161009680618243402116924511141821829055830713600437589058643", + ), + internal.B10( + "179677777376220950493907657233669314916823596507009854134559513388779535023958212632715646194917807302098015450071151245496651913873851032302340489007561121851068326577148680474495447007833318066335149850926605897908761267606415610900931306044455332084757793630487163583451178807470499389106913845684353833379", + ), + internal.B10( + "62649985409697862206708027961094957171873130708493280862148817115812710388878279240372941307490519941098268192630359164091992515623574326498710952492586770923230983753287493884398990474917756375654842939939940915963324175552421981212594421823752854754541693709434365609636589761589816398869727328798680335583", + ), + internal.B10( + "196576931859098680370388202020086631604584490828609819764890020064880575503817891126703473215983239396058738287255240835101797315137072822716923594188151190460588551553676484461393180135097616711975997391550414447010491794087888246885960280296709672609456539741162207414899687167396008233995214434586323322859", + ), + internal.B10( + "271336420864746369701165973306090650688066226258594853124089876839120277465060891854507381090238664515950686049792387028144049076707224579184820539700879884119579186284072404459682082855184644444282438298561112002507411996589407330801765394106772460665497195944412067027079123717579308322520985921886949051399", + ), + internal.B10( + "147653127360336844448178027222853805809444645720500374788954343695331927468524513989671450440433430392339037667457657655958027740671071573403925974795764987870476118984896439440386146680643457835633462311776946902713168513155240275028008685964121441954481847113848701823211862974120297600518927026940189810103", + ), + internal.B10( + "61539010433774119199101441060312213379096965116494840834113311373246794436251480454630309900106802555812462300777026043563820643373439814989443964169335227347638731691284339678436222951965582264570176875078394338903074717434072995072121221264531723385013005031614327462206339323414428493321384497439106152163", + ), + internal.B10( + "311771090987243597109711542316907830756641693311804000593662622484722315782429237915515708860530841821213561483232298821623675096481796856960171671330638042763441430256097782130268494276848432981045602236986861083392706904041234926428759947857376161689191720483868111001987710383245853931937989224732484206639", + ), + internal.B10( + "348545239501897032367950520763624245702184225360238826931782856428685149253861325854706825698843098817604431561258712026020688621010635185480321876001016614912927680387840531641703966894322797491484955817022624047355473480912508041252361257911175397626575812830091471419378132244146077774966527307225203863239", + ), + internal.B10( + "167562983031509383478485987630533113343120902430985961468758712448125734458812918541051012669749885569679178971612428577288632429606851871845164719448590160530844833425628143996971699662729056519326776907622035340086832629206691942750594912221135787534670122007438859975313187460872690748138136170080913902203", + ), + internal.B10( + "151715609132228776595716500435665208768897792993205818803431003524953811539898459230192282642811956896879836518212758893685104146944932088195466999437630114129887975508715417094019351746027667352287673763064246395392591213231796089814648654152625331299642171758052545451706130433176935280325874961374276589763", + ), + internal.B10( + "281712498774102170277967098381151873986368736986748325672760355775943894718315925237789122870763991635031524237638254080973752419302437870447849091185409669906909828874532209010547976564209374430136988588219470076527433259993640332285914706329187116209118972038509278879237122949265824264856530096167843589043", + ), + internal.B10( + "86882427063713590116032012033991745733440719961931885774819345297872782432110706546175706398857226544896860987721577779470479838062106788873559026307646871133570915480684987993282364698928926188640576189281202695899665555602891606955025957497645420156315890379148794822782242384167644977894987285630058930123", + ), + internal.B10( + "83303406464212917441403726886711948278716398972782717472384580707071541544369912289531948826578557956123897261910116726555850408667234850301141318443154703778225305104540324875867615851047711871915209458107086347063530255813116254895432804373554367035028329996279513893337862177371103671113527972705508417219", + ), + internal.B10( + "290829612093510969863838578444630131194824970528125429399090307997156531200557462531776190769158191441614466855963164356672434851525764502180873524299787560160992955274777477308009367164212073773611071813219472273292916120276721541591451163160398750751633065097434700462944540404208636130411713202759646572187", + ), + internal.B10( + "48881643615473281990659967689574873112454227417010573158578046287415357392674453353386274403945930212163960526780980360358370255117866064326375672959460785974850231208253282115124348836379470403659096433419030132737534978624170609788431758477453270578400762995584298785082853409573009591146163658067217132999", + ), + internal.B10( + "47818664065019136841944639898604168570191742429107462510943153778085167306149797958497736138014922106778177497683417470713996340979203175236987691632152128071256018368891463388744514315997309312144264057334961479235114340091423812710466596081259537323405527816634204224641066174554915788023121121924554823463", + ), + internal.B10( + "229695065817346126758526483359337260282925372883465036895664320607569899180246065431164369223444026964554195802821338287119074827091354255558249504915527499047398698041873875019622138440848556549357174461846992529556315682586788137744896985847052668283284231628825924370859640129811142861116994552829398428647", + ), + internal.B10( + "100501115016957347491693508757253443864758446032047524096020585354104983606736759639044367167134409576092528219013168507381959241052976704837620234061351712684500077849808223040972825725745511581504906610633548112115513604053190610668096089811015493012026572475283305559529666099836493252485826156659750294903", + ), + internal.B10( + "164941338210872461448879529698900150056451305424623398039343691654768566536491493438826728710972441042226712374570117138883019423322602098706113298908005378303473844839971995715847918643285222217768344335264383514921181158565529236124115589719970140511822410990649549555047928093673140752570788415674868532299", + ), + internal.B10( + "277037199251333188171108034082127252450960810846571117481542098050121457972964006392527344163695411986229107011168411232117984760439307440561490229321182283823299859836750264962218540927582605520434969388646847458788766216835350519741512353041653865564337457599778511921159510170311560556844284028160928114623", + ), + internal.B10( + "194450633495795999995837828580097111229016136925345956148788562473415774074431957758705067019578300241653926511426134047652491974620284426575921711048247821582093564387999309993254763749991516361193554847964638949731507356038181346481870018202492098453036116472375750355687853028597643560794156133662678349939", + ), + internal.B10( + "351287943170075259292767465687447861777186026969543801283411356809391771346213158326614410370157474105344692424346410925411240089238093158848815209147605381454294661108047095253146499393310219242924552687094316959878415907497273176683394122976929775532753961555304244867490393116346360677109958055297215711587", + ), + internal.B10( + "329970582937843908299463472795994928703202360293919677760100619380642134736439176597341134222679061949935544692834943628667398294307004076774417457697130314341974849843695836050603685013468031012892094273233016929045028941716224648802422408386216033754532303003405690778077639489173685122357063674177077611499", + ), + internal.B10( + "67751546105580063387575764356375682499165422473383503143930633584920975946879807021875353514236996076344028719391905234977223693643926924731304657199486141030504275775023186923364159168130612275534168246529309247449138607249407760246378829338068736888203134857601657561860157938495777271164458736576560502603", + ), + internal.B10( + "276043923868350639738966705196129285523536747369492013710841410915407411458158268634302674024358477700030814139419613881758439643092328709376796555454484423864587139181985503560495790018232370315219082876142282958894264284344738294415199758590186234114829175455336589153989920707778566032047921277945163061363", + ), + internal.B10( + "62028909880050184794454820320289487394141550306616974968340908736543032782344593292214952852576535830823991093496498970213686040280098908204236051130358424961175634703281821899530101130244725435470475135483879784963475148975313832483400747421265545413510460046067002322131902159892876739088034507063542087523", + ), + internal.B10( + "321804071508183671133831207712462079740282619152225438240259877528712344129467977098976100894625335474509551113902455258582802291330071887726188174124352664849954838358973904505681968878957681630941310372231688127901147200937955329324769631743029415035218057960201863908173045670622969475867077447909836936523", + ), + internal.B10( + "52495647838749571441531580865340679598533348873590977282663145916368795913408897399822291638579504238082829052094508345857857144973446573810004060341650816108578548997792700057865473467391946766537119012441105169305106247003867011741811274367120479722991749924616247396514197345075177297436299446651331187067", + ), + internal.B10( + "118753381771703394804894143450628876988609300829627946826004421079000316402854210786451078221445575185505001470635997217855372731401976507648597119694813440063429052266569380936671291883364036649087788968029662592370202444662489071262833666489940296758935970249316300642591963940296755031586580445184253416139", + ), +} + +func TestGenerateChallengesWorks(t *testing.T) { + curves := []elliptic.Curve{btcec.S256(), elliptic.P256()} + for _, curve := range curves { + // dummy secret based on curve + y, _ := curves2.NewScalarBaseMult(curve, big.NewInt(3)) + + for i := 0; i < 5; i++ { + for j := 0; j < 5; j++ { + // Compute N from test primes + skN := new(big.Int).Mul(testPrimes[i], testPrimes[j]) + x, err := generateChallenges(curve.Params(), skN, uint32(i+1), y) + + require.NoError(t, err) + for _, xj := range x { + require.NotNil(t, xj) + require.Greater(t, xj.Cmp(crypto.Zero), 0) + require.Equal(t, xj.Cmp(skN), -1) + } + } + } + } +} + +// tests whether the hash function can still produce valid challenges +// even when the modulus is smaller than the hash output +func TestGenerateChallengesPrimeN(t *testing.T) { + curves := []elliptic.Curve{btcec.S256(), elliptic.P256()} + n := []*big.Int{ + internal.B10("680564733841876926926749214863536724007"), + internal.B10("1234345830644315716128223123383371693999"), + internal.B10( + "358070498390900219760227294443948492156530525739357363711230524749453568134007", + ), + internal.B10( + "409819537231165473932776844223512813127760876374228481246566335070809195677439", + ), + internal.B10( + "336327054820888403283842064345570507895192245801107954728200717775133442039527", + ), + internal.B10( + "353526063197730551176241127281213353808518592628930654494044427064787696719527", + ), + } + + for _, curve := range curves { + // dummy secret based on curve + y, _ := curves2.NewScalarBaseMult(curve, big.NewInt(3)) + for i := 0; i < 5; i++ { + for j := 0; j < 5; j++ { + x, err := generateChallenges(curve.Params(), n[i], uint32(j+1), y) + + require.NoError(t, err) + for _, xj := range x { + require.NotNil(t, xj) + require.Greater(t, xj.Cmp(crypto.Zero), 0) + } + } + } + } +} + +func TestGenerateChallengesNilInputs(t *testing.T) { + y, _ := curves2.NewScalarBaseMult(elliptic.P256(), big.NewInt(1)) + + _, err := generateChallenges(nil, nil, 0, nil) + require.Error(t, err) + _, err = generateChallenges(elliptic.P256().Params(), nil, 0, nil) + require.Error(t, err) + _, err = generateChallenges(elliptic.P256().Params(), big.NewInt(0), 0, nil) + require.Error(t, err) + _, err = generateChallenges(elliptic.P256().Params(), big.NewInt(1), 1, nil) + require.Error(t, err) + _, err = generateChallenges(elliptic.P256().Params(), big.NewInt(1), 1, y) + require.Error(t, err) + _, err = generateChallenges(elliptic.P256().Params(), big.NewInt(1), 1, y) + require.Error(t, err) + _, err = generateChallenges(elliptic.P256().Params(), big.NewInt(255), 1, y) + require.NoError(t, err) +} + +func TestPsfProofParams_Prove(t *testing.T) { + // RSA primes for testing + sk, _ := NewSecretKey( + // 75-digit random primes from Wolfram-Alpha + internal.B10( + "110045198697022997120409435651962875820936327127306040565577217116705932648687", + ), + internal.B10( + "95848033199746534486927143950536999279071340697368502822602282152563330640779", + ), + ) + smallSk, _ := NewSecretKey(big.NewInt(13), big.NewInt(11)) + + // Some points for testing + k := internal.B10("270988338908697209412444309907441365656383309727758604622908325428179708750") + Qp256, _ := curves2.NewScalarBaseMult(elliptic.P256(), k) + Qs256, _ := curves2.NewScalarBaseMult(btcec.S256(), k) + pi := uint32(4) + + tests := []struct { + name string + in *PsfProofParams + expectedError error + expectedResultLen int + }{ + // Positive tests + { + "positive: p256, small numbers", + &PsfProofParams{elliptic.P256(), smallSk, 1001, Qp256}, + nil, + PsfProofLength, + }, + { + "positive: p256, large numbers", + &PsfProofParams{elliptic.P256(), sk, pi, Qp256}, + nil, + PsfProofLength, + }, + { + "positive: s256, large numbers", + &PsfProofParams{btcec.S256(), sk, pi, Qs256}, + nil, + PsfProofLength, + }, + + // Nil params + { + "negative: ", + &PsfProofParams{ + btcec.S256(), + sk, + pi, + Qs256, + }, + nil, + PsfProofLength, + }, + { + "negative: proof params are nil", + &PsfProofParams{ + nil, + sk, + pi, + Qs256, + }, + internal.ErrNilArguments, + 0, + }, + { + "negative: SecretKey is nil", + &PsfProofParams{ + btcec.S256(), + nil, + pi, + Qs256, + }, + internal.ErrNilArguments, + 0, + }, + { + "negative: y is nil", + &PsfProofParams{ + btcec.S256(), + sk, + 0, + Qs256, + }, + internal.ErrNilArguments, + 0, + }, + { + "negative: Pi is nil", + &PsfProofParams{ + btcec.S256(), + sk, + pi, + nil, + }, + internal.ErrNilArguments, + 0, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + // test + result, err := test.in.Prove() + + // Verify the results are as expected + require.Equal(t, test.expectedError, err) + require.Len(t, result, test.expectedResultLen) + }) + } +} + +// Test that Range2Proof can be marshaled and unmarshaled correctly +func TestPsfProof_MarshalJSON(t *testing.T) { + // Generate some proof + sk, _ := NewSecretKey( + internal.B10( + "110045198697022997120409435651962875820936327127306040565577217116705932648687", + ), + internal.B10( + "95848033199746534486927143950536999279071340697368502822602282152563330640779", + ), + ) + Q, _ := curves2.NewScalarBaseMult(elliptic.P256(), + internal.B10("270988338908697209412444309907441365656383309727758604622908325428179708750")) + params := &PsfProofParams{ + elliptic.P256(), sk, + 1, + Q, + } + proof, err := params.Prove() + require.NoError(t, err) + require.NotNil(t, proof) + + // Marshal + testJSON, err := json.Marshal(proof) + require.NoError(t, err) + require.NotNil(t, testJSON) + + var unmarshaled PsfProof + err = json.Unmarshal(testJSON, &unmarshaled) + require.NoError(t, err) + + // Test for equality + require.Len(t, ([]*big.Int)(unmarshaled), len(([]*big.Int)(proof))) + for i := range proof { + require.Equal(t, proof[i], unmarshaled[i]) + } +} + +// Tests for Verify +// prove/verify round trip works +// modifying any parameter causes verify to fail +func TestPsfProofWorks(t *testing.T) { + // RSA primes for testing + sk, _ := NewSecretKey( + // 75-digit random primes from Wolfram-Alpha + internal.B10( + "110045198697022997120409435651962875820936327127306040565577217116705932648687", + ), + internal.B10( + "95848033199746534486927143950536999279071340697368502822602282152563330640779", + ), + ) + + // Some points for testing + k := internal.B10("270988338908697209412444309907441365656383309727758604622908325428179708750") + Qp256, _ := curves2.NewScalarBaseMult(elliptic.P256(), k) + pi := uint32(2) + + proveParams := &PsfProofParams{ + Curve: elliptic.P256(), + SecretKey: sk, + Pi: pi, + Y: Qp256, + } + proof, _ := proveParams.Prove() + verifyParams := &PsfVerifyParams{ + Curve: elliptic.P256(), + PublicKey: &sk.PublicKey, + Pi: pi, + Y: Qp256, + } + require.NoError(t, proof.Verify(verifyParams)) +} diff --git a/password/validator.go b/password/validator.go new file mode 100644 index 0000000..cbfbe8d --- /dev/null +++ b/password/validator.go @@ -0,0 +1,185 @@ +// Package password provides secure password handling and validation +package password + +import ( + "crypto/rand" + "fmt" + "unicode" +) + +// PasswordConfig defines password policy requirements +type PasswordConfig struct { + MinLength int + MaxLength int + RequireUppercase bool + RequireLowercase bool + RequireDigits bool + RequireSpecial bool + MinEntropy float64 +} + +// DefaultPasswordConfig returns secure default password requirements +func DefaultPasswordConfig() *PasswordConfig { + return &PasswordConfig{ + MinLength: 12, + MaxLength: 128, + RequireUppercase: true, + RequireLowercase: true, + RequireDigits: true, + RequireSpecial: true, + MinEntropy: 50.0, // bits + } +} + +// Validator validates passwords against security policies +type Validator struct { + config *PasswordConfig +} + +// NewValidator creates a password validator with the given configuration +func NewValidator(config *PasswordConfig) *Validator { + if config == nil { + config = DefaultPasswordConfig() + } + return &Validator{config: config} +} + +// Validate checks if a password meets security requirements +func (v *Validator) Validate(password []byte) error { + // Check length + if len(password) < v.config.MinLength { + return fmt.Errorf("password must be at least %d characters", v.config.MinLength) + } + if len(password) > v.config.MaxLength { + return fmt.Errorf("password must not exceed %d characters", v.config.MaxLength) + } + + // Check character requirements + var hasUpper, hasLower, hasDigit, hasSpecial bool + for _, ch := range string(password) { + switch { + case unicode.IsUpper(ch): + hasUpper = true + case unicode.IsLower(ch): + hasLower = true + case unicode.IsDigit(ch): + hasDigit = true + case unicode.IsSpace(ch): + // Spaces are allowed but not counted as special + case unicode.IsPunct(ch) || unicode.IsSymbol(ch): + hasSpecial = true + } + } + + if v.config.RequireUppercase && !hasUpper { + return fmt.Errorf("password must contain at least one uppercase letter") + } + if v.config.RequireLowercase && !hasLower { + return fmt.Errorf("password must contain at least one lowercase letter") + } + if v.config.RequireDigits && !hasDigit { + return fmt.Errorf("password must contain at least one digit") + } + if v.config.RequireSpecial && !hasSpecial { + return fmt.Errorf("password must contain at least one special character") + } + + // Check entropy + entropy := v.calculateEntropy(password) + if entropy < v.config.MinEntropy { + return fmt.Errorf("password entropy too low: %.1f bits (minimum: %.1f)", + entropy, v.config.MinEntropy) + } + + return nil +} + +// calculateEntropy estimates password entropy in bits +func (v *Validator) calculateEntropy(password []byte) float64 { + // Count unique characters + charSet := make(map[byte]bool) + for _, b := range password { + charSet[b] = true + } + + // Estimate character pool size + poolSize := 0 + var hasUpper, hasLower, hasDigit, hasSpecial bool + + for ch := range charSet { + r := rune(ch) + switch { + case unicode.IsUpper(r): + hasUpper = true + case unicode.IsLower(r): + hasLower = true + case unicode.IsDigit(r): + hasDigit = true + case unicode.IsPunct(r) || unicode.IsSymbol(r): + hasSpecial = true + } + } + + if hasLower { + poolSize += 26 + } + if hasUpper { + poolSize += 26 + } + if hasDigit { + poolSize += 10 + } + if hasSpecial { + poolSize += 32 // Common special characters + } + + if poolSize == 0 { + return 0 + } + + // Calculate entropy: log2(poolSize^length) + // Simplified: length * log2(poolSize) + bitsPerChar := 0.0 + temp := poolSize + for temp > 0 { + bitsPerChar++ + temp >>= 1 + } + + return float64(len(password)) * bitsPerChar +} + +// GenerateSalt generates a cryptographically secure random salt +func GenerateSalt(size int) ([]byte, error) { + if size < 16 { + return nil, fmt.Errorf("salt size must be at least 16 bytes") + } + + salt := make([]byte, size) + if _, err := rand.Read(salt); err != nil { + return nil, fmt.Errorf("failed to generate salt: %w", err) + } + + return salt, nil +} + +// SecureCompare performs constant-time comparison of two byte slices +func SecureCompare(a, b []byte) bool { + if len(a) != len(b) { + return false + } + + var result byte + for i := 0; i < len(a); i++ { + result |= a[i] ^ b[i] + } + + return result == 0 +} + +// ZeroBytes overwrites a byte slice with zeros +func ZeroBytes(b []byte) { + for i := range b { + b[i] = 0 + } +} diff --git a/password/validator_test.go b/password/validator_test.go new file mode 100644 index 0000000..0b17025 --- /dev/null +++ b/password/validator_test.go @@ -0,0 +1,207 @@ +package password + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestValidator_Validate(t *testing.T) { + validator := NewValidator(DefaultPasswordConfig()) + + testCases := []struct { + name string + password string + wantErr bool + errMsg string + }{ + { + name: "too short", + password: "Short1!", + wantErr: true, + errMsg: "at least 12 characters", + }, + { + name: "too long", + password: string(make([]byte, 129)), + wantErr: true, + errMsg: "not exceed 128 characters", + }, + { + name: "missing uppercase", + password: "longenoughpassword123!", + wantErr: true, + errMsg: "uppercase letter", + }, + { + name: "missing lowercase", + password: "LONGENOUGHPASSWORD123!", + wantErr: true, + errMsg: "lowercase letter", + }, + { + name: "missing digit", + password: "LongEnoughPassword!", + wantErr: true, + errMsg: "one digit", + }, + { + name: "missing special", + password: "LongEnoughPassword123", + wantErr: true, + errMsg: "special character", + }, + { + name: "valid password", + password: "ValidPassword123!", + wantErr: false, + }, + { + name: "complex valid password", + password: "MyS3cur3P@ssw0rd!2024", + wantErr: false, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + err := validator.Validate([]byte(tc.password)) + if tc.wantErr { + require.Error(t, err) + assert.Contains(t, err.Error(), tc.errMsg) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestValidator_CustomConfig(t *testing.T) { + config := &PasswordConfig{ + MinLength: 8, + MaxLength: 64, + RequireUppercase: false, + RequireLowercase: true, + RequireDigits: true, + RequireSpecial: false, + MinEntropy: 30.0, + } + + validator := NewValidator(config) + + // Should pass with custom config + err := validator.Validate([]byte("simple123")) + assert.NoError(t, err) + + // Should fail - too short + err = validator.Validate([]byte("short1")) + assert.Error(t, err) + + // Should fail - no digits + err = validator.Validate([]byte("simplepass")) + assert.Error(t, err) +} + +func TestGenerateSalt(t *testing.T) { + // Test valid salt generation + salt, err := GenerateSalt(32) + require.NoError(t, err) + assert.Len(t, salt, 32) + + // Test different salt each time + salt2, err := GenerateSalt(32) + require.NoError(t, err) + assert.NotEqual(t, salt, salt2) + + // Test minimum size enforcement + _, err = GenerateSalt(8) + assert.Error(t, err) + assert.Contains(t, err.Error(), "at least 16 bytes") +} + +func TestSecureCompare(t *testing.T) { + // Test equal slices + a := []byte("password") + b := []byte("password") + assert.True(t, SecureCompare(a, b)) + + // Test different slices + c := []byte("different") + assert.False(t, SecureCompare(a, c)) + + // Test different lengths + d := []byte("pass") + assert.False(t, SecureCompare(a, d)) + + // Test empty slices + assert.True(t, SecureCompare([]byte{}, []byte{})) +} + +func TestZeroBytes(t *testing.T) { + password := []byte("sensitive") + ZeroBytes(password) + + for _, b := range password { + assert.Equal(t, byte(0), b) + } +} + +func TestCalculateEntropy(t *testing.T) { + validator := NewValidator(nil) + + testCases := []struct { + name string + password string + minEntropy float64 + }{ + { + name: "lowercase only", + password: "abcdefghij", + minEntropy: 40, + }, + { + name: "alphanumeric", + password: "Abc123", + minEntropy: 30, + }, + { + name: "complex", + password: "MyP@ssw0rd!", + minEntropy: 50, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + entropy := validator.calculateEntropy([]byte(tc.password)) + assert.GreaterOrEqual(t, entropy, tc.minEntropy) + }) + } +} + +func BenchmarkValidate(b *testing.B) { + validator := NewValidator(DefaultPasswordConfig()) + password := []byte("ValidPassword123!") + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = validator.Validate(password) + } +} + +func BenchmarkGenerateSalt(b *testing.B) { + for i := 0; i < b.N; i++ { + _, _ = GenerateSalt(32) + } +} + +func BenchmarkSecureCompare(b *testing.B) { + a := []byte("password123") + c := []byte("password123") + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = SecureCompare(a, c) + } +} diff --git a/salt/salt.go b/salt/salt.go new file mode 100644 index 0000000..9733ad1 --- /dev/null +++ b/salt/salt.go @@ -0,0 +1,227 @@ +// Package salt provides cryptographically secure salt generation and management +// for use in key derivation functions and encryption operations. +package salt + +import ( + "crypto/rand" + "fmt" + "io" +) + +const ( + // DefaultSaltSize defines the recommended salt size (256 bits) + DefaultSaltSize = 32 + // MinSaltSize defines the minimum acceptable salt size (128 bits) + MinSaltSize = 16 + // MaxSaltSize defines the maximum salt size to prevent resource exhaustion + MaxSaltSize = 1024 +) + +// Salt represents a cryptographically secure salt value +type Salt struct { + value []byte +} + +// Generate creates a new cryptographically secure salt of the specified size +func Generate(size int) (*Salt, error) { + if size < MinSaltSize { + return nil, fmt.Errorf("salt size too small: minimum %d bytes required", MinSaltSize) + } + if size > MaxSaltSize { + return nil, fmt.Errorf("salt size too large: maximum %d bytes allowed", MaxSaltSize) + } + + saltBytes := make([]byte, size) + if _, err := io.ReadFull(rand.Reader, saltBytes); err != nil { + return nil, fmt.Errorf("failed to generate random salt: %w", err) + } + + return &Salt{value: saltBytes}, nil +} + +// GenerateDefault creates a new salt with the default recommended size +func GenerateDefault() (*Salt, error) { + return Generate(DefaultSaltSize) +} + +// FromBytes creates a Salt from existing bytes (validates minimum size) +func FromBytes(data []byte) (*Salt, error) { + if len(data) < MinSaltSize { + return nil, fmt.Errorf("salt too small: minimum %d bytes required, got %d", MinSaltSize, len(data)) + } + if len(data) > MaxSaltSize { + return nil, fmt.Errorf("salt too large: maximum %d bytes allowed, got %d", MaxSaltSize, len(data)) + } + + // Copy to prevent external modification + saltBytes := make([]byte, len(data)) + copy(saltBytes, data) + + return &Salt{value: saltBytes}, nil +} + +// Bytes returns a copy of the salt bytes to prevent external modification +func (s *Salt) Bytes() []byte { + if s == nil || s.value == nil { + return nil + } + result := make([]byte, len(s.value)) + copy(result, s.value) + return result +} + +// Size returns the size of the salt in bytes +func (s *Salt) Size() int { + if s == nil || s.value == nil { + return 0 + } + return len(s.value) +} + +// String returns a redacted string representation for logging (does not expose salt value) +func (s *Salt) String() string { + if s == nil || s.value == nil { + return "Salt{}" + } + return fmt.Sprintf("Salt{size=%d}", len(s.value)) +} + +// Equal compares two salts in constant time to prevent timing attacks +func (s *Salt) Equal(other *Salt) bool { + if s == nil || other == nil { + return s == other + } + if len(s.value) != len(other.value) { + return false + } + return constantTimeCompare(s.value, other.value) +} + +// Clear securely zeros the salt value from memory +func (s *Salt) Clear() { + if s != nil && s.value != nil { + for i := range s.value { + s.value[i] = 0 + } + s.value = nil + } +} + +// IsEmpty checks if the salt is nil or has zero length +func (s *Salt) IsEmpty() bool { + return s == nil || s.value == nil || len(s.value) == 0 +} + +// constantTimeCompare performs constant-time comparison of two byte slices +func constantTimeCompare(a, b []byte) bool { + if len(a) != len(b) { + return false + } + + var result byte + for i := 0; i < len(a); i++ { + result |= a[i] ^ b[i] + } + return result == 0 +} + +// SaltStore manages storage and retrieval of salts with metadata +type SaltStore struct { + salts map[string]*Salt +} + +// NewSaltStore creates a new salt store for managing multiple salts +func NewSaltStore() *SaltStore { + return &SaltStore{ + salts: make(map[string]*Salt), + } +} + +// Store saves a salt with the given identifier +func (ss *SaltStore) Store(id string, salt *Salt) error { + if id == "" { + return fmt.Errorf("salt identifier cannot be empty") + } + if salt == nil || salt.IsEmpty() { + return fmt.Errorf("salt cannot be nil or empty") + } + + // Store a copy to prevent external modification + ss.salts[id] = &Salt{ + value: salt.Bytes(), + } + + return nil +} + +// Retrieve gets a salt by its identifier +func (ss *SaltStore) Retrieve(id string) (*Salt, error) { + if id == "" { + return nil, fmt.Errorf("salt identifier cannot be empty") + } + + salt, exists := ss.salts[id] + if !exists { + return nil, fmt.Errorf("salt not found for identifier: %s", id) + } + + // Return a copy to prevent external modification + return &Salt{ + value: salt.Bytes(), + }, nil +} + +// Remove deletes a salt from the store and clears its memory +func (ss *SaltStore) Remove(id string) error { + if id == "" { + return fmt.Errorf("salt identifier cannot be empty") + } + + salt, exists := ss.salts[id] + if !exists { + return fmt.Errorf("salt not found for identifier: %s", id) + } + + // Clear the salt from memory before removal + salt.Clear() + delete(ss.salts, id) + + return nil +} + +// List returns all stored salt identifiers +func (ss *SaltStore) List() []string { + ids := make([]string, 0, len(ss.salts)) + for id := range ss.salts { + ids = append(ids, id) + } + return ids +} + +// Clear removes all salts and clears their memory +func (ss *SaltStore) Clear() { + for id, salt := range ss.salts { + salt.Clear() + delete(ss.salts, id) + } +} + +// Size returns the number of stored salts +func (ss *SaltStore) Size() int { + return len(ss.salts) +} + +// GenerateAndStore creates a new salt and stores it with the given identifier +func (ss *SaltStore) GenerateAndStore(id string, size int) (*Salt, error) { + salt, err := Generate(size) + if err != nil { + return nil, fmt.Errorf("failed to generate salt: %w", err) + } + + if err := ss.Store(id, salt); err != nil { + salt.Clear() // Clean up on failure + return nil, fmt.Errorf("failed to store salt: %w", err) + } + + return salt, nil +} diff --git a/salt/salt_test.go b/salt/salt_test.go new file mode 100644 index 0000000..b39d236 --- /dev/null +++ b/salt/salt_test.go @@ -0,0 +1,421 @@ +package salt + +import ( + "bytes" + "fmt" + "testing" +) + +func TestGenerate(t *testing.T) { + tests := []struct { + name string + size int + wantErr bool + }{ + {"minimum size", MinSaltSize, false}, + {"default size", DefaultSaltSize, false}, + {"large size", 512, false}, + {"maximum size", MaxSaltSize, false}, + {"too small", MinSaltSize - 1, true}, + {"too large", MaxSaltSize + 1, true}, + {"zero size", 0, true}, + {"negative size", -1, true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + salt, err := Generate(tt.size) + if (err != nil) != tt.wantErr { + t.Errorf("Generate() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if !tt.wantErr { + if salt == nil { + t.Error("Generate() returned nil salt without error") + return + } + if salt.Size() != tt.size { + t.Errorf("Generate() size = %d, want %d", salt.Size(), tt.size) + } + if salt.IsEmpty() { + t.Error("Generate() returned empty salt") + } + + // Test that salt contains random data (not all zeros) + saltBytes := salt.Bytes() + allZeros := true + for _, b := range saltBytes { + if b != 0 { + allZeros = false + break + } + } + if allZeros { + t.Error("Generate() returned all-zero salt (likely not random)") + } + } + }) + } +} + +func TestGenerateDefault(t *testing.T) { + salt, err := GenerateDefault() + if err != nil { + t.Fatalf("GenerateDefault() error = %v", err) + } + + if salt.Size() != DefaultSaltSize { + t.Errorf("GenerateDefault() size = %d, want %d", salt.Size(), DefaultSaltSize) + } +} + +func TestFromBytes(t *testing.T) { + validBytes := make([]byte, DefaultSaltSize) + for i := range validBytes { + validBytes[i] = byte(i) + } + + tests := []struct { + name string + data []byte + wantErr bool + }{ + {"valid default size", validBytes, false}, + {"minimum size", make([]byte, MinSaltSize), false}, + {"large size", make([]byte, 512), false}, + {"too small", make([]byte, MinSaltSize-1), true}, + {"too large", make([]byte, MaxSaltSize+1), true}, + {"empty", []byte{}, true}, + {"nil", nil, true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + salt, err := FromBytes(tt.data) + if (err != nil) != tt.wantErr { + t.Errorf("FromBytes() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if !tt.wantErr { + if salt == nil { + t.Error("FromBytes() returned nil salt without error") + return + } + if salt.Size() != len(tt.data) { + t.Errorf("FromBytes() size = %d, want %d", salt.Size(), len(tt.data)) + } + + // Verify data is copied correctly + saltBytes := salt.Bytes() + if !bytes.Equal(saltBytes, tt.data) { + t.Error("FromBytes() data doesn't match input") + } + + // Verify external modification doesn't affect salt + if len(tt.data) > 0 { + originalValue := tt.data[0] + tt.data[0] = ^tt.data[0] // Flip bits + if salt.Bytes()[0] != originalValue { + t.Error("FromBytes() salt was affected by external modification") + } + } + } + }) + } +} + +func TestSaltBytes(t *testing.T) { + salt, err := GenerateDefault() + if err != nil { + t.Fatalf("Failed to generate salt: %v", err) + } + + bytes1 := salt.Bytes() + bytes2 := salt.Bytes() + + // Should return same data + if !bytes.Equal(bytes1, bytes2) { + t.Error("Bytes() returned different data on multiple calls") + } + + // Should be independent copies + if &bytes1[0] == &bytes2[0] { + t.Error("Bytes() returned same underlying array (not a copy)") + } + + // Modifying returned bytes shouldn't affect salt + if len(bytes1) > 0 { + originalValue := bytes1[0] + bytes1[0] = ^bytes1[0] + bytes3 := salt.Bytes() + if bytes3[0] != originalValue { + t.Error("External modification of Bytes() affected salt") + } + } +} + +func TestSaltEqual(t *testing.T) { + salt1, err := Generate(DefaultSaltSize) + if err != nil { + t.Fatalf("Failed to generate salt1: %v", err) + } + + salt2, err := Generate(DefaultSaltSize) + if err != nil { + t.Fatalf("Failed to generate salt2: %v", err) + } + + // Same salt data + salt3, err := FromBytes(salt1.Bytes()) + if err != nil { + t.Fatalf("Failed to create salt3: %v", err) + } + + tests := []struct { + name string + salt1 *Salt + salt2 *Salt + expected bool + }{ + {"same salt", salt1, salt1, true}, + {"equivalent salts", salt1, salt3, true}, + {"different salts", salt1, salt2, false}, + {"nil salts", nil, nil, true}, + {"one nil salt", salt1, nil, false}, + {"nil vs non-nil", nil, salt1, false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := tt.salt1.Equal(tt.salt2) + if result != tt.expected { + t.Errorf("Equal() = %v, want %v", result, tt.expected) + } + }) + } +} + +func TestSaltClear(t *testing.T) { + salt, err := GenerateDefault() + if err != nil { + t.Fatalf("Failed to generate salt: %v", err) + } + + originalSize := salt.Size() + if originalSize == 0 { + t.Fatal("Salt size is zero before clear") + } + + salt.Clear() + + if salt.Size() != 0 { + t.Error("Salt size is not zero after clear") + } + + if !salt.IsEmpty() { + t.Error("Salt is not empty after clear") + } + + bytes := salt.Bytes() + if bytes != nil { + t.Error("Bytes() should return nil after clear") + } +} + +func TestSaltStore(t *testing.T) { + store := NewSaltStore() + + // Test empty store + if store.Size() != 0 { + t.Error("New store should be empty") + } + + // Generate and store salt + salt1, err := GenerateDefault() + if err != nil { + t.Fatalf("Failed to generate salt: %v", err) + } + + err = store.Store("test1", salt1) + if err != nil { + t.Fatalf("Failed to store salt: %v", err) + } + + if store.Size() != 1 { + t.Errorf("Store size = %d, want 1", store.Size()) + } + + // Retrieve salt + retrieved, err := store.Retrieve("test1") + if err != nil { + t.Fatalf("Failed to retrieve salt: %v", err) + } + + if !salt1.Equal(retrieved) { + t.Error("Retrieved salt doesn't match stored salt") + } + + // Test generate and store + salt2, err := store.GenerateAndStore("test2", DefaultSaltSize) + if err != nil { + t.Fatalf("Failed to generate and store salt: %v", err) + } + + if store.Size() != 2 { + t.Errorf("Store size = %d, want 2", store.Size()) + } + + if salt2.Size() != DefaultSaltSize { + t.Errorf("Generated salt size = %d, want %d", salt2.Size(), DefaultSaltSize) + } + + // Test list + ids := store.List() + if len(ids) != 2 { + t.Errorf("List() returned %d ids, want 2", len(ids)) + } + + foundTest1 := false + foundTest2 := false + for _, id := range ids { + if id == "test1" { + foundTest1 = true + } + if id == "test2" { + foundTest2 = true + } + } + if !foundTest1 || !foundTest2 { + t.Error("List() doesn't contain expected IDs") + } + + // Test remove + err = store.Remove("test1") + if err != nil { + t.Fatalf("Failed to remove salt: %v", err) + } + + if store.Size() != 1 { + t.Errorf("Store size = %d, want 1 after removal", store.Size()) + } + + _, err = store.Retrieve("test1") + if err == nil { + t.Error("Should not be able to retrieve removed salt") + } + + // Test clear + store.Clear() + if store.Size() != 0 { + t.Error("Store should be empty after clear") + } +} + +func TestSaltStoreErrors(t *testing.T) { + store := NewSaltStore() + + // Test empty identifier errors + err := store.Store("", nil) + if err == nil { + t.Error("Should error on empty identifier") + } + + _, err = store.Retrieve("") + if err == nil { + t.Error("Should error on empty identifier") + } + + err = store.Remove("") + if err == nil { + t.Error("Should error on empty identifier") + } + + // Test nil salt error + err = store.Store("test", nil) + if err == nil { + t.Error("Should error on nil salt") + } + + // Test empty salt error + emptySalt := &Salt{} + err = store.Store("test", emptySalt) + if err == nil { + t.Error("Should error on empty salt") + } + + // Test retrieve non-existent + _, err = store.Retrieve("nonexistent") + if err == nil { + t.Error("Should error when retrieving non-existent salt") + } + + // Test remove non-existent + err = store.Remove("nonexistent") + if err == nil { + t.Error("Should error when removing non-existent salt") + } +} + +func TestConstantTimeCompare(t *testing.T) { + a := []byte{1, 2, 3, 4, 5} + b := []byte{1, 2, 3, 4, 5} + c := []byte{1, 2, 3, 4, 6} + d := []byte{1, 2, 3, 4} + + tests := []struct { + name string + a, b []byte + expected bool + }{ + {"equal slices", a, b, true}, + {"different content", a, c, false}, + {"different length", a, d, false}, + {"empty slices", []byte{}, []byte{}, true}, + {"one empty", a, []byte{}, false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := constantTimeCompare(tt.a, tt.b) + if result != tt.expected { + t.Errorf("constantTimeCompare() = %v, want %v", result, tt.expected) + } + }) + } +} + +func BenchmarkGenerate(b *testing.B) { + sizes := []int{MinSaltSize, DefaultSaltSize, 512} + + for _, size := range sizes { + b.Run(fmt.Sprintf("%dB", size), func(b *testing.B) { + for i := 0; i < b.N; i++ { + salt, err := Generate(size) + if err != nil { + b.Fatalf("Generate error: %v", err) + } + salt.Clear() // Clean up + } + }) + } +} + +func BenchmarkSaltEqual(b *testing.B) { + salt1, _ := GenerateDefault() + salt2, _ := GenerateDefault() + salt3, _ := FromBytes(salt1.Bytes()) + + b.Run("equal", func(b *testing.B) { + for i := 0; i < b.N; i++ { + salt1.Equal(salt3) + } + }) + + b.Run("different", func(b *testing.B) { + for i := 0; i < b.N; i++ { + salt1.Equal(salt2) + } + }) +} diff --git a/secure/memory.go b/secure/memory.go new file mode 100644 index 0000000..d173727 --- /dev/null +++ b/secure/memory.go @@ -0,0 +1,325 @@ +// Package secure provides utilities for secure memory handling and sensitive data management. +// It includes functions for explicit memory zeroization and secure data lifecycle management. +package secure + +import ( + "crypto/rand" + "fmt" + "runtime" + "sync" +) + +// SecureBytes wraps a byte slice with automatic cleanup functionality +type SecureBytes struct { + data []byte + mu sync.RWMutex + finalized bool +} + +// NewSecureBytes creates a new SecureBytes instance with automatic cleanup +func NewSecureBytes(size int) *SecureBytes { + if size <= 0 { + return &SecureBytes{data: nil} + } + + sb := &SecureBytes{ + data: make([]byte, size), + } + + // Set finalizer for automatic cleanup if Clear() is not called + runtime.SetFinalizer(sb, (*SecureBytes).finalize) + return sb +} + +// FromBytes creates a SecureBytes instance from existing data (copies the data) +func FromBytes(data []byte) *SecureBytes { + if len(data) == 0 { + return &SecureBytes{data: nil} + } + + sb := &SecureBytes{ + data: make([]byte, len(data)), + } + copy(sb.data, data) + + runtime.SetFinalizer(sb, (*SecureBytes).finalize) + return sb +} + +// Bytes returns a copy of the secure data to prevent external modification +func (sb *SecureBytes) Bytes() []byte { + sb.mu.RLock() + defer sb.mu.RUnlock() + + if sb.data == nil { + return nil + } + + result := make([]byte, len(sb.data)) + copy(result, sb.data) + return result +} + +// Size returns the size of the secure data +func (sb *SecureBytes) Size() int { + sb.mu.RLock() + defer sb.mu.RUnlock() + + if sb.data == nil { + return 0 + } + return len(sb.data) +} + +// IsEmpty checks if the secure data is empty or nil +func (sb *SecureBytes) IsEmpty() bool { + sb.mu.RLock() + defer sb.mu.RUnlock() + + return sb.data == nil || len(sb.data) == 0 +} + +// Clear explicitly zeros the memory and removes the finalizer +func (sb *SecureBytes) Clear() { + sb.mu.Lock() + defer sb.mu.Unlock() + + if !sb.finalized && sb.data != nil { + Zeroize(sb.data) + sb.data = nil + sb.finalized = true + runtime.SetFinalizer(sb, nil) // Remove finalizer since we've cleaned up + } +} + +// finalize is called by the garbage collector if Clear() was not called +func (sb *SecureBytes) finalize() { + sb.Clear() +} + +// CopyTo safely copies data to the secure buffer +func (sb *SecureBytes) CopyTo(data []byte) error { + sb.mu.Lock() + defer sb.mu.Unlock() + + if sb.finalized { + return fmt.Errorf("secure bytes has been finalized") + } + + if sb.data == nil { + return fmt.Errorf("secure bytes is nil") + } + + if len(data) > len(sb.data) { + return fmt.Errorf("data size %d exceeds secure buffer size %d", len(data), len(sb.data)) + } + + // Zero existing data first + Zeroize(sb.data) + copy(sb.data, data) + return nil +} + +// Zeroize explicitly zeros out sensitive data from memory using byte slicing +func Zeroize(data []byte) { + if len(data) == 0 { + return + } + + // Explicitly zero each byte to prevent compiler optimizations + for i := range data { + data[i] = 0 + } + + // Force memory barrier to ensure zeroization is not optimized away + runtime.KeepAlive(data) +} + +// ZeroizeString attempts to clear a string reference (limited effectiveness) +// Note: Go strings are immutable, so this only clears the reference, not the underlying data +func ZeroizeString(s *string) { + if s == nil { + return + } + // Simply clear the reference - Go strings are immutable + *s = "" +} + +// SecureString wraps a string with secure cleanup capabilities +type SecureString struct { + value string + mu sync.RWMutex + finalized bool +} + +// NewSecureString creates a new SecureString with automatic cleanup +func NewSecureString(s string) *SecureString { + ss := &SecureString{ + value: s, + } + runtime.SetFinalizer(ss, (*SecureString).finalize) + return ss +} + +// String returns the secure string value +func (ss *SecureString) String() string { + ss.mu.RLock() + defer ss.mu.RUnlock() + + if ss.finalized { + return "" + } + return ss.value +} + +// Clear attempts to zero the string and marks it as finalized +func (ss *SecureString) Clear() { + ss.mu.Lock() + defer ss.mu.Unlock() + + if !ss.finalized { + ZeroizeString(&ss.value) + ss.value = "" + ss.finalized = true + runtime.SetFinalizer(ss, nil) + } +} + +// finalize is called by the garbage collector +func (ss *SecureString) finalize() { + ss.Clear() +} + +// IsEmpty checks if the secure string is empty +func (ss *SecureString) IsEmpty() bool { + ss.mu.RLock() + defer ss.mu.RUnlock() + + return ss.finalized || ss.value == "" +} + +// SecureBuffer provides a reusable buffer for sensitive operations +type SecureBuffer struct { + buffer []byte + mu sync.Mutex +} + +// NewSecureBuffer creates a new secure buffer with the specified capacity +func NewSecureBuffer(capacity int) *SecureBuffer { + if capacity <= 0 { + capacity = 1024 // Default capacity + } + + sb := &SecureBuffer{ + buffer: make([]byte, 0, capacity), + } + runtime.SetFinalizer(sb, (*SecureBuffer).finalize) + return sb +} + +// Write appends data to the secure buffer +func (sb *SecureBuffer) Write(data []byte) error { + sb.mu.Lock() + defer sb.mu.Unlock() + + if len(sb.buffer)+len(data) > cap(sb.buffer) { + return fmt.Errorf("buffer overflow: capacity %d, current size %d, write size %d", + cap(sb.buffer), len(sb.buffer), len(data)) + } + + sb.buffer = append(sb.buffer, data...) + return nil +} + +// Read returns a copy of the buffer contents +func (sb *SecureBuffer) Read() []byte { + sb.mu.Lock() + defer sb.mu.Unlock() + + result := make([]byte, len(sb.buffer)) + copy(result, sb.buffer) + return result +} + +// Reset clears the buffer contents but maintains capacity +func (sb *SecureBuffer) Reset() { + sb.mu.Lock() + defer sb.mu.Unlock() + + if len(sb.buffer) > 0 { + Zeroize(sb.buffer[:cap(sb.buffer)]) // Zero the entire backing array + sb.buffer = sb.buffer[:0] // Reset length to 0 + } +} + +// Clear zeros the buffer and releases memory +func (sb *SecureBuffer) Clear() { + sb.mu.Lock() + defer sb.mu.Unlock() + + if sb.buffer != nil { + Zeroize(sb.buffer[:cap(sb.buffer)]) // Zero entire backing array + sb.buffer = nil + runtime.SetFinalizer(sb, nil) + } +} + +// finalize is called by the garbage collector +func (sb *SecureBuffer) finalize() { + sb.Clear() +} + +// Size returns the current size of data in the buffer +func (sb *SecureBuffer) Size() int { + sb.mu.Lock() + defer sb.mu.Unlock() + + return len(sb.buffer) +} + +// Capacity returns the maximum capacity of the buffer +func (sb *SecureBuffer) Capacity() int { + sb.mu.Lock() + defer sb.mu.Unlock() + + if sb.buffer == nil { + return 0 + } + return cap(sb.buffer) +} + +// ZeroizeMultiple zeros multiple byte slices in a single call +func ZeroizeMultiple(slices ...[]byte) { + for _, slice := range slices { + Zeroize(slice) + } +} + +// SecureCompare performs constant-time comparison of two byte slices +// Returns true if the slices are equal, false otherwise +func SecureCompare(a, b []byte) bool { + if len(a) != len(b) { + return false + } + + var result byte + for i := 0; i < len(a); i++ { + result |= a[i] ^ b[i] + } + + return result == 0 +} + +// SecureRandom fills the provided slice with cryptographically secure random bytes +func SecureRandom(data []byte) error { + if len(data) == 0 { + return nil + } + + // Use Go's crypto/rand for secure random generation + if _, err := rand.Read(data); err != nil { + return fmt.Errorf("failed to generate secure random bytes: %w", err) + } + + return nil +} diff --git a/secure/memory_test.go b/secure/memory_test.go new file mode 100644 index 0000000..d2b7e35 --- /dev/null +++ b/secure/memory_test.go @@ -0,0 +1,523 @@ +package secure + +import ( + "bytes" + "fmt" + "runtime" + "testing" + "time" +) + +func TestZeroize(t *testing.T) { + tests := []struct { + name string + data []byte + }{ + {"empty slice", []byte{}}, + {"single byte", []byte{0xFF}}, + {"small slice", []byte{1, 2, 3, 4, 5}}, + {"large slice", make([]byte, 1024)}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Fill with non-zero data + for i := range tt.data { + tt.data[i] = byte(i%256 + 1) + } + + // Store original for verification + original := make([]byte, len(tt.data)) + copy(original, tt.data) + + // Zeroize + Zeroize(tt.data) + + // Verify all bytes are zero + for i, b := range tt.data { + if b != 0 { + t.Errorf("Byte at index %d not zeroed: got %d, want 0", i, b) + } + } + + // Verify original data was actually non-zero (for non-empty slices) + if len(original) > 0 { + hasNonZero := false + for _, b := range original { + if b != 0 { + hasNonZero = true + break + } + } + if !hasNonZero { + t.Error("Test data was already all zeros - invalid test") + } + } + }) + } +} + +func TestSecureBytes(t *testing.T) { + t.Run("NewSecureBytes", func(t *testing.T) { + sb := NewSecureBytes(32) + if sb == nil { + t.Fatal("NewSecureBytes returned nil") + } + if sb.Size() != 32 { + t.Errorf("Size() = %d, want 32", sb.Size()) + } + if sb.IsEmpty() { + t.Error("NewSecureBytes should not be empty") + } + sb.Clear() + }) + + t.Run("zero size", func(t *testing.T) { + sb := NewSecureBytes(0) + if sb.Size() != 0 { + t.Errorf("Size() = %d, want 0", sb.Size()) + } + if !sb.IsEmpty() { + t.Error("Zero-size SecureBytes should be empty") + } + }) + + t.Run("FromBytes", func(t *testing.T) { + original := []byte{1, 2, 3, 4, 5} + sb := FromBytes(original) + + if sb.Size() != len(original) { + t.Errorf("Size() = %d, want %d", sb.Size(), len(original)) + } + + retrieved := sb.Bytes() + if !bytes.Equal(retrieved, original) { + t.Error("Retrieved bytes don't match original") + } + + // Verify independence - modifying original shouldn't affect SecureBytes + original[0] = 99 + retrieved2 := sb.Bytes() + if retrieved2[0] == 99 { + t.Error("SecureBytes was affected by external modification") + } + + sb.Clear() + }) + + t.Run("Bytes returns copy", func(t *testing.T) { + sb := NewSecureBytes(16) + + bytes1 := sb.Bytes() + bytes2 := sb.Bytes() + + // Should be equal content + if !bytes.Equal(bytes1, bytes2) { + t.Error("Multiple Bytes() calls returned different content") + } + + // Should be different slices + if len(bytes1) > 0 && &bytes1[0] == &bytes2[0] { + t.Error("Bytes() returned same underlying array") + } + + // Modifying returned slice shouldn't affect SecureBytes + if len(bytes1) > 0 { + bytes1[0] = 0xFF + bytes3 := sb.Bytes() + if bytes3[0] == 0xFF { + t.Error("External modification affected SecureBytes") + } + } + + sb.Clear() + }) + + t.Run("Clear", func(t *testing.T) { + data := []byte{1, 2, 3, 4, 5} + sb := FromBytes(data) + + if sb.IsEmpty() { + t.Error("SecureBytes should not be empty before Clear") + } + + sb.Clear() + + if !sb.IsEmpty() { + t.Error("SecureBytes should be empty after Clear") + } + + if sb.Size() != 0 { + t.Error("Size should be 0 after Clear") + } + + bytes := sb.Bytes() + if bytes != nil { + t.Error("Bytes() should return nil after Clear") + } + }) + + t.Run("CopyTo", func(t *testing.T) { + sb := NewSecureBytes(10) + data := []byte{1, 2, 3, 4, 5} + + err := sb.CopyTo(data) + if err != nil { + t.Fatalf("CopyTo failed: %v", err) + } + + retrieved := sb.Bytes() + if !bytes.Equal(retrieved[:len(data)], data) { + t.Error("CopyTo didn't copy data correctly") + } + + // Test overflow + largeData := make([]byte, 20) + err = sb.CopyTo(largeData) + if err == nil { + t.Error("CopyTo should fail with oversized data") + } + + sb.Clear() + + // Test copy to finalized + err = sb.CopyTo(data) + if err == nil { + t.Error("CopyTo should fail on finalized SecureBytes") + } + }) +} + +func TestSecureString(t *testing.T) { + t.Run("basic operations", func(t *testing.T) { + original := "sensitive data" + ss := NewSecureString(original) + + if ss.String() != original { + t.Error("String() doesn't match original") + } + + if ss.IsEmpty() { + t.Error("SecureString should not be empty") + } + + ss.Clear() + + if !ss.IsEmpty() { + t.Error("SecureString should be empty after Clear") + } + + if ss.String() != "" { + t.Error("String() should return empty string after Clear") + } + }) + + t.Run("empty string", func(t *testing.T) { + ss := NewSecureString("") + if !ss.IsEmpty() { + t.Error("Empty SecureString should report as empty") + } + ss.Clear() + }) +} + +func TestSecureBuffer(t *testing.T) { + t.Run("basic operations", func(t *testing.T) { + sb := NewSecureBuffer(100) + + if sb.Size() != 0 { + t.Error("New buffer should have size 0") + } + + if sb.Capacity() != 100 { + t.Errorf("Capacity() = %d, want 100", sb.Capacity()) + } + + // Write data + data1 := []byte("hello") + err := sb.Write(data1) + if err != nil { + t.Fatalf("Write failed: %v", err) + } + + if sb.Size() != len(data1) { + t.Errorf("Size() = %d, want %d", sb.Size(), len(data1)) + } + + // Write more data + data2 := []byte(" world") + err = sb.Write(data2) + if err != nil { + t.Fatalf("Second write failed: %v", err) + } + + expected := append(data1, data2...) + result := sb.Read() + if !bytes.Equal(result, expected) { + t.Errorf("Read() = %q, want %q", string(result), string(expected)) + } + + sb.Clear() + }) + + t.Run("overflow protection", func(t *testing.T) { + sb := NewSecureBuffer(10) + + // Fill to capacity + data := make([]byte, 10) + err := sb.Write(data) + if err != nil { + t.Fatalf("Write to capacity failed: %v", err) + } + + // Try to overflow + err = sb.Write([]byte{1}) + if err == nil { + t.Error("Write should fail on buffer overflow") + } + + sb.Clear() + }) + + t.Run("reset", func(t *testing.T) { + sb := NewSecureBuffer(50) + + data := []byte("test data") + err := sb.Write(data) + if err != nil { + t.Fatalf("Write failed: %v", err) + } + + if sb.Size() == 0 { + t.Error("Buffer should not be empty before reset") + } + + sb.Reset() + + if sb.Size() != 0 { + t.Error("Buffer should be empty after reset") + } + + if sb.Capacity() != 50 { + t.Error("Capacity should be preserved after reset") + } + + // Should be able to write again + err = sb.Write([]byte("new data")) + if err != nil { + t.Error("Should be able to write after reset") + } + + sb.Clear() + }) +} + +func TestZeroizeMultiple(t *testing.T) { + slice1 := []byte{1, 2, 3} + slice2 := []byte{4, 5, 6} + slice3 := []byte{7, 8, 9} + + ZeroizeMultiple(slice1, slice2, slice3) + + slices := [][]byte{slice1, slice2, slice3} + for i, slice := range slices { + for j, b := range slice { + if b != 0 { + t.Errorf("Slice %d, byte %d not zeroed: got %d", i, j, b) + } + } + } +} + +func TestSecureCompare(t *testing.T) { + tests := []struct { + name string + a, b []byte + expected bool + }{ + {"equal slices", []byte{1, 2, 3}, []byte{1, 2, 3}, true}, + {"different content", []byte{1, 2, 3}, []byte{1, 2, 4}, false}, + {"different length", []byte{1, 2, 3}, []byte{1, 2}, false}, + {"both empty", []byte{}, []byte{}, true}, + {"one empty", []byte{1}, []byte{}, false}, + {"both nil", nil, nil, true}, + {"one nil", []byte{1}, nil, false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := SecureCompare(tt.a, tt.b) + if result != tt.expected { + t.Errorf("SecureCompare() = %v, want %v", result, tt.expected) + } + }) + } +} + +func TestSecureRandom(t *testing.T) { + sizes := []int{0, 1, 16, 32, 1024} + + for _, size := range sizes { + t.Run(fmt.Sprintf("%d bytes", size), func(t *testing.T) { + data := make([]byte, size) + err := SecureRandom(data) + if err != nil { + t.Fatalf("SecureRandom failed: %v", err) + } + + if size == 0 { + return // Nothing to verify for empty slice + } + + // For non-zero sizes, verify we got some randomness + // (Note: there's a tiny chance this could fail with truly random data) + allZeros := true + allSame := true + first := data[0] + + for _, b := range data { + if b != 0 { + allZeros = false + } + if b != first { + allSame = false + } + } + + if size > 1 { + if allZeros { + t.Error("SecureRandom returned all zeros (suspicious)") + } + if allSame { + t.Error("SecureRandom returned all same values (suspicious)") + } + } + }) + } +} + +// Test that finalizers work correctly (this is tricky to test reliably) +func TestFinalizers(t *testing.T) { + t.Run("SecureBytes finalizer", func(t *testing.T) { + // Create a SecureBytes and let it go out of scope + func() { + sb := NewSecureBytes(32) + // Fill with test data + data := make([]byte, 32) + for i := range data { + data[i] = byte(i + 1) + } + sb.CopyTo(data) + // sb goes out of scope here + }() + + // Force garbage collection + runtime.GC() + runtime.GC() + time.Sleep(10 * time.Millisecond) + + // We can't easily verify the finalizer ran, but this tests that + // the finalizer doesn't cause a panic + }) + + t.Run("SecureString finalizer", func(t *testing.T) { + func() { + ss := NewSecureString("test data") + _ = ss.String() + // ss goes out of scope here + }() + + runtime.GC() + runtime.GC() + time.Sleep(10 * time.Millisecond) + }) + + t.Run("SecureBuffer finalizer", func(t *testing.T) { + func() { + sb := NewSecureBuffer(64) + sb.Write([]byte("test data")) + // sb goes out of scope here + }() + + runtime.GC() + runtime.GC() + time.Sleep(10 * time.Millisecond) + }) +} + +func BenchmarkZeroize(b *testing.B) { + sizes := []int{32, 256, 1024, 4096} + + for _, size := range sizes { + b.Run(fmt.Sprintf("%dB", size), func(b *testing.B) { + data := make([]byte, size) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + // Fill with data + for j := range data { + data[j] = byte(j) + } + // Zeroize + Zeroize(data) + } + }) + } +} + +func BenchmarkSecureCompare(b *testing.B) { + sizes := []int{16, 32, 256, 1024} + + for _, size := range sizes { + b.Run(fmt.Sprintf("%dB", size), func(b *testing.B) { + a := make([]byte, size) + b_slice := make([]byte, size) + + // Fill with identical data + for i := range a { + a[i] = byte(i) + b_slice[i] = byte(i) + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + SecureCompare(a, b_slice) + } + }) + } +} + +func BenchmarkSecureBytes(b *testing.B) { + b.Run("NewSecureBytes", func(b *testing.B) { + for i := 0; i < b.N; i++ { + sb := NewSecureBytes(32) + sb.Clear() + } + }) + + b.Run("Bytes", func(b *testing.B) { + sb := NewSecureBytes(32) + defer sb.Clear() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + data := sb.Bytes() + _ = data + } + }) + + b.Run("CopyTo", func(b *testing.B) { + sb := NewSecureBytes(32) + defer sb.Clear() + + testData := make([]byte, 16) + for i := range testData { + testData[i] = byte(i) + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + sb.CopyTo(testData) + } + }) +} diff --git a/security_test.go b/security_test.go new file mode 100644 index 0000000..9fcf97a --- /dev/null +++ b/security_test.go @@ -0,0 +1,466 @@ +// Package crypto provides comprehensive security tests for cryptographic implementations +package crypto + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "crypto/sha256" + "fmt" + "math/big" + "sync" + "testing" + "time" + + "github.com/sonr-io/sonr/crypto/argon2" + ecdsaPkg "github.com/sonr-io/sonr/crypto/ecdsa" + "github.com/sonr-io/sonr/crypto/password" + "github.com/sonr-io/sonr/crypto/wasm" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// TestTimingAttackResistance verifies constant-time operations +func TestTimingAttackResistance(t *testing.T) { + // Test Argon2 constant-time comparison + kdf := argon2.New(argon2.LightConfig()) + password := []byte("correct-password") + hash, err := kdf.HashPassword(password) + require.NoError(t, err) + + // Measure timing for correct vs incorrect passwords + correctTimes := make([]time.Duration, 100) + incorrectTimes := make([]time.Duration, 100) + + for i := 0; i < 100; i++ { + // Time correct password + start := time.Now() + _, _ = argon2.VerifyPassword(password, hash) + correctTimes[i] = time.Since(start) + + // Time incorrect password + wrongPassword := []byte("wrong-password-x") + start = time.Now() + _, _ = argon2.VerifyPassword(wrongPassword, hash) + incorrectTimes[i] = time.Since(start) + } + + // Calculate average times + var correctAvg, incorrectAvg time.Duration + for i := 0; i < 100; i++ { + correctAvg += correctTimes[i] + incorrectAvg += incorrectTimes[i] + } + correctAvg /= 100 + incorrectAvg /= 100 + + // Times should be similar (within 20% variance) + diff := correctAvg - incorrectAvg + if diff < 0 { + diff = -diff + } + maxDiff := correctAvg / 5 // 20% threshold + + assert.Less(t, diff, maxDiff, "timing difference suggests non-constant-time comparison") +} + +// TestSignatureMalleabilityAttack tests protection against signature malleability +func TestSignatureMalleabilityAttack(t *testing.T) { + priv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + message := []byte("transaction data") + hash := sha256.Sum256(message) + + // Create deterministic signature + r, s, err := ecdsaPkg.DeterministicSign(priv, hash[:]) + require.NoError(t, err) + + // Verify original signature + assert.True(t, ecdsa.Verify(&priv.PublicKey, hash[:], r, s)) + + // Create malleable signature (r, -s mod N) + N := priv.Curve.Params().N + sMalleable := new(big.Int).Sub(N, s) + + // Standard ECDSA would accept this, but our canonical check should reject it + assert.False(t, ecdsaPkg.VerifyDeterministic(&priv.PublicKey, hash[:], r, sMalleable), + "malleable signature should be rejected") + + // Canonicalize should fix it + rCanon, sCanon, err := ecdsaPkg.CanonicalizeSignature(r, sMalleable, priv.Curve) + require.NoError(t, err) + assert.Equal(t, s, sCanon, "canonicalized signature should match original") + assert.True(t, ecdsa.Verify(&priv.PublicKey, hash[:], rCanon, sCanon)) +} + +// TestNonceReuseAttack verifies protection against nonce reuse +func TestNonceReuseAttack(t *testing.T) { + priv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + // Sign same message multiple times with deterministic ECDSA + message := []byte("sensitive data") + hash := sha256.Sum256(message) + + signatures := make([]struct{ r, s *big.Int }, 10) + for i := 0; i < 10; i++ { + r, s, err := ecdsaPkg.DeterministicSign(priv, hash[:]) + require.NoError(t, err) + signatures[i].r = r + signatures[i].s = s + } + + // All signatures should be identical (deterministic) + for i := 1; i < 10; i++ { + assert.Equal(t, signatures[0].r, signatures[i].r, + "deterministic signatures should use same nonce") + assert.Equal(t, signatures[0].s, signatures[i].s, + "deterministic signatures should be identical") + } + + // Different messages should use different nonces + message2 := []byte("different data") + hash2 := sha256.Sum256(message2) + r2, s2, err := ecdsaPkg.DeterministicSign(priv, hash2[:]) + require.NoError(t, err) + + assert.NotEqual(t, signatures[0].r, r2, + "different messages must use different nonces") + assert.NotEqual(t, signatures[0].s, s2, + "different messages must produce different signatures") +} + +// TestPasswordDictionaryAttack tests resistance to dictionary attacks +func TestPasswordDictionaryAttack(t *testing.T) { + commonPasswords := []string{ + "password", "123456", "password123", "admin", "letmein", + "welcome", "monkey", "dragon", "master", "qwerty", + } + + validator := password.NewValidator(password.DefaultPasswordConfig()) + + // All common passwords should be rejected + for _, pwd := range commonPasswords { + err := validator.Validate([]byte(pwd)) + assert.Error(t, err, "common password '%s' should be rejected", pwd) + } + + // Test that Argon2 makes dictionary attacks expensive + kdf := argon2.New(argon2.DefaultConfig()) + + start := time.Now() + for _, pwd := range commonPasswords { + hash, err := kdf.HashPassword([]byte(pwd)) + require.NoError(t, err) + + // Try to crack with dictionary + for _, attempt := range commonPasswords { + _, _ = argon2.VerifyPassword([]byte(attempt), hash) + } + } + elapsed := time.Since(start) + + // Should take significant time (> 1 second for 100 attempts) + assert.Greater(t, elapsed, 1*time.Second, + "Argon2 should make dictionary attacks expensive") +} + +// TestWASMHashCollisionAttack tests resistance to hash collision attacks +func TestWASMHashCollisionAttack(t *testing.T) { + verifier := wasm.NewHashVerifier() + + // Create two different modules + module1 := []byte("wasm module version 1.0") + module2 := []byte("wasm module version 2.0") + + hash1 := verifier.ComputeHash(module1) + hash2 := verifier.ComputeHash(module2) + + // Hashes must be different + assert.NotEqual(t, hash1, hash2, + "different modules must have different hashes") + + // Test collision resistance with similar modules + similarModules := make([][]byte, 100) + hashes := make(map[string]bool) + + for i := 0; i < 100; i++ { + similarModules[i] = []byte(fmt.Sprintf("wasm module version 1.%d", i)) + hash := verifier.ComputeHash(similarModules[i]) + + // Check for collisions + assert.False(t, hashes[hash], + "hash collision detected for module %d", i) + hashes[hash] = true + } +} + +// TestRaceConditionSafety tests thread safety of cryptographic operations +func TestRaceConditionSafety(t *testing.T) { + // Test concurrent Argon2 operations + kdf := argon2.New(argon2.LightConfig()) + password := []byte("test-password") + + var wg sync.WaitGroup + errors := make(chan error, 100) + + for i := 0; i < 100; i++ { + wg.Add(1) + go func() { + defer wg.Done() + + hash, err := kdf.HashPassword(password) + if err != nil { + errors <- err + return + } + + valid, err := argon2.VerifyPassword(password, hash) + if err != nil { + errors <- err + return + } + if !valid { + errors <- fmt.Errorf("password verification failed") + } + }() + } + + wg.Wait() + close(errors) + + // Check for any errors + for err := range errors { + t.Errorf("concurrent operation failed: %v", err) + } + + // Test concurrent ECDSA signing + priv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + + message := []byte("concurrent test") + hash := sha256.Sum256(message) + + signatures := make(chan struct{ r, s *big.Int }, 100) + var sigWg sync.WaitGroup + + for i := 0; i < 100; i++ { + sigWg.Add(1) + go func() { + defer sigWg.Done() + + r, s, err := ecdsaPkg.DeterministicSign(priv, hash[:]) + if err == nil { + signatures <- struct{ r, s *big.Int }{r, s} + } + }() + } + + sigWg.Wait() + close(signatures) + + // All signatures should be identical (deterministic) + var firstSig struct{ r, s *big.Int } + count := 0 + for sig := range signatures { + if count == 0 { + firstSig = sig + } else { + assert.Equal(t, firstSig.r, sig.r, + "concurrent signatures should be identical") + assert.Equal(t, firstSig.s, sig.s, + "concurrent signatures should be identical") + } + count++ + } + assert.Equal(t, 100, count, "all concurrent operations should succeed") +} + +// TestMemoryExhaustionAttack tests resistance to memory exhaustion +func TestMemoryExhaustionAttack(t *testing.T) { + // Test with high memory Argon2 config + config := &argon2.Config{ + Time: 1, + Memory: 128 * 1024, // 128MB + Parallelism: 4, + SaltLength: 32, + KeyLength: 32, + } + + // Validate config prevents excessive memory use + err := argon2.ValidateConfig(config) + assert.NoError(t, err, "reasonable memory config should be valid") + + // Test with excessive memory request + excessiveConfig := &argon2.Config{ + Time: 1, + Memory: 4 * 1024 * 1024, // 4GB - should be rejected + Parallelism: 4, + SaltLength: 32, + KeyLength: 32, + } + + // This should be caught by reasonable implementations + kdf := argon2.New(excessiveConfig) + + // Attempt derivation with memory limit + password := []byte("test") + salt := make([]byte, 32) + _, _ = rand.Read(salt) + + // Monitor memory usage (simplified test) + start := time.Now() + _ = kdf.DeriveKey(password, salt) + elapsed := time.Since(start) + + // Excessive memory should cause noticeable delay + assert.Less(t, elapsed, 10*time.Second, + "operation should complete in reasonable time") +} + +// TestSaltReuseVulnerability tests that salts are unique +func TestSaltReuseVulnerability(t *testing.T) { + kdf := argon2.New(argon2.DefaultConfig()) + + salts := make(map[string]bool) + + // Generate many salts + for i := 0; i < 1000; i++ { + salt, err := kdf.GenerateSalt() + require.NoError(t, err) + + saltStr := string(salt) + assert.False(t, salts[saltStr], + "salt reuse detected at iteration %d", i) + salts[saltStr] = true + } +} + +// TestWeakRandomnessDetection tests quality of random number generation +func TestWeakRandomnessDetection(t *testing.T) { + // Generate multiple ECDSA keys and check for patterns + keys := make([]*ecdsa.PrivateKey, 100) + + for i := 0; i < 100; i++ { + key, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + require.NoError(t, err) + keys[i] = key + } + + // Check that all private keys are unique + seen := make(map[string]bool) + for i, key := range keys { + keyStr := key.D.String() + assert.False(t, seen[keyStr], + "duplicate private key detected at index %d", i) + seen[keyStr] = true + } + + // Check distribution (simplified chi-square test) + // Count leading bits + zeros := 0 + ones := 0 + for _, key := range keys { + if key.D.Bit(0) == 0 { + zeros++ + } else { + ones++ + } + } + + // Should be roughly 50/50 distribution + // For 100 samples, we expect ~50 each, but allow for statistical variance + // Using binomial distribution, 99% confidence interval is approximately ±3 standard deviations + // Standard deviation = sqrt(n*p*(1-p)) = sqrt(100*0.5*0.5) = 5 + // So we allow difference up to 3*5 = 15, but we'll be more lenient with 25 + diff := zeros - ones + if diff < 0 { + diff = -diff + } + assert.Less(t, diff, 25, + "random bit distribution appears biased: %d zeros, %d ones", zeros, ones) +} + +// TestCryptographicAgility tests ability to switch algorithms +func TestCryptographicAgility(t *testing.T) { + // Test different Argon2 configurations + configs := []*argon2.Config{ + argon2.LightConfig(), + argon2.DefaultConfig(), + argon2.HighSecurityConfig(), + } + + password := []byte("test-password") + + for i, config := range configs { + kdf := argon2.New(config) + + hash, err := kdf.HashPassword(password) + require.NoError(t, err, "config %d should work", i) + + // Verify with same config + valid, err := argon2.VerifyPassword(password, hash) + require.NoError(t, err) + assert.True(t, valid, "config %d verification should succeed", i) + } + + // Test different elliptic curves + curves := []elliptic.Curve{ + elliptic.P224(), + elliptic.P256(), + elliptic.P384(), + elliptic.P521(), + } + + message := []byte("test message") + hashMsg := sha256.Sum256(message) + + for _, curve := range curves { + priv, err := ecdsa.GenerateKey(curve, rand.Reader) + require.NoError(t, err) + + r, s, err := ecdsaPkg.DeterministicSign(priv, hashMsg[:]) + require.NoError(t, err) + + valid := ecdsa.Verify(&priv.PublicKey, hashMsg[:], r, s) + assert.True(t, valid, "curve %s should work", curve.Params().Name) + } +} + +// BenchmarkSecurityOperations benchmarks security-critical operations +func BenchmarkSecurityOperations(b *testing.B) { + b.Run("Argon2Default", func(b *testing.B) { + kdf := argon2.New(argon2.DefaultConfig()) + password := []byte("benchmark-password") + salt, _ := kdf.GenerateSalt() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = kdf.DeriveKey(password, salt) + } + }) + + b.Run("ECDSADeterministic", func(b *testing.B) { + priv, _ := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + hash := sha256.Sum256([]byte("benchmark")) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, _, _ = ecdsaPkg.DeterministicSign(priv, hash[:]) + } + }) + + b.Run("WASMHashVerification", func(b *testing.B) { + verifier := wasm.NewHashVerifier() + module := make([]byte, 1024*1024) // 1MB module + rand.Read(module) + hash := verifier.ComputeHash(module) + verifier.AddTrustedHash("bench", hash) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = verifier.VerifyHash("bench", module) + } + }) +} diff --git a/sharing/README.md b/sharing/README.md new file mode 100755 index 0000000..03874fd --- /dev/null +++ b/sharing/README.md @@ -0,0 +1,16 @@ +--- +aliases: [README] +tags: [] +title: README +linter-yaml-title-alias: README +date created: Wednesday, April 17th 2024, 4:11:40 pm +date modified: Thursday, April 18th 2024, 8:19:25 am +--- + +## Shamir Secret Sharing Scheme + +The code is an implementation of the following papers. + +- +- ~gasarch/TOPICS/secretsharing/feldmanVSS.pdf +- diff --git a/sharing/ed25519_feldman_test.go b/sharing/ed25519_feldman_test.go new file mode 100644 index 0000000..c1fed0f --- /dev/null +++ b/sharing/ed25519_feldman_test.go @@ -0,0 +1,132 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package sharing + +import ( + crand "crypto/rand" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +var testCurve = curves.ED25519() + +func TestEd25519FeldmanSplitInvalidArgs(t *testing.T) { + _, err := NewFeldman(0, 0, testCurve) + require.NotNil(t, err) + _, err = NewFeldman(3, 2, testCurve) + require.NotNil(t, err) + _, err = NewFeldman(1, 10, testCurve) + require.NotNil(t, err) + scheme, err := NewFeldman(2, 3, testCurve) + require.Nil(t, err) + require.NotNil(t, scheme) + _, _, err = scheme.Split(testCurve.NewScalar(), crand.Reader) + require.NotNil(t, err) +} + +func TestEd25519FeldmanCombineNoShares(t *testing.T) { + scheme, err := NewFeldman(2, 3, testCurve) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine() + require.NotNil(t, err) +} + +func TestEd25519FeldmanCombineDuplicateShare(t *testing.T) { + scheme, err := NewFeldman(2, 3, testCurve) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine([]*ShamirShare{ + { + Id: 1, + Value: testCurve.Scalar.New(3).Bytes(), + }, + { + Id: 1, + Value: testCurve.Scalar.New(3).Bytes(), + }, + }...) + require.NotNil(t, err) +} + +func TestEd25519FeldmanCombineBadIdentifier(t *testing.T) { + scheme, err := NewFeldman(2, 3, testCurve) + require.Nil(t, err) + require.NotNil(t, scheme) + shares := []*ShamirShare{ + { + Id: 0, + Value: testCurve.Scalar.New(3).Bytes(), + }, + { + Id: 2, + Value: testCurve.Scalar.New(3).Bytes(), + }, + } + _, err = scheme.Combine(shares...) + require.NotNil(t, err) + shares[0] = &ShamirShare{ + Id: 4, + Value: testCurve.Scalar.New(3).Bytes(), + } + _, err = scheme.Combine(shares...) + require.NotNil(t, err) +} + +func TestEd25519FeldmanCombineSingle(t *testing.T) { + scheme, err := NewFeldman(2, 3, testCurve) + require.Nil(t, err) + require.NotNil(t, scheme) + + secret := testCurve.Scalar.Hash([]byte("test")) + verifiers, shares, err := scheme.Split(secret, crand.Reader) + require.Nil(t, err) + require.NotNil(t, shares) + for _, s := range shares { + err = verifiers.Verify(s) + require.Nil(t, err) + } + secret2, err := scheme.Combine(shares...) + require.Nil(t, err) + require.Equal(t, secret2, secret) +} + +func TestEd25519FeldmanAllCombinations(t *testing.T) { + scheme, err := NewFeldman(3, 5, testCurve) + require.Nil(t, err) + require.NotNil(t, scheme) + + secret := testCurve.Scalar.Hash([]byte("test")) + verifiers, shares, err := scheme.Split(secret, crand.Reader) + for _, s := range shares { + err = verifiers.Verify(s) + require.Nil(t, err) + } + require.Nil(t, err) + require.NotNil(t, shares) + // There are 5*4*3 possible combinations + for i := 0; i < 5; i++ { + for j := 0; j < 5; j++ { + if i == j { + continue + } + for k := 0; k < 5; k++ { + if i == k || j == k { + continue + } + + rSecret, err := scheme.Combine(shares[i], shares[j], shares[k]) + require.Nil(t, err) + require.NotNil(t, rSecret) + require.Equal(t, rSecret, secret) + } + } + } +} diff --git a/sharing/feldman.go b/sharing/feldman.go new file mode 100644 index 0000000..3a665f0 --- /dev/null +++ b/sharing/feldman.go @@ -0,0 +1,115 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package sharing + +import ( + "fmt" + "io" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +type FeldmanVerifier struct { + Commitments []curves.Point +} + +func (v FeldmanVerifier) Verify(share *ShamirShare) error { + curve := curves.GetCurveByName(v.Commitments[0].CurveName()) + err := share.Validate(curve) + if err != nil { + return err + } + x := curve.Scalar.New(int(share.Id)) + i := curve.Scalar.One() + rhs := v.Commitments[0] + + for j := 1; j < len(v.Commitments); j++ { + i = i.Mul(x) + rhs = rhs.Add(v.Commitments[j].Mul(i)) + } + sc, _ := curve.Scalar.SetBytes(share.Value) + lhs := v.Commitments[0].Generator().Mul(sc) + + if lhs.Equal(rhs) { + return nil + } else { + return fmt.Errorf("not equal") + } +} + +type Feldman struct { + Threshold, Limit uint32 + Curve *curves.Curve +} + +func NewFeldman(threshold, limit uint32, curve *curves.Curve) (*Feldman, error) { + if limit < threshold { + return nil, fmt.Errorf("limit cannot be less than threshold") + } + if threshold < 2 { + return nil, fmt.Errorf("threshold cannot be less than 2") + } + if limit > 255 { + return nil, fmt.Errorf("cannot exceed 255 shares") + } + if curve == nil { + return nil, fmt.Errorf("invalid curve") + } + return &Feldman{threshold, limit, curve}, nil +} + +func (f Feldman) Split( + secret curves.Scalar, + reader io.Reader, +) (*FeldmanVerifier, []*ShamirShare, error) { + if secret.IsZero() { + return nil, nil, fmt.Errorf("invalid secret") + } + shamir := &Shamir{ + threshold: f.Threshold, + limit: f.Limit, + curve: f.Curve, + } + shares, poly := shamir.getPolyAndShares(secret, reader) + verifier := new(FeldmanVerifier) + verifier.Commitments = make([]curves.Point, f.Threshold) + for i := range verifier.Commitments { + verifier.Commitments[i] = f.Curve.ScalarBaseMult(poly.Coefficients[i]) + } + return verifier, shares, nil +} + +func (f Feldman) LagrangeCoeffs(shares map[uint32]*ShamirShare) (map[uint32]curves.Scalar, error) { + shamir := &Shamir{ + threshold: f.Threshold, + limit: f.Limit, + curve: f.Curve, + } + identities := make([]uint32, 0) + for _, xi := range shares { + identities = append(identities, xi.Id) + } + return shamir.LagrangeCoeffs(identities) +} + +func (f Feldman) Combine(shares ...*ShamirShare) (curves.Scalar, error) { + shamir := &Shamir{ + threshold: f.Threshold, + limit: f.Limit, + curve: f.Curve, + } + return shamir.Combine(shares...) +} + +func (f Feldman) CombinePoints(shares ...*ShamirShare) (curves.Point, error) { + shamir := &Shamir{ + threshold: f.Threshold, + limit: f.Limit, + curve: f.Curve, + } + return shamir.CombinePoints(shares...) +} diff --git a/sharing/pedersen.go b/sharing/pedersen.go new file mode 100644 index 0000000..a0757f4 --- /dev/null +++ b/sharing/pedersen.go @@ -0,0 +1,154 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package sharing + +import ( + "fmt" + "io" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// Pedersen Verifiable Secret Sharing Scheme +type Pedersen struct { + threshold, limit uint32 + curve *curves.Curve + generator curves.Point +} + +type PedersenVerifier struct { + Generator curves.Point + Commitments []curves.Point +} + +func (pv PedersenVerifier) Verify(share, blindShare *ShamirShare) error { + curve := curves.GetCurveByName(pv.Generator.CurveName()) + if err := share.Validate(curve); err != nil { + return err + } + if err := blindShare.Validate(curve); err != nil { + return err + } + + x := curve.Scalar.New(int(share.Id)) + i := curve.Scalar.One() + rhs := pv.Commitments[0] + + for j := 1; j < len(pv.Commitments); j++ { + i = i.Mul(x) + rhs = rhs.Add(pv.Commitments[j].Mul(i)) + } + + sc, _ := curve.Scalar.SetBytes(share.Value) + bsc, _ := curve.Scalar.SetBytes(blindShare.Value) + g := pv.Commitments[0].Generator().Mul(sc) + h := pv.Generator.Mul(bsc) + lhs := g.Add(h) + + if lhs.Equal(rhs) { + return nil + } else { + return fmt.Errorf("not equal") + } +} + +// PedersenResult contains all the data from calling Split +type PedersenResult struct { + Blinding curves.Scalar + BlindingShares, SecretShares []*ShamirShare + FeldmanVerifier *FeldmanVerifier + PedersenVerifier *PedersenVerifier +} + +// NewPedersen creates a new pedersen VSS +func NewPedersen(threshold, limit uint32, generator curves.Point) (*Pedersen, error) { + if limit < threshold { + return nil, fmt.Errorf("limit cannot be less than threshold") + } + if threshold < 2 { + return nil, fmt.Errorf("threshold cannot be less than 2") + } + if limit > 255 { + return nil, fmt.Errorf("cannot exceed 255 shares") + } + curve := curves.GetCurveByName(generator.CurveName()) + if curve == nil { + return nil, fmt.Errorf("invalid curve") + } + if generator == nil { + return nil, fmt.Errorf("invalid generator") + } + if !generator.IsOnCurve() || generator.IsIdentity() { + return nil, fmt.Errorf("invalid generator") + } + return &Pedersen{threshold, limit, curve, generator}, nil +} + +// Split creates the verifiers, blinding and shares +func (pd Pedersen) Split(secret curves.Scalar, reader io.Reader) (*PedersenResult, error) { + // generate a random blinding factor + blinding := pd.curve.Scalar.Random(reader) + + shamir := Shamir{pd.threshold, pd.limit, pd.curve} + // split the secret into shares + shares, poly := shamir.getPolyAndShares(secret, reader) + + // split the blinding into shares + blindingShares, polyBlinding := shamir.getPolyAndShares(blinding, reader) + + // Generate the verifiable commitments to the polynomial for the shares + blindedverifiers := make([]curves.Point, pd.threshold) + verifiers := make([]curves.Point, pd.threshold) + + // ({p0 * G + b0 * H}, ...,{pt * G + bt * H}) + for i, c := range poly.Coefficients { + s := pd.curve.ScalarBaseMult(c) + b := pd.generator.Mul(polyBlinding.Coefficients[i]) + bv := s.Add(b) + blindedverifiers[i] = bv + verifiers[i] = s + } + verifier1 := &FeldmanVerifier{Commitments: verifiers} + verifier2 := &PedersenVerifier{Commitments: blindedverifiers, Generator: pd.generator} + + return &PedersenResult{ + blinding, blindingShares, shares, verifier1, verifier2, + }, nil +} + +func (pd Pedersen) LagrangeCoeffs( + shares map[uint32]*ShamirShare, +) (map[uint32]curves.Scalar, error) { + shamir := &Shamir{ + threshold: pd.threshold, + limit: pd.limit, + curve: pd.curve, + } + identities := make([]uint32, 0) + for _, xi := range shares { + identities = append(identities, xi.Id) + } + return shamir.LagrangeCoeffs(identities) +} + +func (pd Pedersen) Combine(shares ...*ShamirShare) (curves.Scalar, error) { + shamir := &Shamir{ + threshold: pd.threshold, + limit: pd.limit, + curve: pd.curve, + } + return shamir.Combine(shares...) +} + +func (pd Pedersen) CombinePoints(shares ...*ShamirShare) (curves.Point, error) { + shamir := &Shamir{ + threshold: pd.threshold, + limit: pd.limit, + curve: pd.curve, + } + return shamir.CombinePoints(shares...) +} diff --git a/sharing/polynomial.go b/sharing/polynomial.go new file mode 100644 index 0000000..390f214 --- /dev/null +++ b/sharing/polynomial.go @@ -0,0 +1,35 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package sharing + +import ( + "io" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +type Polynomial struct { + Coefficients []curves.Scalar +} + +func (p *Polynomial) Init(intercept curves.Scalar, degree uint32, reader io.Reader) *Polynomial { + p.Coefficients = make([]curves.Scalar, degree) + p.Coefficients[0] = intercept.Clone() + for i := 1; i < int(degree); i++ { + p.Coefficients[i] = intercept.Random(reader) + } + return p +} + +func (p Polynomial) Evaluate(x curves.Scalar) curves.Scalar { + degree := len(p.Coefficients) - 1 + out := p.Coefficients[degree].Clone() + for i := degree - 1; i >= 0; i-- { + out = out.Mul(x).Add(p.Coefficients[i]) + } + return out +} diff --git a/sharing/polynomial_test.go b/sharing/polynomial_test.go new file mode 100644 index 0000000..f4efd8d --- /dev/null +++ b/sharing/polynomial_test.go @@ -0,0 +1,32 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Copyright Coinbase, Inc. All Rights Reserved. +// +// +// SPDX-License-Identifier: Apache-2.0 +// + +package sharing + +import ( + crand "crypto/rand" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestNewPoly(t *testing.T) { + curve := curves.BLS12381G1() + secret := curve.NewScalar().Hash([]byte("test")) + + poly := new(Polynomial).Init(secret, 4, crand.Reader) + require.NotNil(t, poly) + + require.Equal(t, poly.Coefficients[0], secret) +} diff --git a/sharing/shamir.go b/sharing/shamir.go new file mode 100644 index 0000000..67a71b7 --- /dev/null +++ b/sharing/shamir.go @@ -0,0 +1,209 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Package sharing is an implementation of shamir secret sharing and implements the following papers. +// +// - https://dl.acm.org/doi/pdf/10.1145/359168.359176 +// - https://www.cs.umd.edu/~gasarch/TOPICS/secretsharing/feldmanVSS.pdf +// - https://link.springer.com/content/pdf/10.1007%2F3-540-46766-1_9.pdf +package sharing + +import ( + "encoding/binary" + "fmt" + "io" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +type ShamirShare struct { + Id uint32 `json:"identifier"` + Value []byte `json:"value"` +} + +func (ss ShamirShare) Validate(curve *curves.Curve) error { + if ss.Id == 0 { + return fmt.Errorf("invalid identifier") + } + sc, err := curve.Scalar.SetBytes(ss.Value) + if err != nil { + return err + } + if sc.IsZero() { + return fmt.Errorf("invalid share") + } + return nil +} + +func (ss ShamirShare) Bytes() []byte { + var id [4]byte + binary.BigEndian.PutUint32(id[:], ss.Id) + return append(id[:], ss.Value...) +} + +type Shamir struct { + threshold, limit uint32 + curve *curves.Curve +} + +func NewShamir(threshold, limit uint32, curve *curves.Curve) (*Shamir, error) { + if limit < threshold { + return nil, fmt.Errorf("limit cannot be less than threshold") + } + if threshold < 2 { + return nil, fmt.Errorf("threshold cannot be less than 2") + } + if limit > 255 { + return nil, fmt.Errorf("cannot exceed 255 shares") + } + if curve == nil { + return nil, fmt.Errorf("invalid curve") + } + return &Shamir{threshold, limit, curve}, nil +} + +func (s Shamir) Split(secret curves.Scalar, reader io.Reader) ([]*ShamirShare, error) { + if secret.IsZero() { + return nil, fmt.Errorf("invalid secret") + } + shares, _ := s.getPolyAndShares(secret, reader) + return shares, nil +} + +func (s Shamir) getPolyAndShares( + secret curves.Scalar, + reader io.Reader, +) ([]*ShamirShare, *Polynomial) { + poly := new(Polynomial).Init(secret, s.threshold, reader) + shares := make([]*ShamirShare, s.limit) + for i := range shares { + x := s.curve.Scalar.New(i + 1) + shares[i] = &ShamirShare{ + Id: uint32(i + 1), + Value: poly.Evaluate(x).Bytes(), + } + } + return shares, poly +} + +func (s Shamir) LagrangeCoeffs(identities []uint32) (map[uint32]curves.Scalar, error) { + xs := make(map[uint32]curves.Scalar, len(identities)) + for _, xi := range identities { + xs[xi] = s.curve.Scalar.New(int(xi)) + } + + result := make(map[uint32]curves.Scalar, len(identities)) + for i, xi := range xs { + num := s.curve.Scalar.One() + den := s.curve.Scalar.One() + for j, xj := range xs { + if i == j { + continue + } + + num = num.Mul(xj) + den = den.Mul(xj.Sub(xi)) + } + if den.IsZero() { + return nil, fmt.Errorf("divide by zero") + } + result[i] = num.Div(den) + } + return result, nil +} + +func (s Shamir) Combine(shares ...*ShamirShare) (curves.Scalar, error) { + if len(shares) < int(s.threshold) { + return nil, fmt.Errorf("invalid number of shares") + } + dups := make(map[uint32]bool, len(shares)) + xs := make([]curves.Scalar, len(shares)) + ys := make([]curves.Scalar, len(shares)) + + for i, share := range shares { + err := share.Validate(s.curve) + if err != nil { + return nil, err + } + if share.Id > s.limit { + return nil, fmt.Errorf("invalid share identifier") + } + if _, in := dups[share.Id]; in { + return nil, fmt.Errorf("duplicate share") + } + dups[share.Id] = true + ys[i], _ = s.curve.Scalar.SetBytes(share.Value) + xs[i] = s.curve.Scalar.New(int(share.Id)) + } + return s.interpolate(xs, ys) +} + +func (s Shamir) CombinePoints(shares ...*ShamirShare) (curves.Point, error) { + if len(shares) < int(s.threshold) { + return nil, fmt.Errorf("invalid number of shares") + } + dups := make(map[uint32]bool, len(shares)) + xs := make([]curves.Scalar, len(shares)) + ys := make([]curves.Point, len(shares)) + + for i, share := range shares { + err := share.Validate(s.curve) + if err != nil { + return nil, err + } + if share.Id > s.limit { + return nil, fmt.Errorf("invalid share identifier") + } + if _, in := dups[share.Id]; in { + return nil, fmt.Errorf("duplicate share") + } + dups[share.Id] = true + sc, _ := s.curve.Scalar.SetBytes(share.Value) + ys[i] = s.curve.ScalarBaseMult(sc) + xs[i] = s.curve.Scalar.New(int(share.Id)) + } + return s.interpolatePoint(xs, ys) +} + +func (s Shamir) interpolate(xs, ys []curves.Scalar) (curves.Scalar, error) { + result := s.curve.Scalar.Zero() + for i, xi := range xs { + num := s.curve.Scalar.One() + den := s.curve.Scalar.One() + for j, xj := range xs { + if i == j { + continue + } + num = num.Mul(xj) + den = den.Mul(xj.Sub(xi)) + } + if den.IsZero() { + return nil, fmt.Errorf("divide by zero") + } + result = result.Add(ys[i].Mul(num.Div(den))) + } + return result, nil +} + +func (s Shamir) interpolatePoint(xs []curves.Scalar, ys []curves.Point) (curves.Point, error) { + result := s.curve.NewIdentityPoint() + for i, xi := range xs { + num := s.curve.Scalar.One() + den := s.curve.Scalar.One() + for j, xj := range xs { + if i == j { + continue + } + num = num.Mul(xj) + den = den.Mul(xj.Sub(xi)) + } + if den.IsZero() { + return nil, fmt.Errorf("divide by zero") + } + result = result.Add(ys[i].Mul(num.Div(den))) + } + return result, nil +} diff --git a/sharing/shamir_test.go b/sharing/shamir_test.go new file mode 100644 index 0000000..a80eb90 --- /dev/null +++ b/sharing/shamir_test.go @@ -0,0 +1,186 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package sharing + +import ( + "bytes" + crand "crypto/rand" + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestShamirSplitInvalidArgs(t *testing.T) { + curve := curves.ED25519() + _, err := NewShamir(0, 0, curve) + require.NotNil(t, err) + _, err = NewShamir(3, 2, curve) + require.NotNil(t, err) + _, err = NewShamir(1, 10, curve) + require.NotNil(t, err) + scheme, err := NewShamir(2, 3, curve) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Split(curve.NewScalar(), crand.Reader) + require.NotNil(t, err) +} + +func TestShamirCombineNoShares(t *testing.T) { + curve := curves.ED25519() + scheme, err := NewShamir(2, 3, curve) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine() + require.NotNil(t, err) +} + +func TestShamirCombineDuplicateShare(t *testing.T) { + curve := curves.ED25519() + scheme, err := NewShamir(2, 3, curve) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine([]*ShamirShare{ + { + Id: 1, + Value: curve.NewScalar().New(3).Bytes(), + }, + { + Id: 1, + Value: curve.NewScalar().New(3).Bytes(), + }, + }...) + require.NotNil(t, err) +} + +func TestShamirCombineBadIdentifier(t *testing.T) { + curve := curves.ED25519() + scheme, err := NewShamir(2, 3, curve) + require.Nil(t, err) + require.NotNil(t, scheme) + shares := []*ShamirShare{ + { + Id: 0, + Value: curve.NewScalar().New(3).Bytes(), + }, + { + Id: 2, + Value: curve.NewScalar().New(3).Bytes(), + }, + } + _, err = scheme.Combine(shares...) + require.NotNil(t, err) + shares[0] = &ShamirShare{ + Id: 4, + Value: curve.NewScalar().New(3).Bytes(), + } + _, err = scheme.Combine(shares...) + require.NotNil(t, err) +} + +func TestShamirCombineSingle(t *testing.T) { + curve := curves.ED25519() + scheme, err := NewShamir(2, 3, curve) + require.Nil(t, err) + require.NotNil(t, scheme) + + shares, err := scheme.Split(curve.NewScalar().Hash([]byte("test")), crand.Reader) + require.Nil(t, err) + require.NotNil(t, shares) + secret, err := scheme.Combine(shares...) + require.Nil(t, err) + require.Equal(t, secret, curve.NewScalar().Hash([]byte("test"))) +} + +// Test ComputeL function to compute Lagrange coefficients. +func TestShamirComputeL(t *testing.T) { + curve := curves.ED25519() + scheme, err := NewShamir(2, 2, curve) + require.Nil(t, err) + require.NotNil(t, scheme) + secret := curve.Scalar.Hash([]byte("test")) + shares, err := scheme.Split(secret, crand.Reader) + require.Nil(t, err) + require.NotNil(t, shares) + identities := make([]uint32, 0) + for _, xi := range shares { + identities = append(identities, xi.Id) + } + lCoeffs, err := scheme.LagrangeCoeffs(identities) + require.Nil(t, err) + require.NotNil(t, lCoeffs) + + // Checking we can reconstruct the same secret using Lagrange coefficients. + result := curve.NewScalar() + for _, r := range shares { + rc, _ := curve.Scalar.SetBytes(r.Value) + result = result.Add(rc.Mul(lCoeffs[r.Id])) + } + require.Equal(t, result.Bytes(), secret.Bytes()) +} + +func TestShamirAllCombinations(t *testing.T) { + curve := curves.ED25519() + scheme, err := NewShamir(3, 5, curve) + require.Nil(t, err) + require.NotNil(t, scheme) + + secret := curve.Scalar.Hash([]byte("test")) + shares, err := scheme.Split(secret, crand.Reader) + require.Nil(t, err) + require.NotNil(t, shares) + // There are 5*4*3 possible combinations + for i := 0; i < 5; i++ { + for j := 0; j < 5; j++ { + if i == j { + continue + } + for k := 0; k < 5; k++ { + if i == k || j == k { + continue + } + + rSecret, err := scheme.Combine(shares[i], shares[j], shares[k]) + require.Nil(t, err) + require.NotNil(t, rSecret) + require.Equal(t, rSecret, secret) + } + } + } +} + +// Ensures that ShamirShare's un/marshal successfully. +func TestMarshalJsonRoundTrip(t *testing.T) { + curve := curves.ED25519() + shares := []ShamirShare{ + {0, curve.Scalar.New(300).Bytes()}, + {2, curve.Scalar.New(300000).Bytes()}, + {20, curve.Scalar.New(12812798).Bytes()}, + {31, curve.Scalar.New(17).Bytes()}, + {57, curve.Scalar.New(5066680).Bytes()}, + {128, curve.Scalar.New(3005).Bytes()}, + {19, curve.Scalar.New(317).Bytes()}, + {7, curve.Scalar.New(323).Bytes()}, + {222, curve.NewScalar().New(-1).Bytes()}, + } + // Run all the tests! + for _, in := range shares { + input, err := json.Marshal(in) + require.NoError(t, err) + require.NotNil(t, input) + + // Unmarshal and test + out := &ShamirShare{} + // out.Value = curve.NewScalar() + err = json.Unmarshal(input, &out) + require.NoError(t, err) + require.Equal(t, in.Id, out.Id) + require.Equal(t, bytes.Compare(in.Value, out.Value), 0) + } +} diff --git a/sharing/v1/bls12381g1_feldman_test.go b/sharing/v1/bls12381g1_feldman_test.go new file mode 100644 index 0000000..ef1ca64 --- /dev/null +++ b/sharing/v1/bls12381g1_feldman_test.go @@ -0,0 +1,177 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package v1 + +import ( + "math/big" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +var ( + modulus, _ = new(big.Int).SetString( + "1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED", + 16, + ) + field = curves.NewField(modulus) +) + +func TestBls12381G1FeldmanSplitInvalidArgs(t *testing.T) { + _, err := NewFeldman(0, 0, Bls12381G1()) + require.NotNil(t, err) + _, err = NewFeldman(3, 2, Bls12381G1()) + require.NotNil(t, err) + _, err = NewFeldman(1, 10, Bls12381G1()) + require.NotNil(t, err) + scheme, err := NewFeldman(2, 3, Bls12381G1()) + require.Nil(t, err) + require.NotNil(t, scheme) + _, _, err = scheme.Split([]byte{}) + require.NotNil(t, err) + _, _, err = scheme.Split( + []byte{ + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + }, + ) + require.NotNil(t, err) +} + +func TestBls12381G1FeldmanCombineNoShares(t *testing.T) { + scheme, err := NewFeldman(2, 3, Bls12381G1()) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine() + require.NotNil(t, err) +} + +func TestBls12381G1FeldmanCombineDuplicateShare(t *testing.T) { + scheme, err := NewFeldman(2, 3, Bls12381G1()) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine([]*ShamirShare{ + { + Identifier: 1, + Value: field.NewElement(big.NewInt(3)), + }, + { + Identifier: 1, + Value: field.NewElement(big.NewInt(3)), + }, + }...) + require.NotNil(t, err) +} + +func TestBls12381G1FeldmanCombineBadIdentifier(t *testing.T) { + scheme, err := NewFeldman(2, 3, Bls12381G1()) + require.Nil(t, err) + require.NotNil(t, scheme) + shares := []*ShamirShare{ + { + Identifier: 0, + Value: field.NewElement(big.NewInt(3)), + }, + { + Identifier: 2, + Value: field.NewElement(big.NewInt(3)), + }, + } + _, err = scheme.Combine(shares...) + require.NotNil(t, err) + shares[0] = &ShamirShare{ + Identifier: 4, + Value: field.NewElement(big.NewInt(3)), + } + _, err = scheme.Combine(shares...) + require.NotNil(t, err) +} + +func TestBls12381G1FeldmanCombineSingle(t *testing.T) { + scheme, err := NewFeldman(2, 3, Bls12381G1()) + require.Nil(t, err) + require.NotNil(t, scheme) + + verifiers, shares, err := scheme.Split([]byte("test")) + require.Nil(t, err) + require.NotNil(t, shares) + for _, s := range shares { + ok, err := scheme.Verify(s, verifiers) + require.Nil(t, err) + require.True(t, ok) + } + secret, err := scheme.Combine(shares...) + require.Nil(t, err) + require.Equal(t, secret, []byte("test")) +} + +func TestBls12381G1FeldmanAllCombinations(t *testing.T) { + scheme, err := NewFeldman(3, 5, Bls12381G1()) + require.Nil(t, err) + require.NotNil(t, scheme) + + secret := []byte("test") + verifiers, shares, err := scheme.Split(secret) + for _, s := range shares { + ok, err := scheme.Verify(s, verifiers) + require.Nil(t, err) + require.True(t, ok) + } + require.Nil(t, err) + require.NotNil(t, shares) + // There are 5*4*3 possible combinations + for i := 0; i < 5; i++ { + for j := 0; j < 5; j++ { + if i == j { + continue + } + for k := 0; k < 5; k++ { + if i == k || j == k { + continue + } + + rSecret, err := scheme.Combine(shares[i], shares[j], shares[k]) + require.Nil(t, err) + require.NotNil(t, rSecret) + require.Equal(t, rSecret, secret) + } + } + } +} diff --git a/sharing/v1/bls12381g1curve.go b/sharing/v1/bls12381g1curve.go new file mode 100644 index 0000000..b628c6e --- /dev/null +++ b/sharing/v1/bls12381g1curve.go @@ -0,0 +1,120 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package v1 + +import ( + "crypto/elliptic" + "math/big" + "sync" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/core/curves/native/bls12381" + "github.com/sonr-io/sonr/crypto/internal" +) + +var ( + bls12381g1Initonce sync.Once + bls12381g1 Bls12381G1Curve +) + +type Bls12381G1Curve struct { + *elliptic.CurveParams +} + +func bls12381g1InitAll() { + bls12381g1.CurveParams = new(elliptic.CurveParams) + bls12381g1.P, _ = new( + big.Int, + ).SetString("1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab", 16) + bls12381g1.N = bls12381.Bls12381FqNew().Params.BiModulus + bls12381g1.B, _ = new( + big.Int, + ).SetString("0bbc3efc5008a26a0e1c8c3fad0059c051ac582950405194dd595f13570725ce8c22631a7918fd8ebaac93d50ce72271", 16) + bls12381g1.Gx, _ = new( + big.Int, + ).SetString("120177419e0bfb75edce6ecc21dbf440f0ae6acdf3d0e747154f95c7143ba1c17817fc679976fff55cb38790fd530c16", 16) + bls12381g1.Gy, _ = new( + big.Int, + ).SetString("0bbc3efc5008a26a0e1c8c3fad0059c051ac582950405194dd595f13570725ce8c22631a7918fd8ebaac93d50ce72271", 16) + bls12381g1.BitSize = 381 + bls12381g1.Name = "Bls12381G1" +} + +func Bls12381G1() *Bls12381G1Curve { + bls12381g1Initonce.Do(bls12381g1InitAll) + return &bls12381g1 +} + +func (curve *Bls12381G1Curve) Params() *elliptic.CurveParams { + return curve.CurveParams +} + +func (curve *Bls12381G1Curve) IsOnCurve(x, y *big.Int) bool { + _, err := new(bls12381.G1).SetBigInt(x, y) + return err == nil +} + +func (curve *Bls12381G1Curve) Add(x1, y1, x2, y2 *big.Int) (*big.Int, *big.Int) { + p1, err1 := new(bls12381.G1).SetBigInt(x1, y1) + p2, err2 := new(bls12381.G1).SetBigInt(x2, y2) + + if err1 != nil || err2 != nil { + return nil, nil + } + return p1.Add(p1, p2).BigInt() +} + +func (curve *Bls12381G1Curve) Double(x1, y1 *big.Int) (*big.Int, *big.Int) { + p, err := new(bls12381.G1).SetBigInt(x1, y1) + if err != nil { + return nil, nil + } + return p.Double(p).BigInt() +} + +func (curve *Bls12381G1Curve) ScalarMult(Bx, By *big.Int, k []byte) (*big.Int, *big.Int) { + p, err := new(bls12381.G1).SetBigInt(Bx, By) + if err != nil { + return nil, nil + } + var bb [native.FieldBytes]byte + copy(bb[:], k) + s, err := bls12381.Bls12381FqNew().SetBytes(&bb) + if err != nil { + return nil, nil + } + return p.Mul(p, s).BigInt() +} + +func (curve *Bls12381G1Curve) ScalarBaseMult(k []byte) (*big.Int, *big.Int) { + p := new(bls12381.G1).Generator() + var bb [native.FieldBytes]byte + copy(bb[:], internal.ReverseScalarBytes(k)) + s, err := bls12381.Bls12381FqNew().SetBytes(&bb) + if err != nil { + return nil, nil + } + return p.Mul(p, s).BigInt() +} + +// Hash an arbitrary byte sequence to a G1 point according to the hash-to-curve standard +func (curve *Bls12381G1Curve) Hash(msg []byte) (*big.Int, *big.Int) { + return new( + bls12381.G1, + ).Hash(native.EllipticPointHasherSha256(), msg, []byte("BLS12381G1_XMD:SHA-256_SSWU_RO_")). + BigInt() +} + +// CompressedBytesFromBigInts takes x and y coordinates and converts them to the BLS compressed point form +func (curve *Bls12381G1Curve) CompressedBytesFromBigInts(x, y *big.Int) ([]byte, error) { + p, err := new(bls12381.G1).SetBigInt(x, y) + if err != nil { + return nil, err + } + out := p.ToCompressed() + return out[:], nil +} diff --git a/sharing/v1/bls12381g2_feldman_test.go b/sharing/v1/bls12381g2_feldman_test.go new file mode 100755 index 0000000..52a3add --- /dev/null +++ b/sharing/v1/bls12381g2_feldman_test.go @@ -0,0 +1,167 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package v1 + +import ( + "math/big" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestBls12381G2FeldmanSplitInvalidArgs(t *testing.T) { + _, err := NewFeldman(0, 0, Bls12381G2()) + require.NotNil(t, err) + _, err = NewFeldman(3, 2, Bls12381G2()) + require.NotNil(t, err) + _, err = NewFeldman(1, 10, Bls12381G2()) + require.NotNil(t, err) + scheme, err := NewFeldman(2, 3, Bls12381G2()) + require.Nil(t, err) + require.NotNil(t, scheme) + _, _, err = scheme.Split([]byte{}) + require.NotNil(t, err) + _, _, err = scheme.Split( + []byte{ + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + }, + ) + require.NotNil(t, err) +} + +func TestBls12381G2FeldmanCombineNoShares(t *testing.T) { + scheme, err := NewFeldman(2, 3, Bls12381G2()) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine() + require.NotNil(t, err) +} + +func TestBls12381G2FeldmanCombineDuplicateShare(t *testing.T) { + scheme, err := NewFeldman(2, 3, Bls12381G2()) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine([]*ShamirShare{ + { + Identifier: 1, + Value: field.NewElement(big.NewInt(3)), + }, + { + Identifier: 1, + Value: field.NewElement(big.NewInt(3)), + }, + }...) + require.NotNil(t, err) +} + +func TestBls12381G2FeldmanCombineBadIdentifier(t *testing.T) { + scheme, err := NewFeldman(2, 3, Bls12381G2()) + require.Nil(t, err) + require.NotNil(t, scheme) + shares := []*ShamirShare{ + { + Identifier: 0, + Value: field.NewElement(big.NewInt(3)), + }, + { + Identifier: 2, + Value: field.NewElement(big.NewInt(3)), + }, + } + _, err = scheme.Combine(shares...) + require.NotNil(t, err) + shares[0] = &ShamirShare{ + Identifier: 4, + Value: field.NewElement(big.NewInt(3)), + } + _, err = scheme.Combine(shares...) + require.NotNil(t, err) +} + +func TestBls12381G2FeldmanCombineSingle(t *testing.T) { + scheme, err := NewFeldman(2, 3, Bls12381G2()) + require.Nil(t, err) + require.NotNil(t, scheme) + + verifiers, shares, err := scheme.Split([]byte("test")) + require.Nil(t, err) + require.NotNil(t, shares) + for _, s := range shares { + ok, err := scheme.Verify(s, verifiers) + require.Nil(t, err) + require.True(t, ok) + } + secret, err := scheme.Combine(shares...) + require.Nil(t, err) + require.Equal(t, secret, []byte("test")) +} + +func TestBls12381G2FeldmanAllCombinations(t *testing.T) { + scheme, err := NewFeldman(3, 5, Bls12381G2()) + require.Nil(t, err) + require.NotNil(t, scheme) + + secret := []byte("test") + verifiers, shares, err := scheme.Split(secret) + for _, s := range shares { + ok, err := scheme.Verify(s, verifiers) + require.Nil(t, err) + require.True(t, ok) + } + require.Nil(t, err) + require.NotNil(t, shares) + // There are 5*4*3 possible combinations + for i := 0; i < 5; i++ { + for j := 0; j < 5; j++ { + if i == j { + continue + } + for k := 0; k < 5; k++ { + if i == k || j == k { + continue + } + + rSecret, err := scheme.Combine(shares[i], shares[j], shares[k]) + require.Nil(t, err) + require.NotNil(t, rSecret) + require.Equal(t, rSecret, secret) + } + } + } +} diff --git a/sharing/v1/bls12381g2curve.go b/sharing/v1/bls12381g2curve.go new file mode 100644 index 0000000..e0f0780 --- /dev/null +++ b/sharing/v1/bls12381g2curve.go @@ -0,0 +1,119 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package v1 + +import ( + "crypto/elliptic" + "math/big" + "sync" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/core/curves/native/bls12381" +) + +var ( + bls12381g2Initonce sync.Once + bls12381g2 Bls12381G2Curve +) + +type Bls12381G2Curve struct { + *elliptic.CurveParams +} + +func bls12381g2InitAll() { + bls12381g2.CurveParams = new(elliptic.CurveParams) + bls12381g2.P, _ = new( + big.Int, + ).SetString("1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab", 16) + bls12381g2.N = bls12381.Bls12381FqNew().Params.BiModulus + bls12381g2.B, _ = new( + big.Int, + ).SetString("0bbc3efc5008a26a0e1c8c3fad0059c051ac582950405194dd595f13570725ce8c22631a7918fd8ebaac93d50ce72271", 16) + bls12381g2.Gx, _ = new( + big.Int, + ).SetString("120177419e0bfb75edce6ecc21dbf440f0ae6acdf3d0e747154f95c7143ba1c17817fc679976fff55cb38790fd530c16", 16) + bls12381g2.Gy, _ = new( + big.Int, + ).SetString("0bbc3efc5008a26a0e1c8c3fad0059c051ac582950405194dd595f13570725ce8c22631a7918fd8ebaac93d50ce72271", 16) + bls12381g2.BitSize = 381 + bls12381g2.Name = "Bls12381G1" +} + +func Bls12381G2() *Bls12381G1Curve { + bls12381g2Initonce.Do(bls12381g2InitAll) + return &bls12381g1 +} + +func (curve *Bls12381G2Curve) Params() *elliptic.CurveParams { + return curve.CurveParams +} + +func (curve *Bls12381G2Curve) IsOnCurve(x, y *big.Int) bool { + _, err := new(bls12381.G2).SetBigInt(x, y) + return err == nil +} + +func (curve *Bls12381G2Curve) Add(x1, y1, x2, y2 *big.Int) (*big.Int, *big.Int) { + p1, err1 := new(bls12381.G2).SetBigInt(x1, y1) + p2, err2 := new(bls12381.G2).SetBigInt(x2, y2) + + if err1 != nil || err2 != nil { + return nil, nil + } + return p1.Add(p1, p2).BigInt() +} + +func (curve *Bls12381G2Curve) Double(x1, y1 *big.Int) (*big.Int, *big.Int) { + p, err := new(bls12381.G2).SetBigInt(x1, y1) + if err != nil { + return nil, nil + } + return p.Double(p).BigInt() +} + +func (curve *Bls12381G2Curve) ScalarMult(Bx, By *big.Int, k []byte) (*big.Int, *big.Int) { + p, err := new(bls12381.G2).SetBigInt(Bx, By) + if err != nil { + return nil, nil + } + var bb [native.FieldBytes]byte + copy(bb[:], k) + s, err := bls12381.Bls12381FqNew().SetBytes(&bb) + if err != nil { + return nil, nil + } + return p.Mul(p, s).BigInt() +} + +func (curve *Bls12381G2Curve) ScalarBaseMult(k []byte) (*big.Int, *big.Int) { + p := new(bls12381.G2).Generator() + var bb [native.FieldBytes]byte + copy(bb[:], k) + s, err := bls12381.Bls12381FqNew().SetBytes(&bb) + if err != nil { + return nil, nil + } + return p.Mul(p, s).BigInt() +} + +// Hash an arbitrary byte sequence to a G1 point according to the hash-to-curve standard +func (curve *Bls12381G2Curve) Hash(msg []byte) (*big.Int, *big.Int) { + return new( + bls12381.G1, + ).Hash(native.EllipticPointHasherSha256(), msg, []byte("BLS12381G2_XMD:SHA-256_SSWU_RO_")). + BigInt() +} + +// CompressedBytesFromBigInts takes x and y coordinates and converts them to the BLS compressed point form +func (curve *Bls12381G2Curve) CompressedBytesFromBigInts(x, y *big.Int) ([]byte, error) { + p, err := new(bls12381.G2).SetBigInt(x, y) + if err != nil { + return nil, err + } + out := p.ToCompressed() + return out[:], nil +} diff --git a/sharing/v1/common.go b/sharing/v1/common.go new file mode 100644 index 0000000..bd88e7c --- /dev/null +++ b/sharing/v1/common.go @@ -0,0 +1,14 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package v1 + +import ( + kryptology "github.com/sonr-io/sonr/crypto/core/curves" +) + +// ShareVerifier is used to verify secret shares from Feldman or Pedersen VSS +type ShareVerifier = kryptology.EcPoint diff --git a/sharing/v1/ed25519_feldman_test.go b/sharing/v1/ed25519_feldman_test.go new file mode 100755 index 0000000..d59fb2b --- /dev/null +++ b/sharing/v1/ed25519_feldman_test.go @@ -0,0 +1,167 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package v1 + +import ( + "math/big" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestEd25519FeldmanSplitInvalidArgs(t *testing.T) { + _, err := NewFeldman(0, 0, Ed25519()) + require.NotNil(t, err) + _, err = NewFeldman(3, 2, Ed25519()) + require.NotNil(t, err) + _, err = NewFeldman(1, 10, Ed25519()) + require.NotNil(t, err) + scheme, err := NewFeldman(2, 3, Ed25519()) + require.Nil(t, err) + require.NotNil(t, scheme) + _, _, err = scheme.Split([]byte{}) + require.NotNil(t, err) + _, _, err = scheme.Split( + []byte{ + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + }, + ) + require.NotNil(t, err) +} + +func TestEd25519FeldmanCombineNoShares(t *testing.T) { + scheme, err := NewFeldman(2, 3, Ed25519()) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine() + require.NotNil(t, err) +} + +func TestEd25519FeldmanCombineDuplicateShare(t *testing.T) { + scheme, err := NewFeldman(2, 3, Ed25519()) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine([]*ShamirShare{ + { + Identifier: 1, + Value: field.NewElement(big.NewInt(3)), + }, + { + Identifier: 1, + Value: field.NewElement(big.NewInt(3)), + }, + }...) + require.NotNil(t, err) +} + +func TestEd25519FeldmanCombineBadIdentifier(t *testing.T) { + scheme, err := NewFeldman(2, 3, Ed25519()) + require.Nil(t, err) + require.NotNil(t, scheme) + shares := []*ShamirShare{ + { + Identifier: 0, + Value: field.NewElement(big.NewInt(3)), + }, + { + Identifier: 2, + Value: field.NewElement(big.NewInt(3)), + }, + } + _, err = scheme.Combine(shares...) + require.NotNil(t, err) + shares[0] = &ShamirShare{ + Identifier: 4, + Value: field.NewElement(big.NewInt(3)), + } + _, err = scheme.Combine(shares...) + require.NotNil(t, err) +} + +func TestEd25519FeldmanCombineSingle(t *testing.T) { + scheme, err := NewFeldman(2, 3, Ed25519()) + require.Nil(t, err) + require.NotNil(t, scheme) + + verifiers, shares, err := scheme.Split([]byte("test")) + require.Nil(t, err) + require.NotNil(t, shares) + for _, s := range shares { + ok, err := scheme.Verify(s, verifiers) + require.Nil(t, err) + require.True(t, ok) + } + secret, err := scheme.Combine(shares...) + require.Nil(t, err) + require.Equal(t, secret, []byte("test")) +} + +func TestEd25519FeldmanAllCombinations(t *testing.T) { + scheme, err := NewFeldman(3, 5, Ed25519()) + require.Nil(t, err) + require.NotNil(t, scheme) + + secret := []byte("test") + verifiers, shares, err := scheme.Split(secret) + for _, s := range shares { + ok, err := scheme.Verify(s, verifiers) + require.Nil(t, err) + require.True(t, ok) + } + require.Nil(t, err) + require.NotNil(t, shares) + // There are 5*4*3 possible combinations + for i := 0; i < 5; i++ { + for j := 0; j < 5; j++ { + if i == j { + continue + } + for k := 0; k < 5; k++ { + if i == k || j == k { + continue + } + + rSecret, err := scheme.Combine(shares[i], shares[j], shares[k]) + require.Nil(t, err) + require.NotNil(t, rSecret) + require.Equal(t, rSecret, secret) + } + } + } +} diff --git a/sharing/v1/ed25519_pedersen_test.go b/sharing/v1/ed25519_pedersen_test.go new file mode 100644 index 0000000..9033e67 --- /dev/null +++ b/sharing/v1/ed25519_pedersen_test.go @@ -0,0 +1,198 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package v1 + +import ( + "crypto/sha512" + "math/big" + "testing" + + "filippo.io/edwards25519" + "github.com/stretchr/testify/require" + + core "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/internal" +) + +var ( + ed25519BasePoint = &core.EcPoint{Curve: Ed25519(), X: Ed25519().Gx, Y: Ed25519().Gy} + testPointEd25519, _ = core.NewScalarBaseMult(Ed25519(), big.NewInt(2222)) +) + +func TestEd25519PedersenSplitInvalidArgs(t *testing.T) { + _, err := NewPedersen(0, 0, nil) + require.NotNil(t, err) + _, err = NewPedersen(3, 2, nil) + require.NotNil(t, err) + _, err = NewPedersen(1, 10, nil) + require.NotNil(t, err) + _, err = NewPedersen(2, 3, nil) + require.NotNil(t, err) + scheme, err := NewPedersen(2, 3, ed25519BasePoint) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Split([]byte{}) + require.NotNil(t, err) + // test that split doesn't work on secrets bigger than the modulus + _, err = scheme.Split( + []byte{ + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + }, + ) + require.NotNil(t, err) +} + +func TestEd25519PedersenCombineNoShares(t *testing.T) { + scheme, err := NewPedersen(2, 3, ed25519BasePoint) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine() + require.NotNil(t, err) +} + +func TestEd25519PedersenCombineDuplicateShare(t *testing.T) { + scheme, err := NewPedersen(2, 3, ed25519BasePoint) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine([]*ShamirShare{ + { + Identifier: 1, + Value: field.NewElement(big.NewInt(3)), + }, + { + Identifier: 1, + Value: field.NewElement(big.NewInt(3)), + }, + }...) + require.NotNil(t, err) +} + +func TestEd25519PedersenCombineBadIdentifier(t *testing.T) { + scheme, err := NewPedersen(2, 3, ed25519BasePoint) + require.Nil(t, err) + require.NotNil(t, scheme) + shares := []*ShamirShare{ + { + Identifier: 0, + Value: field.NewElement(big.NewInt(3)), + }, + { + Identifier: 2, + Value: field.NewElement(big.NewInt(3)), + }, + } + _, err = scheme.Combine(shares...) + require.NotNil(t, err) + shares[0] = &ShamirShare{ + Identifier: 4, + Value: field.NewElement(big.NewInt(3)), + } + _, err = scheme.Combine(shares...) + require.Error(t, err) +} + +func TestEd25519PedersenCombineSingle(t *testing.T) { + scheme, err := NewPedersen(2, 3, testPointEd25519) + require.Nil(t, err) + require.NotNil(t, scheme) + + hBytes := sha512.Sum512([]byte("test")) + var arr [32]byte + copy(arr[:], hBytes[:]) + sc, err := edwards25519.NewScalar().SetBytesWithClamping(arr[:]) + require.Nil(t, err) + result, err := scheme.Split(internal.ReverseScalarBytes(sc.Bytes())) + require.Nil(t, err) + require.NotNil(t, result) + for i, s := range result.SecretShares { + ok, err := scheme.Verify(s, result.BlindingShares[i], result.BlindedVerifiers) + require.Nil(t, err) + require.True(t, ok) + } + secret, err := scheme.Combine(result.SecretShares...) + require.Nil(t, err) + require.Equal(t, internal.ReverseScalarBytes(secret), sc.Bytes()) +} + +func TestEd25519PedersenAllCombinations(t *testing.T) { + scheme, err := NewPedersen(3, 5, testPointEd25519) + require.Nil(t, err) + require.NotNil(t, scheme) + + secret := []byte("test") + result, err := scheme.Split(secret) + for i, s := range result.SecretShares { + ok, err := scheme.Verify(s, result.BlindingShares[i], result.BlindedVerifiers) + require.Nil(t, err) + require.True(t, ok) + } + require.Nil(t, err) + require.NotNil(t, result) + // There are 5*4*3 possible combinations + for i := 0; i < 5; i++ { + for j := 0; j < 5; j++ { + if i == j { + continue + } + for k := 0; k < 5; k++ { + if i == k || j == k { + continue + } + + rSecret, err := scheme.Combine( + result.SecretShares[i], + result.SecretShares[j], + result.SecretShares[k], + ) + require.Nil(t, err) + require.NotNil(t, rSecret) + require.Equal(t, rSecret, secret) + + bSecret, err := scheme.Combine( + result.BlindingShares[i], + result.BlindingShares[j], + result.BlindingShares[k], + ) + require.Nil(t, err) + require.NotNil(t, bSecret) + require.Equal(t, bSecret, result.Blinding.Bytes()) + } + } + } +} diff --git a/sharing/v1/ed25519curve.go b/sharing/v1/ed25519curve.go new file mode 100644 index 0000000..4576ba3 --- /dev/null +++ b/sharing/v1/ed25519curve.go @@ -0,0 +1,147 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package v1 + +import ( + "crypto/elliptic" + "math/big" + "sync" + + ed "filippo.io/edwards25519" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/internal" +) + +var ( + ed25519Initonce sync.Once + ed25519 Ed25519Curve +) + +type Ed25519Curve struct { + *elliptic.CurveParams +} + +func ed25519InitAll() { + // taken from https://datatracker.ietf.org/doc/html/rfc8032 + ed25519.CurveParams = new(elliptic.CurveParams) + ed25519.P, _ = new( + big.Int, + ).SetString("7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFED", 16) + ed25519.N, _ = new( + big.Int, + ).SetString("1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED", 16) + ed25519.Gx = new(big.Int) + ed25519.Gy = new(big.Int).SetBytes(ed.NewGeneratorPoint().Bytes()) + ed25519.BitSize = 255 + ed25519.Name = "ed25519" +} + +func Ed25519() *Ed25519Curve { + ed25519Initonce.Do(ed25519InitAll) + return &ed25519 +} + +func (curve *Ed25519Curve) Params() *elliptic.CurveParams { + return curve.CurveParams +} + +func (curve *Ed25519Curve) IsOnCurve(x, y *big.Int) bool { + // ignore the x value since Ed25519 is canonical 32 bytes of y according to RFC 8032 + // Set bytes returns an error if not a valid point + _, err := internal.BigInt2Ed25519Point(y) + return err == nil +} + +func (curve *Ed25519Curve) Add(x1, y1, x2, y2 *big.Int) (*big.Int, *big.Int) { + var p1, p2 *ed.Point + var err error + if y1.Cmp(big.NewInt(0)) == 0 { + p1 = ed.NewIdentityPoint() + } else { + p1, err = internal.BigInt2Ed25519Point(y1) + } + if err != nil { + panic(err) + } + if y2.Cmp(big.NewInt(0)) == 0 { + p2 = ed.NewIdentityPoint() + } else { + p2, err = internal.BigInt2Ed25519Point(y2) + } + if err != nil { + panic(err) + } + p1.Add(p1, p2) + return new(big.Int), new(big.Int).SetBytes(p1.Bytes()) +} + +func (curve *Ed25519Curve) Double(x1, y1 *big.Int) (*big.Int, *big.Int) { + p, err := internal.BigInt2Ed25519Point(y1) + if err != nil { + panic(err) + } + p1, _ := internal.BigInt2Ed25519Point(y1) + p.Add(p, p1) + return new(big.Int), new(big.Int).SetBytes(p.Bytes()) +} + +func (curve *Ed25519Curve) ScalarMult(Bx, By *big.Int, k []byte) (*big.Int, *big.Int) { + p, err := internal.BigInt2Ed25519Point(By) + if err != nil { + panic(err) + } + s, err := internal.BigInt2Ed25519Scalar(new(big.Int).SetBytes(k)) + if err != nil { + var t [64]byte + copy(t[:], internal.ReverseScalarBytes(k)) + s, err = ed.NewScalar().SetUniformBytes(t[:]) + if err != nil { + panic(err) + } + } + p.ScalarMult(s, p) + return new(big.Int), new(big.Int).SetBytes(p.Bytes()) +} + +func (curve *Ed25519Curve) ScalarBaseMult(k []byte) (*big.Int, *big.Int) { + s, err := internal.BigInt2Ed25519Scalar(new(big.Int).SetBytes(k)) + if err != nil { + var t [64]byte + copy(t[:], internal.ReverseScalarBytes(k)) + s, err = ed.NewScalar().SetUniformBytes(t[:]) + if err != nil { + panic(err) + } + } + p := ed.NewIdentityPoint().ScalarBaseMult(s) + return new(big.Int), new(big.Int).SetBytes(p.Bytes()) +} + +func (curve *Ed25519Curve) Neg(Bx, By *big.Int) (*big.Int, *big.Int) { + var p1 *ed.Point + var err error + if By.Cmp(big.NewInt(0)) == 0 { + p1 = ed.NewIdentityPoint() + } else { + p1, err = internal.BigInt2Ed25519Point(By) + if err != nil { + panic(err) + } + } + p1.Negate(p1) + return new(big.Int), new(big.Int).SetBytes(p1.Bytes()) +} + +func (curve *Ed25519Curve) Hash(msg []byte) (*big.Int, *big.Int) { + data := new(curves.PointEd25519).Hash(msg).ToAffineCompressed() + pt, err := ed.NewIdentityPoint().SetBytes(data) + if err != nil { + panic(err) + } + return new(big.Int), new(big.Int).SetBytes(internal.ReverseScalarBytes(pt.Bytes())) +} diff --git a/sharing/v1/ed25519curve_test.go b/sharing/v1/ed25519curve_test.go new file mode 100755 index 0000000..d4b9456 --- /dev/null +++ b/sharing/v1/ed25519curve_test.go @@ -0,0 +1,28 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package v1 + +import ( + "math/big" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestEd25519ScalarMult(t *testing.T) { + // These values were caught during testing where this combination + // yields leading zeros in which big.Int chops. + // This test makes sure that this case is correctly handled + y := new( + big.Int, + ).SetBytes([]byte{37, 228, 49, 105, 78, 97, 108, 221, 63, 25, 125, 212, 108, 189, 247, 169, 52, 86, 150, 97, 93, 199, 212, 254, 122, 98, 189, 7, 97, 14, 78, 12}) + x := new(big.Int).SetInt64(4) + curve := Ed25519() + require.True(t, curve.IsOnCurve(nil, y)) + _, newY := curve.ScalarMult(nil, y, x.Bytes()) + require.True(t, curve.IsOnCurve(nil, newY)) +} diff --git a/sharing/v1/feldman.go b/sharing/v1/feldman.go new file mode 100644 index 0000000..0d2059a --- /dev/null +++ b/sharing/v1/feldman.go @@ -0,0 +1,107 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package v1 + +import ( + "crypto/elliptic" + "encoding/binary" + "fmt" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// Feldman Verifiable Secret Sharing Scheme +type Feldman struct { + threshold, limit uint32 + curve elliptic.Curve +} + +// FeldmanResult contains all the data from calling Split +type FeldmanResult struct { + SecretShares []*ShamirShare + Verifiers []*ShareVerifier +} + +func NewFeldman(threshold, limit uint32, curve elliptic.Curve) (*Feldman, error) { + if limit < threshold { + return nil, fmt.Errorf("limit cannot be less than threshold") + } + if threshold < 2 { + return nil, fmt.Errorf("threshold must be at least 2") + } + return &Feldman{ + threshold, limit, curve, + }, nil +} + +func (f Feldman) Split(secret []byte) ([]*ShareVerifier, []*ShamirShare, error) { + field := curves.NewField(f.curve.Params().N) + shamir := Shamir{f.threshold, f.limit, field} + shares, poly, err := shamir.GetSharesAndPolynomial(secret) + if err != nil { + return nil, nil, err + } + // Generate the verifiable commitments to the polynomial for the shares + verifiers := make([]*ShareVerifier, len(poly.Coefficients)) + for i, c := range poly.Coefficients { + v, err := curves.NewScalarBaseMult(f.curve, c.Value) + if err != nil { + return nil, nil, err + } + verifiers[i] = v + } + return verifiers, shares, nil +} + +func (f Feldman) Combine(shares ...*ShamirShare) ([]byte, error) { + field := curves.NewField(f.curve.Params().N) + shamir := Shamir{f.threshold, f.limit, field} + return shamir.Combine(shares...) +} + +// Verify checks a share for validity +func (f Feldman) Verify(share *ShamirShare, verifiers []*ShareVerifier) (bool, error) { + if len(verifiers) < int(f.threshold) { + return false, fmt.Errorf("not enough verifiers to check") + } + field := curves.NewField(f.curve.Params().N) + + xBytes := make([]byte, 4) + binary.BigEndian.PutUint32(xBytes, share.Identifier) + x := field.ElementFromBytes(xBytes) + + i := share.Value.Modulus.One() + + // c_0 + rhs := verifiers[0] + + // Compute the sum of products + // c_0 * c_1^i * c_2^{i^2} * c_3^{i^3} ... c_t^{i_t} + for j := 1; j < len(verifiers); j++ { + // i *= x + i = i.Mul(x) + + c, err := verifiers[j].ScalarMult(i.Value) + if err != nil { + return false, err + } + + // ... * c_j^{i^j} + rhs, err = rhs.Add(c) + if err != nil { + return false, err + } + } + + lhs, err := curves.NewScalarBaseMult(f.curve, share.Value.Value) + if err != nil { + return false, err + } + + // Check if lhs == rhs + return lhs.Equals(rhs), nil +} diff --git a/sharing/v1/k256_feldman_test.go b/sharing/v1/k256_feldman_test.go new file mode 100755 index 0000000..326ae51 --- /dev/null +++ b/sharing/v1/k256_feldman_test.go @@ -0,0 +1,168 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package v1 + +import ( + "math/big" + "testing" + + "github.com/btcsuite/btcd/btcec/v2" + "github.com/stretchr/testify/require" +) + +func TestK256FeldmanSplitInvalidArgs(t *testing.T) { + _, err := NewFeldman(0, 0, btcec.S256()) + require.NotNil(t, err) + _, err = NewFeldman(3, 2, btcec.S256()) + require.NotNil(t, err) + _, err = NewFeldman(1, 10, btcec.S256()) + require.NotNil(t, err) + scheme, err := NewFeldman(2, 3, btcec.S256()) + require.Nil(t, err) + require.NotNil(t, scheme) + _, _, err = scheme.Split([]byte{}) + require.NotNil(t, err) + _, _, err = scheme.Split( + []byte{ + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + }, + ) + require.NotNil(t, err) +} + +func TestK256FeldmanCombineNoShares(t *testing.T) { + scheme, err := NewFeldman(2, 3, btcec.S256()) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine() + require.NotNil(t, err) +} + +func TestK256FeldmanCombineDuplicateShare(t *testing.T) { + scheme, err := NewFeldman(2, 3, btcec.S256()) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine([]*ShamirShare{ + { + Identifier: 1, + Value: field.NewElement(big.NewInt(3)), + }, + { + Identifier: 1, + Value: field.NewElement(big.NewInt(3)), + }, + }...) + require.NotNil(t, err) +} + +func TestK256FeldmanCombineBadIdentifier(t *testing.T) { + scheme, err := NewFeldman(2, 3, btcec.S256()) + require.Nil(t, err) + require.NotNil(t, scheme) + shares := []*ShamirShare{ + { + Identifier: 0, + Value: field.NewElement(big.NewInt(3)), + }, + { + Identifier: 2, + Value: field.NewElement(big.NewInt(3)), + }, + } + _, err = scheme.Combine(shares...) + require.NotNil(t, err) + shares[0] = &ShamirShare{ + Identifier: 4, + Value: field.NewElement(big.NewInt(3)), + } + _, err = scheme.Combine(shares...) + require.NotNil(t, err) +} + +func TestK256FeldmanCombineSingle(t *testing.T) { + scheme, err := NewFeldman(2, 3, btcec.S256()) + require.Nil(t, err) + require.NotNil(t, scheme) + + verifiers, shares, err := scheme.Split([]byte("test")) + require.Nil(t, err) + require.NotNil(t, shares) + for _, s := range shares { + ok, err := scheme.Verify(s, verifiers) + require.Nil(t, err) + require.True(t, ok) + } + secret, err := scheme.Combine(shares...) + require.Nil(t, err) + require.Equal(t, secret, []byte("test")) +} + +func TestK256FeldmanAllCombinations(t *testing.T) { + scheme, err := NewFeldman(3, 5, btcec.S256()) + require.Nil(t, err) + require.NotNil(t, scheme) + + secret := []byte("test") + verifiers, shares, err := scheme.Split(secret) + for _, s := range shares { + ok, err := scheme.Verify(s, verifiers) + require.Nil(t, err) + require.True(t, ok) + } + require.Nil(t, err) + require.NotNil(t, shares) + // There are 5*4*3 possible combinations + for i := 0; i < 5; i++ { + for j := 0; j < 5; j++ { + if i == j { + continue + } + for k := 0; k < 5; k++ { + if i == k || j == k { + continue + } + + rSecret, err := scheme.Combine(shares[i], shares[j], shares[k]) + require.Nil(t, err) + require.NotNil(t, rSecret) + require.Equal(t, rSecret, secret) + } + } + } +} diff --git a/sharing/v1/k256_pedersen_test.go b/sharing/v1/k256_pedersen_test.go new file mode 100644 index 0000000..8a12158 --- /dev/null +++ b/sharing/v1/k256_pedersen_test.go @@ -0,0 +1,201 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package v1 + +import ( + "math/big" + "testing" + + "github.com/btcsuite/btcd/btcec/v2" + "github.com/stretchr/testify/require" + + core "github.com/sonr-io/sonr/crypto/core/curves" +) + +var ( + k256BasePoint = &core.EcPoint{Curve: btcec.S256(), X: btcec.S256().Gx, Y: btcec.S256().Gy} + testPointK256, _ = core.NewScalarBaseMult(btcec.S256(), big.NewInt(2222)) +) + +func TestK256PedersenSplitInvalidArgs(t *testing.T) { + _, err := NewPedersen(0, 0, nil) + require.NotNil(t, err) + _, err = NewPedersen(3, 2, nil) + require.NotNil(t, err) + _, err = NewPedersen(1, 10, nil) + require.NotNil(t, err) + _, err = NewPedersen(2, 3, nil) + require.NotNil(t, err) + scheme, err := NewPedersen(2, 3, k256BasePoint) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Split([]byte{}) + require.NotNil(t, err) + // test that split doesn't work on secrets bigger than the modulus + _, err = scheme.Split( + []byte{ + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + }, + ) + require.NotNil(t, err) +} + +func TestK256PedersenCombineNoShares(t *testing.T) { + scheme, err := NewPedersen(2, 3, k256BasePoint) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine() + require.NotNil(t, err) +} + +func TestK256PedersenCombineDuplicateShare(t *testing.T) { + scheme, err := NewPedersen(2, 3, k256BasePoint) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine([]*ShamirShare{ + { + Identifier: 1, + Value: field.NewElement(big.NewInt(3)), + }, + { + Identifier: 1, + Value: field.NewElement(big.NewInt(3)), + }, + }...) + require.NotNil(t, err) +} + +func TestK256PedersenCombineBadIdentifier(t *testing.T) { + scheme, err := NewPedersen(2, 3, k256BasePoint) + require.Nil(t, err) + require.NotNil(t, scheme) + shares := []*ShamirShare{ + { + Identifier: 0, + Value: field.NewElement(big.NewInt(3)), + }, + { + Identifier: 2, + Value: field.NewElement(big.NewInt(3)), + }, + } + _, err = scheme.Combine(shares...) + require.NotNil(t, err) + shares[0] = &ShamirShare{ + Identifier: 4, + Value: field.NewElement(big.NewInt(3)), + } + _, err = scheme.Combine(shares...) + require.Error(t, err) +} + +func TestK256GeneratorFromHashedBytes(t *testing.T) { + x, y, err := K256GeneratorFromHashedBytes( + []byte("Fair is foul, and foul is fair: Hover through the fog and filthy air."), + ) + require.NoError(t, err) + require.NotNil(t, x) + require.NotNil(t, y) + require.True(t, btcec.S256().IsOnCurve(x, y)) +} + +func TestK256PedersenCombineSingle(t *testing.T) { + scheme, err := NewPedersen(2, 3, testPointK256) + require.Nil(t, err) + require.NotNil(t, scheme) + + result, err := scheme.Split([]byte("test")) + require.Nil(t, err) + require.NotNil(t, result) + for i, s := range result.SecretShares { + ok, err := scheme.Verify(s, result.BlindingShares[i], result.BlindedVerifiers) + require.Nil(t, err) + require.True(t, ok) + } + secret, err := scheme.Combine(result.SecretShares...) + require.Nil(t, err) + require.Equal(t, secret, []byte("test")) +} + +func TestK256PedersenAllCombinations(t *testing.T) { + scheme, err := NewPedersen(3, 5, testPointK256) + require.Nil(t, err) + require.NotNil(t, scheme) + + secret := []byte("test") + result, err := scheme.Split(secret) + for i, s := range result.SecretShares { + ok, err := scheme.Verify(s, result.BlindingShares[i], result.BlindedVerifiers) + require.Nil(t, err) + require.True(t, ok) + } + require.Nil(t, err) + require.NotNil(t, result) + // There are 5*4*3 possible combinations + for i := 0; i < 5; i++ { + for j := 0; j < 5; j++ { + if i == j { + continue + } + for k := 0; k < 5; k++ { + if i == k || j == k { + continue + } + + rSecret, err := scheme.Combine( + result.SecretShares[i], + result.SecretShares[j], + result.SecretShares[k], + ) + require.Nil(t, err) + require.NotNil(t, rSecret) + require.Equal(t, rSecret, secret) + + bSecret, err := scheme.Combine( + result.BlindingShares[i], + result.BlindingShares[j], + result.BlindingShares[k], + ) + require.Nil(t, err) + require.NotNil(t, bSecret) + require.Equal(t, bSecret, result.Blinding.Bytes()) + } + } + } +} diff --git a/sharing/v1/p256_feldman_test.go b/sharing/v1/p256_feldman_test.go new file mode 100755 index 0000000..9175e1b --- /dev/null +++ b/sharing/v1/p256_feldman_test.go @@ -0,0 +1,168 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package v1 + +import ( + "crypto/elliptic" + "math/big" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestP256FeldmanSplitInvalidArgs(t *testing.T) { + _, err := NewFeldman(0, 0, elliptic.P256()) + require.NotNil(t, err) + _, err = NewFeldman(3, 2, elliptic.P256()) + require.NotNil(t, err) + _, err = NewFeldman(1, 10, elliptic.P256()) + require.NotNil(t, err) + scheme, err := NewFeldman(2, 3, elliptic.P256()) + require.Nil(t, err) + require.NotNil(t, scheme) + _, _, err = scheme.Split([]byte{}) + require.NotNil(t, err) + _, _, err = scheme.Split( + []byte{ + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + }, + ) + require.NotNil(t, err) +} + +func TestP256FeldmanCombineNoShares(t *testing.T) { + scheme, err := NewFeldman(2, 3, elliptic.P256()) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine() + require.NotNil(t, err) +} + +func TestP256FeldmanCombineDuplicateShare(t *testing.T) { + scheme, err := NewFeldman(2, 3, elliptic.P256()) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine([]*ShamirShare{ + { + Identifier: 1, + Value: field.NewElement(big.NewInt(3)), + }, + { + Identifier: 1, + Value: field.NewElement(big.NewInt(3)), + }, + }...) + require.NotNil(t, err) +} + +func TestP256FeldmanCombineBadIdentifier(t *testing.T) { + scheme, err := NewFeldman(2, 3, elliptic.P256()) + require.Nil(t, err) + require.NotNil(t, scheme) + shares := []*ShamirShare{ + { + Identifier: 0, + Value: field.NewElement(big.NewInt(3)), + }, + { + Identifier: 2, + Value: field.NewElement(big.NewInt(3)), + }, + } + _, err = scheme.Combine(shares...) + require.NotNil(t, err) + shares[0] = &ShamirShare{ + Identifier: 4, + Value: field.NewElement(big.NewInt(3)), + } + _, err = scheme.Combine(shares...) + require.NotNil(t, err) +} + +func TestP256FeldmanCombineSingle(t *testing.T) { + scheme, err := NewFeldman(2, 3, elliptic.P256()) + require.Nil(t, err) + require.NotNil(t, scheme) + + verifiers, shares, err := scheme.Split([]byte("test")) + require.Nil(t, err) + require.NotNil(t, shares) + for _, s := range shares { + ok, err := scheme.Verify(s, verifiers) + require.Nil(t, err) + require.True(t, ok) + } + secret, err := scheme.Combine(shares...) + require.Nil(t, err) + require.Equal(t, secret, []byte("test")) +} + +func TestP256FeldmanAllCombinations(t *testing.T) { + scheme, err := NewFeldman(3, 5, elliptic.P256()) + require.Nil(t, err) + require.NotNil(t, scheme) + + secret := []byte("test") + verifiers, shares, err := scheme.Split(secret) + for _, s := range shares { + ok, err := scheme.Verify(s, verifiers) + require.Nil(t, err) + require.True(t, ok) + } + require.Nil(t, err) + require.NotNil(t, shares) + // There are 5*4*3 possible combinations + for i := 0; i < 5; i++ { + for j := 0; j < 5; j++ { + if i == j { + continue + } + for k := 0; k < 5; k++ { + if i == k || j == k { + continue + } + + rSecret, err := scheme.Combine(shares[i], shares[j], shares[k]) + require.Nil(t, err) + require.NotNil(t, rSecret) + require.Equal(t, rSecret, secret) + } + } + } +} diff --git a/sharing/v1/pedersen.go b/sharing/v1/pedersen.go new file mode 100644 index 0000000..5a9c202 --- /dev/null +++ b/sharing/v1/pedersen.go @@ -0,0 +1,178 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package v1 + +import ( + crand "crypto/rand" + "encoding/binary" + "fmt" + "math/big" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/internal" +) + +// Pedersen Verifiable Secret Sharing Scheme +type Pedersen struct { + threshold, limit uint32 + generator *curves.EcPoint +} + +// PedersenResult contains all the data from calling Split +type PedersenResult struct { + Blinding *big.Int + BlindingShares, SecretShares []*ShamirShare + BlindedVerifiers []*ShareVerifier + Verifiers []*ShareVerifier +} + +// NewPedersen creates a new pedersen VSS +func NewPedersen(threshold, limit uint32, generator *curves.EcPoint) (*Pedersen, error) { + if limit < threshold { + return nil, fmt.Errorf("limit cannot be less than threshold") + } + if threshold < 2 { + return nil, fmt.Errorf("threshold must be at least 2") + } + + if generator == nil { + return nil, internal.ErrNilArguments + } + if generator.IsIdentity() { + return nil, fmt.Errorf("generator point cannot be at infinity") + } + if !generator.IsOnCurve() { + return nil, fmt.Errorf("generator point must be on the curve") + } + + return &Pedersen{ + threshold, limit, generator, + }, nil +} + +// Split creates the verifiers, blinding and shares +func (pd Pedersen) Split(secret []byte) (*PedersenResult, error) { + // generate a random blinding factor + blinding, err := crand.Int(crand.Reader, pd.generator.Curve.Params().N) + if err != nil { + return nil, err + } + + field := curves.NewField(pd.generator.Curve.Params().N) + shamir := Shamir{pd.threshold, pd.limit, field} + // split the secret into shares + shares, polySecret, err := shamir.GetSharesAndPolynomial(secret) + if err != nil { + return nil, err + } + + // split the blinding into shares + blindingShares, polyBlinding, err := shamir.GetSharesAndPolynomial(blinding.Bytes()) + if err != nil { + return nil, err + } + + // Generate the verifiable commitments to the polynomial for the shares + blindedverifiers := make([]*ShareVerifier, pd.threshold) + verifiers := make([]*ShareVerifier, pd.threshold) + + // ({p0 * G + b0 * H}, ...,{pt * G + bt * H}) + for i, c := range polySecret.Coefficients { + s, err := curves.NewScalarBaseMult(pd.generator.Curve, c.Value) + if err != nil { + return nil, err + } + b, err := pd.generator.ScalarMult(polyBlinding.Coefficients[i].Value) + if err != nil { + return nil, err + } + + bv, err := s.Add(b) + if err != nil { + return nil, err + } + + blindedverifiers[i] = bv + verifiers[i] = s + } + return &PedersenResult{ + blinding, blindingShares, shares, blindedverifiers, verifiers, + }, nil +} + +// Combine recreates the original secret from the shares +func (pd Pedersen) Combine(shares ...*ShamirShare) ([]byte, error) { + field := curves.NewField(pd.generator.Curve.Params().N) + shamir := Shamir{pd.threshold, pd.limit, field} + return shamir.Combine(shares...) +} + +// Verify checks a share for validity +func (pd Pedersen) Verify( + share *ShamirShare, + blinding *ShamirShare, + blindedverifiers []*ShareVerifier, +) (bool, error) { + if len(blindedverifiers) < int(pd.threshold) { + return false, fmt.Errorf("not enough blindedverifiers to check") + } + field := curves.NewField(pd.generator.Curve.Params().N) + + xBytes := make([]byte, 4) + binary.BigEndian.PutUint32(xBytes, share.Identifier) + x := field.ElementFromBytes(xBytes) + + i := share.Value.Modulus.One() + + // c_0 + rhs := blindedverifiers[0] + + // Compute the sum of products + // c_0 * c_1^i * c_2^{i^2} * c_3^{i^3} ... c_t^{i_t} + for j := 1; j < len(blindedverifiers); j++ { + // i *= x + i = i.Mul(x) + + c, err := blindedverifiers[j].ScalarMult(i.Value) + if err != nil { + return false, err + } + + // ... * c_j^{i^j} + rhs, err = rhs.Add(c) + if err != nil { + return false, err + } + } + + lhs, err := curves.NewScalarBaseMult(pd.generator.Curve, share.Value.Value) + if err != nil { + return false, err + } + tmp, err := pd.generator.ScalarMult(blinding.Value.Value) + if err != nil { + return false, err + } + lhs, err = lhs.Add(tmp) + if err != nil { + return false, err + } + + // Check if lhs == rhs + return lhs.Equals(rhs), nil +} + +// K256GeneratorFromHashedBytes computes a generator whose discrete log is unknown +// from a bytes sequence +func K256GeneratorFromHashedBytes(bytes []byte) (x, y *big.Int, err error) { + pt := new(curves.PointK256).Hash(bytes) + p, _ := pt.(*curves.PointK256) + x = p.X().BigInt() + y = p.Y().BigInt() + err = nil + return x, y, err +} diff --git a/sharing/v1/polynomial.go b/sharing/v1/polynomial.go new file mode 100644 index 0000000..09f2326 --- /dev/null +++ b/sharing/v1/polynomial.go @@ -0,0 +1,49 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package v1 + +import ( + "github.com/sonr-io/sonr/crypto/core/curves" +) + +type polynomial struct { + Coefficients []*curves.Element +} + +// newPoly creates a random polynomial of the given degree but with the provided intercept value +func newPoly(intercept *curves.Element, degree uint32) (polynomial, error) { + p := polynomial{ + Coefficients: make([]*curves.Element, degree), + } + + // Intercept is the value to be split + p.Coefficients[0] = intercept + + // random coefficients + for i := uint32(1); i < degree; i++ { + c, err := intercept.Field().RandomElement(nil) + if err != nil { + return p, err + } + p.Coefficients[i] = c + } + + return p, nil +} + +// evaluate returns the value of the polynomial for the given x +func (p polynomial) evaluate(x *curves.Element) *curves.Element { + // Compute the polynomial value using Horner's Method + + degree := len(p.Coefficients) - 1 + result := p.Coefficients[degree].Clone() + for i := degree - 1; i >= 0; i-- { + result = result.Mul(x).Add(p.Coefficients[i]) + } + + return result +} diff --git a/sharing/v1/polynomial_test.go b/sharing/v1/polynomial_test.go new file mode 100755 index 0000000..f94dc76 --- /dev/null +++ b/sharing/v1/polynomial_test.go @@ -0,0 +1,29 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Copyright Coinbase, Inc. All Rights Reserved. +// +// +// SPDX-License-Identifier: Apache-2.0 +// + +package v1 + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestNewPoly(t *testing.T) { + secret := field.ElementFromBytes([]byte("test")) + + poly, err := newPoly(secret, 4) + require.Nil(t, err) + require.NotNil(t, poly) + + require.Equal(t, poly.Coefficients[0], secret) +} diff --git a/sharing/v1/shamir.go b/sharing/v1/shamir.go new file mode 100644 index 0000000..62de4a9 --- /dev/null +++ b/sharing/v1/shamir.go @@ -0,0 +1,214 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package v1 + +import ( + "encoding/binary" + "fmt" + "math/big" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// ShamirShare is the data from splitting a secret +type ShamirShare struct { + // x-coordinate + Identifier uint32 `json:"identifier"` + // y-coordinate + Value *curves.Element `json:"value"` +} + +// NewShamirShare creates a ShamirShare given the Identifier, value, and Field for the value +func NewShamirShare(x uint32, y []byte, f *curves.Field) *ShamirShare { + return &ShamirShare{ + Identifier: x, + Value: f.ElementFromBytes(y), + } +} + +// Bytes returns the representation of the share in bytes with the identifier as the first +// 4 bytes +func (s ShamirShare) Bytes() []byte { + a := make([]byte, 4) + binary.BigEndian.PutUint32(a, s.Identifier) + a = append(a, s.Value.Bytes()...) + return a +} + +// Add returns the sum of two Shamir shares +func (s ShamirShare) Add(other *ShamirShare) *ShamirShare { + if s.Identifier != other.Identifier { + panic("identifiers must match for valid addition") + } + newSecret := s.Value.Add(other.Value) + return NewShamirShare(s.Identifier, newSecret.Bytes(), s.Value.Field()) +} + +// Shamir is the Shamir secret sharing scheme +type Shamir struct { + threshold, limit uint32 + field *curves.Field +} + +// NewShamir creates a Shamir secret sharing scheme +func NewShamir(threshold, limit int, field *curves.Field) (*Shamir, error) { + if limit < threshold { + return nil, fmt.Errorf("limit cannot be less than threshold") + } + if threshold < 2 { + return nil, fmt.Errorf("threshold must be at least 2") + } + return &Shamir{ + uint32(threshold), uint32(limit), field, + }, nil +} + +// Split takes a secret and splits it into multiple shares that requires +// threshold to reconstruct +func (s *Shamir) Split(secret []byte) ([]*ShamirShare, error) { + shares, _, err := s.GetSharesAndPolynomial(secret) + return shares, err +} + +// Combine takes any number of shares and tries to combine them into the original secret +func (s *Shamir) Combine(shares ...*ShamirShare) ([]byte, error) { + if len(shares) < int(s.threshold) { + return nil, fmt.Errorf("not enough shares to combine") + } + + dups := make(map[uint32]bool) + xCoordinates := make([]*curves.Element, s.threshold) + yCoordinates := make([]*curves.Element, s.threshold) + + for i := 0; i < int(s.threshold); i++ { + r := shares[i] + if r.Identifier > s.limit || r.Identifier < 1 { + return nil, fmt.Errorf("invalid share identifier") + } + if _, ok := dups[r.Identifier]; ok { + return nil, fmt.Errorf("duplicate shares cannot be used") + } + + xBytes := make([]byte, 4) + binary.BigEndian.PutUint32(xBytes, r.Identifier) + xCoordinates[i] = s.field.ElementFromBytes(xBytes) + yCoordinates[i] = r.Value + } + + secret, err := s.Interpolate(xCoordinates, yCoordinates) + if err != nil { + return nil, err + } + return secret.Bytes(), nil +} + +// getSharesAndPolynomial returns the shares for the specified secret and the polynomial +// used to create the shares +func (s *Shamir) GetSharesAndPolynomial(secret []byte) ([]*ShamirShare, *polynomial, error) { + if len(secret) == 0 { + return nil, nil, fmt.Errorf("cannot split an empty secret") + } + intSecret := new(big.Int).SetBytes(secret) + if !s.field.IsValid(intSecret) { + return nil, nil, fmt.Errorf("secret is too large") + } + + elemSecret := s.field.NewElement(intSecret) + poly, err := newPoly(elemSecret, s.threshold) + if err != nil { + return nil, nil, fmt.Errorf("failed to generate polynomial: %w", err) + } + + shares := make([]*ShamirShare, s.limit) + for i := uint32(0); i < s.limit; i++ { + x := s.field.NewElement(big.NewInt(int64(i + 1))) + y := poly.evaluate(x) + shares[i] = &ShamirShare{ + Identifier: i + 1, + Value: y, + } + } + return shares, &poly, nil +} + +// interpolate calculates the lagrange interpolation +func (s *Shamir) Interpolate( + xCoordinates, yCoordinates []*curves.Element, +) (*curves.Element, error) { + if len(xCoordinates) < int(s.threshold) || + len(yCoordinates) < int(s.threshold) { + return nil, fmt.Errorf("not enough points") + } + + zero := xCoordinates[0].Field().Zero() + result := yCoordinates[0].Field().Zero() + + for i := 0; i < int(s.threshold); i++ { + basis := xCoordinates[0].Field().One() + for j := 0; j < int(s.threshold); j++ { + if i == j { + continue + } + + // x_m - x_j + denom := xCoordinates[j].Sub(xCoordinates[i]) + if denom.IsEqual(zero) { + return nil, fmt.Errorf("invalid x coordinates") + } + // x_m / x_m - x_j + basis = basis.Mul(xCoordinates[j].Div(denom)) + } + + result = result.Add(yCoordinates[i].Mul(basis)) + } + + return result, nil +} + +// ComputeL is a function that computes all Lagrange coefficients. +// This function is particularly needed in FROST tSchnorr signature. +func (s Shamir) ComputeL(shares ...*ShamirShare) ([]*curves.Element, error) { + if len(shares) < int(s.threshold) { + return nil, fmt.Errorf("not enough shares to compute Lagrange coefficients") + } + dups := make(map[uint32]bool) + xCoordinates := make([]*curves.Element, s.threshold) + for i := 0; i < int(s.threshold); i++ { + r := shares[i] + if r.Identifier > s.limit || r.Identifier < 1 { + return nil, fmt.Errorf("ComputeL: invalid share identifier") + } + if _, ok := dups[r.Identifier]; ok { + return nil, fmt.Errorf("ComputeL: duplicate shares cannot be used") + } + + xBytes := make([]byte, 4) + binary.BigEndian.PutUint32(xBytes, r.Identifier) + xCoordinates[i] = s.field.ElementFromBytes(xBytes) + } + + zero := xCoordinates[0].Field().Zero() + result := make([]*curves.Element, s.threshold) + + for i := 0; i < int(s.threshold); i++ { + basis := xCoordinates[0].Field().One() + for j := 0; j < int(s.threshold); j++ { + if i == j { + continue + } + // x_m - x_j + denom := xCoordinates[j].Sub(xCoordinates[i]) + if denom.IsEqual(zero) { + return nil, fmt.Errorf("invalid x coordinates") + } + // x_m / x_m - x_j + basis = basis.Mul(xCoordinates[j].Div(denom)) + } + result[i] = basis + } + return result, nil +} diff --git a/sharing/v1/shamir_test.go b/sharing/v1/shamir_test.go new file mode 100644 index 0000000..962569f --- /dev/null +++ b/sharing/v1/shamir_test.go @@ -0,0 +1,241 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package v1 + +import ( + "encoding/json" + "math/big" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestShamirSplitInvalidArgs(t *testing.T) { + _, err := NewShamir(0, 0, field) + require.NotNil(t, err) + _, err = NewShamir(3, 2, field) + require.NotNil(t, err) + _, err = NewShamir(1, 10, field) + require.NotNil(t, err) + scheme, err := NewShamir(2, 3, field) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Split([]byte{}) + require.NotNil(t, err) + _, err = scheme.Split( + []byte{ + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + 0x65, + }, + ) + require.NotNil(t, err) +} + +func TestShamirCombineNoShares(t *testing.T) { + scheme, err := NewShamir(2, 3, field) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine() + require.NotNil(t, err) +} + +func TestShamirCombineDuplicateShare(t *testing.T) { + scheme, err := NewShamir(2, 3, field) + require.Nil(t, err) + require.NotNil(t, scheme) + _, err = scheme.Combine([]*ShamirShare{ + { + Identifier: 1, + Value: field.NewElement(big.NewInt(3)), + }, + { + Identifier: 1, + Value: field.NewElement(big.NewInt(3)), + }, + }...) + require.NotNil(t, err) +} + +func TestShamirCombineBadIdentifier(t *testing.T) { + scheme, err := NewShamir(2, 3, field) + require.Nil(t, err) + require.NotNil(t, scheme) + shares := []*ShamirShare{ + { + Identifier: 0, + Value: field.NewElement(big.NewInt(3)), + }, + { + Identifier: 2, + Value: field.NewElement(big.NewInt(3)), + }, + } + _, err = scheme.Combine(shares...) + require.NotNil(t, err) + shares[0] = &ShamirShare{ + Identifier: 4, + Value: field.NewElement(big.NewInt(3)), + } + _, err = scheme.Combine(shares...) + require.NotNil(t, err) +} + +func TestShamirCombineSingle(t *testing.T) { + scheme, err := NewShamir(2, 3, field) + require.Nil(t, err) + require.NotNil(t, scheme) + + shares, err := scheme.Split([]byte("test")) + require.Nil(t, err) + require.NotNil(t, shares) + secret, err := scheme.Combine(shares...) + require.Nil(t, err) + require.Equal(t, secret, []byte("test")) +} + +// Test ComputeL function to compute Lagrange coefficients. +func TestShamirComputeL(t *testing.T) { + scheme, err := NewShamir(2, 2, field) + require.Nil(t, err) + require.NotNil(t, scheme) + secret := []byte("test") + shares, err := scheme.Split(secret) + require.Nil(t, err) + require.NotNil(t, shares) + lCoeffs, err := scheme.ComputeL(shares[0], shares[1]) + require.Nil(t, err) + require.NotNil(t, lCoeffs) + + // Checking we can reconstruct the same secret using Lagrange coefficients. + inputShares := [2]*ShamirShare{shares[0], shares[1]} + yCoordinates := make([]*curves.Element, 2) + for i := 0; i < 2; i++ { + r := inputShares[i] + yCoordinates[i] = r.Value + } + result := yCoordinates[0].Field().Zero() + for i := 0; i < 2; i++ { + result = result.Add(yCoordinates[i].Mul(lCoeffs[i])) + } + require.Equal(t, result.Bytes(), secret) +} + +func TestShamirAllCombinations(t *testing.T) { + scheme, err := NewShamir(3, 5, field) + require.Nil(t, err) + require.NotNil(t, scheme) + + secret := []byte("test") + shares, err := scheme.Split(secret) + require.Nil(t, err) + require.NotNil(t, shares) + // There are 5*4*3 possible combinations + for i := 0; i < 5; i++ { + for j := 0; j < 5; j++ { + if i == j { + continue + } + for k := 0; k < 5; k++ { + if i == k || j == k { + continue + } + + rSecret, err := scheme.Combine(shares[i], shares[j], shares[k]) + require.Nil(t, err) + require.NotNil(t, rSecret) + require.Equal(t, rSecret, secret) + } + } + } +} + +// Ensures that ShamirShare's un/marshal successfully. +func TestMarshalJsonRoundTrip(t *testing.T) { + oneBelowModulus := new(big.Int).Sub(modulus, big.NewInt(1)) + shares := []ShamirShare{ + {0, field.NewElement(big.NewInt(300))}, + {2, field.NewElement(big.NewInt(300000))}, + {20, field.NewElement(big.NewInt(12812798))}, + {31, field.NewElement(big.NewInt(17))}, + {57, field.NewElement(big.NewInt(5066680))}, + {128, field.NewElement(big.NewInt(3005))}, + {19, field.NewElement(big.NewInt(317))}, + {7, field.NewElement(big.NewInt(323))}, + {222, field.NewElement(oneBelowModulus)}, + } + // Run all the tests! + for _, in := range shares { + bytes, err := json.Marshal(in) + require.NoError(t, err) + require.NotNil(t, bytes) + + // Unmarshal and test + out := &ShamirShare{} + err = json.Unmarshal(bytes, &out) + require.NoError(t, err) + require.Equal(t, in.Identifier, out.Identifier) + require.Equal(t, in.Value.Value.Bytes(), out.Value.Value.Bytes()) + } +} + +func TestSharesAdd(t *testing.T) { + finiteField := curves.NewField(big.NewInt(7)) + one := NewShamirShare(0, []byte{0x01}, finiteField) + two := NewShamirShare(0, []byte{0x02}, finiteField) + + // basic addition + sum := one.Add(two) + require.Equal(t, uint32(0), sum.Identifier) + require.Equal(t, []byte{0x03}, sum.Value.Bytes()) + + // addition is performed within the globalField + sum = two.Add(NewShamirShare(0, []byte{0x06}, finiteField)) + require.Equal(t, uint32(0), sum.Identifier) + require.Equal(t, []byte{0x01}, sum.Value.Bytes()) +} + +func TestSharesAdd_errors(t *testing.T) { + finiteField := curves.NewField(big.NewInt(7)) + one := NewShamirShare(0, []byte{0x01}, finiteField) + two := NewShamirShare(1, []byte{0x02}, finiteField) + require.PanicsWithValue(t, "identifiers must match for valid addition", func() { + one.Add(two) + }) +} diff --git a/signatures/bbs/blind_signature.go b/signatures/bbs/blind_signature.go new file mode 100644 index 0000000..5fdbf50 --- /dev/null +++ b/signatures/bbs/blind_signature.go @@ -0,0 +1,79 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bbs + +import ( + "errors" + "fmt" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/signatures/common" +) + +// BlindSignature is a BBS+ blind signature +// structurally identical to `Signature` but +// is used to help avoid misuse and confusion. +// +// 1 or more message have been hidden by the +// potential signature holder so the signer +// only knows a subset of the messages to be signed +type BlindSignature struct { + a curves.PairingPoint + e, s curves.Scalar +} + +// Init creates an empty signature to a specific curve +// which should be followed by UnmarshalBinary +func (sig *BlindSignature) Init(curve *curves.PairingCurve) *BlindSignature { + sig.a = curve.NewG1IdentityPoint() + sig.e = curve.NewScalar() + sig.s = curve.NewScalar() + return sig +} + +func (sig BlindSignature) MarshalBinary() ([]byte, error) { + out := append(sig.a.ToAffineCompressed(), sig.e.Bytes()...) + out = append(out, sig.s.Bytes()...) + return out, nil +} + +func (sig *BlindSignature) UnmarshalBinary(data []byte) error { + pointLength := len(sig.a.ToAffineCompressed()) + scalarLength := len(sig.s.Bytes()) + expectedLength := pointLength + scalarLength*2 + if len(data) != expectedLength { + return fmt.Errorf("invalid byte sequence") + } + a, err := sig.a.FromAffineCompressed(data[:pointLength]) + if err != nil { + return err + } + e, err := sig.e.SetBytes(data[pointLength:(pointLength + scalarLength)]) + if err != nil { + return err + } + s, err := sig.s.SetBytes(data[(pointLength + scalarLength):]) + if err != nil { + return err + } + var ok bool + sig.a, ok = a.(curves.PairingPoint) + if !ok { + return errors.New("incorrect type conversion") + } + sig.e = e + sig.s = s + return nil +} + +func (sig BlindSignature) ToUnblinded(blinder common.SignatureBlinding) *Signature { + return &Signature{ + a: sig.a, + e: sig.e, + s: sig.s.Add(blinder), + } +} diff --git a/signatures/bbs/blind_signature_context.go b/signatures/bbs/blind_signature_context.go new file mode 100644 index 0000000..19d6a77 --- /dev/null +++ b/signatures/bbs/blind_signature_context.go @@ -0,0 +1,271 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bbs + +import ( + "fmt" + "io" + "sort" + + "github.com/gtank/merlin" + "golang.org/x/crypto/sha3" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/internal" + "github.com/sonr-io/sonr/crypto/signatures/common" +) + +// BlindSignatureContext contains the data used for computing +// a blind signature and verifying proof of hidden messages from +// a future signature holder. A potential holder commits to messages +// that the signer will not know during the signing process +// rendering them hidden, but requires the holder to +// prove knowledge of those messages so a malicious party +// doesn't add just random data from anywhere. +type BlindSignatureContext struct { + // The blinded signature commitment + commitment common.Commitment + // The challenge hash for the Fiat-Shamir heuristic + challenge curves.Scalar + // The proofs of hidden messages. + proofs []curves.Scalar +} + +// NewBlindSignatureContext creates the data needed to +// send to a signer to complete a blinded signature +// `msgs` is an index to message map where the index +// corresponds to the index in `generators` +// msgs are hidden from the signer but can also be empty +// if no messages are hidden but a blind signature is still desired +// because the signer should have no knowledge of the signature +func NewBlindSignatureContext( + curve *curves.PairingCurve, + msgs map[int]curves.Scalar, + generators *MessageGenerators, + nonce common.Nonce, + reader io.Reader, +) (*BlindSignatureContext, common.SignatureBlinding, error) { + if curve == nil || generators == nil || nonce == nil || reader == nil { + return nil, nil, internal.ErrNilArguments + } + points := make([]curves.Point, 0) + secrets := make([]curves.Scalar, 0) + + committing := common.NewProofCommittedBuilder(&curves.Curve{ + Scalar: curve.Scalar, + Point: curve.Scalar.Point().(curves.PairingPoint).OtherGroup(), + Name: curve.Name, + }) + + // C = h0^blinding_factor*h_i^m_i..... + for i, m := range msgs { + if i > generators.length || i < 0 { + return nil, nil, fmt.Errorf("invalid index") + } + secrets = append(secrets, m) + pt := generators.Get(i + 1) + points = append(points, pt) + err := committing.CommitRandom(pt, reader) + if err != nil { + return nil, nil, err + } + } + blinding, ok := curve.Scalar.Random(reader).(common.SignatureBlinding) + if !ok { + return nil, nil, fmt.Errorf("unable to create signature blinding") + } + secrets = append(secrets, blinding) + + h0 := generators.Get(0) + points = append(points, h0) + err := committing.CommitRandom(h0, reader) + if err != nil { + return nil, nil, err + } + + // Create a random commitment, compute challenges and response. + // The proof of knowledge consists of a commitment and responses + // Holder and signer engage in a proof of knowledge for `commitment` + commitment := curve.Scalar.Point().(curves.PairingPoint).OtherGroup(). + SumOfProducts(points, secrets) + transcript := merlin.NewTranscript("new blind signature") + transcript.AppendMessage([]byte("random commitment"), committing.GetChallengeContribution()) + transcript.AppendMessage([]byte("blind commitment"), commitment.ToAffineCompressed()) + transcript.AppendMessage([]byte("nonce"), nonce.Bytes()) + okm := transcript.ExtractBytes([]byte("blind signature context challenge"), 64) + challenge, err := curve.Scalar.SetBytesWide(okm) + if err != nil { + return nil, nil, err + } + proofs, err := committing.GenerateProof(challenge, secrets) + if err != nil { + return nil, nil, err + } + return &BlindSignatureContext{ + commitment: commitment, + challenge: challenge, + proofs: proofs, + }, blinding, nil +} + +func (bsc *BlindSignatureContext) Init(curve *curves.PairingCurve) *BlindSignatureContext { + bsc.challenge = curve.NewScalar() + bsc.commitment = curve.Scalar.Point().(curves.PairingPoint).OtherGroup() + bsc.proofs = make([]curves.Scalar, 0) + return bsc +} + +// MarshalBinary store the generators as a sequence of bytes +// where each point is compressed. +// Needs (N + 1) * ScalarSize + PointSize +func (bsc BlindSignatureContext) MarshalBinary() ([]byte, error) { + buffer := append(bsc.commitment.ToAffineCompressed(), bsc.challenge.Bytes()...) + for _, p := range bsc.proofs { + buffer = append(buffer, p.Bytes()...) + } + return buffer, nil +} + +func (bsc *BlindSignatureContext) UnmarshalBinary(in []byte) error { + scSize := len(bsc.challenge.Bytes()) + ptSize := len(bsc.commitment.ToAffineCompressed()) + if len(in) < scSize*2+ptSize { + return fmt.Errorf("insufficient number of bytes") + } + + if (len(in)-ptSize)%scSize != 0 { + return fmt.Errorf("invalid byte sequence") + } + commitment, err := bsc.commitment.FromAffineCompressed(in[:ptSize]) + if err != nil { + return err + } + challenge, err := bsc.challenge.SetBytes(in[ptSize:(ptSize + scSize)]) + if err != nil { + return err + } + + nProofs := ((len(in) - ptSize) / scSize) - 1 + proofs := make([]curves.Scalar, nProofs) + for i := 0; i < nProofs; i++ { + proofs[i], err = bsc.challenge.SetBytes(in[(ptSize + (i+1)*scSize):(ptSize + (i+2)*scSize)]) + if err != nil { + return err + } + } + bsc.commitment = commitment + bsc.challenge = challenge + bsc.proofs = proofs + return nil +} + +// Verify validates a proof of hidden messages +func (bsc BlindSignatureContext) Verify( + knownMsgs []int, + generators *MessageGenerators, + nonce common.Nonce, +) error { + known := make(map[int]bool, len(knownMsgs)) + for _, i := range knownMsgs { + if i > generators.length { + return fmt.Errorf("invalid message index") + } + known[i] = true + } + + points := make([]curves.Point, 0) + for i := 0; i < generators.length; i++ { + if _, contains := known[i]; !contains { + points = append(points, generators.Get(i+1)) + } + } + points = append(points, generators.Get(0), bsc.commitment) + scalars := append(bsc.proofs, bsc.challenge.Neg()) + + commitment := points[0].SumOfProducts(points, scalars) + transcript := merlin.NewTranscript("new blind signature") + transcript.AppendMessage([]byte("random commitment"), commitment.ToAffineCompressed()) + transcript.AppendMessage([]byte("blind commitment"), bsc.commitment.ToAffineCompressed()) + transcript.AppendMessage([]byte("nonce"), nonce.Bytes()) + okm := transcript.ExtractBytes([]byte("blind signature context challenge"), 64) + challenge, err := bsc.challenge.SetBytesWide(okm) + if err != nil { + return err + } + + if challenge.Cmp(bsc.challenge) != 0 { + return fmt.Errorf("invalid proof") + } + return nil +} + +// ToBlindSignature converts a blind signature where +// msgs are known to the signer +// `msgs` is an index to message map where the index +// corresponds to the index in `generators` +func (bsc BlindSignatureContext) ToBlindSignature( + msgs map[int]curves.Scalar, + sk *SecretKey, + generators *MessageGenerators, + nonce common.Nonce, +) (*BlindSignature, error) { + if sk == nil || generators == nil || nonce == nil { + return nil, internal.ErrNilArguments + } + if sk.value.IsZero() { + return nil, fmt.Errorf("invalid secret key") + } + tv1 := make([]int, 0, len(msgs)) + for i := range msgs { + if i > generators.length { + return nil, fmt.Errorf("not enough message generators") + } + tv1 = append(tv1, i) + } + sort.Ints(tv1) + signingMsgs := make([]curves.Scalar, len(msgs)) + for i, index := range tv1 { + signingMsgs[i] = msgs[index] + } + err := bsc.Verify(tv1, generators, nonce) + if err != nil { + return nil, err + } + + drbg := sha3.NewShake256() + _, _ = drbg.Write(sk.value.Bytes()) + addDeterministicNonceData(generators, signingMsgs, drbg) + // Should yield non-zero values for `e` and `s`, very small likelihood of being zero + e := getNonZeroScalar(sk.value, drbg) + s := getNonZeroScalar(sk.value, drbg) + + exp, err := e.Add(sk.value).Invert() + if err != nil { + return nil, err + } + // B = g1+h_0^s+h_1^m_1... + points := make([]curves.Point, len(msgs)+3) + scalars := make([]curves.Scalar, len(msgs)+3) + points[0] = bsc.commitment + points[1] = bsc.commitment.Generator() + points[2] = generators.Get(0) + scalars[0] = sk.value.One() + scalars[1] = sk.value.One() + scalars[2] = s + i := 3 + for idx, m := range msgs { + points[i] = generators.Get(idx + 1) + scalars[i] = m + i++ + } + b := bsc.commitment.SumOfProducts(points, scalars) + return &BlindSignature{ + a: b.Mul(exp).(curves.PairingPoint), + e: e, + s: s, + }, nil +} diff --git a/signatures/bbs/blind_signature_context_test.go b/signatures/bbs/blind_signature_context_test.go new file mode 100644 index 0000000..60e8c41 --- /dev/null +++ b/signatures/bbs/blind_signature_context_test.go @@ -0,0 +1,98 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bbs + +import ( + crand "crypto/rand" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestBlindSignatureContext(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G2{}) + pk, sk, err := NewKeys(curve) + require.NoError(t, err) + require.NotNil(t, pk) + require.NotNil(t, sk) + + generators, err := new(MessageGenerators).Init(pk, 4) + require.NoError(t, err) + nonce := curve.Scalar.Random(crand.Reader) + msgs := make(map[int]curves.Scalar, 1) + msgs[0] = curve.Scalar.Hash([]byte("identifier")) + + ctx, blinding, err := NewBlindSignatureContext(curve, msgs, generators, nonce, crand.Reader) + require.NoError(t, err) + require.NotNil(t, blinding) + require.False(t, blinding.IsZero()) + require.NotNil(t, ctx) + require.False(t, ctx.commitment.IsIdentity()) + require.False(t, ctx.challenge.IsZero()) + for _, p := range ctx.proofs { + require.False(t, p.IsZero()) + } + + delete(msgs, 0) + msgs[1] = curve.Scalar.Hash([]byte("firstname")) + msgs[2] = curve.Scalar.Hash([]byte("lastname")) + msgs[3] = curve.Scalar.Hash([]byte("age")) + blindSig, err := ctx.ToBlindSignature(msgs, sk, generators, nonce) + require.NoError(t, err) + require.NotNil(t, blindSig) + require.False(t, blindSig.a.IsIdentity()) + require.False(t, blindSig.e.IsZero()) + require.False(t, blindSig.s.IsZero()) + sig := blindSig.ToUnblinded(blinding) + msgs[0] = curve.Scalar.Hash([]byte("identifier")) + var sigMsgs [4]curves.Scalar + for i := 0; i < 4; i++ { + sigMsgs[i] = msgs[i] + } + err = pk.Verify(sig, generators, sigMsgs[:]) + require.NoError(t, err) +} + +func TestBlindSignatureContextMarshalBinary(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G2{}) + pk, sk, err := NewKeys(curve) + require.NoError(t, err) + require.NotNil(t, pk) + require.NotNil(t, sk) + + generators, err := new(MessageGenerators).Init(pk, 4) + require.NoError(t, err) + nonce := curve.Scalar.Random(crand.Reader) + msgs := make(map[int]curves.Scalar, 1) + msgs[0] = curve.Scalar.Hash([]byte("identifier")) + + ctx, blinding, err := NewBlindSignatureContext(curve, msgs, generators, nonce, crand.Reader) + require.NoError(t, err) + require.NotNil(t, blinding) + require.False(t, blinding.IsZero()) + require.NotNil(t, ctx) + require.False(t, ctx.commitment.IsIdentity()) + require.False(t, ctx.challenge.IsZero()) + for _, p := range ctx.proofs { + require.False(t, p.IsZero()) + } + + data, err := ctx.MarshalBinary() + require.NoError(t, err) + require.NotNil(t, data) + ctx2 := new(BlindSignatureContext).Init(curve) + err = ctx2.UnmarshalBinary(data) + require.NoError(t, err) + require.Equal(t, ctx.challenge.Cmp(ctx2.challenge), 0) + require.True(t, ctx.commitment.Equal(ctx2.commitment)) + require.Equal(t, len(ctx.proofs), len(ctx2.proofs)) + for i, p := range ctx.proofs { + require.Equal(t, p.Cmp(ctx2.proofs[i]), 0) + } +} diff --git a/signatures/bbs/message_generators.go b/signatures/bbs/message_generators.go new file mode 100644 index 0000000..8723dc4 --- /dev/null +++ b/signatures/bbs/message_generators.go @@ -0,0 +1,77 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bbs + +import ( + "errors" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// MessageGenerators are used to sign a vector of commitments for +// a BBS+ signature. These must be the same generators used by +// sign, verify, prove, and open +// +// These are generated in a deterministic manner. By using +// MessageGenerators in this way, the generators do not need to be +// stored alongside the public key and the same key can be used to sign +// an arbitrary number of messages +// Generators are created by computing +// H_i = H_G1(W || I2OSP(0, 4) || I2OSP(0, 1) || I2OSP(length, 4)) +// where I2OSP means Integer to Octet Stream Primitive and +// I2OSP represents an integer in a statically sized byte array. +// `W` is the BBS+ public key. +// Internally we store the 201 byte state since the only value that changes +// is the index +type MessageGenerators struct { + // Blinding factor generator, stored, so we know what points to return in `Get` + h0 curves.PairingPoint + length int + state [201]byte +} + +// Init set the message generators to the default state +func (msgg *MessageGenerators) Init(w *PublicKey, length int) (*MessageGenerators, error) { + if length < 0 { + return nil, errors.New("length should be nonnegative") + } + msgg.length = length + for i := range msgg.state { + msgg.state[i] = 0 + } + copy(msgg.state[:192], w.value.ToAffineUncompressed()) + msgg.state[197] = byte(length >> 24) + msgg.state[198] = byte(length >> 16) + msgg.state[199] = byte(length >> 8) + msgg.state[200] = byte(length) + var ok bool + msgg.h0, ok = w.value.OtherGroup().Hash(msgg.state[:]).(curves.PairingPoint) + if !ok { + return nil, errors.New("incorrect type conversion") + } + + return msgg, nil +} + +func (msgg MessageGenerators) Get(i int) curves.PairingPoint { + if i <= 0 { + return msgg.h0 + } + if i > msgg.length { + return nil + } + state := msgg.state + state[193] = byte(i >> 24) + state[194] = byte(i >> 16) + state[195] = byte(i >> 8) + state[196] = byte(i) + point, ok := msgg.h0.Hash(msgg.state[:]).(curves.PairingPoint) + if !ok { + return nil + } + return point +} diff --git a/signatures/bbs/pok_signature.go b/signatures/bbs/pok_signature.go new file mode 100644 index 0000000..7009080 --- /dev/null +++ b/signatures/bbs/pok_signature.go @@ -0,0 +1,159 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bbs + +import ( + "fmt" + "io" + + "github.com/gtank/merlin" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/signatures/common" +) + +// PokSignature a.k.a. Proof of Knowledge of a Signature +// is used by the prover to convince a verifier +// that they possess a valid signature and +// can selectively disclose a set of signed messages +type PokSignature struct { + // These values correspond to values with the same name + // as section 4.5 in + aPrime, aBar, d curves.PairingPoint + // proof1 for proving signature + // proof2 for selective disclosure + proof1, proof2 *common.ProofCommittedBuilder + // secrets1 for proving signature + // secrets2 for proving relation + // g1 * h1^m1 * h2^m2.... for all disclosed messages + // m_i == d^r3 * h_0^{-s_prime} * h1^-m1 * h2^-m2.... for all undisclosed messages m_i + secrets1, secrets2 []curves.Scalar +} + +// NewPokSignature creates the initial proof data before a Fiat-Shamir calculation +func NewPokSignature(sig *Signature, + generators *MessageGenerators, + msgs []common.ProofMessage, + reader io.Reader, +) (*PokSignature, error) { + if len(msgs) != generators.length { + return nil, fmt.Errorf("mismatch messages and generators") + } + + r1 := getNonZeroScalar(sig.s, reader) + r2 := getNonZeroScalar(sig.s, reader) + r3, err := r1.Invert() + if err != nil { + return nil, err + } + + sigMsgs := make([]curves.Scalar, len(msgs)) + for i, m := range msgs { + sigMsgs[i] = m.GetMessage() + } + + b := computeB(sig.s, sigMsgs, generators) + + aPrime, ok := sig.a.Mul(r1).(curves.PairingPoint) + if !ok { + return nil, fmt.Errorf("invalid point") + } + aBar, ok := b.Mul(r1).Sub(aPrime.Mul(sig.e)).(curves.PairingPoint) + if !ok { + return nil, fmt.Errorf("invalid point") + } + // d = b * r1 + h0 * r2 + d, ok := aPrime.SumOfProducts([]curves.Point{b, generators.h0}, []curves.Scalar{r1, r2}).(curves.PairingPoint) + if !ok { + return nil, fmt.Errorf("invalid point") + } + + // s' = s + r2r3 + sPrime := sig.s.Add(r2.Mul(r3)) + + // For proving relation aBar - d = aPrime * -e + h0 * r2 + curve := curves.Curve{ + Scalar: sig.s.Zero(), + Point: sig.a.Identity(), + } + proof1 := common.NewProofCommittedBuilder(&curve) + // For aPrime * -e + err = proof1.CommitRandom(aPrime, reader) + if err != nil { + return nil, err + } + err = proof1.CommitRandom(generators.h0, reader) + if err != nil { + return nil, err + } + secrets1 := []curves.Scalar{sig.e, r2} + + // For selective disclosure + proof2 := common.NewProofCommittedBuilder(&curve) + // For d * -r3 + err = proof2.CommitRandom(d.Neg(), reader) + if err != nil { + return nil, err + } + // For h0 * s' + err = proof2.CommitRandom(generators.h0, reader) + if err != nil { + return nil, err + } + secrets2 := make([]curves.Scalar, 0, len(msgs)+2) + secrets2 = append(secrets2, r3) + secrets2 = append(secrets2, sPrime) + + for i, m := range msgs { + if m.IsHidden() { + err = proof2.Commit(generators.Get(i+1), m.GetBlinding(reader)) + if err != nil { + return nil, err + } + secrets2 = append(secrets2, m.GetMessage()) + } + } + + return &PokSignature{ + aPrime, + aBar, + d, + proof1, + proof2, + secrets1, + secrets2, + }, nil +} + +// GetChallengeContribution returns the bytes that should be added to +// a sigma protocol transcript for generating the challenge +func (pok *PokSignature) GetChallengeContribution(transcript *merlin.Transcript) { + transcript.AppendMessage([]byte("A'"), pok.aPrime.ToAffineCompressed()) + transcript.AppendMessage([]byte("Abar"), pok.aBar.ToAffineCompressed()) + transcript.AppendMessage([]byte("D"), pok.d.ToAffineCompressed()) + transcript.AppendMessage([]byte("Proof1"), pok.proof1.GetChallengeContribution()) + transcript.AppendMessage([]byte("Proof2"), pok.proof2.GetChallengeContribution()) +} + +// GenerateProof converts the blinding factors and secrets into Schnorr proofs +func (pok *PokSignature) GenerateProof(challenge curves.Scalar) (*PokSignatureProof, error) { + proof1, err := pok.proof1.GenerateProof(challenge, pok.secrets1) + if err != nil { + return nil, err + } + proof2, err := pok.proof2.GenerateProof(challenge, pok.secrets2) + if err != nil { + return nil, err + } + return &PokSignatureProof{ + aPrime: pok.aPrime, + aBar: pok.aBar, + d: pok.d, + proof1: proof1, + proof2: proof2, + }, nil +} diff --git a/signatures/bbs/pok_signature_proof.go b/signatures/bbs/pok_signature_proof.go new file mode 100644 index 0000000..cd221fe --- /dev/null +++ b/signatures/bbs/pok_signature_proof.go @@ -0,0 +1,214 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bbs + +import ( + "errors" + "fmt" + + "github.com/gtank/merlin" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/signatures/common" +) + +// PokSignatureProof is the actual proof sent from a prover +// to a verifier that contains a proof of knowledge of a signature +// and the selective disclosure proof +type PokSignatureProof struct { + aPrime, aBar, d curves.PairingPoint + proof1, proof2 []curves.Scalar +} + +// Init creates an empty proof to a specific curve +// which should be followed by UnmarshalBinary +func (pok *PokSignatureProof) Init(curve *curves.PairingCurve) *PokSignatureProof { + pok.aPrime = curve.Scalar.Point().(curves.PairingPoint).OtherGroup() + pok.aBar = pok.aPrime + pok.d = pok.aPrime + pok.proof1 = []curves.Scalar{ + curve.Scalar.Zero(), + curve.Scalar.Zero(), + } + pok.proof2 = make([]curves.Scalar, 0) + return pok +} + +func (pok *PokSignatureProof) MarshalBinary() ([]byte, error) { + data := append(pok.aPrime.ToAffineCompressed(), pok.aBar.ToAffineCompressed()...) + data = append(data, pok.d.ToAffineCompressed()...) + for _, p := range pok.proof1 { + data = append(data, p.Bytes()...) + } + for _, p := range pok.proof2 { + data = append(data, p.Bytes()...) + } + return data, nil +} + +func (pok *PokSignatureProof) UnmarshalBinary(in []byte) error { + scSize := len(pok.proof1[0].Bytes()) + ptSize := len(pok.aPrime.ToAffineCompressed()) + minSize := scSize*4 + ptSize*3 + inSize := len(in) + if inSize < minSize { + return fmt.Errorf("invalid byte sequence") + } + if (inSize-ptSize)%scSize != 0 { + return fmt.Errorf("invalid byte sequence") + } + secretCnt := ((inSize - ptSize*3) / scSize) - 2 + offset := 0 + end := ptSize + + aPrime, err := pok.aPrime.FromAffineCompressed(in[offset:end]) + if err != nil { + return err + } + offset = end + end += ptSize + aBar, err := pok.aBar.FromAffineCompressed(in[offset:end]) + if err != nil { + return err + } + offset = end + end += ptSize + d, err := pok.d.FromAffineCompressed(in[offset:end]) + if err != nil { + return err + } + offset = end + end += scSize + proof1i0, err := pok.proof1[0].SetBytes(in[offset:end]) + if err != nil { + return err + } + offset = end + end += scSize + proof1i1, err := pok.proof1[1].SetBytes(in[offset:end]) + if err != nil { + return err + } + proof2 := make([]curves.Scalar, secretCnt) + for i := 0; i < secretCnt; i++ { + offset = end + end += scSize + proof2[i], err = pok.proof1[0].SetBytes(in[offset:end]) + if err != nil { + return err + } + } + + var ok bool + pok.aPrime, ok = aPrime.(curves.PairingPoint) + if !ok { + return errors.New("incorrect type conversion") + } + pok.aBar, ok = aBar.(curves.PairingPoint) + if !ok { + return errors.New("incorrect type conversion") + } + pok.d, ok = d.(curves.PairingPoint) + if !ok { + return errors.New("incorrect type conversion") + } + pok.proof1[0] = proof1i0 + pok.proof1[1] = proof1i1 + pok.proof2 = proof2 + return nil +} + +// GetChallengeContribution converts the committed values to bytes +// for the Fiat-Shamir challenge +func (pok PokSignatureProof) GetChallengeContribution( + generators *MessageGenerators, + revealedMessages map[int]curves.Scalar, + challenge common.Challenge, + transcript *merlin.Transcript, +) { + transcript.AppendMessage([]byte("A'"), pok.aPrime.ToAffineCompressed()) + transcript.AppendMessage([]byte("Abar"), pok.aBar.ToAffineCompressed()) + transcript.AppendMessage([]byte("D"), pok.d.ToAffineCompressed()) + + proof1Points := []curves.Point{pok.aBar.Sub(pok.d), pok.aPrime, generators.h0} + proof1Scalars := []curves.Scalar{challenge, pok.proof1[0], pok.proof1[1]} + commitmentProof1 := pok.aPrime.SumOfProducts(proof1Points, proof1Scalars) + transcript.AppendMessage([]byte("Proof1"), commitmentProof1.ToAffineCompressed()) + + rPoints := make([]curves.Point, 1, len(revealedMessages)+1) + rScalars := make([]curves.Scalar, 1, len(revealedMessages)+1) + + rPoints[0] = pok.aPrime.Generator() + rScalars[0] = pok.proof1[0].One() + + for idx, msg := range revealedMessages { + rPoints = append(rPoints, generators.Get(idx+1)) + rScalars = append(rScalars, msg) + } + + r := pok.aPrime.SumOfProducts(rPoints, rScalars) + + pts := 3 + generators.length - len(revealedMessages) + proof2Points := make([]curves.Point, 3, pts) + proof2Scalars := make([]curves.Scalar, 3, pts) + + // R * c + proof2Points[0] = r + proof2Scalars[0] = challenge + + // D * -r3Hat + proof2Points[1] = pok.d.Neg() + proof2Scalars[1] = pok.proof2[0] + + // H0 * s'Hat + proof2Points[2] = generators.h0 + proof2Scalars[2] = pok.proof2[1] + + j := 2 + for i := 0; i < generators.length; i++ { + if _, contains := revealedMessages[i]; contains { + continue + } + proof2Points = append(proof2Points, generators.Get(i+1)) + proof2Scalars = append(proof2Scalars, pok.proof2[j]) + j++ + } + commitmentProof2 := r.SumOfProducts(proof2Points, proof2Scalars) + + transcript.AppendMessage([]byte("Proof2"), commitmentProof2.ToAffineCompressed()) +} + +// VerifySigPok only validates the signature proof, +// the selective disclosure proof is checked by +// verifying +// pok.challenge == computedChallenge +func (pok PokSignatureProof) VerifySigPok(pk *PublicKey) bool { + return !pk.value.IsIdentity() && + !pok.aPrime.IsIdentity() && + !pok.aBar.IsIdentity() && + pok.aPrime.MultiPairing(pok.aPrime, pk.value, pok.aBar, pk.value.Generator().Neg().(curves.PairingPoint)). + IsOne() +} + +// Verify checks a signature proof of knowledge and selective disclosure proof +func (pok PokSignatureProof) Verify( + revealedMsgs map[int]curves.Scalar, + pk *PublicKey, + generators *MessageGenerators, + nonce common.Nonce, + challenge common.Challenge, + transcript *merlin.Transcript, +) bool { + pok.GetChallengeContribution(generators, revealedMsgs, challenge, transcript) + transcript.AppendMessage([]byte("nonce"), nonce.Bytes()) + okm := transcript.ExtractBytes([]byte("signature proof of knowledge"), 64) + vChallenge, err := pok.proof1[0].SetBytesWide(okm) + if err != nil { + return false + } + return pok.VerifySigPok(pk) && challenge.Cmp(vChallenge) == 0 +} diff --git a/signatures/bbs/pok_signature_proof_test.go b/signatures/bbs/pok_signature_proof_test.go new file mode 100644 index 0000000..0888351 --- /dev/null +++ b/signatures/bbs/pok_signature_proof_test.go @@ -0,0 +1,319 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bbs + +import ( + crand "crypto/rand" + "testing" + + "github.com/gtank/merlin" + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/signatures/common" +) + +func TestPokSignatureProofSomeMessagesRevealed(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G2{}) + pk, sk, err := NewKeys(curve) + require.NoError(t, err) + require.NotNil(t, sk) + require.NotNil(t, pk) + require.False(t, sk.value.IsZero()) + require.False(t, pk.value.IsIdentity()) + _, ok := pk.value.(*curves.PointBls12381G2) + require.True(t, ok) + generators, err := new(MessageGenerators).Init(pk, 4) + require.NoError(t, err) + msgs := []curves.Scalar{ + curve.Scalar.New(2), + curve.Scalar.New(3), + curve.Scalar.New(4), + curve.Scalar.New(5), + } + + sig, err := sk.Sign(generators, msgs) + require.NoError(t, err) + require.NotNil(t, sig) + require.False(t, sig.a.IsIdentity()) + require.False(t, sig.e.IsZero()) + require.False(t, sig.s.IsZero()) + + proofMsgs := []common.ProofMessage{ + &common.ProofSpecificMessage{ + Message: msgs[0], + }, + &common.ProofSpecificMessage{ + Message: msgs[1], + }, + &common.RevealedMessage{ + Message: msgs[2], + }, + &common.RevealedMessage{ + Message: msgs[3], + }, + } + + pok, err := NewPokSignature(sig, generators, proofMsgs, crand.Reader) + require.NoError(t, err) + require.NotNil(t, pok) + nonce := curve.Scalar.Random(crand.Reader) + transcript := merlin.NewTranscript("TestPokSignatureProofWorks") + pok.GetChallengeContribution(transcript) + transcript.AppendMessage([]byte("nonce"), nonce.Bytes()) + okm := transcript.ExtractBytes([]byte("signature proof of knowledge"), 64) + challenge, err := curve.Scalar.SetBytesWide(okm) + require.NoError(t, err) + + pokSig, err := pok.GenerateProof(challenge) + require.NoError(t, err) + require.NotNil(t, pokSig) + require.True(t, pokSig.VerifySigPok(pk)) + + revealedMsgs := map[int]curves.Scalar{ + 2: msgs[2], + 3: msgs[3], + } + // Manual verify to show how when used in conjunction with other ZKPs + transcript = merlin.NewTranscript("TestPokSignatureProofWorks") + pokSig.GetChallengeContribution(generators, revealedMsgs, challenge, transcript) + transcript.AppendMessage([]byte("nonce"), nonce.Bytes()) + okm = transcript.ExtractBytes([]byte("signature proof of knowledge"), 64) + vChallenge, err := curve.Scalar.SetBytesWide(okm) + require.NoError(t, err) + require.Equal(t, challenge.Cmp(vChallenge), 0) + + // Use the all-inclusive method + transcript = merlin.NewTranscript("TestPokSignatureProofWorks") + require.True(t, pokSig.Verify(revealedMsgs, pk, generators, nonce, challenge, transcript)) +} + +func TestPokSignatureProofAllMessagesRevealed(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G2{}) + pk, sk, err := NewKeys(curve) + require.NoError(t, err) + require.NotNil(t, sk) + require.NotNil(t, pk) + require.False(t, sk.value.IsZero()) + require.False(t, pk.value.IsIdentity()) + _, ok := pk.value.(*curves.PointBls12381G2) + require.True(t, ok) + generators, err := new(MessageGenerators).Init(pk, 4) + require.NoError(t, err) + msgs := []curves.Scalar{ + curve.Scalar.New(2), + curve.Scalar.New(3), + curve.Scalar.New(4), + curve.Scalar.New(5), + } + + sig, err := sk.Sign(generators, msgs) + require.NoError(t, err) + require.NotNil(t, sig) + require.False(t, sig.a.IsIdentity()) + require.False(t, sig.e.IsZero()) + require.False(t, sig.s.IsZero()) + + proofMsgs := []common.ProofMessage{ + &common.RevealedMessage{ + Message: msgs[0], + }, + &common.RevealedMessage{ + Message: msgs[1], + }, + &common.RevealedMessage{ + Message: msgs[2], + }, + &common.RevealedMessage{ + Message: msgs[3], + }, + } + + pok, err := NewPokSignature(sig, generators, proofMsgs, crand.Reader) + require.NoError(t, err) + require.NotNil(t, pok) + nonce := curve.Scalar.Random(crand.Reader) + transcript := merlin.NewTranscript("TestPokSignatureProofWorks") + pok.GetChallengeContribution(transcript) + transcript.AppendMessage([]byte("nonce"), nonce.Bytes()) + okm := transcript.ExtractBytes([]byte("signature proof of knowledge"), 64) + challenge, err := curve.Scalar.SetBytesWide(okm) + require.NoError(t, err) + + pokSig, err := pok.GenerateProof(challenge) + require.NoError(t, err) + require.NotNil(t, pokSig) + require.True(t, pokSig.VerifySigPok(pk)) + + revealedMsgs := map[int]curves.Scalar{ + 0: msgs[0], + 1: msgs[1], + 2: msgs[2], + 3: msgs[3], + } + // Manual verify to show how when used in conjunction with other ZKPs + transcript = merlin.NewTranscript("TestPokSignatureProofWorks") + pokSig.GetChallengeContribution(generators, revealedMsgs, challenge, transcript) + transcript.AppendMessage([]byte("nonce"), nonce.Bytes()) + okm = transcript.ExtractBytes([]byte("signature proof of knowledge"), 64) + vChallenge, err := curve.Scalar.SetBytesWide(okm) + require.NoError(t, err) + require.Equal(t, challenge.Cmp(vChallenge), 0) + + // Use the all-inclusive method + transcript = merlin.NewTranscript("TestPokSignatureProofWorks") + require.True(t, pokSig.Verify(revealedMsgs, pk, generators, nonce, challenge, transcript)) +} + +func TestPokSignatureProofAllMessagesHidden(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G2{}) + pk, sk, err := NewKeys(curve) + require.NoError(t, err) + require.NotNil(t, sk) + require.NotNil(t, pk) + require.False(t, sk.value.IsZero()) + require.False(t, pk.value.IsIdentity()) + _, ok := pk.value.(*curves.PointBls12381G2) + require.True(t, ok) + generators, err := new(MessageGenerators).Init(pk, 4) + require.NoError(t, err) + msgs := []curves.Scalar{ + curve.Scalar.New(2), + curve.Scalar.New(3), + curve.Scalar.New(4), + curve.Scalar.New(5), + } + + sig, err := sk.Sign(generators, msgs) + require.NoError(t, err) + require.NotNil(t, sig) + require.False(t, sig.a.IsIdentity()) + require.False(t, sig.e.IsZero()) + require.False(t, sig.s.IsZero()) + + proofMsgs := []common.ProofMessage{ + &common.ProofSpecificMessage{ + Message: msgs[0], + }, + &common.ProofSpecificMessage{ + Message: msgs[1], + }, + &common.ProofSpecificMessage{ + Message: msgs[2], + }, + &common.ProofSpecificMessage{ + Message: msgs[3], + }, + } + + pok, err := NewPokSignature(sig, generators, proofMsgs, crand.Reader) + require.NoError(t, err) + require.NotNil(t, pok) + nonce := curve.Scalar.Random(crand.Reader) + transcript := merlin.NewTranscript("TestPokSignatureProofWorks") + pok.GetChallengeContribution(transcript) + transcript.AppendMessage([]byte("nonce"), nonce.Bytes()) + okm := transcript.ExtractBytes([]byte("signature proof of knowledge"), 64) + challenge, err := curve.Scalar.SetBytesWide(okm) + require.NoError(t, err) + + pokSig, err := pok.GenerateProof(challenge) + require.NoError(t, err) + require.NotNil(t, pokSig) + require.True(t, pokSig.VerifySigPok(pk)) + + revealedMsgs := map[int]curves.Scalar{} + + // Manual verify to show how when used in conjunction with other ZKPs + transcript = merlin.NewTranscript("TestPokSignatureProofWorks") + pokSig.GetChallengeContribution(generators, revealedMsgs, challenge, transcript) + transcript.AppendMessage([]byte("nonce"), nonce.Bytes()) + okm = transcript.ExtractBytes([]byte("signature proof of knowledge"), 64) + vChallenge, err := curve.Scalar.SetBytesWide(okm) + require.NoError(t, err) + require.Equal(t, challenge.Cmp(vChallenge), 0) + + // Use the all-inclusive method + transcript = merlin.NewTranscript("TestPokSignatureProofWorks") + require.True(t, pokSig.Verify(revealedMsgs, pk, generators, nonce, challenge, transcript)) +} + +func TestPokSignatureProofMarshalBinary(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G2{}) + pk, sk, err := NewKeys(curve) + require.NoError(t, err) + require.NotNil(t, sk) + require.NotNil(t, pk) + require.False(t, sk.value.IsZero()) + require.False(t, pk.value.IsIdentity()) + _, ok := pk.value.(*curves.PointBls12381G2) + require.True(t, ok) + generators, err := new(MessageGenerators).Init(pk, 4) + require.NoError(t, err) + msgs := []curves.Scalar{ + curve.Scalar.New(2), + curve.Scalar.New(3), + curve.Scalar.New(4), + curve.Scalar.New(5), + } + + sig, err := sk.Sign(generators, msgs) + require.NoError(t, err) + require.NotNil(t, sig) + require.False(t, sig.a.IsIdentity()) + require.False(t, sig.e.IsZero()) + require.False(t, sig.s.IsZero()) + + proofMsgs := []common.ProofMessage{ + &common.ProofSpecificMessage{ + Message: msgs[0], + }, + &common.ProofSpecificMessage{ + Message: msgs[1], + }, + &common.RevealedMessage{ + Message: msgs[2], + }, + &common.RevealedMessage{ + Message: msgs[3], + }, + } + + pok, err := NewPokSignature(sig, generators, proofMsgs, crand.Reader) + require.NoError(t, err) + require.NotNil(t, pok) + nonce := curve.Scalar.Random(crand.Reader) + transcript := merlin.NewTranscript("TestPokSignatureProofMarshalBinary") + pok.GetChallengeContribution(transcript) + transcript.AppendMessage([]byte("nonce"), nonce.Bytes()) + challenge, err := curve.Scalar.SetBytesWide( + transcript.ExtractBytes([]byte("signature proof of knowledge"), 64), + ) + require.NoError(t, err) + + pokSig, err := pok.GenerateProof(challenge) + require.NoError(t, err) + require.NotNil(t, pokSig) + + data, err := pokSig.MarshalBinary() + require.NoError(t, err) + require.NotNil(t, data) + pokSig2 := new(PokSignatureProof).Init(curve) + err = pokSig2.UnmarshalBinary(data) + require.NoError(t, err) + require.True(t, pokSig.aPrime.Equal(pokSig2.aPrime)) + require.True(t, pokSig.aBar.Equal(pokSig2.aBar)) + require.True(t, pokSig.d.Equal(pokSig2.d)) + require.Equal(t, len(pokSig.proof1), len(pokSig2.proof1)) + require.Equal(t, len(pokSig.proof2), len(pokSig2.proof2)) + for i, p := range pokSig.proof1 { + require.Equal(t, p.Cmp(pokSig2.proof1[i]), 0) + } + for i, p := range pokSig.proof2 { + require.Equal(t, p.Cmp(pokSig2.proof2[i]), 0) + } +} diff --git a/signatures/bbs/public_key.go b/signatures/bbs/public_key.go new file mode 100644 index 0000000..b82b487 --- /dev/null +++ b/signatures/bbs/public_key.go @@ -0,0 +1,77 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bbs + +import ( + "errors" + "fmt" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// PublicKey is a BBS+ verification key +type PublicKey struct { + value curves.PairingPoint +} + +func (pk *PublicKey) Init(curve *curves.PairingCurve) *PublicKey { + pk.value = curve.NewG2IdentityPoint() + return pk +} + +func (pk PublicKey) MarshalBinary() ([]byte, error) { + return pk.value.ToAffineCompressed(), nil +} + +func (pk *PublicKey) UnmarshalBinary(in []byte) error { + value, err := pk.value.FromAffineCompressed(in) + if err != nil { + return err + } + var ok bool + pk.value, ok = value.(curves.PairingPoint) + if !ok { + return errors.New("incorrect type conversion") + } + return nil +} + +// Verify checks a signature where all messages are known to the verifier +func (pk PublicKey) Verify( + signature *Signature, + generators *MessageGenerators, + msgs []curves.Scalar, +) error { + if generators.length < len(msgs) { + return fmt.Errorf("not enough message generators") + } + if len(msgs) < 1 { + return fmt.Errorf("invalid messages") + } + // Identity Point will always return true which is not what we want + if pk.value.IsIdentity() { + return fmt.Errorf("invalid public key") + } + if signature.a.IsIdentity() { + return fmt.Errorf("invalid signature") + } + a, ok := pk.value.Generator().Mul(signature.e).Add(pk.value).(curves.PairingPoint) + if !ok { + return fmt.Errorf("not a valid point") + } + b, ok := computeB(signature.s, msgs, generators).Neg().(curves.PairingPoint) + if !ok { + return fmt.Errorf("not a valid point") + } + + res := a.MultiPairing(signature.a, a, b, pk.value.Generator().(curves.PairingPoint)) + if !res.IsOne() { + return fmt.Errorf("invalid result") + } + + return nil +} diff --git a/signatures/bbs/secret_key.go b/signatures/bbs/secret_key.go new file mode 100644 index 0000000..0402380 --- /dev/null +++ b/signatures/bbs/secret_key.go @@ -0,0 +1,185 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bbs + +import ( + crand "crypto/rand" + "crypto/sha256" + "errors" + "fmt" + "io" + + "golang.org/x/crypto/hkdf" + "golang.org/x/crypto/sha3" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// SecretKey is a BBS+ signing key +type SecretKey struct { + value curves.PairingScalar +} + +func NewSecretKey(curve *curves.PairingCurve) (*SecretKey, error) { + // The salt used with generating secret keys + // See section 2.3 from https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-04 + const hkdfKeyGenSalt = "BLS-SIG-KEYGEN-SALT-" + const Size = 33 + var ikm [Size]byte + cnt, err := crand.Read(ikm[:32]) + if err != nil { + return nil, err + } + if cnt != Size-1 { + return nil, fmt.Errorf("unable to read sufficient random data") + } + + // https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-04#section-2.3 + h := sha256.New() + n, err := h.Write([]byte(hkdfKeyGenSalt)) + if err != nil { + return nil, err + } + if n != len(hkdfKeyGenSalt) { + return nil, fmt.Errorf("incorrect salt bytes written to be hashed") + } + salt := h.Sum(nil) + + // Leaves key_info parameter as the default empty string + // and just adds parameter I2OSP(L, 2) + kdf := hkdf.New(sha256.New, ikm[:], salt, []byte{0, 48}) + var okm [64]byte + read, err := kdf.Read(okm[:48]) + if err != nil { + return nil, err + } + if read != 48 { + return nil, fmt.Errorf("failed to create secret key") + } + v, err := curve.Scalar.SetBytesWide(okm[:]) + if err != nil { + return nil, err + } + value, ok := v.(curves.PairingScalar) + if !ok { + return nil, fmt.Errorf("invalid scalar") + } + return &SecretKey{ + value: value.SetPoint(curve.PointG2), + }, nil +} + +func NewKeys(curve *curves.PairingCurve) (*PublicKey, *SecretKey, error) { + sk, err := NewSecretKey(curve) + if err != nil { + return nil, nil, err + } + return sk.PublicKey(), sk, nil +} + +func (sk *SecretKey) Init(curve *curves.PairingCurve) *SecretKey { + sk.value = curve.NewScalar() + return sk +} + +func (sk SecretKey) MarshalBinary() ([]byte, error) { + return sk.value.Bytes(), nil +} + +func (sk *SecretKey) UnmarshalBinary(in []byte) error { + value, err := sk.value.SetBytes(in) + if err != nil { + return err + } + var ok bool + sk.value, ok = value.(curves.PairingScalar) + if !ok { + return errors.New("incorrect type conversion") + } + return nil +} + +// Sign generates a new signature where all messages are known to the signer +func (sk *SecretKey) Sign(generators *MessageGenerators, msgs []curves.Scalar) (*Signature, error) { + if generators.length < len(msgs) { + return nil, fmt.Errorf("not enough message generators") + } + if len(msgs) < 1 { + return nil, fmt.Errorf("invalid messages") + } + if sk.value.IsZero() { + return nil, fmt.Errorf("invalid secret key") + } + + drbg := sha3.NewShake256() + _, _ = drbg.Write(sk.value.Bytes()) + addDeterministicNonceData(generators, msgs, drbg) + // Should yield non-zero values for `e` and `s`, very small likelihood of being zero + e := getNonZeroScalar(sk.value, drbg) + s := getNonZeroScalar(sk.value, drbg) + b := computeB(s, msgs, generators) + exp, err := e.Add(sk.value).Invert() + if err != nil { + return nil, err + } + return &Signature{ + a: b.Mul(exp).(curves.PairingPoint), + e: e, + s: s, + }, nil +} + +// PublicKey returns the corresponding public key +func (sk *SecretKey) PublicKey() *PublicKey { + return &PublicKey{ + value: sk.value.Point().Generator().Mul(sk.value).(curves.PairingPoint), + } +} + +// computes g1 + s * h0 + msgs[0] * h[0] + msgs[1] * h[1] ... +func computeB( + s curves.Scalar, + msgs []curves.Scalar, + generators *MessageGenerators, +) curves.PairingPoint { + nMsgs := len(msgs) + points := make([]curves.Point, nMsgs+2) + points[1] = generators.Get(0) + points[0] = points[1].Generator() + + scalars := make([]curves.Scalar, nMsgs+2) + scalars[0] = msgs[0].One() + scalars[1] = s + for i, m := range msgs { + points[i+2] = generators.Get(i + 1) + scalars[i+2] = m + } + pt := points[0].SumOfProducts(points, scalars) + return pt.(curves.PairingPoint) +} + +func addDeterministicNonceData( + generators *MessageGenerators, + msgs []curves.Scalar, + drbg io.Writer, +) { + for i := 0; i <= generators.length; i++ { + _, _ = drbg.Write(generators.Get(i).ToAffineUncompressed()) + } + for _, m := range msgs { + _, _ = drbg.Write(m.Bytes()) + } +} + +func getNonZeroScalar(sc curves.Scalar, reader io.Reader) curves.Scalar { + // Should yield non-zero values for `e` and `s`, very small likelihood of being zero + e := sc.Random(reader) + for e.IsZero() { + e = sc.Random(reader) + } + return e +} diff --git a/signatures/bbs/signature.go b/signatures/bbs/signature.go new file mode 100644 index 0000000..c29be6b --- /dev/null +++ b/signatures/bbs/signature.go @@ -0,0 +1,67 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Package bbs is an implementation of BBS+ signature of https://eprint.iacr.org/2016/663.pdf +package bbs + +import ( + "errors" + "fmt" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// Signature is a BBS+ signature +// as described in 4.3 in +// +type Signature struct { + a curves.PairingPoint + e, s curves.Scalar +} + +// Init creates an empty signature to a specific curve +// which should be followed by UnmarshalBinary or Create +func (sig *Signature) Init(curve *curves.PairingCurve) *Signature { + sig.a = curve.NewG1IdentityPoint() + sig.e = curve.NewScalar() + sig.s = curve.NewScalar() + return sig +} + +func (sig Signature) MarshalBinary() ([]byte, error) { + out := append(sig.a.ToAffineCompressed(), sig.e.Bytes()...) + out = append(out, sig.s.Bytes()...) + return out, nil +} + +func (sig *Signature) UnmarshalBinary(data []byte) error { + pointLength := len(sig.a.ToAffineCompressed()) + scalarLength := len(sig.s.Bytes()) + expectedLength := pointLength + scalarLength*2 + if len(data) != expectedLength { + return fmt.Errorf("invalid byte sequence") + } + a, err := sig.a.FromAffineCompressed(data[:pointLength]) + if err != nil { + return err + } + e, err := sig.e.SetBytes(data[pointLength:(pointLength + scalarLength)]) + if err != nil { + return err + } + s, err := sig.s.SetBytes(data[(pointLength + scalarLength):]) + if err != nil { + return err + } + var ok bool + sig.a, ok = a.(curves.PairingPoint) + if !ok { + return errors.New("incorrect type conversion") + } + sig.e = e + sig.s = s + return nil +} diff --git a/signatures/bbs/signature_test.go b/signatures/bbs/signature_test.go new file mode 100644 index 0000000..96d2d3c --- /dev/null +++ b/signatures/bbs/signature_test.go @@ -0,0 +1,81 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bbs + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestSignatureWorks(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G2{}) + msgs := []curves.Scalar{ + curve.Scalar.New(3), + curve.Scalar.New(4), + curve.Scalar.New(5), + curve.Scalar.New(6), + } + pk, sk, err := NewKeys(curve) + require.NoError(t, err) + generators, err := new(MessageGenerators).Init(pk, 4) + require.NoError(t, err) + + sig, err := sk.Sign(generators, msgs) + require.NoError(t, err) + err = pk.Verify(sig, generators, msgs) + require.NoError(t, err) +} + +func TestSignatureIncorrectMessages(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G2{}) + msgs := []curves.Scalar{ + curve.Scalar.New(3), + curve.Scalar.New(4), + curve.Scalar.New(5), + curve.Scalar.New(6), + } + pk, sk, err := NewKeys(curve) + require.NoError(t, err) + generators, err := new(MessageGenerators).Init(pk, 4) + require.NoError(t, err) + + sig, err := sk.Sign(generators, msgs) + require.NoError(t, err) + msgs[0] = curve.Scalar.New(7) + err = pk.Verify(sig, generators, msgs) + require.Error(t, err) +} + +func TestSignatureMarshalBinary(t *testing.T) { + curve := curves.BLS12381(&curves.PointBls12381G2{}) + msgs := []curves.Scalar{ + curve.Scalar.New(3), + curve.Scalar.New(4), + curve.Scalar.New(5), + curve.Scalar.New(6), + } + pk, sk, err := NewKeys(curve) + require.NoError(t, err) + generators, err := new(MessageGenerators).Init(pk, 4) + require.NoError(t, err) + + sig, err := sk.Sign(generators, msgs) + require.NoError(t, err) + + data, err := sig.MarshalBinary() + require.NoError(t, err) + require.Equal(t, 112, len(data)) + sig2 := new(Signature).Init(curve) + err = sig2.UnmarshalBinary(data) + require.NoError(t, err) + require.True(t, sig.a.Equal(sig2.a)) + require.Equal(t, sig.e.Cmp(sig2.e), 0) + require.Equal(t, sig.s.Cmp(sig2.s), 0) +} diff --git a/signatures/bls/README.md b/signatures/bls/README.md new file mode 100755 index 0000000..d46edf9 --- /dev/null +++ b/signatures/bls/README.md @@ -0,0 +1,114 @@ +--- +aliases: [README] +tags: [] +title: README +linter-yaml-title-alias: README +date created: Wednesday, April 17th 2024, 4:11:40 pm +date modified: Thursday, April 18th 2024, 8:19:25 am +--- + +## BLS Signatures + +An implementation of the Boneh-Lynn-Shacham (BLS) signatures according to the [standard](https://datatracker.ietf.org/doc/draft-irtf-cfrg-bls-signature/?include_text=1) +on top of the Barreto-Lynn-Scott (BLS) 12-381 curve. The [BLS12-381 curve](https://github.com/zkcrypto/pairing/tree/master/src/bls12_381#serialization) provides roughly +128-bits of security and BLS is a digital signature with aggregation properties. + +We have implemented all three signature schemes described in the [standard](https://datatracker.ietf.org/doc/draft-irtf-cfrg-bls-signature/?include_text=1) +which are designed to handle rogue-key attacks differently. These three are: + +- **Basic** handles rogue key attacks by requiring all signed messages in an aggregate signature be distinct +- **Message Augmentation** handles rogue key attacks by prepending the public key to the message during signing which ensures that all messages are distinct for different public keys. +- **Proof of Possession** handles rogue key attacks by validating public keys in a separate step called a proof of possession. This allows for faster aggregate verification. + +Pairing-friendly curves have two generator-groups 𝔾1, 𝔾2. +Data in 𝔾2 is twice the size of 𝔾1 and operations are slower. +BLS signatures require signatures and public keys to be in opposite groups i.e. signatures in 𝔾1 and public keys in 𝔾2 or +signatures in 𝔾2 and public keys in 𝔾1. This means one of two things: + +- **Short public keys, long signatures**: Signatures are longer and slower to create, verify, and aggregate but public keys are small and fast to aggregate. Used when signing and verification operations not computed as often or for minimizing storage or bandwidth for public keys. +- **Short signatures, long public keys**: Signatures are short and fast to create, verify, and aggregate but public keys are bigger and slower to aggregate. Used when signing and verification operations are computed often or for minimizing storage or bandwidth for signatures. + +This library supports both of these variants for all three signature schemes. The more widely deployed +variant is short public keys, long signatures. We refer to this variant as `UsualBls`. The other variant, +short signatures, long public keys, is named `TinyBls`. +The naming convention follows Sig`SchemeType`[Vt] where **Vt** is short for variant. For example, + +- **Usual Bls Basic -> SigBasic**: Provides all the functions for the Basic signature scheme with signatures in 𝔾2 and public keys in 𝔾1 +- **Tiny Bls Basic -> SigBasicVt**: Provides all the functions for the Basic signature scheme with signatures in 𝔾1 and public keys in 𝔾2 + +One final note, in cryptography, it is considered good practice to use domain separation values to limit attacks to specific contexts. The [standard](https://datatracker.ietf.org/doc/draft-irtf-cfrg-bls-signature/?include_text=1) +recommends specific values for each scheme but this library also supports supplying custom domain separation values. For example, there are two functions for creating +a BLS signing instance: + +- **NewSigBasic()** creates a Basic BLS signature using the recommended domain separation value. +- **NewSigBasicWithDst(dst)** creates a Basic BLS signature using the parameter `dst` as the domain separation value such as in the [Eth2.0 Spec](https://github.com/ethereum/eth2.0-specs/blob/dev/specs/phase0/validator.md#attestation-aggregation) + +Also implemented is Threshold BLS as described in section 3.2 of [B03](https://www.cc.gatech.edu/~aboldyre/papers/bold.pdf). + +- ThresholdKeygen(parts, threshold int) -> ([]\*SecretKeyShare, error) +- PartialSign(share *SecretKeyShare, msg []byte) -> *PartialSignature +- CombineSigs(*PartialSignature…) -> *Signature + +### Security Considerations + +#### Validating Secret Keys + +Low entropy secret keys are a problem since this means an attacker can more easily guess the value and now can impersonate the key holder. +Secret keys are derived using trusted random number generators (TRNG). If the TRNG produces bad entropy +the secret key will be weak. Secret keys are checked to be non-zero values. A zero value secret key +not only fails the entropy test but also yields a public key that will validate any signature. + +#### Validating Public Keys + +Public keys are ensured to be valid. A valid public key means +it represents a valid, non-identity elliptic curve point in the correct subgroup. +Public keys that are the identity element mean any signature would pass a call to verify. + +#### Validating Signatures + +Generated signatures are ensured to be valid. A valid signature means +it represents a valid, non-identity elliptic curve point in the correct subgroup. +Signatures that are the identity element mean any signature would pass a call to verify. +Verify checks signatures for non-identity elements in the correct subgroup before checking +for a valid signature. + +#### Mitigating Rogue Key Attacks + +A rogue key attacks can only happen to multisignature or aggregated signature. +A rogue key attack is where at least one party produces a valid but malicious public key such that the multisignature requires less than the threshold to verify a signature. +There are two ways to mitigate rogue key attacks: Guarantee all signed messages are unique or validate each public key before use. This library offers both solutions. + +### Comparison to other BLS Implementations and Integrations + +This library has been tested to be compatible with the following implementations +by randomly generating millions of keys and signatures and importing and verifying +between the two libraries. + +1. Ethereum's [py_ecc](https://github.com/ethereum/py_ecc) +2. PhoreProject [Go BLS](https://github.com/phoreproject/bls) +3. Herumi's [BLS](https://github.com/herumi/bls-eth-go-binary) +4. Algorand's [Rust BLS Sig](https://crates.io/crates/bls_sigs_ref) +5. Miracl's [Rust and Go BLS sig](https://github.com/miracl/core) + +The most common and high risk failures that can occur with library implementations +are low entropy secret keys, malformed public keys, and invalid signature generation. + +Low entropy secret keys are a problem since this means an attacker can more easily guess +the value and now can impersonate the key holder. + +Malformed public keys and signatures result in invalid addresses and no ability to withdraw funds. + +To check for these problems, We tested millions of keys/signatures (vs say thousands or hundreds) to prove that + +1. The odds of producing an invalid key/signature is already theoretically low 2-255 +2. The results of running millions of checks shows that nothing bad happened in 10M attempts + +In other words, keys and signatures generated from these libraries can be consumed by this library +and visa-versa. + +Some of the libraries implementations for hash to curve were either out of date +or not compliant with the [IETF Spec](https://datatracker.ietf.org/doc/draft-irtf-cfrg-hash-to-curve/). +This meant that signatures generated by this library would not validate with others and visa-versa. +However, public keys were found to be compatible in all libraries. + +This library is compliant with the latest version (10 as of this writing). diff --git a/signatures/bls/bls_sig/lib.go b/signatures/bls/bls_sig/lib.go new file mode 100644 index 0000000..d546754 --- /dev/null +++ b/signatures/bls/bls_sig/lib.go @@ -0,0 +1,208 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Package bls_sig is an implementation of the BLS signature defined in https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +package bls_sig + +import ( + "crypto/rand" + "crypto/sha256" + "crypto/subtle" + "fmt" + + "golang.org/x/crypto/hkdf" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/core/curves/native/bls12381" + "github.com/sonr-io/sonr/crypto/internal" + "github.com/sonr-io/sonr/crypto/sharing" +) + +// Secret key in Fr +const SecretKeySize = 32 + +// Secret key share with identifier byte in Fr +const SecretKeyShareSize = 33 + +// The salt used with generating secret keys +// See section 2.3 from https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +const hkdfKeyGenSalt = "BLS-SIG-KEYGEN-SALT-" + +// Represents a value mod r where r is the curve order or +// order of the subgroups in G1 and G2 +type SecretKey struct { + value *native.Field +} + +func allRowsUnique(data [][]byte) bool { + seen := make(map[string]bool) + for _, row := range data { + m := string(row) + if _, ok := seen[m]; ok { + return false + } + seen[m] = true + } + return true +} + +func generateRandBytes(count int) ([]byte, error) { + ikm := make([]byte, count) + cnt, err := rand.Read(ikm) + if err != nil { + return nil, err + } + if cnt != count { + return nil, fmt.Errorf("unable to read sufficient random data") + } + return ikm, nil +} + +// Creates a new BLS secret key +// Input key material (ikm) MUST be at least 32 bytes long, +// but it MAY be longer. +func (sk SecretKey) Generate(ikm []byte) (*SecretKey, error) { + if len(ikm) < 32 { + return nil, fmt.Errorf("ikm is too short. Must be at least 32") + } + + // https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-04#section-2.3 + h := sha256.New() + n, err := h.Write([]byte(hkdfKeyGenSalt)) + if err != nil { + return nil, err + } + if n != len(hkdfKeyGenSalt) { + return nil, fmt.Errorf("incorrect salt bytes written to be hashed") + } + salt := h.Sum(nil) + + ikm = append(ikm, 0) + // Leaves key_info parameter as the default empty string + // and just adds parameter I2OSP(L, 2) + var okm [native.WideFieldBytes]byte + kdf := hkdf.New(sha256.New, ikm, salt, []byte{0, 48}) + read, err := kdf.Read(okm[:48]) + copy(okm[:48], internal.ReverseScalarBytes(okm[:48])) + if err != nil { + return nil, err + } + if read != 48 { + return nil, fmt.Errorf("failed to create private key") + } + v := bls12381.Bls12381FqNew().SetBytesWide(&okm) + return &SecretKey{value: v}, nil +} + +// Serialize a secret key to raw bytes +func (sk SecretKey) MarshalBinary() ([]byte, error) { + bytes := sk.value.Bytes() + return internal.ReverseScalarBytes(bytes[:]), nil +} + +// Deserialize a secret key from raw bytes +// Cannot be zero. Must be 32 bytes and cannot be all zeroes. +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03#section-2.3 +func (sk *SecretKey) UnmarshalBinary(data []byte) error { + if len(data) != SecretKeySize { + return fmt.Errorf("secret key must be %d bytes", SecretKeySize) + } + zeros := make([]byte, len(data)) + if subtle.ConstantTimeCompare(data, zeros) == 1 { + return fmt.Errorf("secret key cannot be zero") + } + var bb [native.FieldBytes]byte + copy(bb[:], internal.ReverseScalarBytes(data)) + value, err := bls12381.Bls12381FqNew().SetBytes(&bb) + if err != nil { + return err + } + sk.value = value + return nil +} + +// SecretKeyShare is shamir share of a private key +type SecretKeyShare struct { + identifier byte + value []byte +} + +// Serialize a secret key share to raw bytes +func (sks SecretKeyShare) MarshalBinary() ([]byte, error) { + var blob [SecretKeyShareSize]byte + l := len(sks.value) + copy(blob[:l], sks.value) + blob[l] = sks.identifier + return blob[:], nil +} + +// Deserialize a secret key share from raw bytes +func (sks *SecretKeyShare) UnmarshalBinary(data []byte) error { + if len(data) != SecretKeyShareSize { + return fmt.Errorf("secret key share must be %d bytes", SecretKeyShareSize) + } + + zeros := make([]byte, len(data)) + if subtle.ConstantTimeCompare(data, zeros) == 1 { + return fmt.Errorf("secret key share cannot be zero") + } + l := len(data) + sks.identifier = data[l-1] + sks.value = make([]byte, SecretKeySize) + copy(sks.value, data[:l]) + return nil +} + +// thresholdizeSecretKey splits a composite secret key such that +// `threshold` partial signatures can be combined to form a composite signature +func thresholdizeSecretKey(secretKey *SecretKey, threshold, total uint) ([]*SecretKeyShare, error) { + // Verify our parameters are acceptable. + if secretKey == nil { + return nil, fmt.Errorf("secret key is nil") + } + if threshold > total { + return nil, fmt.Errorf("threshold cannot be greater than the total") + } + if threshold == 0 { + return nil, fmt.Errorf("threshold cannot be zero") + } + if total <= 1 { + return nil, fmt.Errorf("total must be larger than 1") + } + if total > 255 || threshold > 255 { + return nil, fmt.Errorf("cannot have more than 255 shares") + } + + curve := curves.BLS12381G1() + sss, err := sharing.NewShamir(uint32(threshold), uint32(total), curve) + if err != nil { + return nil, err + } + secret, ok := curve.NewScalar().(*curves.ScalarBls12381) + if !ok { + return nil, fmt.Errorf("invalid curve") + } + secret.Value = secretKey.value + shares, err := sss.Split(secret, rand.Reader) + if err != nil { + return nil, err + } + // Verify we got the expected number of shares + if uint(len(shares)) != total { + return nil, fmt.Errorf("%v != %v shares", len(shares), total) + } + + // Package our shares + secrets := make([]*SecretKeyShare, len(shares)) + for i, s := range shares { + // users expect BigEndian + sks := &SecretKeyShare{identifier: byte(s.Id), value: s.Value} + secrets[i] = sks + } + + return secrets, nil +} diff --git a/signatures/bls/bls_sig/lib_test.go b/signatures/bls/bls_sig/lib_test.go new file mode 100644 index 0000000..75e8777 --- /dev/null +++ b/signatures/bls/bls_sig/lib_test.go @@ -0,0 +1,481 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bls_sig + +import ( + "bytes" + "crypto/rand" + "encoding" + "math/big" + "testing" + + "github.com/sonr-io/sonr/crypto/core/curves/native/bls12381" + "github.com/sonr-io/sonr/crypto/internal" +) + +func genSecretKey(t *testing.T) *SecretKey { + ikm := make([]byte, 32) + sk, err := new(SecretKey).Generate(ikm) + if err != nil { + t.Errorf("Couldn't generate secret key") + } + return sk +} + +func genRandSecretKey(ikm []byte, t *testing.T) *SecretKey { + sk, err := new(SecretKey).Generate(ikm) + if err != nil { + t.Errorf("Couldn't generate secret key") + } + return sk +} + +func genPublicKeyVt(sk *SecretKey, t *testing.T) *PublicKeyVt { + pk, err := sk.GetPublicKeyVt() + if err != nil { + t.Errorf("Expected GetPublicKeyVt to pass but failed: %v", err) + } + return pk +} + +func genPublicKey(sk *SecretKey, t *testing.T) *PublicKey { + pk, err := sk.GetPublicKey() + if err != nil { + t.Errorf("GetPublicKey failed. Couldn't generate public key: %v", err) + } + return pk +} + +func genSignature(sk *SecretKey, message []byte, t *testing.T) *Signature { + bls := NewSigPop() + + sig, err := bls.Sign(sk, message) + if err != nil { + t.Errorf("createSignature couldn't sign message: %v", err) + } + return sig +} + +func genSignatureVt(sk *SecretKey, message []byte, t *testing.T) *SignatureVt { + bls := NewSigPopVt() + sig, err := bls.Sign(sk, message) + if err != nil { + t.Errorf("createSignatureVt couldn't sign message: %v", err) + } + return sig +} + +func readRand(ikm []byte, t *testing.T) { + n, err := rand.Read(ikm) + if err != nil || n < len(ikm) { + t.Errorf("Not enough data was read or an error occurred") + } +} + +func assertSecretKeyGen(seed, expected []byte, t *testing.T) { + sk, err := new(SecretKey).Generate(seed) + if err != nil { + t.Errorf("Expected Generate to succeed but failed") + } + actual, _ := sk.MarshalBinary() + if len(actual) != len(expected) { + t.Errorf("Length of Generate output is incorrect. Expected 32, found: %v\n", len(actual)) + } + if !bytes.Equal(actual[:], expected) { + t.Errorf("SecretKey was not as expected") + } +} + +func marshalStruct(value encoding.BinaryMarshaler, t *testing.T) []byte { + out, err := value.MarshalBinary() + if err != nil { + t.Errorf("MarshalBinary failed: %v", err) + } + return out +} + +func TestSecretKeyZeroBytes(t *testing.T) { + seed := []byte{} + _, err := new(SecretKey).Generate(seed) + if err == nil { + t.Errorf("Expected Generate to fail but succeeded") + } +} + +func TestMarshalLeadingZeroes(t *testing.T) { + tests := []struct { + name string + in []byte + }{ + { + "no leading zeroes", + []byte{ + 74, + 53, + 59, + 227, + 218, + 192, + 145, + 160, + 167, + 230, + 64, + 98, + 3, + 114, + 245, + 225, + 226, + 228, + 64, + 23, + 23, + 193, + 231, + 156, + 172, + 111, + 251, + 168, + 246, + 144, + 86, + 4, + }, + }, + { + "one leading zero byte", + []byte{ + 0o0, + 53, + 59, + 227, + 218, + 192, + 145, + 160, + 167, + 230, + 64, + 98, + 3, + 114, + 245, + 225, + 226, + 228, + 64, + 23, + 23, + 193, + 231, + 156, + 172, + 111, + 251, + 168, + 246, + 144, + 86, + 4, + }, + }, + { + "two leading zeroes", + []byte{ + 0o0, + 0o0, + 59, + 227, + 218, + 192, + 145, + 160, + 167, + 230, + 64, + 98, + 3, + 114, + 245, + 225, + 226, + 228, + 64, + 23, + 23, + 193, + 231, + 156, + 172, + 111, + 251, + 168, + 246, + 144, + 86, + 4, + }, + }, + } + // Run all the tests! + ss := bls12381.Bls12381FqNew() + for _, test := range tests { + // Marshal + var k big.Int + k.SetBytes(test.in) + ss.SetBigInt(&k) + bytes, err := SecretKey{ss}.MarshalBinary() + if err != nil { + t.Errorf("%v", err) + continue + } + + // Test that marshal produces a values of the exected len + t.Run(test.name, func(t *testing.T) { + if len(bytes) != SecretKeySize { + t.Errorf("expected len=%v got len=%v", SecretKeySize, len(bytes)) + } + }) + + // Test that we can also unmarhsal correctly + t.Run(test.name, func(t *testing.T) { + var actual SecretKey + err := actual.UnmarshalBinary(bytes) + // Test for error + if err != nil { + t.Errorf("%v", err) + return + } + + // Test for correctness + if actual.value.Cmp(ss) != 0 { + t.Errorf("unmarshaled doens't match original value") + } + }) + } +} + +func TestSecretKey32Bytes(t *testing.T) { + seed := make([]byte, 32) + expected := []byte{ + 77, + 18, + 154, + 25, + 223, + 134, + 160, + 245, + 52, + 91, + 173, + 76, + 198, + 242, + 73, + 236, + 42, + 129, + 156, + 204, + 51, + 134, + 137, + 91, + 235, + 79, + 125, + 152, + 179, + 219, + 98, + 53, + } + assertSecretKeyGen(seed, expected, t) +} + +func TestSecretKey128Bytes(t *testing.T) { + seed := make([]byte, 128) + expected := []byte{ + 97, + 207, + 109, + 96, + 94, + 90, + 233, + 215, + 221, + 207, + 240, + 139, + 24, + 209, + 152, + 170, + 73, + 209, + 151, + 241, + 148, + 176, + 173, + 92, + 101, + 48, + 39, + 175, + 201, + 219, + 146, + 168, + } + assertSecretKeyGen(seed, expected, t) +} + +func TestRandomSecretKey(t *testing.T) { + seed := make([]byte, 48) + _, _ = rand.Read(seed) + _, err := new(SecretKey).Generate(seed) + if err != nil { + t.Errorf("Expected Generate to succeed but failed") + } +} + +func TestSecretKeyToBytes(t *testing.T) { + sk := genSecretKey(t) + skBytes := marshalStruct(sk, t) + sk1 := new(SecretKey) + err := sk1.UnmarshalBinary(skBytes) + if err != nil { + t.Errorf("Expected UnmarshalBinary to pass but failed: %v", err) + } + out := sk1.value.Bytes() + for i, b := range internal.ReverseScalarBytes(out[:]) { + if skBytes[i] != b { + t.Errorf( + "Expected secret keys to be equal but are different at offset %d: %v != %v", + i, + skBytes[i], + b, + ) + } + } + sk2 := new(SecretKey) + err = sk2.UnmarshalBinary(skBytes) + if err != nil { + t.Errorf("Expected FromBytes to succeed but failed.") + } + if !bytes.Equal(marshalStruct(sk2, t), skBytes) { + t.Errorf("Expected secret keys to be equal but are different") + } +} + +// Verifies that the thresholdize creates the expected number +// of shares +func TestThresholdizeSecretKeyCountsCorrect(t *testing.T) { + sk := &SecretKey{value: bls12381.Bls12381FqNew().SetBigInt(big.NewInt(248631463258962596))} + tests := []struct { + key *SecretKey + t, n uint + expectedError bool + }{ + // bad cases + {sk, 1, 1, true}, // n == 1 + {sk, 1, 5, true}, // t == 1 + {nil, 3, 5, true}, // sk nil + {sk, 101, 100, true}, // t> n + {sk, 0, 10, true}, // t == 0 + {sk, 10, 256, true}, // n > 256 + + // good cases + {sk, 10, 10, false}, // t == n + {sk, 2, 10, false}, // boundary case for t + {sk, 9, 10, false}, // boundary case for t + {sk, 10, 255, false}, // boundary case for n + {sk, 254, 255, false}, // boundary case for t,n + {sk, 100, 200, false}, // arbitrary t,n values + {sk, 10, 20, false}, // arbitrary t,n values + {sk, 15, 200, false}, // arbitrary t,n values + {sk, 254, 255, false}, // boundary case + {sk, 255, 255, false}, // boundary case + } + + // Run all the tests! + for i, test := range tests { + shares, err := thresholdizeSecretKey(test.key, test.t, test.n) + + // Check for errors + if test.expectedError && err == nil { + t.Errorf( + "%d - expected an error but received nil. t=%v, n=%v, sk=%v", + i, + test.t, + test.n, + sk, + ) + } + + // Check for errors + if !test.expectedError && err != nil { + t.Errorf( + "%d - received unexpected error %v. t=%v, n=%v, sk=%v", + i, + err, + test.t, + test.n, + sk, + ) + } + + // Check the share count == n + if !test.expectedError && test.n != uint(len(shares)) { + t.Errorf("%d - expected len(shares) = %v != %v (n)", i, len(shares), test.n) + } + } +} + +func TestSecretKeyShareUnmarshalBinary(t *testing.T) { + sk := genSecretKey(t) + sks, err := thresholdizeSecretKey(sk, 3, 5) + if err != nil { + t.Errorf("Expected thresholdizeSecretKey to pass but failed.") + } + + for i, sh := range sks { + b1, err := sh.MarshalBinary() + if err != nil { + t.Errorf("%d - expected MarshalBinary to pass but failed. sh=%v", i, sh) + } + + // UnmarshalBinary b1 to new SecretKeyShare + sh1 := new(SecretKeyShare) + err = sh1.UnmarshalBinary(b1) + if err != nil { + t.Errorf("%d - expected UnmarshalBinary to pass but failed. sh=%v", i, sh) + } + + // zero bytes slice with length equal to SecretKeySize + zeros := make([]byte, SecretKeySize) + b2, err := sh1.MarshalBinary() + if err != nil { + t.Errorf("%d - expected MarshalBinary to pass but failed. sh1=%v", i, sh1) + } + + // Check if []bytes from UnmarshalBinary != zeros && Initial bytes(b1) == Final bytes(b2) + if bytes.Equal(zeros, b2) && !bytes.Equal(b1, b2) { + t.Errorf( + "%d - expected UnmarshalBinary to give non zeros value but failed. sh1=%v, sh=%v", + i, + sh1, + sh, + ) + } + } +} diff --git a/signatures/bls/bls_sig/tiny_bls.go b/signatures/bls/bls_sig/tiny_bls.go new file mode 100755 index 0000000..ff27971 --- /dev/null +++ b/signatures/bls/bls_sig/tiny_bls.go @@ -0,0 +1,484 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bls_sig + +import ( + "fmt" +) + +const ( + // Domain separation tag for basic signatures + // according to section 4.2.1 in + // https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 + blsSignatureBasicVtDst = "BLS_SIG_BLS12381G1_XMD:SHA-256_SSWU_RO_NUL_" + // Domain separation tag for basic signatures + // according to section 4.2.2 in + // https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 + blsSignatureAugVtDst = "BLS_SIG_BLS12381G1_XMD:SHA-256_SSWU_RO_AUG_" + // Domain separation tag for proof of possession signatures + // according to section 4.2.3 in + // https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 + blsSignaturePopVtDst = "BLS_SIG_BLS12381G1_XMD:SHA-256_SSWU_RO_POP_" + // Domain separation tag for proof of possession proofs + // according to section 4.2.3 in + // https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 + blsPopProofVtDst = "BLS_POP_BLS12381G1_XMD:SHA-256_SSWU_RO_POP_" +) + +type BlsSchemeVt interface { + Keygen() (*PublicKeyVt, *SecretKey, error) + KeygenWithSeed(ikm []byte) (*PublicKeyVt, *SecretKey, error) + Sign(sk *SecretKey, msg []byte) (*SignatureVt, error) + Verify(pk *PublicKeyVt, msg []byte, sig *SignatureVt) bool + AggregateVerify(pks []*PublicKeyVt, msgs [][]byte, sigs []*SignatureVt) bool +} + +// generateKeysVt creates 32 bytes of random data to be fed to +// generateKeysWithSeedVt +func generateKeysVt() (*PublicKeyVt, *SecretKey, error) { + ikm, err := generateRandBytes(32) + if err != nil { + return nil, nil, err + } + return generateKeysWithSeedVt(ikm) +} + +// generateKeysWithSeedVt generates a BLS key pair given input key material (ikm) +func generateKeysWithSeedVt(ikm []byte) (*PublicKeyVt, *SecretKey, error) { + sk, err := new(SecretKey).Generate(ikm) + if err != nil { + return nil, nil, err + } + pk, err := sk.GetPublicKeyVt() + if err != nil { + return nil, nil, err + } + return pk, sk, nil +} + +// thresholdGenerateKeys will generate random secret key shares and the corresponding public key +func thresholdGenerateKeysVt(threshold, total uint) (*PublicKeyVt, []*SecretKeyShare, error) { + pk, sk, err := generateKeysVt() + if err != nil { + return nil, nil, err + } + shares, err := thresholdizeSecretKey(sk, threshold, total) + if err != nil { + return nil, nil, err + } + return pk, shares, nil +} + +// thresholdGenerateKeysWithSeed will generate random secret key shares and the corresponding public key +// using the corresponding seed `ikm` +func thresholdGenerateKeysWithSeedVt( + ikm []byte, + threshold, total uint, +) (*PublicKeyVt, []*SecretKeyShare, error) { + pk, sk, err := generateKeysWithSeedVt(ikm) + if err != nil { + return nil, nil, err + } + shares, err := thresholdizeSecretKey(sk, threshold, total) + if err != nil { + return nil, nil, err + } + return pk, shares, nil +} + +// SigBasicVt is minimal-pubkey-size scheme that doesn't support FastAggregateVerification. +// see: https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03#section-4.2.1 +type SigBasicVt struct { + dst string +} + +// Creates a new BLS basic signature scheme with the standard domain separation tag used for signatures. +func NewSigBasicVt() *SigBasicVt { + return &SigBasicVt{dst: blsSignatureBasicVtDst} +} + +// Creates a new BLS basic signature scheme with a custom domain separation tag used for signatures. +func NewSigBasicVtWithDst(signDst string) *SigBasicVt { + return &SigBasicVt{dst: signDst} +} + +// Creates a new BLS key pair +func (b SigBasicVt) Keygen() (*PublicKeyVt, *SecretKey, error) { + return generateKeysVt() +} + +// Creates a new BLS key pair +// Input key material (ikm) MUST be at least 32 bytes long, +// but it MAY be longer. +func (b SigBasicVt) KeygenWithSeed(ikm []byte) (*PublicKeyVt, *SecretKey, error) { + return generateKeysWithSeedVt(ikm) +} + +// ThresholdKeyGen generates a public key and `total` secret key shares such that +// `threshold` of them can be combined in signatures +func (b SigBasicVt) ThresholdKeygen( + threshold, total uint, +) (*PublicKeyVt, []*SecretKeyShare, error) { + return thresholdGenerateKeysVt(threshold, total) +} + +// ThresholdKeygenWithSeed generates a public key and `total` secret key shares such that +// `threshold` of them can be combined in signatures from input key material (ikm) +func (b SigBasicVt) ThresholdKeygenWithSeed( + ikm []byte, + threshold, total uint, +) (*PublicKeyVt, []*SecretKeyShare, error) { + return thresholdGenerateKeysWithSeedVt(ikm, threshold, total) +} + +// Computes a signature in G1 from sk, a secret key, and a message +func (b SigBasicVt) Sign(sk *SecretKey, msg []byte) (*SignatureVt, error) { + return sk.createSignatureVt(msg, b.dst) +} + +// Compute a partial signature in G2 that can be combined with other partial signature +func (b SigBasicVt) PartialSign(sks *SecretKeyShare, msg []byte) (*PartialSignatureVt, error) { + return sks.partialSignVt(msg, b.dst) +} + +// CombineSignatures takes partial signatures to yield a completed signature +func (b SigBasicVt) CombineSignatures(sigs ...*PartialSignatureVt) (*SignatureVt, error) { + return combineSigsVt(sigs) +} + +// Checks that a signature is valid for the message under the public key pk +func (b SigBasicVt) Verify(pk *PublicKeyVt, msg []byte, sig *SignatureVt) (bool, error) { + return pk.verifySignatureVt(msg, sig, b.dst) +} + +// The AggregateVerify algorithm checks an aggregated signature over +// several (PK, message, signature) pairs. +// Each message must be different or this will return false. +// See section 3.1.1 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigBasicVt) AggregateVerify( + pks []*PublicKeyVt, + msgs [][]byte, + sigs []*SignatureVt, +) (bool, error) { + if !allRowsUnique(msgs) { + return false, fmt.Errorf("all messages must be distinct") + } + asig, err := aggregateSignaturesVt(sigs...) + if err != nil { + return false, err + } + return asig.aggregateVerify(pks, msgs, b.dst) +} + +// SigAugVt is minimal-signature-size scheme that doesn't support FastAggregateVerification. +// see: https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03#section-4.2.2 +type SigAugVt struct { + dst string +} + +// Creates a new BLS message augmentation signature scheme with the standard domain separation tag used for signatures. +func NewSigAugVt() *SigAugVt { + return &SigAugVt{dst: blsSignatureAugVtDst} +} + +// Creates a new BLS message augmentation signature scheme with a custom domain separation tag used for signatures. +func NewSigAugVtWithDst(signDst string) *SigAugVt { + return &SigAugVt{dst: signDst} +} + +// Creates a new BLS key pair +func (b SigAugVt) Keygen() (*PublicKeyVt, *SecretKey, error) { + return generateKeysVt() +} + +// Creates a new BLS secret key +// Input key material (ikm) MUST be at least 32 bytes long, +// but it MAY be longer. +func (b SigAugVt) KeygenWithSeed(ikm []byte) (*PublicKeyVt, *SecretKey, error) { + return generateKeysWithSeedVt(ikm) +} + +// ThresholdKeyGen generates a public key and `total` secret key shares such that +// `threshold` of them can be combined in signatures +func (b SigAugVt) ThresholdKeygen(threshold, total uint) (*PublicKeyVt, []*SecretKeyShare, error) { + return thresholdGenerateKeysVt(threshold, total) +} + +// ThresholdKeygenWithSeed generates a public key and `total` secret key shares such that +// `threshold` of them can be combined in signatures +func (b SigAugVt) ThresholdKeygenWithSeed( + ikm []byte, + threshold, total uint, +) (*PublicKeyVt, []*SecretKeyShare, error) { + return thresholdGenerateKeysWithSeedVt(ikm, threshold, total) +} + +// Computes a signature in G1 from sk, a secret key, and a message +// See section 3.2.1 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-02 +func (b SigAugVt) Sign(sk *SecretKey, msg []byte) (*SignatureVt, error) { + pk, err := sk.GetPublicKeyVt() + if err != nil { + return nil, err + } + bytes, err := pk.MarshalBinary() + if err != nil { + return nil, fmt.Errorf("MarshalBinary failed") + } + bytes = append(bytes, msg...) + return sk.createSignatureVt(bytes, b.dst) +} + +// Compute a partial signature in G2 that can be combined with other partial signature +func (b SigAugVt) PartialSign( + sks *SecretKeyShare, + pk *PublicKeyVt, + msg []byte, +) (*PartialSignatureVt, error) { + if len(msg) == 0 { + return nil, fmt.Errorf("message cannot be empty or nil") + } + bytes, err := pk.MarshalBinary() + if err != nil { + return nil, fmt.Errorf("MarshalBinary failed") + } + bytes = append(bytes, msg...) + return sks.partialSignVt(bytes, b.dst) +} + +// CombineSignatures takes partial signatures to yield a completed signature +func (b SigAugVt) CombineSignatures(sigs ...*PartialSignatureVt) (*SignatureVt, error) { + return combineSigsVt(sigs) +} + +// Checks that a signature is valid for the message under the public key pk +// See section 3.2.2 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigAugVt) Verify(pk *PublicKeyVt, msg []byte, sig *SignatureVt) (bool, error) { + bytes, err := pk.MarshalBinary() + if err != nil { + return false, err + } + bytes = append(bytes, msg...) + return pk.verifySignatureVt(bytes, sig, b.dst) +} + +// The aggregateVerify algorithm checks an aggregated signature over +// several (PK, message, signature) pairs. +// See section 3.2.3 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigAugVt) AggregateVerify( + pks []*PublicKeyVt, + msgs [][]byte, + sigs []*SignatureVt, +) (bool, error) { + if len(pks) != len(msgs) { + return false, fmt.Errorf( + "the number of public keys does not match the number of messages: %v != %v", + len(pks), + len(msgs), + ) + } + data := make([][]byte, len(msgs)) + for i, msg := range msgs { + bytes, err := pks[i].MarshalBinary() + if err != nil { + return false, err + } + data[i] = append(bytes, msg...) + } + asig, err := aggregateSignaturesVt(sigs...) + if err != nil { + return false, err + } + return asig.aggregateVerify(pks, data, b.dst) +} + +// SigEth2Vt supports signatures on Eth2. +// Internally is an alias for SigPopVt +type SigEth2Vt = SigPopVt + +// NewSigEth2Vt Creates a new BLS ETH2 signature scheme with the standard domain separation tag used for signatures. +func NewSigEth2Vt() *SigEth2Vt { + return NewSigPopVt() +} + +// SigPopVt is minimal-signature-size scheme that supports FastAggregateVerification +// and requires using proofs of possession to mitigate rogue-key attacks +// see: https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03#section-4.2.3 +type SigPopVt struct { + sigDst string + popDst string +} + +// Creates a new BLS proof of possession signature scheme with the standard domain separation tag used for signatures. +func NewSigPopVt() *SigPopVt { + return &SigPopVt{sigDst: blsSignaturePopVtDst, popDst: blsPopProofVtDst} +} + +// Creates a new BLS message proof of possession signature scheme with a custom domain separation tag used for signatures. +func NewSigPopVtWithDst(signDst, popDst string) (*SigPopVt, error) { + if signDst == popDst { + return nil, fmt.Errorf("domain separation tags cannot be equal") + } + return &SigPopVt{sigDst: signDst, popDst: popDst}, nil +} + +// Creates a new BLS key pair +func (b SigPopVt) Keygen() (*PublicKeyVt, *SecretKey, error) { + return generateKeysVt() +} + +// Creates a new BLS secret key +// Input key material (ikm) MUST be at least 32 bytes long, +// but it MAY be longer. +func (b SigPopVt) KeygenWithSeed(ikm []byte) (*PublicKeyVt, *SecretKey, error) { + return generateKeysWithSeedVt(ikm) +} + +// ThresholdKeyGen generates a public key and `total` secret key shares such that +// `threshold` of them can be combined in signatures +func (b SigPopVt) ThresholdKeygen(threshold, total uint) (*PublicKeyVt, []*SecretKeyShare, error) { + return thresholdGenerateKeysVt(threshold, total) +} + +// ThresholdKeyGen generates a public key and `total` secret key shares such that +// `threshold` of them can be combined in signatures +func (b SigPopVt) ThresholdKeygenWithSeed( + ikm []byte, + threshold, total uint, +) (*PublicKeyVt, []*SecretKeyShare, error) { + return thresholdGenerateKeysWithSeedVt(ikm, threshold, total) +} + +// Computes a signature in G1 from sk, a secret key, and a message +// See section 2.6 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigPopVt) Sign(sk *SecretKey, msg []byte) (*SignatureVt, error) { + return sk.createSignatureVt(msg, b.sigDst) +} + +// Compute a partial signature in G2 that can be combined with other partial signature +func (b SigPopVt) PartialSign(sks *SecretKeyShare, msg []byte) (*PartialSignatureVt, error) { + return sks.partialSignVt(msg, b.sigDst) +} + +// CombineSignatures takes partial signatures to yield a completed signature +func (b SigPopVt) CombineSignatures(sigs ...*PartialSignatureVt) (*SignatureVt, error) { + return combineSigsVt(sigs) +} + +// Checks that a signature is valid for the message under the public key pk +// See section 2.7 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigPopVt) Verify(pk *PublicKeyVt, msg []byte, sig *SignatureVt) (bool, error) { + return pk.verifySignatureVt(msg, sig, b.sigDst) +} + +// The aggregateVerify algorithm checks an aggregated signature over +// several (PK, message, signature) pairs. +// Each message must be different or this will return false. +// See section 3.1.1 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-02 +func (b SigPopVt) AggregateVerify( + pks []*PublicKeyVt, + msgs [][]byte, + sigs []*SignatureVt, +) (bool, error) { + if !allRowsUnique(msgs) { + return false, fmt.Errorf("all messages must be distinct") + } + asig, err := aggregateSignaturesVt(sigs...) + if err != nil { + return false, err + } + return asig.aggregateVerify(pks, msgs, b.sigDst) +} + +// Combine many signatures together to form a Multisignature. +// Multisignatures can be created when multiple signers jointly +// generate signatures over the same message. +func (b SigPopVt) AggregateSignatures(sigs ...*SignatureVt) (*MultiSignatureVt, error) { + g1, err := aggregateSignaturesVt(sigs...) + if err != nil { + return nil, err + } + return &MultiSignatureVt{value: g1.value}, nil +} + +// Combine many public keys together to form a Multipublickey. +// Multipublickeys are used to verify multisignatures. +func (b SigPopVt) AggregatePublicKeys(pks ...*PublicKeyVt) (*MultiPublicKeyVt, error) { + g2, err := aggregatePublicKeysVt(pks...) + if err != nil { + return nil, err + } + return &MultiPublicKeyVt{value: g2.value}, nil +} + +// Checks that a multisignature is valid for the message under the multi public key +// Similar to FastAggregateVerify except the keys and signatures have already been +// combined. See section 3.3.4 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-02 +func (b SigPopVt) VerifyMultiSignature( + pk *MultiPublicKeyVt, + msg []byte, + sig *MultiSignatureVt, +) (bool, error) { + s := &SignatureVt{value: sig.value} + p := &PublicKeyVt{value: pk.value} + return p.verifySignatureVt(msg, s, b.sigDst) +} + +// FastAggregateVerify verifies an aggregated signature over the same message under the given public keys. +// See section 3.3.4 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigPopVt) FastAggregateVerify( + pks []*PublicKeyVt, + msg []byte, + asig *SignatureVt, +) (bool, error) { + apk, err := aggregatePublicKeysVt(pks...) + if err != nil { + return false, err + } + return apk.verifySignatureVt(msg, asig, b.sigDst) +} + +// FastAggregateVerifyConstituent verifies a list of signature over the same message under the given public keys. +// See section 3.3.4 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigPopVt) FastAggregateVerifyConstituent( + pks []*PublicKeyVt, + msg []byte, + sigs []*SignatureVt, +) (bool, error) { + asig, err := aggregateSignaturesVt(sigs...) + if err != nil { + return false, err + } + return b.FastAggregateVerify(pks, msg, asig) +} + +// Create a proof of possession for the corresponding public key. +// A proof of possession must be created for each public key to be used +// in FastAggregateVerify or a Multipublickey to avoid rogue key attacks. +// See section 3.3.2 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigPopVt) PopProve(sk *SecretKey) (*ProofOfPossessionVt, error) { + return sk.createProofOfPossessionVt(b.popDst) +} + +// verify a proof of possession for the corresponding public key is valid. +// A proof of possession must be created for each public key to be used +// in FastAggregateVerify or a Multipublickey to avoid rogue key attacks. +// See section 3.3.3 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigPopVt) PopVerify(pk *PublicKeyVt, pop1 *ProofOfPossessionVt) (bool, error) { + return pop1.verify(pk, b.popDst) +} diff --git a/signatures/bls/bls_sig/tiny_bls_sig.go b/signatures/bls/bls_sig/tiny_bls_sig.go new file mode 100644 index 0000000..bc83b6e --- /dev/null +++ b/signatures/bls/bls_sig/tiny_bls_sig.go @@ -0,0 +1,516 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bls_sig + +import ( + "fmt" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/core/curves/native/bls12381" + "github.com/sonr-io/sonr/crypto/internal" +) + +// Implement BLS signatures on the BLS12-381 curve +// according to https://crypto.standford.edu/~dabo/pubs/papers/BLSmultisig.html +// and https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +// this file implements signatures in G1 and public keys in G2. +// Public Keys and Signatures can be aggregated but the consumer +// must use proofs of possession to defend against rogue-key attacks. + +const ( + // Public key size in G2 + PublicKeyVtSize = 96 + // Signature size in G1 + SignatureVtSize = 48 + // Proof of Possession in G1 + ProofOfPossessionVtSize = 48 +) + +// Represents a public key in G2 +type PublicKeyVt struct { + value bls12381.G2 +} + +// Serialize a public key to a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +func (pk *PublicKeyVt) MarshalBinary() ([]byte, error) { + out := pk.value.ToCompressed() + return out[:], nil +} + +// Deserialize a public key from a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +// If successful, it will assign the public key +// otherwise it will return an error +func (pk *PublicKeyVt) UnmarshalBinary(data []byte) error { + if len(data) != PublicKeyVtSize { + return fmt.Errorf("public key must be %d bytes", PublicKeySize) + } + var blob [PublicKeyVtSize]byte + copy(blob[:], data) + p2, err := new(bls12381.G2).FromCompressed(&blob) + if err != nil { + return err + } + if p2.IsIdentity() == 1 { + return fmt.Errorf("public keys cannot be zero") + } + pk.value = *p2 + return nil +} + +// Represents a BLS signature in G1 +type SignatureVt struct { + value bls12381.G1 +} + +// Serialize a signature to a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +func (sig *SignatureVt) MarshalBinary() ([]byte, error) { + out := sig.value.ToCompressed() + return out[:], nil +} + +func (sig *SignatureVt) verify(pk *PublicKeyVt, message []byte, signDstVt string) (bool, error) { + return pk.verifySignatureVt(message, sig, signDstVt) +} + +// The AggregateVerify algorithm checks an aggregated signature over +// several (PK, message) pairs. +// The Signature is the output of aggregateSignaturesVt +// Each message must be different or this will return false. +// See section 3.1.1 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (sig *SignatureVt) aggregateVerify( + pks []*PublicKeyVt, + msgs [][]byte, + signDstVt string, +) (bool, error) { + return sig.coreAggregateVerify(pks, msgs, signDstVt) +} + +func (sig *SignatureVt) coreAggregateVerify( + pks []*PublicKeyVt, + msgs [][]byte, + signDstVt string, +) (bool, error) { + if len(pks) < 1 { + return false, fmt.Errorf("at least one key is required") + } + if len(msgs) < 1 { + return false, fmt.Errorf("at least one message is required") + } + if len(pks) != len(msgs) { + return false, fmt.Errorf( + "the number of public keys does not match the number of messages: %v != %v", + len(pks), + len(msgs), + ) + } + if sig.value.InCorrectSubgroup() == 0 { + return false, fmt.Errorf("signature is not in the correct subgroup") + } + + engine := new(bls12381.Engine) + dst := []byte(signDstVt) + // e(H(m_1), pk_1)*...*e(H(m_N), pk_N) == e(s, g2) + // However, we use only one miller loop + // by doing the equivalent of + // e(H(m_1), pk_1)*...*e(H(m_N), pk_N) * e(s^-1, g2) == 1 + for i, pk := range pks { + if pk == nil { + return false, fmt.Errorf("public key at %d is nil", i) + } + if pk.value.IsIdentity() == 1 || pk.value.InCorrectSubgroup() == 0 { + return false, fmt.Errorf("public key at %d is not in the correct subgroup", i) + } + p1 := new(bls12381.G1).Hash(native.EllipticPointHasherSha256(), msgs[i], dst) + engine.AddPair(p1, &pk.value) + } + engine.AddPairInvG2(&sig.value, new(bls12381.G2).Generator()) + return engine.Check(), nil +} + +// Deserialize a signature from a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +// If successful, it will assign the Signature +// otherwise it will return an error +func (sig *SignatureVt) UnmarshalBinary(data []byte) error { + if len(data) != SignatureVtSize { + return fmt.Errorf("signature must be %d bytes", SignatureSize) + } + var blob [SignatureVtSize]byte + copy(blob[:], data) + p1, err := new(bls12381.G1).FromCompressed(&blob) + if err != nil { + return err + } + if p1.IsIdentity() == 1 { + return fmt.Errorf("signatures cannot be zero") + } + sig.value = *p1 + return nil +} + +// Get the corresponding public key from a secret key +// Verifies the public key is in the correct subgroup +func (sk *SecretKey) GetPublicKeyVt() (*PublicKeyVt, error) { + result := new(bls12381.G2).Mul(new(bls12381.G2).Generator(), sk.value) + if result.InCorrectSubgroup() == 0 || result.IsIdentity() == 1 { + return nil, fmt.Errorf("point is not in correct subgroup") + } + return &PublicKeyVt{value: *result}, nil +} + +// Compute a signature from a secret key and message +// This signature is deterministic which protects against +// attacks arising from signing with bad randomness like +// the nonce reuse attack on ECDSA. `message` is +// hashed to a point in G1 as described in to +// https://datatracker.ietf.org/doc/draft-irtf-cfrg-hash-to-curve/?include_text=1 +// See Section 2.6 in https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +// nil message is not permitted but empty slice is allowed +func (sk *SecretKey) createSignatureVt(message []byte, dstVt string) (*SignatureVt, error) { + if message == nil { + return nil, fmt.Errorf("message cannot be nil") + } + if sk.value.IsZero() == 1 { + return nil, fmt.Errorf("invalid secret key") + } + p1 := new(bls12381.G1).Hash(native.EllipticPointHasherSha256(), message, []byte(dstVt)) + result := new(bls12381.G1).Mul(p1, sk.value) + if result.InCorrectSubgroup() == 0 { + return nil, fmt.Errorf("point is not in correct subgroup") + } + return &SignatureVt{value: *result}, nil +} + +// Verify a signature is valid for the message under this public key. +// See Section 2.7 in https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (pk PublicKeyVt) verifySignatureVt( + message []byte, + signature *SignatureVt, + dstVt string, +) (bool, error) { + if signature == nil || message == nil || pk.value.IsIdentity() == 1 { + return false, fmt.Errorf("signature and message and public key cannot be nil or zero") + } + if signature.value.IsIdentity() == 1 || signature.value.InCorrectSubgroup() == 0 { + return false, fmt.Errorf("signature is not in the correct subgroup") + } + engine := new(bls12381.Engine) + + p1 := new(bls12381.G1).Hash(native.EllipticPointHasherSha256(), message, []byte(dstVt)) + // e(H(m), pk) == e(s, g2) + // However, we can reduce the number of miller loops + // by doing the equivalent of + // e(H(m)^-1, pk) * e(s, g2) == 1 + engine.AddPairInvG1(p1, &pk.value) + engine.AddPair(&signature.value, new(bls12381.G2).Generator()) + return engine.Check(), nil +} + +// Combine public keys into one aggregated key +func aggregatePublicKeysVt(pks ...*PublicKeyVt) (*PublicKeyVt, error) { + if len(pks) < 1 { + return nil, fmt.Errorf("at least one public key is required") + } + result := new(bls12381.G2).Identity() + for i, k := range pks { + if k == nil { + return nil, fmt.Errorf("key at %d is nil, keys cannot be nil", i) + } + if k.value.InCorrectSubgroup() == 0 { + return nil, fmt.Errorf("key at %d is not in the correct subgroup", i) + } + result.Add(result, &k.value) + } + return &PublicKeyVt{value: *result}, nil +} + +// Combine signatures into one aggregated signature +func aggregateSignaturesVt(sigs ...*SignatureVt) (*SignatureVt, error) { + if len(sigs) < 1 { + return nil, fmt.Errorf("at least one signature is required") + } + result := new(bls12381.G1).Identity() + for i, s := range sigs { + if s == nil { + return nil, fmt.Errorf("signature at %d is nil, signature cannot be nil", i) + } + if s.value.InCorrectSubgroup() == 0 { + return nil, fmt.Errorf("signature at %d is not in the correct subgroup", i) + } + result.Add(result, &s.value) + } + return &SignatureVt{value: *result}, nil +} + +// A proof of possession scheme uses a separate public key validation +// step, called a proof of possession, to defend against rogue key +// attacks. This enables an optimization to aggregate signature +// verification for the case that all signatures are on the same +// message. +type ProofOfPossessionVt struct { + value bls12381.G1 +} + +// Generates a proof-of-possession (PoP) for this secret key. The PoP signature should be verified before +// before accepting any aggregate signatures related to the corresponding pubkey. +func (sk *SecretKey) createProofOfPossessionVt(popDstVt string) (*ProofOfPossessionVt, error) { + pk, err := sk.GetPublicKeyVt() + if err != nil { + return nil, err + } + msg, err := pk.MarshalBinary() + if err != nil { + return nil, err + } + sig, err := sk.createSignatureVt(msg, popDstVt) + if err != nil { + return nil, err + } + return &ProofOfPossessionVt{value: sig.value}, nil +} + +// Serialize a proof of possession to a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +func (pop *ProofOfPossessionVt) MarshalBinary() ([]byte, error) { + out := pop.value.ToCompressed() + return out[:], nil +} + +// Deserialize a proof of possession from a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +// If successful, it will assign the Signature +// otherwise it will return an error +func (pop *ProofOfPossessionVt) UnmarshalBinary(data []byte) error { + p1 := new(SignatureVt) + err := p1.UnmarshalBinary(data) + if err != nil { + return err + } + pop.value = p1.value + return nil +} + +// Verifies that PoP is valid for this pubkey. In order to prevent rogue key attacks, a PoP must be validated +// for each pubkey in an aggregated signature. +func (pop *ProofOfPossessionVt) verify(pk *PublicKeyVt, popDstVt string) (bool, error) { + if pk == nil { + return false, fmt.Errorf("public key cannot be nil") + } + msg, err := pk.MarshalBinary() + if err != nil { + return false, err + } + return pk.verifySignatureVt(msg, &SignatureVt{value: pop.value}, popDstVt) +} + +// Represents an MultiSignature in G1. A multisignature is used when multiple signatures +// are calculated over the same message vs an aggregate signature where each message signed +// is a unique. +type MultiSignatureVt struct { + value bls12381.G1 +} + +// Serialize a multi-signature to a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +func (sig *MultiSignatureVt) MarshalBinary() ([]byte, error) { + out := sig.value.ToCompressed() + return out[:], nil +} + +// Check a multisignature is valid for a multipublickey and a message +func (sig *MultiSignatureVt) verify( + pk *MultiPublicKeyVt, + message []byte, + signDstVt string, +) (bool, error) { + if pk == nil { + return false, fmt.Errorf("public key cannot be nil") + } + p := &PublicKeyVt{value: pk.value} + return p.verifySignatureVt(message, &SignatureVt{value: sig.value}, signDstVt) +} + +// Deserialize a signature from a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +// If successful, it will assign the Signature +// otherwise it will return an error +func (sig *MultiSignatureVt) UnmarshalBinary(data []byte) error { + if len(data) != SignatureVtSize { + return fmt.Errorf("multi signature must be %v bytes", SignatureSize) + } + s1 := new(SignatureVt) + err := s1.UnmarshalBinary(data) + if err != nil { + return err + } + sig.value = s1.value + return nil +} + +// Represents accumulated multiple Public Keys in G2 for verifying a multisignature +type MultiPublicKeyVt struct { + value bls12381.G2 +} + +// Serialize a public key to a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +func (pk *MultiPublicKeyVt) MarshalBinary() ([]byte, error) { + out := pk.value.ToCompressed() + return out[:], nil +} + +// Deserialize a public key from a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +// If successful, it will assign the public key +// otherwise it will return an error +func (pk *MultiPublicKeyVt) UnmarshalBinary(data []byte) error { + if len(data) != PublicKeyVtSize { + return fmt.Errorf("multi public key must be %v bytes", PublicKeySize) + } + p2 := new(PublicKeyVt) + err := p2.UnmarshalBinary(data) + if err != nil { + return err + } + pk.value = p2.value + return nil +} + +// Check a multisignature is valid for a multipublickey and a message +func (pk *MultiPublicKeyVt) verify( + message []byte, + sig *MultiSignatureVt, + signDstVt string, +) (bool, error) { + return sig.verify(pk, message, signDstVt) +} + +// PartialSignatureVt represents threshold Gap Diffie-Hellman BLS signature +// that can be combined with other partials to yield a completed BLS signature +// See section 3.2 in +type PartialSignatureVt struct { + identifier byte + signature bls12381.G1 +} + +// partialSignVt creates a partial signature that can be combined with other partial signatures +// to yield a complete signature +func (sks *SecretKeyShare) partialSignVt( + message []byte, + signDst string, +) (*PartialSignatureVt, error) { + if len(message) == 0 { + return nil, fmt.Errorf("message cannot be empty or nil") + } + p1 := new(bls12381.G1).Hash(native.EllipticPointHasherSha256(), message, []byte(signDst)) + + var blob [SecretKeySize]byte + copy(blob[:], internal.ReverseScalarBytes(sks.value)) + s, err := bls12381.Bls12381FqNew().SetBytes(&blob) + if err != nil { + return nil, err + } + result := new(bls12381.G1).Mul(p1, s) + if result.InCorrectSubgroup() == 0 { + return nil, fmt.Errorf("point is not on correct subgroup") + } + return &PartialSignatureVt{identifier: sks.identifier, signature: *result}, nil +} + +// combineSigsVt gathers partial signatures and yields a complete signature +func combineSigsVt(partials []*PartialSignatureVt) (*SignatureVt, error) { + if len(partials) < 2 { + return nil, fmt.Errorf("must have at least 2 partial signatures") + } + if len(partials) > 255 { + return nil, fmt.Errorf("unsupported to combine more than 255 signatures") + } + + xVars, yVars, err := splitXYVt(partials) + if err != nil { + return nil, err + } + + sTmp := new(bls12381.G1).Identity() + sig := new(bls12381.G1).Identity() + + // Lagrange interpolation + basis := bls12381.Bls12381FqNew() + for i, xi := range xVars { + basis.SetOne() + + for j, xj := range xVars { + if i == j { + continue + } + + num := bls12381.Bls12381FqNew().Neg(xj) // x - x_m + den := bls12381.Bls12381FqNew().Sub(xi, xj) // x_j - x_m + _, wasInverted := den.Invert(den) + // wasInverted == false if den == 0 + if !wasInverted { + return nil, fmt.Errorf("signatures cannot be recombined") + } + basis.Mul(basis, num.Mul(num, den)) + } + sTmp.Mul(yVars[i], basis) + sig.Add(sig, sTmp) + } + if sig.InCorrectSubgroup() == 0 { + return nil, fmt.Errorf("signature is not in the correct subgroup") + } + + return &SignatureVt{value: *sig}, nil +} + +// Ensure no duplicates x values and convert x values to field elements +func splitXYVt(partials []*PartialSignatureVt) ([]*native.Field, []*bls12381.G1, error) { + x := make([]*native.Field, len(partials)) + y := make([]*bls12381.G1, len(partials)) + + dup := make(map[byte]bool) + + for i, sp := range partials { + if sp == nil { + return nil, nil, fmt.Errorf("partial signature cannot be nil") + } + if _, exists := dup[sp.identifier]; exists { + return nil, nil, fmt.Errorf("duplicate signature included") + } + if sp.signature.InCorrectSubgroup() == 0 { + return nil, nil, fmt.Errorf("signature is not in the correct subgroup") + } + dup[sp.identifier] = true + x[i] = bls12381.Bls12381FqNew().SetUint64(uint64(sp.identifier)) + y[i] = &sp.signature + } + return x, y, nil +} diff --git a/signatures/bls/bls_sig/tiny_bls_sig_aug_test.go b/signatures/bls/bls_sig/tiny_bls_sig_aug_test.go new file mode 100644 index 0000000..3f5ed17 --- /dev/null +++ b/signatures/bls/bls_sig/tiny_bls_sig_aug_test.go @@ -0,0 +1,389 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bls_sig + +import ( + "testing" + + "github.com/sonr-io/sonr/crypto/core/curves/native/bls12381" +) + +func generateAugSignatureG1(sk *SecretKey, msg []byte, t *testing.T) *SignatureVt { + bls := NewSigAugVt() + sig, err := bls.Sign(sk, msg) + if err != nil { + t.Errorf("Aug Sign failed") + } + return sig +} + +func generateAugAggregateDataG1(t *testing.T) ([]*PublicKeyVt, []*SignatureVt, [][]byte) { + msgs := make([][]byte, numAggregateG1) + pks := make([]*PublicKeyVt, numAggregateG1) + sigs := make([]*SignatureVt, numAggregateG1) + ikm := make([]byte, 32) + bls := NewSigAugVt() + + for i := 0; i < numAggregateG1; i++ { + readRand(ikm, t) + pk, sk, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Aug KeyGen failed") + } + msg := make([]byte, 20) + readRand(msg, t) + sig := generateAugSignatureG1(sk, msg, t) + msgs[i] = msg + sigs[i] = sig + pks[i] = pk + } + return pks, sigs, msgs +} + +func TestAugKeyGenG1Works(t *testing.T) { + ikm := make([]byte, 32) + readRand(ikm, t) + bls := NewSigAugVt() + _, _, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Aug KeyGen failed") + } +} + +func TestAugKeyGenG1Fail(t *testing.T) { + ikm := make([]byte, 10) + readRand(ikm, t) + bls := NewSigAugVt() + _, _, err := bls.KeygenWithSeed(ikm) + if err == nil { + t.Errorf("Aug KeyGen succeeded when it should've failed") + } +} + +func TestAugCustomDstG1(t *testing.T) { + ikm := make([]byte, 32) + readRand(ikm, t) + bls := NewSigAugVtWithDst("BLS_SIG_BLS12381G1_XMD:SHA-256_SSWU_RO_AUG_TEST") + pk, sk, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Aug Custom Dst KeyGen failed") + } + + readRand(ikm, t) + sig, err := bls.Sign(sk, ikm) + if err != nil { + t.Errorf("Aug Custom Dst Sign failed") + } + + if res, _ := bls.Verify(pk, ikm, sig); !res { + t.Errorf("Aug Custom Dst Verify failed") + } + + ikm[0] += 1 + if res, _ := bls.Verify(pk, ikm, sig); res { + t.Errorf("Aug Custom Dst Verify succeeded when it should've failed.") + } +} + +func TestAugSigningG1(t *testing.T) { + ikm := make([]byte, 32) + readRand(ikm, t) + bls := NewSigAugVt() + pk, sk, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Aug KeyGen failed") + } + + readRand(ikm, t) + sig := generateAugSignatureG1(sk, ikm, t) + + if res, _ := bls.Verify(pk, ikm, sig); !res { + t.Errorf("Aug Verify failed") + } + + ikm[0] += 1 + if res, _ := bls.Verify(pk, ikm, sig); res { + t.Errorf("Aug Verify succeeded when it should've failed.") + } +} + +func TestAugAggregateVerifyG1Works(t *testing.T) { + pks, sigs, msgs := generateAugAggregateDataG1(t) + bls := NewSigAugVt() + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); !res { + t.Errorf("Aug AggregateVerify failed") + } +} + +func TestAugAggregateVerifyG1BadPks(t *testing.T) { + bls := NewSigAugVt() + pks, sigs, msgs := generateAugAggregateDataG1(t) + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); !res { + t.Errorf("Aug AggregateVerify failed") + } + + pks[0] = pks[1] + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Aug AggregateVerify succeeded when it should've failed") + } + + // Try a zero key to make sure it doesn't crash + pkValue := new(bls12381.G2).Identity() + pks[0] = &PublicKeyVt{value: *pkValue} + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Aug AggregateVerify succeeded with zero byte public key it should've failed") + } + + // Try with base generator + pkValue.Generator() + pks[0] = &PublicKeyVt{value: *pkValue} + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf( + "Aug aggregateVerify succeeded with the base generator public key it should've failed", + ) + } +} + +func TestAugAggregateVerifyG1BadSigs(t *testing.T) { + bls := NewSigAugVt() + pks, sigs, msgs := generateAugAggregateDataG1(t) + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); !res { + t.Errorf("Aug aggregateVerify failed") + } + + sigs[0] = sigs[1] + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Aug aggregateVerify succeeded when it should've failed") + } + + // Try a zero signature to make sure it doesn't crash + sigValue := new(bls12381.G1).Identity() + sigs[0] = &SignatureVt{value: *sigValue} + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Aug aggregateVerify succeeded with zero byte signature it should've failed") + } + + // Try with base generator + sigValue.Generator() + sigs[0] = &SignatureVt{value: *sigValue} + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf( + "Aug aggregateVerify succeeded with the base generator signature it should've failed", + ) + } +} + +func TestAugAggregateVerifyG1BadMsgs(t *testing.T) { + bls := NewSigAugVt() + pks, sigs, msgs := generateAugAggregateDataG1(t) + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); !res { + t.Errorf("Aug aggregateVerify failed") + } + + // Test len(pks) != len(msgs) + if res, _ := bls.AggregateVerify(pks, msgs[0:8], sigs); res { + t.Errorf("Aug aggregateVerify succeeded when it should've failed") + } + + msgs[0] = msgs[1] + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Aug aggregateVerify succeeded when it should've failed") + } +} + +func TestAugAggregateVerifyG1DupMsg(t *testing.T) { + bls := NewSigAugVt() + + // Only two messages but repeated + messages := make([][]byte, numAggregateG1) + messages[0] = []byte("Yes") + messages[1] = []byte("No") + for i := 2; i < numAggregateG1; i++ { + messages[i] = messages[i%2] + } + + pks := make([]*PublicKeyVt, numAggregateG1) + sigs := make([]*SignatureVt, numAggregateG1) + + ikm := make([]byte, 32) + for i := 0; i < numAggregateG1; i++ { + readRand(ikm, t) + pk, sk, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Aug KeyGen failed") + } + + sig := generateAugSignatureG1(sk, messages[i], t) + pks[i] = pk + sigs[i] = sig + } + + if res, _ := bls.AggregateVerify(pks, messages, sigs); !res { + t.Errorf("Aug aggregateVerify failed for duplicate messages") + } +} + +func TestBlsAugG1KeyGen(t *testing.T) { + bls := NewSigAugVt() + _, _, err := bls.Keygen() + if err != nil { + t.Errorf("Keygen failed: %v", err) + } +} + +func TestAugVtThresholdKeygenBadInputs(t *testing.T) { + bls := NewSigAugVt() + _, _, err := bls.ThresholdKeygen(0, 0) + if err == nil { + t.Errorf("ThresholdKeygen should've failed but succeeded") + } + _, _, err = bls.ThresholdKeygen(1, 0) + if err == nil { + t.Errorf("ThresholdKeygen should've failed but succeeded") + } + _, _, err = bls.ThresholdKeygen(3, 2) + if err == nil { + t.Errorf("ThresholdKeygen should've failed but succeeded") + } +} + +func TestAugVtThresholdKeygen(t *testing.T) { + bls := NewSigAugVt() + _, sks, err := bls.ThresholdKeygen(3, 5) + if err != nil { + t.Errorf("ThresholdKeygen failed") + } + if len(sks) != 5 { + t.Errorf("ThresholdKeygen did not produce enough shares") + } +} + +func TestAugPartialSignVt(t *testing.T) { + ikm := make([]byte, 32) + bls := NewSigAugVt() + pk, sks, err := bls.ThresholdKeygenWithSeed(ikm, 2, 4) + if err != nil { + t.Errorf("ThresholdKeygen failed") + } + msg := make([]byte, 10) + sig1, err := bls.PartialSign(sks[0], pk, msg) + if err != nil { + t.Errorf("partialSign failed: %v", err) + } + sig2, err := bls.PartialSign(sks[1], pk, msg) + if err != nil { + t.Errorf("partialSign failed: %v", err) + } + sig, err := bls.CombineSignatures(sig1, sig2) + if err != nil { + t.Errorf("CombineSignatures failed: %v", err) + } + + if res, _ := bls.Verify(pk, msg, sig); !res { + t.Errorf("Combined signature does not verify") + } + + sig, err = bls.CombineSignatures(sig1) + if err == nil { + t.Errorf("CombineSignatures failed: %v", err) + } + if res, _ := bls.Verify(pk, msg, sig); res { + t.Errorf("Combined signature verify succeeded when it should've failed") + } +} + +// Ensure that mixed partial signatures from distinct origins create invalid composite signatures +func TestAugVtPartialMixupShares(t *testing.T) { + total := uint(5) + ikm := make([]byte, 32) + bls := NewSigAugVt() + pk1, sks1, err := bls.ThresholdKeygenWithSeed(ikm, 3, total) + if err != nil { + t.Errorf("ThresholdKeygen failed: %v", err) + } + for i := range ikm { + ikm[i] = 1 + } + pk2, sks2, err := bls.ThresholdKeygenWithSeed(ikm, 3, total) + if err != nil { + t.Errorf("ThresholdKeygen failed: %v", err) + } + + // Generate partial signatures for both sets of keys + msg := make([]byte, 10) + sigs1 := make([]*PartialSignatureVt, total) + sigs2 := make([]*PartialSignatureVt, total) + for i := range sks1 { + sigs1[i], err = bls.PartialSign(sks1[i], pk1, msg) + if err != nil { + t.Errorf("PartialSign failed: %v", err) + } + sigs2[i], err = bls.PartialSign(sks2[i], pk2, msg) + if err != nil { + t.Errorf("PartialSign failed: %v", err) + } + } + + // Try combining 2 from group 1 and 2 from group 2 + sig, err := bls.CombineSignatures(sigs1[0], sigs1[1], sigs2[2], sigs2[3]) + if err != nil { + t.Errorf("CombineSignatures failed: %v", err) + } + // Signature shouldn't validate + if res, _ := bls.Verify(pk1, msg, sig); res { + t.Errorf( + "CombineSignatures worked with different shares of two secret keys for the same message", + ) + } + if res, _ := bls.Verify(pk2, msg, sig); res { + t.Errorf( + "CombineSignatures worked with different shares of two secret keys for the same message", + ) + } + // Should error out due to duplicate identifiers + _, err = bls.CombineSignatures(sigs1[0], sigs1[1], sigs2[0], sigs2[1]) + if err == nil { + t.Errorf("CombineSignatures expected to fail but succeeded.") + } +} + +func TestEmptyMessageAugPartialSignVt(t *testing.T) { + bls := NewSigAugVt() + pk, sks, err := bls.ThresholdKeygen(5, 6) + if err != nil { + t.Errorf("ThresholdKeygen failed") + } + + // Sign an empty message + _, err = bls.PartialSign(sks[0], pk, []byte{}) + if err == nil { + t.Errorf("Expected partial sign to fail on empty message") + } +} + +func TestNilMessageAugPartialSignVt(t *testing.T) { + bls := NewSigAugVt() + pk, sks, err := bls.ThresholdKeygen(5, 6) + if err != nil { + t.Errorf("ThresholdKeygen failed") + } + + // Sign nil message + _, err = bls.PartialSign(sks[0], pk, nil) + if err == nil { + t.Errorf("Expected partial signing to fail on nil message") + } +} diff --git a/signatures/bls/bls_sig/tiny_bls_sig_basic_test.go b/signatures/bls/bls_sig/tiny_bls_sig_basic_test.go new file mode 100644 index 0000000..a0c0494 --- /dev/null +++ b/signatures/bls/bls_sig/tiny_bls_sig_basic_test.go @@ -0,0 +1,385 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bls_sig + +import ( + "testing" + + "github.com/sonr-io/sonr/crypto/core/curves/native/bls12381" +) + +func generateBasicSignatureG1(sk *SecretKey, msg []byte, t *testing.T) *SignatureVt { + bls := NewSigBasicVt() + sig, err := bls.Sign(sk, msg) + if err != nil { + t.Errorf("Basic Sign failed") + } + return sig +} + +func generateBasicAggregateDataG1(t *testing.T) ([]*PublicKeyVt, []*SignatureVt, [][]byte) { + msgs := make([][]byte, numAggregateG1) + pks := make([]*PublicKeyVt, numAggregateG1) + sigs := make([]*SignatureVt, numAggregateG1) + ikm := make([]byte, 32) + bls := NewSigBasicVt() + + for i := 0; i < numAggregateG1; i++ { + readRand(ikm, t) + pk, sk, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Basic KeyGen failed") + } + msg := make([]byte, 20) + readRand(msg, t) + sig := generateBasicSignatureG1(sk, msg, t) + msgs[i] = msg + sigs[i] = sig + pks[i] = pk + } + return pks, sigs, msgs +} + +func TestBasicKeyGenG1Works(t *testing.T) { + ikm := make([]byte, 32) + readRand(ikm, t) + bls := NewSigBasicVt() + _, _, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Basic KeyGen failed") + } +} + +func TestBasicKeyGenG1Fail(t *testing.T) { + ikm := make([]byte, 10) + readRand(ikm, t) + bls := NewSigBasicVt() + _, _, err := bls.KeygenWithSeed(ikm) + if err == nil { + t.Errorf("Basic KeyGen succeeded when it should've failed") + } +} + +func TestBasicCustomDstG1(t *testing.T) { + ikm := make([]byte, 32) + readRand(ikm, t) + bls := NewSigBasicVtWithDst("BLS_SIG_BLS12381G1_XMD:SHA-256_SSWU_RO_NUL_TEST") + pk, sk, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Basic Custom Dst KeyGen failed") + } + + readRand(ikm, t) + sig, err := bls.Sign(sk, ikm) + if err != nil { + t.Errorf("Basic Custom Dst Sign failed") + } + + if res, _ := bls.Verify(pk, ikm, sig); !res { + t.Errorf("Basic Custon Dst Verify failed") + } + + ikm[0] += 1 + if res, _ := bls.Verify(pk, ikm, sig); res { + t.Errorf("Basic Custom Dst Verify succeeded when it should've failed.") + } +} + +func TestBasicSigningG1(t *testing.T) { + ikm := make([]byte, 32) + readRand(ikm, t) + bls := NewSigBasicVt() + pk, sk, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Basic KeyGen failed") + } + + readRand(ikm, t) + sig := generateBasicSignatureG1(sk, ikm, t) + + if res, _ := bls.Verify(pk, ikm, sig); !res { + t.Errorf("Basic Verify failed") + } + + ikm[0] += 1 + if res, _ := bls.Verify(pk, ikm, sig); res { + t.Errorf("Basic Verify succeeded when it should've failed.") + } +} + +func TestBasicSigningEmptyMessage(t *testing.T) { + bls := NewSigBasicVt() + _, sk, err := bls.Keygen() + if err != nil { + t.Errorf("Basic KeyGen failed") + } + + // Sign an empty message + _, err = bls.Sign(sk, []byte{}) + if err != nil { + t.Errorf("Expected signing message to succeed: %v", err) + } +} + +func TestBasicSigningNilMessage(t *testing.T) { + bls := NewSigBasicVt() + _, sk, err := bls.Keygen() + if err != nil { + t.Errorf("Basic KeyGen failed") + } + + // Sign nil message + _, err = bls.Sign(sk, nil) + if err == nil { + t.Errorf("Expected signing empty message to fail") + } +} + +func TestBasicAggregateVerifyG1Works(t *testing.T) { + pks, sigs, msgs := generateBasicAggregateDataG1(t) + bls := NewSigBasicVt() + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); !res { + t.Errorf("Basic AggregateVerify failed") + } +} + +func TestBasicAggregateVerifyG1BadPks(t *testing.T) { + bls := NewSigBasicVt() + pks, sigs, msgs := generateBasicAggregateDataG1(t) + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); !res { + t.Errorf("Basic aggregateVerify failed") + } + + pks[0] = pks[1] + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Basic aggregateVerify succeeded when it should've failed") + } + + // Try a zero key to make sure it doesn't crash + pkValue := new(bls12381.G2).Identity() + pks[0] = &PublicKeyVt{value: *pkValue} + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Basic aggregateVerify succeeded with zero byte public key it should've failed") + } + + // Try with base generator + pkValue.Generator() + pks[0] = &PublicKeyVt{value: *pkValue} + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf( + "Basic aggregateVerify succeeded with the base generator public key it should've failed", + ) + } +} + +func TestBasicAggregateVerifyG1BadSigs(t *testing.T) { + bls := NewSigBasicVt() + pks, sigs, msgs := generateBasicAggregateDataG1(t) + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); !res { + t.Errorf("Basic aggregateVerify failed") + } + + sigs[0] = sigs[1] + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Basic aggregateVerify succeeded when it should've failed") + } + + // Try a zero key to make sure it doesn't crash + g1 := new(bls12381.G1).Identity() + sigValue := g1.Identity() + sigs[0] = &SignatureVt{value: *sigValue} + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Basic aggregateVerify succeeded with zero byte signature it should've failed") + } + + // Try with base generator + sigValue = g1.Generator() + sigs[0] = &SignatureVt{value: *sigValue} + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf( + "Basic aggregateVerify succeeded with the base generator signature it should've failed", + ) + } +} + +func TestBasicAggregateVerifyG1BadMsgs(t *testing.T) { + bls := NewSigBasicVt() + pks, sigs, msgs := generateBasicAggregateDataG1(t) + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); !res { + t.Errorf("Basic aggregateVerify failed") + } + + msgs[0] = msgs[1] + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Basic aggregateVerify succeeded when it should've failed") + } +} + +func TestBasicVtThresholdKeygenBadInputs(t *testing.T) { + bls := NewSigBasicVt() + _, _, err := bls.ThresholdKeygen(0, 0) + if err == nil { + t.Errorf("ThresholdKeygen should've failed but succeeded") + } + _, _, err = bls.ThresholdKeygen(0, 1) + if err == nil { + t.Errorf("ThresholdKeygen should've failed but succeeded") + } + _, _, err = bls.ThresholdKeygen(3, 2) + if err == nil { + t.Errorf("ThresholdKeygen should've failed but succeeded") + } +} + +func TestBasicVtThresholdKeygen(t *testing.T) { + bls := NewSigBasicVt() + _, sks, err := bls.ThresholdKeygen(3, 5) + if err != nil { + t.Errorf("Keygen failed: %v", err) + } + if len(sks) != 5 { + t.Errorf("ThresholdKeygen did not produce enough shares") + } +} + +func TestBasicPartialSignVt(t *testing.T) { + ikm := make([]byte, 32) + bls := NewSigBasicVt() + pk, sks, err := bls.ThresholdKeygenWithSeed(ikm, 2, 4) + if err != nil { + t.Errorf("ThresholdKeygen failed") + } + msg := make([]byte, 10) + sig1, err := bls.PartialSign(sks[0], msg) + if err != nil { + t.Errorf("partialSign failed: %v", err) + } + sig2, err := bls.PartialSign(sks[1], msg) + if err != nil { + t.Errorf("partialSign failed: %v", err) + } + sig, err := bls.CombineSignatures(sig1, sig2) + if err != nil { + t.Errorf("CombineSignatures failed: %v", err) + } + + if res, _ := bls.Verify(pk, msg, sig); !res { + t.Errorf("Combined signature does not verify") + } + + sig, err = bls.CombineSignatures(sig1) + if err == nil { + t.Errorf("CombineSignatures succeeded when it should've failed") + } + if res, _ := bls.Verify(pk, msg, sig); res { + t.Errorf("Combined signature verify succeeded when it should've failed") + } +} + +// Ensure that mixed partial signatures from distinct origins create invalid composite signatures +func TestBasicVtPartialMixupShares(t *testing.T) { + total := uint(5) + ikm := make([]byte, 32) + bls := NewSigBasicVt() + pk1, sks1, err := bls.ThresholdKeygenWithSeed(ikm, 3, total) + if err != nil { + t.Errorf("ThresholdKeygen failed: %v", err) + } + for i := range ikm { + ikm[i] = 1 + } + pk2, sks2, err := bls.ThresholdKeygenWithSeed(ikm, 3, total) + if err != nil { + t.Errorf("ThresholdKeygen failed: %v", err) + } + + // Generate partial signatures for both sets of keys + msg := make([]byte, 10) + sigs1 := make([]*PartialSignatureVt, total) + sigs2 := make([]*PartialSignatureVt, total) + for i := range sks1 { + sigs1[i], err = bls.PartialSign(sks1[i], msg) + if err != nil { + t.Errorf("PartialSign failed: %v", err) + } + sigs2[i], err = bls.PartialSign(sks2[i], msg) + if err != nil { + t.Errorf("PartialSign failed: %v", err) + } + } + + // Try combining 2 from group 1 and 2 from group 2 + sig, err := bls.CombineSignatures(sigs1[0], sigs1[1], sigs2[2], sigs2[3]) + if err != nil { + t.Errorf("CombineSignatures failed: %v", err) + } + // Signature shouldn't validate + if res, _ := bls.Verify(pk1, msg, sig); res { + t.Errorf( + "CombineSignatures worked with different shares of two secret keys for the same message", + ) + } + if res, _ := bls.Verify(pk2, msg, sig); res { + t.Errorf( + "CombineSignatures worked with different shares of two secret keys for the same message", + ) + } + // Should error out due to duplicate identifiers + _, err = bls.CombineSignatures(sigs1[0], sigs1[1], sigs2[0], sigs2[1]) + if err == nil { + t.Errorf("CombineSignatures expected to fail but succeeded.") + } +} + +func TestIdentityPublicKeyVt(t *testing.T) { + bls := NewSigBasicVt() + _, sk, err := bls.Keygen() + if err != nil { + t.Errorf("Keygen failed: %v", err) + } + msg := []byte{0, 0, 0, 0} + sig, _ := bls.Sign(sk, msg) + pk := PublicKeyVt{value: *new(bls12381.G2).Identity()} + if res, _ := bls.Verify(&pk, msg, sig); res { + t.Errorf("Verify succeeded when the public key is the identity.") + } +} + +func TestThresholdSignTooHighAndLowVt(t *testing.T) { + bls := NewSigBasicVt() + _, sks, err := bls.ThresholdKeygen(3, 5) + if err != nil { + t.Errorf("ThresholdKeygen failed: %v", err) + } + msg := make([]byte, 10) + + ps, err := bls.PartialSign(sks[0], msg) + if err != nil { + t.Errorf("PartialSign failed: %v", err) + } + + _, err = bls.CombineSignatures(ps) + if err == nil { + t.Errorf("CombinSignatures succeeded when it should've failed") + } + + pss := make([]*PartialSignatureVt, 256) + + _, err = bls.CombineSignatures(pss...) + if err == nil { + t.Errorf("CombinSignatures succeeded when it should've failed") + } +} diff --git a/signatures/bls/bls_sig/tiny_bls_sig_pop_test.go b/signatures/bls/bls_sig/tiny_bls_sig_pop_test.go new file mode 100644 index 0000000..4970d06 --- /dev/null +++ b/signatures/bls/bls_sig/tiny_bls_sig_pop_test.go @@ -0,0 +1,962 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bls_sig + +import ( + "bytes" + "math/big" + "math/rand" + "testing" + + "github.com/sonr-io/sonr/crypto/core/curves/native/bls12381" +) + +const numAggregateG1 = 10 + +func TestGetPublicKeyG2(t *testing.T) { + sk := genSecretKey(t) + pk := genPublicKeyVt(sk, t) + actual := marshalStruct(pk, t) + expected := []byte{ + 175, + 76, + 33, + 103, + 184, + 172, + 12, + 111, + 24, + 87, + 84, + 61, + 243, + 82, + 99, + 76, + 131, + 95, + 171, + 237, + 145, + 143, + 7, + 93, + 205, + 148, + 104, + 29, + 153, + 103, + 187, + 206, + 112, + 223, + 252, + 198, + 102, + 41, + 38, + 244, + 228, + 223, + 102, + 16, + 216, + 152, + 231, + 250, + 7, + 111, + 90, + 98, + 194, + 244, + 101, + 251, + 69, + 130, + 11, + 209, + 41, + 210, + 133, + 105, + 217, + 179, + 190, + 1, + 6, + 155, + 135, + 2, + 168, + 249, + 253, + 41, + 59, + 87, + 8, + 49, + 231, + 198, + 142, + 30, + 186, + 44, + 175, + 17, + 198, + 63, + 210, + 176, + 237, + 171, + 11, + 127, + } + if !bytes.Equal(actual, expected) { + t.Errorf("Expected GetPublicKeyVt to pass but failed.") + } +} + +func testSignG1(message []byte, t *testing.T) { + sk := genSecretKey(t) + sig := genSignatureVt(sk, message, t) + pk := genPublicKeyVt(sk, t) + + bls := NewSigPopVt() + + if res, _ := bls.Verify(pk, message, sig); !res { + t.Errorf("createSignatureVt failed when it should've passed.") + } +} + +func TestSignG1EmptyNilMessage(t *testing.T) { + sk := genSecretKey(t) + bls := NewSigPopVt() + sig, _ := bls.Sign(sk, nil) + pk := genPublicKeyVt(sk, t) + + if res, _ := bls.Verify(pk, nil, sig); res { + t.Errorf("createSignature succeeded when it should've failed") + } + + message := []byte{} + sig = genSignatureVt(sk, message, t) + + if res, err := bls.Verify(pk, message, sig); !res { + t.Errorf("create and verify failed on empty message: %v", err) + } +} + +func TestSignG1OneByteMessage(t *testing.T) { + message := []byte{1} + testSignG1(message, t) +} + +func TestSignG1LargeMessage(t *testing.T) { + message := make([]byte, 1048576) + testSignG1(message, t) +} + +func TestSignG1RandomMessage(t *testing.T) { + message := make([]byte, 65537) + readRand(message, t) + testSignG1(message, t) +} + +func TestSignG1BadMessage(t *testing.T) { + message := make([]byte, 1024) + sk := genSecretKey(t) + sig := genSignatureVt(sk, message, t) + pk := genPublicKeyVt(sk, t) + message = []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 0} + + bls := NewSigPopVt() + + if res, _ := bls.Verify(pk, message, sig); res { + t.Errorf("Expected signature to not verify") + } +} + +func TestBadConversionsG1(t *testing.T) { + sk := genSecretKey(t) + message := []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 0} + sig := genSignatureVt(sk, message, t) + pk := genPublicKeyVt(sk, t) + + bls := NewSigPopVt() + + if res, _ := bls.Verify(pk, message, sig); !res { + t.Errorf("Signature should be valid") + } + if res, _ := sig.verify(pk, message, blsSignaturePopVtDst); !res { + t.Errorf("Signature should be valid") + } + + // Convert public key to signature in G2 + sig2 := new(Signature) + err := sig2.UnmarshalBinary(marshalStruct(pk, t)) + if err != nil { + t.Errorf("Should be able to convert to signature in G2") + } + pk2 := new(PublicKey) + err = pk2.UnmarshalBinary(marshalStruct(sig, t)) + if err != nil { + t.Errorf("Should be able to convert to public key in G1") + } + + res, _ := pk2.verifySignature(message, sig2, blsSignaturePopVtDst) + if res { + t.Errorf("The signature shouldn't verify") + } +} + +func TestAggregatePublicKeysG2(t *testing.T) { + pks := []*PublicKeyVt{} + ikm := make([]byte, 32) + for i := 0; i < 20; i++ { + readRand(ikm, t) + sk := genRandSecretKey(ikm, t) + pk := genPublicKeyVt(sk, t) + pks = append(pks, pk) + } + apk1, err := aggregatePublicKeysVt(pks...) + if err != nil { + t.Errorf("%v", err) + } + rng := rand.New(rand.NewSource(1234567890)) + rng.Shuffle(len(pks), func(i, j int) { pks[i], pks[j] = pks[j], pks[i] }) + apk2, err := aggregatePublicKeysVt(pks...) + if err != nil { + t.Errorf("%v", err) + } + if !bytes.Equal(marshalStruct(apk1, t), marshalStruct(apk2, t)) { + t.Errorf("Aggregated public keys should be equal") + } + rand.Shuffle(len(pks), func(i, j int) { pks[i], pks[j] = pks[j], pks[i] }) + apk1, err = aggregatePublicKeysVt(pks...) + if err != nil { + t.Errorf("%v", err) + } + if !bytes.Equal(marshalStruct(apk1, t), marshalStruct(apk2, t)) { + t.Errorf("Aggregated public keys should be equal") + } +} + +func TestAggregateSignaturesG1(t *testing.T) { + sigs := []*SignatureVt{} + ikm := make([]byte, 32) + for i := 0; i < 20; i++ { + readRand(ikm, t) + sk := genRandSecretKey(ikm, t) + sig := genSignatureVt(sk, ikm, t) + sigs = append(sigs, sig) + } + asig1, err := aggregateSignaturesVt(sigs...) + if err != nil { + t.Errorf("%v", err) + } + rng2 := rand.New(rand.NewSource(1234567890)) + rng2.Shuffle(len(sigs), func(i, j int) { sigs[i], sigs[j] = sigs[j], sigs[i] }) + asig2, err := aggregateSignaturesVt(sigs...) + if err != nil { + t.Errorf("%v", err) + } + if !bytes.Equal(marshalStruct(asig1, t), marshalStruct(asig2, t)) { + t.Errorf("Aggregated signatures should be equal") + } + rand.Shuffle(len(sigs), func(i, j int) { sigs[i], sigs[j] = sigs[j], sigs[i] }) + asig1, err = aggregateSignaturesVt(sigs...) + if err != nil { + t.Errorf("%v", err) + } + if !bytes.Equal(marshalStruct(asig1, t), marshalStruct(asig2, t)) { + t.Errorf("Aggregated signatures should be equal") + } +} + +func initAggregatedTestValuesG1(messages [][]byte, t *testing.T) ([]*PublicKeyVt, []*SignatureVt) { + pks := []*PublicKeyVt{} + sigs := []*SignatureVt{} + ikm := make([]byte, 32) + for i := 0; i < numAggregateG1; i++ { + readRand(ikm, t) + sk := genRandSecretKey(ikm, t) + sig := genSignatureVt(sk, messages[i%len(messages)], t) + sigs = append(sigs, sig) + pk := genPublicKeyVt(sk, t) + pks = append(pks, pk) + } + return pks, sigs +} + +func TestAggregatedFunctionalityG1(t *testing.T) { + message := make([]byte, 20) + messages := make([][]byte, 1) + messages[0] = message + pks, sigs := initAggregatedTestValuesG1(messages, t) + + bls := NewSigPopVt() + asig, err := bls.AggregateSignatures(sigs...) + if err != nil { + t.Errorf("%v", err) + } + apk, err := bls.AggregatePublicKeys(pks...) + if err != nil { + t.Errorf("%v", err) + } + if res, _ := bls.VerifyMultiSignature(apk, message, asig); !res { + t.Errorf("Should verify aggregated signatures with same message") + } + if res, _ := asig.verify(apk, message, blsSignaturePopVtDst); !res { + t.Errorf("MultiSignature.verify failed.") + } + if res, _ := apk.verify(message, asig, blsSignaturePopVtDst); !res { + t.Errorf("MultiPublicKey.verify failed.") + } +} + +func TestBadAggregatedFunctionalityG1(t *testing.T) { + message := make([]byte, 20) + messages := make([][]byte, 1) + messages[0] = message + pks, sigs := initAggregatedTestValuesG1(messages, t) + + bls := NewSigPopVt() + + apk, err := bls.AggregatePublicKeys(pks[2:]...) + if err != nil { + t.Errorf("%v", err) + } + asig, err := bls.AggregateSignatures(sigs...) + if err != nil { + t.Errorf("%v", err) + } + + if res, _ := bls.VerifyMultiSignature(apk, message, asig); res { + t.Errorf( + "Should not verify aggregated signatures with same message when some public keys are missing", + ) + } + + apk, err = bls.AggregatePublicKeys(pks...) + if err != nil { + t.Errorf("%v", err) + } + asig, err = bls.AggregateSignatures(sigs[2:]...) + if err != nil { + t.Errorf("%v", err) + } + if res, _ := bls.VerifyMultiSignature(apk, message, asig); res { + t.Errorf( + "Should not verify aggregated signatures with same message when some signatures are missing", + ) + } + + asig, err = bls.AggregateSignatures(sigs...) + if err != nil { + t.Errorf("%v", err) + } + + badmsg := []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20} + if res, _ := bls.VerifyMultiSignature(apk, badmsg, asig); res { + t.Errorf("Should not verify aggregated signature with bad message") + } +} + +func TestAggregateVerifyG1Pass(t *testing.T) { + messages := make([][]byte, numAggregateG1) + for i := 0; i < numAggregateG1; i++ { + message := make([]byte, 20) + readRand(message, t) + messages[i] = message + } + pks, sigs := initAggregatedTestValuesG1(messages, t) + bls := NewSigPopVt() + if res, _ := bls.AggregateVerify(pks, messages, sigs); !res { + t.Errorf("Expected aggregateVerify to pass but failed") + } +} + +func TestAggregateVerifyG1MsgSigCntMismatch(t *testing.T) { + messages := make([][]byte, 8) + for i := 0; i < 8; i++ { + message := make([]byte, 20) + readRand(message, t) + messages[i] = message + } + + pks, sigs := initAggregatedTestValuesG1(messages, t) + bls := NewSigPopVt() + if res, _ := bls.AggregateVerify(pks, messages, sigs); res { + t.Errorf("Expected AggregateVerifyG1 to fail with duplicate message but passed") + } +} + +func TestAggregateVerifyG1FailDupMsg(t *testing.T) { + messages := make([][]byte, 10) + for i := 0; i < 9; i++ { + message := make([]byte, 20) + readRand(message, t) + messages[i] = message + } + // Duplicate message + messages[9] = messages[0] + pks, sigs := initAggregatedTestValuesG1(messages, t) + bls := NewSigPopVt() + if res, _ := bls.AggregateVerify(pks, messages, sigs); res { + t.Errorf("Expected aggregateVerify to fail with duplicate message but passed") + } +} + +func TestAggregateVerifyG1FailIncorrectMsg(t *testing.T) { + messages := make([][]byte, 10) + for i := 0; i < 9; i++ { + message := make([]byte, 20) + readRand(message, t) + messages[i] = message + } + // Duplicate message + messages[9] = messages[0] + pks, sigs := initAggregatedTestValuesG1(messages, t) + bls := NewSigPopVt() + if res, _ := bls.AggregateVerify(pks[2:], messages[2:], sigs); res { + t.Errorf("Expected aggregateVerify to fail with duplicate message but passed") + } +} + +func TestAggregateVerifyG1OneMsg(t *testing.T) { + messages := make([][]byte, 1) + messages[0] = make([]byte, 20) + sk := genSecretKey(t) + sig := genSignatureVt(sk, messages[0], t) + pk := genPublicKeyVt(sk, t) + bls := NewSigPopVt() + // Should be the same as verifySignatureVt + if res, _ := bls.AggregateVerify([]*PublicKeyVt{pk}, messages, []*SignatureVt{sig}); !res { + t.Errorf("Expected AggregateVerifyG1OneMsg to pass but failed") + } +} + +func TestVerifyG1Mutability(t *testing.T) { + // verify should not change any inputs + ikm := make([]byte, 32) + ikm_copy := make([]byte, 32) + readRand(ikm, t) + copy(ikm_copy, ikm) + bls := NewSigPopVt() + pk, sk, err := bls.KeygenWithSeed(ikm) + + if !bytes.Equal(ikm, ikm_copy) { + t.Errorf("SigPopVt.KeygenWithSeed modifies ikm") + } + if err != nil { + t.Errorf("Expected KeygenWithSeed to succeed but failed.") + } + sig, err := bls.Sign(sk, ikm) + if !bytes.Equal(ikm, ikm_copy) { + t.Errorf("SigPopVt.Sign modifies message") + } + if err != nil { + t.Errorf("SigPopVt.KeygenWithSeed to succeed but failed.") + } + sigCopy := marshalStruct(sig, t) + if res, _ := bls.Verify(pk, ikm, sig); !res { + t.Errorf("Expected verify to succeed but failed.") + } + if !bytes.Equal(ikm, ikm_copy) { + t.Errorf("SigPopVt.verify modifies message") + } + if !bytes.Equal(sigCopy, marshalStruct(sig, t)) { + t.Errorf("SigPopVt.verify modifies signature") + } +} + +func TestPublicKeyG2FromBadBytes(t *testing.T) { + pk := make([]byte, 32) + err := new(PublicKeyVt).UnmarshalBinary(pk) + if err == nil { + t.Errorf("Expected PublicKeyG2FromBytes to fail but passed") + } + // All zeros + pk = make([]byte, PublicKeyVtSize) + // See https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization + // 1 << 7 == compressed + // 1 << 6 == infinity or zero + pk[0] = 0xc0 + err = new(PublicKeyVt).UnmarshalBinary(pk) + if err == nil { + t.Errorf("Expected PublicKeyG2FromBytes to fail but passed") + } + sk := genSecretKey(t) + pk1, err := sk.GetPublicKeyVt() + if err != nil { + t.Errorf("Expected GetPublicKeyVt to pass but failed.") + } + out := marshalStruct(pk1, t) + out[3] += 1 + err = new(PublicKeyVt).UnmarshalBinary(pk) + if err == nil { + t.Errorf("Expected PublicKeyG2FromBytes to fail but passed") + } +} + +func TestSignatureG1FromBadBytes(t *testing.T) { + sig := make([]byte, 32) + err := new(SignatureVt).UnmarshalBinary(sig) + if err == nil { + t.Errorf("Expected SignatureG1FromBytes to fail but passed") + } + // All zeros + sig = make([]byte, SignatureVtSize) + // See https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization + // 1 << 7 == compressed + // 1 << 6 == infinity or zero + sig[0] = 0xc0 + err = new(SignatureVt).UnmarshalBinary(sig) + if err == nil { + t.Errorf("Expected SignatureG1FromBytes to fail but passed") + } +} + +func TestBadSecretKeyG1(t *testing.T) { + sk := &SecretKey{value: bls12381.Bls12381FqNew()} + pk, err := sk.GetPublicKeyVt() + if err == nil { + t.Errorf("Expected GetPublicKeyVt to fail with 0 byte secret key but passed: %v", pk) + } + _ = sk.UnmarshalBinary(sk.value.Params.BiModulus.Bytes()) + pk, err = sk.GetPublicKeyVt() + if err == nil { + t.Errorf("Expected GetPublicKeyVt to fail with secret key with Q but passed: %v", pk) + } + + err = sk.UnmarshalBinary([]byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}) + if err == nil { + t.Errorf("Expected SecretKeyFromBytes to fail with not enough bytes but passed: %v", pk) + } + + err = sk.UnmarshalBinary(make([]byte, 32)) + if err == nil { + t.Errorf("Expected SecretKeyFromBytes to fail but passed: %v", pk) + } +} + +func TestProofOfPossessionG1Works(t *testing.T) { + ikm := make([]byte, 32) + readRand(ikm, t) + bls := NewSigPopVt() + pk, sk, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Key gen failed but should've succeeded") + } + pop, err := bls.PopProve(sk) + if err != nil { + t.Errorf("PopProve failed but should've succeeded") + } + if res, _ := bls.PopVerify(pk, pop); !res { + t.Errorf("PopVerify failed but should've succeeded") + } +} + +func TestProofOfPossessionG1FromBadKey(t *testing.T) { + ikm := make([]byte, 32) + value := new(big.Int) + value.SetBytes(ikm) + sk := SecretKey{value: bls12381.Bls12381FqNew().SetBigInt(value)} + _, err := sk.createProofOfPossessionVt(blsSignaturePopVtDst) + if err == nil { + t.Errorf("createProofOfPossessionVt should've failed but succeeded.") + } +} + +func TestProofOfPossessionG1BytesWorks(t *testing.T) { + sk := genSecretKey(t) + pop, err := sk.createProofOfPossessionVt(blsSignaturePopVtDst) + if err != nil { + t.Errorf("CreateProofOfPossesionG1 failed but shouldn've succeeded.") + } + out := marshalStruct(pop, t) + if len(out) != ProofOfPossessionVtSize { + t.Errorf( + "ProofOfPossessionBytes incorrect size: expected %v, got %v", + ProofOfPossessionVtSize, + len(out), + ) + } + pop2 := new(ProofOfPossessionVt) + err = pop2.UnmarshalBinary(out) + if err != nil { + t.Errorf("ProofOfPossessionVt.UnmarshalBinary failed: %v", err) + } + out2 := marshalStruct(pop2, t) + if !bytes.Equal(out, out2) { + t.Errorf("ProofOfPossessionVt.UnmarshalBinary failed, not equal when deserialized") + } +} + +func TestProofOfPossessionG1BadBytes(t *testing.T) { + zeros := make([]byte, ProofOfPossessionVtSize) + temp := new(ProofOfPossessionVt) + err := temp.UnmarshalBinary(zeros) + if err == nil { + t.Errorf("ProofOfPossessionVt.UnmarshalBinary shouldn've failed but succeeded.") + } +} + +func TestProofOfPossessionG1Fails(t *testing.T) { + sk := genSecretKey(t) + pop, err := sk.createProofOfPossessionVt(blsSignaturePopVtDst) + if err != nil { + t.Errorf("Expected createProofOfPossessionVt to succeed but failed.") + } + + ikm := make([]byte, 32) + readRand(ikm, t) + sk = genRandSecretKey(ikm, t) + bad, err := sk.GetPublicKeyVt() + if err != nil { + t.Errorf("Expected PublicKeyG2FromBytes to succeed but failed: %v", err) + } + if res, _ := pop.verify(bad, blsSignaturePopVtDst); res { + t.Errorf("Expected ProofOfPossession verify to fail but succeeded.") + } +} + +func TestMultiSigG1Bytes(t *testing.T) { + messages := make([][]byte, 1) + message := make([]byte, 20) + messages[0] = message + _, sigs := initAggregatedTestValuesG1(messages, t) + bls := NewSigPopVt() + msig, err := bls.AggregateSignatures(sigs...) + if err != nil { + t.Errorf("%v", err) + } + msigBytes := marshalStruct(msig, t) + if len(msigBytes) != SignatureVtSize { + t.Errorf( + "Invalid multi-sig length. Expected %d bytes, found %d", + SignatureVtSize, + len(msigBytes), + ) + } + msig2 := new(MultiSignatureVt) + err = msig2.UnmarshalBinary(msigBytes) + if err != nil { + t.Errorf("MultiSignatureG1FromBytes failed with %v", err) + } + msigBytes2 := marshalStruct(msig2, t) + + if !bytes.Equal(msigBytes, msigBytes2) { + t.Errorf("Bytes methods not equal.") + } +} + +func TestMultiSigG1BadBytes(t *testing.T) { + messages := make([][]byte, 1) + message := make([]byte, 20) + messages[0] = message + _, sigs := initAggregatedTestValuesG1(messages, t) + bls := NewSigPopVt() + msig, err := bls.AggregateSignatures(sigs...) + if err != nil { + t.Errorf("%v", err) + } + msigBytes := marshalStruct(msig, t) + if len(msigBytes) != SignatureVtSize { + t.Errorf( + "Invalid multi-sig length. Expected %d bytes, found %d", + SignatureSize, + len(msigBytes), + ) + } + msigBytes[0] = 0 + temp := new(MultiSignatureVt) + err = temp.UnmarshalBinary(msigBytes) + if err == nil { + t.Errorf("MultiSignatureG1FromBytes should've failed but succeeded") + } + msigBytes = make([]byte, SignatureVtSize) + err = temp.UnmarshalBinary(msigBytes) + if err == nil { + t.Errorf("MultiSignatureG1FromBytes should've failed but succeeded") + } +} + +func TestMultiPubkeyG2Bytes(t *testing.T) { + messages := make([][]byte, 1) + message := make([]byte, 20) + messages[0] = message + pks, _ := initAggregatedTestValuesG1(messages, t) + bls := NewSigPopVt() + apk, err := bls.AggregatePublicKeys(pks...) + if err != nil { + t.Errorf("%v", err) + } + apkBytes := marshalStruct(apk, t) + if len(apkBytes) != PublicKeyVtSize { + t.Errorf("MultiPublicKeyVt has an incorrect size") + } + apk2 := new(MultiPublicKeyVt) + err = apk2.UnmarshalBinary(apkBytes) + if err != nil { + t.Errorf("MultiPublicKeyVt.UnmarshalBinary failed with %v", err) + } + apk2Bytes := marshalStruct(apk2, t) + if !bytes.Equal(apkBytes, apk2Bytes) { + t.Errorf("Bytes methods not equal.") + } +} + +func TestMultiPubkeyG2BadBytes(t *testing.T) { + messages := make([][]byte, 1) + message := make([]byte, 20) + messages[0] = message + pks, _ := initAggregatedTestValuesG1(messages, t) + bls := NewSigPopVt() + apk, err := bls.AggregatePublicKeys(pks...) + if err != nil { + t.Errorf("%v", err) + } + apkBytes := marshalStruct(apk, t) + if len(apkBytes) != PublicKeyVtSize { + t.Errorf("MultiPublicKeyVt has an incorrect size") + } + apkBytes[0] = 0 + temp := new(MultiPublicKeyVt) + err = temp.UnmarshalBinary(apkBytes) + if err == nil { + t.Errorf("MultiPublicKeyVt.UnmarshalBinary should've failed but succeeded") + } + apkBytes = make([]byte, PublicKeyVtSize) + err = temp.UnmarshalBinary(apkBytes) + if err == nil { + t.Errorf("MultiPublicKeyVt.UnmarshalBinary should've failed but succeeded") + } +} + +func TestFastAggregateVerifyConstituentG1Works(t *testing.T) { + messages := make([][]byte, 1) + message := make([]byte, 1) + messages[0] = message + pks, sigs := initAggregatedTestValuesG1(messages, t) + bls := NewSigPopVt() + + if res, _ := bls.FastAggregateVerifyConstituent(pks, message, sigs); !res { + t.Errorf("FastAggregateVerify failed.") + } +} + +func TestFastAggregateVerifyG1Works(t *testing.T) { + messages := make([][]byte, 1) + message := make([]byte, 1) + messages[0] = message + pks, sigs := initAggregatedTestValuesG1(messages, t) + asig, _ := aggregateSignaturesVt(sigs...) + bls := NewSigPopVt() + + if res, _ := bls.FastAggregateVerify(pks, message, asig); !res { + t.Errorf("FastAggregateVerify failed.") + } +} + +func TestFastAggregateVerifyG1Fails(t *testing.T) { + messages := make([][]byte, 1) + message := make([]byte, 1) + messages[0] = message + pks, sigs := initAggregatedTestValuesG1(messages, t) + bls := NewSigPopVt() + message[0] = 1 + if res, _ := bls.FastAggregateVerifyConstituent(pks, message, sigs); res { + t.Errorf("FastAggregateVerify verified when it should've failed.") + } +} + +func TestCustomPopDstG1Works(t *testing.T) { + bls, _ := NewSigPopVtWithDst("BLS_SIG_BLS12381G1_XMD:SHA-256_SSWU_RO_POP_TEST", + "BLS_POP_BLS12381G1_XMD:SHA-256_SSWU_RO_POP_TEST") + msg := make([]byte, 20) + ikm := make([]byte, 32) + pk, sk, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Couldn't create custom dst keys: %v", err) + } + sig, err := bls.Sign(sk, msg) + if err != nil { + t.Errorf("Couldn't sign with custom dst: %v", err) + } + if res, _ := bls.Verify(pk, msg, sig); !res { + t.Errorf("verify fails with custom dst") + } + + pks := make([]*PublicKeyVt, 10) + sigs := make([]*SignatureVt, 10) + pks[0] = pk + sigs[0] = sig + for i := 1; i < 10; i++ { + readRand(ikm, t) + pkt, skt, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Couldn't create custom dst keys: %v", err) + } + sigt, err := bls.Sign(skt, msg) + if err != nil { + t.Errorf("Couldn't sign with custom dst: %v", err) + } + pks[i] = pkt + sigs[i] = sigt + } + + if res, _ := bls.FastAggregateVerifyConstituent(pks, msg, sigs); !res { + t.Errorf("FastAggregateVerify failed with custom dst") + } + + pop, err := bls.PopProve(sk) + if err != nil { + t.Errorf("PopProve failed with custom dst") + } + + if res, _ := bls.PopVerify(pk, pop); !res { + t.Errorf("PopVerify failed with custom dst") + } +} + +func TestBlsPopG1KeyGenWithSeed(t *testing.T) { + ikm := []byte("Not enough bytes") + bls := NewSigPopVt() + _, _, err := bls.KeygenWithSeed(ikm) + if err == nil { + t.Errorf("Expected KeygenWithSeed to fail but succeeded") + } +} + +func TestBlsPopG1KeyGen(t *testing.T) { + bls := NewSigPopVt() + _, _, err := bls.Keygen() + if err != nil { + t.Errorf("Keygen failed: %v", err) + } +} + +func TestPopVtThresholdKeygenBadInputs(t *testing.T) { + bls := NewSigPopVt() + _, _, err := bls.ThresholdKeygen(0, 0) + if err == nil { + t.Errorf("ThresholdKeygen should've failed but succeeded") + } + _, _, err = bls.ThresholdKeygen(1, 0) + if err == nil { + t.Errorf("ThresholdKeygen should've failed but succeeded") + } + _, _, err = bls.ThresholdKeygen(3, 2) + if err == nil { + t.Errorf("ThresholdKeygen should've failed but succeeded") + } +} + +func TestPopVtThresholdKeygen(t *testing.T) { + bls := NewSigPopVt() + _, sks, err := bls.ThresholdKeygen(3, 5) + if err != nil { + t.Errorf("ThresholdKeygen failed") + } + if len(sks) != 5 { + t.Errorf("ThresholdKeygen did not produce enough shares") + } +} + +func TestPopPartialSignVt(t *testing.T) { + ikm := make([]byte, 32) + bls := NewSigPopVt() + pk, sks, err := bls.ThresholdKeygenWithSeed(ikm, 2, 4) + if err != nil { + t.Errorf("ThresholdKeygen failed") + } + msg := make([]byte, 10) + sig1, err := bls.PartialSign(sks[0], msg) + if err != nil { + t.Errorf("partialSign failed: %v", err) + } + sig2, err := bls.PartialSign(sks[1], msg) + if err != nil { + t.Errorf("partialSign failed: %v", err) + } + sig, err := bls.CombineSignatures(sig1, sig2) + if err != nil { + t.Errorf("CombineSignatures failed: %v", err) + } + + if res, _ := bls.Verify(pk, msg, sig); !res { + t.Errorf("Combined signature does not verify") + } + + sig, err = bls.CombineSignatures(sig1) + if err == nil { + t.Errorf("CombineSignatures succeeded when it should've failed") + } + if res, _ := bls.Verify(pk, msg, sig); res { + t.Errorf("Combined signature verify succeeded when it should've failed") + } +} + +// Ensure that mixed partial signatures from distinct origins create invalid composite signatures +func TestPopVtPartialMixupShares(t *testing.T) { + total := uint(5) + ikm := make([]byte, 32) + bls := NewSigPopVt() + pk1, sks1, err := bls.ThresholdKeygenWithSeed(ikm, 3, total) + if err != nil { + t.Errorf("ThresholdKeygen failed: %v", err) + } + for i := range ikm { + ikm[i] = 1 + } + pk2, sks2, err := bls.ThresholdKeygenWithSeed(ikm, 3, total) + if err != nil { + t.Errorf("ThresholdKeygen failed: %v", err) + } + + // Generate partial signatures for both sets of keys + msg := make([]byte, 10) + sigs1 := make([]*PartialSignatureVt, total) + sigs2 := make([]*PartialSignatureVt, total) + for i := range sks1 { + sigs1[i], err = bls.PartialSign(sks1[i], msg) + if err != nil { + t.Errorf("PartialSign failed: %v", err) + } + sigs2[i], err = bls.PartialSign(sks2[i], msg) + if err != nil { + t.Errorf("PartialSign failed: %v", err) + } + } + + // Try combining 2 from group 1 and 2 from group 2 + sig, err := bls.CombineSignatures(sigs1[0], sigs1[1], sigs2[2], sigs2[3]) + if err != nil { + t.Errorf("CombineSignatures failed: %v", err) + } + // Signature shouldn't validate + if res, _ := bls.Verify(pk1, msg, sig); res { + t.Errorf( + "CombineSignatures worked with different shares of two secret keys for the same message", + ) + } + if res, _ := bls.Verify(pk2, msg, sig); res { + t.Errorf( + "CombineSignatures worked with different shares of two secret keys for the same message", + ) + } + // Should error out due to duplicate identifiers + _, err = bls.CombineSignatures(sigs1[0], sigs1[1], sigs2[0], sigs2[1]) + if err == nil { + t.Errorf("CombineSignatures expected to fail but succeeded.") + } +} diff --git a/signatures/bls/bls_sig/usual_bls.go b/signatures/bls/bls_sig/usual_bls.go new file mode 100755 index 0000000..f35193e --- /dev/null +++ b/signatures/bls/bls_sig/usual_bls.go @@ -0,0 +1,478 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bls_sig + +import ( + "fmt" +) + +const ( + // Domain separation tag for basic signatures + // according to section 4.2.1 in + // https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 + blsSignatureBasicDst = "BLS_SIG_BLS12381G2_XMD:SHA-256_SSWU_RO_NUL_" + // Domain separation tag for basic signatures + // according to section 4.2.2 in + // https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 + blsSignatureAugDst = "BLS_SIG_BLS12381G2_XMD:SHA-256_SSWU_RO_AUG_" + // Domain separation tag for proof of possession signatures + // according to section 4.2.3 in + // https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 + blsSignaturePopDst = "BLS_SIG_BLS12381G2_XMD:SHA-256_SSWU_RO_POP_" + // Domain separation tag for proof of possession proofs + // according to section 4.2.3 in + // https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 + blsPopProofDst = "BLS_POP_BLS12381G2_XMD:SHA-256_SSWU_RO_POP_" +) + +type BlsScheme interface { + Keygen() (*PublicKey, *SecretKey, error) + KeygenWithSeed(ikm []byte) (*PublicKey, *SecretKey, error) + Sign(sk *SecretKey, msg []byte) (*Signature, error) + Verify(pk *PublicKey, msg []byte, sig *Signature) bool + AggregateVerify(pks []*PublicKey, msgs [][]byte, sigs []*Signature) bool +} + +// generateKeys creates 32 bytes of random data to be fed to +// generateKeysWithSeed +func generateKeys() (*PublicKey, *SecretKey, error) { + ikm, err := generateRandBytes(32) + if err != nil { + return nil, nil, err + } + return generateKeysWithSeed(ikm) +} + +// generateKeysWithSeed generates a BLS key pair given input key material (ikm) +func generateKeysWithSeed(ikm []byte) (*PublicKey, *SecretKey, error) { + sk, err := new(SecretKey).Generate(ikm) + if err != nil { + return nil, nil, err + } + pk, err := sk.GetPublicKey() + if err != nil { + return nil, nil, err + } + return pk, sk, nil +} + +// thresholdGenerateKeys will generate random secret key shares and the corresponding public key +func thresholdGenerateKeys(threshold, total uint) (*PublicKey, []*SecretKeyShare, error) { + pk, sk, err := generateKeys() + if err != nil { + return nil, nil, err + } + shares, err := thresholdizeSecretKey(sk, threshold, total) + if err != nil { + return nil, nil, err + } + return pk, shares, nil +} + +// thresholdGenerateKeysWithSeed will generate random secret key shares and the corresponding public key +// using the corresponding seed `ikm` +func thresholdGenerateKeysWithSeed( + ikm []byte, + threshold, total uint, +) (*PublicKey, []*SecretKeyShare, error) { + pk, sk, err := generateKeysWithSeed(ikm) + if err != nil { + return nil, nil, err + } + shares, err := thresholdizeSecretKey(sk, threshold, total) + if err != nil { + return nil, nil, err + } + return pk, shares, nil +} + +// SigBasic is minimal-pubkey-size scheme that doesn't support FastAggregateVerificiation. +// see: https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03#section-4.2.1 +type SigBasic struct { + dst string +} + +// Creates a new BLS basic signature scheme with the standard domain separation tag used for signatures. +func NewSigBasic() *SigBasic { + return &SigBasic{dst: blsSignatureBasicDst} +} + +// Creates a new BLS basic signature scheme with a custom domain separation tag used for signatures. +func NewSigBasicWithDst(signDst string) *SigBasic { + return &SigBasic{dst: signDst} +} + +// Creates a new BLS key pair +func (b SigBasic) Keygen() (*PublicKey, *SecretKey, error) { + return generateKeys() +} + +// Creates a new BLS key pair +// Input key material (ikm) MUST be at least 32 bytes long, +// but it MAY be longer. +func (b SigBasic) KeygenWithSeed(ikm []byte) (*PublicKey, *SecretKey, error) { + return generateKeysWithSeed(ikm) +} + +// ThresholdKeyGen generates a public key and `total` secret key shares such that +// `threshold` of them can be combined in signatures +func (b SigBasic) ThresholdKeygen(threshold, total uint) (*PublicKey, []*SecretKeyShare, error) { + return thresholdGenerateKeys(threshold, total) +} + +// ThresholdKeyGen generates a public key and `total` secret key shares such that +// `threshold` of them can be combined in signatures +func (b SigBasic) ThresholdKeygenWithSeed( + ikm []byte, + threshold, total uint, +) (*PublicKey, []*SecretKeyShare, error) { + return thresholdGenerateKeysWithSeed(ikm, threshold, total) +} + +// Computes a signature in G2 from sk, a secret key, and a message +func (b SigBasic) Sign(sk *SecretKey, msg []byte) (*Signature, error) { + return sk.createSignature(msg, b.dst) +} + +// Compute a partial signature in G2 that can be combined with other partial signature +func (b SigBasic) PartialSign(sks *SecretKeyShare, msg []byte) (*PartialSignature, error) { + return sks.partialSign(msg, b.dst) +} + +// CombineSignatures takes partial signatures to yield a completed signature +func (b SigBasic) CombineSignatures(sigs ...*PartialSignature) (*Signature, error) { + return combineSigs(sigs) +} + +// Checks that a signature is valid for the message under the public key pk +func (b SigBasic) Verify(pk *PublicKey, msg []byte, sig *Signature) (bool, error) { + return pk.verifySignature(msg, sig, b.dst) +} + +// The AggregateVerify algorithm checks an aggregated signature over +// several (PK, message, signature) pairs. +// Each message must be different or this will return false. +// See section 3.1.1 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigBasic) AggregateVerify( + pks []*PublicKey, + msgs [][]byte, + sigs []*Signature, +) (bool, error) { + if !allRowsUnique(msgs) { + return false, fmt.Errorf("all messages must be distinct") + } + asig, err := aggregateSignatures(sigs...) + if err != nil { + return false, err + } + return asig.aggregateVerify(pks, msgs, b.dst) +} + +// SigAug is minimal-pubkey-size scheme that doesn't support FastAggregateVerificiation. +// see: https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03#section-4.2.2 +type SigAug struct { + dst string +} + +// Creates a new BLS message augmentation signature scheme with the standard domain separation tag used for signatures. +func NewSigAug() *SigAug { + return &SigAug{dst: blsSignatureAugDst} +} + +// Creates a new BLS message augmentation signature scheme with a custom domain separation tag used for signatures. +func NewSigAugWithDst(signDst string) *SigAug { + return &SigAug{dst: signDst} +} + +// Creates a new BLS key pair +func (b SigAug) Keygen() (*PublicKey, *SecretKey, error) { + return generateKeys() +} + +// Creates a new BLS secret key +// Input key material (ikm) MUST be at least 32 bytes long, +// but it MAY be longer. +func (b SigAug) KeygenWithSeed(ikm []byte) (*PublicKey, *SecretKey, error) { + return generateKeysWithSeed(ikm) +} + +// ThresholdKeyGen generates a public key and `total` secret key shares such that +// `threshold` of them can be combined in signatures +func (b SigAug) ThresholdKeygen(threshold, total uint) (*PublicKey, []*SecretKeyShare, error) { + return thresholdGenerateKeys(threshold, total) +} + +// ThresholdKeyGen generates a public key and `total` secret key shares such that +// `threshold` of them can be combined in signatures +func (b SigAug) ThresholdKeygenWithSeed( + ikm []byte, + threshold, total uint, +) (*PublicKey, []*SecretKeyShare, error) { + return thresholdGenerateKeysWithSeed(ikm, threshold, total) +} + +// Computes a signature in G1 from sk, a secret key, and a message +// See section 3.2.1 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigAug) Sign(sk *SecretKey, msg []byte) (*Signature, error) { + if len(msg) == 0 { + return nil, fmt.Errorf("message cannot be empty or nil") + } + + pk, err := sk.GetPublicKey() + if err != nil { + return nil, err + } + bytes, err := pk.MarshalBinary() + if err != nil { + return nil, fmt.Errorf("MarshalBinary failed") + } + bytes = append(bytes, msg...) + return sk.createSignature(bytes, b.dst) +} + +// Compute a partial signature in G2 that can be combined with other partial signature +func (b SigAug) PartialSign( + sks *SecretKeyShare, + pk *PublicKey, + msg []byte, +) (*PartialSignature, error) { + if len(msg) == 0 { + return nil, fmt.Errorf("message cannot be empty or nil") + } + + bytes, err := pk.MarshalBinary() + if err != nil { + return nil, fmt.Errorf("MarshalBinary failed") + } + bytes = append(bytes, msg...) + return sks.partialSign(bytes, b.dst) +} + +// CombineSignatures takes partial signatures to yield a completed signature +func (b SigAug) CombineSignatures(sigs ...*PartialSignature) (*Signature, error) { + return combineSigs(sigs) +} + +// Checks that a signature is valid for the message under the public key pk +// See section 3.2.2 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigAug) Verify(pk *PublicKey, msg []byte, sig *Signature) (bool, error) { + bytes, err := pk.MarshalBinary() + if err != nil { + return false, err + } + bytes = append(bytes, msg...) + return pk.verifySignature(bytes, sig, b.dst) +} + +// The AggregateVerify algorithm checks an aggregated signature over +// several (PK, message, signature) pairs. +// See section 3.2.3 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigAug) AggregateVerify(pks []*PublicKey, msgs [][]byte, sigs []*Signature) (bool, error) { + if len(pks) != len(msgs) { + return false, fmt.Errorf( + "the number of public keys does not match the number of messages: %v != %v", + len(pks), + len(msgs), + ) + } + data := make([][]byte, len(msgs)) + for i, msg := range msgs { + bytes, err := pks[i].MarshalBinary() + if err != nil { + return false, err + } + data[i] = append(bytes, msg...) + } + asig, err := aggregateSignatures(sigs...) + if err != nil { + return false, err + } + return asig.aggregateVerify(pks, data, b.dst) +} + +// SigEth2 supports signatures on Eth2. +// Internally is an alias for SigPop +type SigEth2 = SigPop + +// NewSigEth2 Creates a new BLS ETH2 signature scheme with the standard domain separation tag used for signatures. +func NewSigEth2() *SigEth2 { + return NewSigPop() +} + +// SigPop is minimal-pubkey-size scheme that supports FastAggregateVerification +// and requires using proofs of possession to mitigate rogue-key attacks +// see: https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03#section-4.2.3 +type SigPop struct { + sigDst string + popDst string +} + +// Creates a new BLS proof of possession signature scheme with the standard domain separation tag used for signatures. +func NewSigPop() *SigPop { + return &SigPop{sigDst: blsSignaturePopDst, popDst: blsPopProofDst} +} + +// Creates a new BLS message proof of possession signature scheme with a custom domain separation tag used for signatures. +func NewSigPopWithDst(signDst, popDst string) (*SigPop, error) { + if signDst == popDst { + return nil, fmt.Errorf("domain separation tags cannot be equal") + } + return &SigPop{sigDst: signDst, popDst: popDst}, nil +} + +// Creates a new BLS key pair +func (b SigPop) Keygen() (*PublicKey, *SecretKey, error) { + return generateKeys() +} + +// Creates a new BLS secret key +// Input key material (ikm) MUST be at least 32 bytes long, +// but it MAY be longer. +func (b SigPop) KeygenWithSeed(ikm []byte) (*PublicKey, *SecretKey, error) { + return generateKeysWithSeed(ikm) +} + +// ThresholdKeyGen generates a public key and `total` secret key shares such that +// `threshold` of them can be combined in signatures +func (b SigPop) ThresholdKeygen(threshold, total uint) (*PublicKey, []*SecretKeyShare, error) { + return thresholdGenerateKeys(threshold, total) +} + +// ThresholdKeyGen generates a public key and `total` secret key shares such that +// `threshold` of them can be combined in signatures +func (b SigPop) ThresholdKeygenWithSeed( + ikm []byte, + threshold, total uint, +) (*PublicKey, []*SecretKeyShare, error) { + return thresholdGenerateKeysWithSeed(ikm, threshold, total) +} + +// Computes a signature in G2 from sk, a secret key, and a message +// See section 2.6 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigPop) Sign(sk *SecretKey, msg []byte) (*Signature, error) { + return sk.createSignature(msg, b.sigDst) +} + +// Compute a partial signature in G2 that can be combined with other partial signature +func (b SigPop) PartialSign(sks *SecretKeyShare, msg []byte) (*PartialSignature, error) { + return sks.partialSign(msg, b.sigDst) +} + +// CombineSignatures takes partial signatures to yield a completed signature +func (b SigPop) CombineSignatures(sigs ...*PartialSignature) (*Signature, error) { + return combineSigs(sigs) +} + +// Checks that a signature is valid for the message under the public key pk +// See section 2.7 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigPop) Verify(pk *PublicKey, msg []byte, sig *Signature) (bool, error) { + return pk.verifySignature(msg, sig, b.sigDst) +} + +// The aggregateVerify algorithm checks an aggregated signature over +// several (PK, message, signature) pairs. +// Each message must be different or this will return false. +// See section 3.1.1 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigPop) AggregateVerify(pks []*PublicKey, msgs [][]byte, sigs []*Signature) (bool, error) { + if !allRowsUnique(msgs) { + return false, fmt.Errorf("all messages must be distinct") + } + asig, err := aggregateSignatures(sigs...) + if err != nil { + return false, err + } + return asig.aggregateVerify(pks, msgs, b.sigDst) +} + +// Combine many signatures together to form a Multisignature. +// Multisignatures can be created when multiple signers jointly +// generate signatures over the same message. +func (b SigPop) AggregateSignatures(sigs ...*Signature) (*MultiSignature, error) { + g1, err := aggregateSignatures(sigs...) + if err != nil { + return nil, err + } + return &MultiSignature{value: g1.Value}, nil +} + +// Combine many public keys together to form a Multipublickey. +// Multipublickeys are used to verify multisignatures. +func (b SigPop) AggregatePublicKeys(pks ...*PublicKey) (*MultiPublicKey, error) { + g2, err := aggregatePublicKeys(pks...) + if err != nil { + return nil, err + } + return &MultiPublicKey{value: g2.value}, nil +} + +// Checks that a multisignature is valid for the message under the multi public key +// Similar to FastAggregateVerify except the keys and signatures have already been +// combined. See section 3.3.4 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigPop) VerifyMultiSignature( + pk *MultiPublicKey, + msg []byte, + sig *MultiSignature, +) (bool, error) { + s := &Signature{Value: sig.value} + p := &PublicKey{value: pk.value} + return p.verifySignature(msg, s, b.sigDst) +} + +// FastAggregateVerify verifies an aggregated signature against the specified message and set of public keys. +// See section 3.3.4 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigPop) FastAggregateVerify(pks []*PublicKey, msg []byte, asig *Signature) (bool, error) { + apk, err := aggregatePublicKeys(pks...) + if err != nil { + return false, err + } + return apk.verifySignature(msg, asig, b.sigDst) +} + +// FastAggregateVerifyConstituent aggregates all constituent signatures and the verifies +// them against the specified message and public keys +// See section 3.3.4 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigPop) FastAggregateVerifyConstituent( + pks []*PublicKey, + msg []byte, + sigs []*Signature, +) (bool, error) { + // Aggregate the constituent signatures + asig, err := aggregateSignatures(sigs...) + if err != nil { + return false, err + } + // And verify + return b.FastAggregateVerify(pks, msg, asig) +} + +// Create a proof of possession for the corresponding public key. +// A proof of possession must be created for each public key to be used +// in FastAggregateVerify or a Multipublickey to avoid rogue key attacks. +// See section 3.3.2 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigPop) PopProve(sk *SecretKey) (*ProofOfPossession, error) { + return sk.createProofOfPossession(b.popDst) +} + +// verify a proof of possession for the corresponding public key is valid. +// A proof of possession must be created for each public key to be used +// in FastAggregateVerify or a Multipublickey to avoid rogue key attacks. +// See section 3.3.3 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (b SigPop) PopVerify(pk *PublicKey, pop2 *ProofOfPossession) (bool, error) { + return pop2.verify(pk, b.popDst) +} diff --git a/signatures/bls/bls_sig/usual_bls_sig.go b/signatures/bls/bls_sig/usual_bls_sig.go new file mode 100644 index 0000000..094c1ae --- /dev/null +++ b/signatures/bls/bls_sig/usual_bls_sig.go @@ -0,0 +1,507 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bls_sig + +import ( + "fmt" + + "github.com/sonr-io/sonr/crypto/core/curves/native" + "github.com/sonr-io/sonr/crypto/core/curves/native/bls12381" + "github.com/sonr-io/sonr/crypto/internal" +) + +// Implement BLS signatures on the BLS12-381 curve +// according to https://crypto.standford.edu/~dabo/pubs/papers/BLSmultisig.html +// and https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +// this file implements signatures in G2 and public keys in G1. +// Public Keys and Signatures can be aggregated but the consumer +// must use proofs of possession to defend against rogue-key attacks. + +const ( + // Public key size in G1 + PublicKeySize = 48 + // Signature size in G2 + SignatureSize = 96 + // Proof of Possession in G2 + ProofOfPossessionSize = 96 +) + +// Represents a public key in G1 +type PublicKey struct { + value bls12381.G1 +} + +// Serialize a public key to a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +func (pk PublicKey) MarshalBinary() ([]byte, error) { + out := pk.value.ToCompressed() + return out[:], nil +} + +// Deserialize a public key from a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +// If successful, it will assign the public key +// otherwise it will return an error +func (pk *PublicKey) UnmarshalBinary(data []byte) error { + if len(data) != PublicKeySize { + return fmt.Errorf("public key must be %d bytes", PublicKeySize) + } + var blob [PublicKeySize]byte + copy(blob[:], data) + p1, err := new(bls12381.G1).FromCompressed(&blob) + if err != nil { + return err + } + if p1.IsIdentity() == 1 { + return fmt.Errorf("public keys cannot be zero") + } + pk.value = *p1 + return nil +} + +// Represents a BLS signature in G2 +type Signature struct { + Value bls12381.G2 +} + +// Serialize a signature to a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +func (sig Signature) MarshalBinary() ([]byte, error) { + out := sig.Value.ToCompressed() + return out[:], nil +} + +func (sig Signature) verify(pk *PublicKey, message []byte, signDst string) (bool, error) { + return pk.verifySignature(message, &sig, signDst) +} + +// The AggregateVerify algorithm checks an aggregated signature over +// several (PK, message) pairs. +// The Signature is the output of aggregateSignatures +// Each message must be different or this will return false +// See section 3.1.1 from +// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (sig Signature) aggregateVerify( + pks []*PublicKey, + msgs [][]byte, + signDst string, +) (bool, error) { + return sig.coreAggregateVerify(pks, msgs, signDst) +} + +func (sig Signature) coreAggregateVerify( + pks []*PublicKey, + msgs [][]byte, + signDst string, +) (bool, error) { + if len(pks) < 1 { + return false, fmt.Errorf("at least one key is required") + } + if len(msgs) < 1 { + return false, fmt.Errorf("at least one message is required") + } + if len(pks) != len(msgs) { + return false, fmt.Errorf( + "the number of public keys does not match the number of messages: %v != %v", + len(pks), + len(msgs), + ) + } + if sig.Value.InCorrectSubgroup() == 0 { + return false, fmt.Errorf("signature is not in the correct subgroup") + } + + dst := []byte(signDst) + engine := new(bls12381.Engine) + // e(pk_1, H(m_1))*...*e(pk_N, H(m_N)) == e(g1, s) + // However, we use only one miller loop + // by doing the equivalent of + // e(pk_1, H(m_1))*...*e(pk_N, H(m_N)) * e(g1^-1, s) == 1 + for i, pk := range pks { + if pk == nil { + return false, fmt.Errorf("public key at %d is nil", i) + } + if pk.value.IsIdentity() == 1 || pk.value.InCorrectSubgroup() == 0 { + return false, fmt.Errorf("public key at %d is not in the correct subgroup", i) + } + p2 := new(bls12381.G2).Hash(native.EllipticPointHasherSha256(), msgs[i], dst) + engine.AddPair(&pk.value, p2) + } + engine.AddPairInvG1(new(bls12381.G1).Generator(), &sig.Value) + return engine.Check(), nil +} + +// Deserialize a signature from a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +// If successful, it will assign the Signature +// otherwise it will return an error +func (sig *Signature) UnmarshalBinary(data []byte) error { + if len(data) != SignatureSize { + return fmt.Errorf("signature must be %d bytes", SignatureSize) + } + var blob [SignatureSize]byte + copy(blob[:], data) + p2, err := new(bls12381.G2).FromCompressed(&blob) + if err != nil { + return err + } + if p2.IsIdentity() == 1 { + return fmt.Errorf("signatures cannot be zero") + } + sig.Value = *p2 + return nil +} + +// Get the corresponding public key from a secret key +// Verifies the public key is in the correct subgroup +func (sk SecretKey) GetPublicKey() (*PublicKey, error) { + result := new(bls12381.G1).Generator() + result.Mul(result, sk.value) + if result.InCorrectSubgroup() == 0 || result.IsIdentity() == 1 { + return nil, fmt.Errorf("point is not in correct subgroup") + } + return &PublicKey{value: *result}, nil +} + +// Compute a signature from a secret key and message +// This signature is deterministic which protects against +// attacks arising from signing with bad randomness like +// the nonce reuse attack on ECDSA. `message` is +// hashed to a point in G2 as described in to +// https://datatracker.ietf.org/doc/draft-irtf-cfrg-hash-to-curve/?include_text=1 +// See Section 2.6 in https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +// nil message is not permitted but empty slice is allowed +func (sk SecretKey) createSignature(message []byte, dst string) (*Signature, error) { + if message == nil { + return nil, fmt.Errorf("message cannot be nil") + } + if sk.value.IsZero() == 1 { + return nil, fmt.Errorf("invalid secret key") + } + p2 := new(bls12381.G2).Hash(native.EllipticPointHasherSha256(), message, []byte(dst)) + result := new(bls12381.G2).Mul(p2, sk.value) + if result.InCorrectSubgroup() == 0 { + return nil, fmt.Errorf("point is not on correct subgroup") + } + return &Signature{Value: *result}, nil +} + +// Verify a signature is valid for the message under this public key. +// See Section 2.7 in https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03 +func (pk PublicKey) verifySignature( + message []byte, + signature *Signature, + dst string, +) (bool, error) { + if signature == nil || message == nil || pk.value.IsIdentity() == 1 { + return false, fmt.Errorf("signature and message and public key cannot be nil or zero") + } + if signature.Value.IsIdentity() == 1 || signature.Value.InCorrectSubgroup() == 0 { + return false, fmt.Errorf("signature is not in the correct subgroup") + } + engine := new(bls12381.Engine) + + p2 := new(bls12381.G2).Hash(native.EllipticPointHasherSha256(), message, []byte(dst)) + // e(pk, H(m)) == e(g1, s) + // However, we can reduce the number of miller loops + // by doing the equivalent of + // e(pk^-1, H(m)) * e(g1, s) == 1 + engine.AddPair(&pk.value, p2) + engine.AddPairInvG1(new(bls12381.G1).Generator(), &signature.Value) + return engine.Check(), nil +} + +// Combine public keys into one aggregated key +func aggregatePublicKeys(pks ...*PublicKey) (*PublicKey, error) { + if len(pks) < 1 { + return nil, fmt.Errorf("at least one public key is required") + } + result := new(bls12381.G1).Identity() + for i, k := range pks { + if k == nil { + return nil, fmt.Errorf("key at %d is nil, keys cannot be nil", i) + } + if k.value.InCorrectSubgroup() == 0 { + return nil, fmt.Errorf("key at %d is not in the correct subgroup", i) + } + result.Add(result, &k.value) + } + return &PublicKey{value: *result}, nil +} + +// Combine signatures into one aggregated signature +func aggregateSignatures(sigs ...*Signature) (*Signature, error) { + if len(sigs) < 1 { + return nil, fmt.Errorf("at least one signature is required") + } + result := new(bls12381.G2).Identity() + for i, s := range sigs { + if s == nil { + return nil, fmt.Errorf("signature at %d is nil, signature cannot be nil", i) + } + if s.Value.InCorrectSubgroup() == 0 { + return nil, fmt.Errorf("signature at %d is not in the correct subgroup", i) + } + result.Add(result, &s.Value) + } + return &Signature{Value: *result}, nil +} + +// A proof of possession scheme uses a separate public key validation +// step, called a proof of possession, to defend against rogue key +// attacks. This enables an optimization to aggregate signature +// verification for the case that all signatures are on the same +// message. +type ProofOfPossession struct { + value bls12381.G2 +} + +// Generates a proof-of-possession (PoP) for this secret key. The PoP signature should be verified before +// before accepting any aggregate signatures related to the corresponding pubkey. +func (sk SecretKey) createProofOfPossession(popDst string) (*ProofOfPossession, error) { + pk, err := sk.GetPublicKey() + if err != nil { + return nil, err + } + msg, err := pk.MarshalBinary() + if err != nil { + return nil, err + } + sig, err := sk.createSignature(msg, popDst) + if err != nil { + return nil, err + } + return &ProofOfPossession{value: sig.Value}, nil +} + +// Serialize a proof of possession to a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +func (pop ProofOfPossession) MarshalBinary() ([]byte, error) { + out := pop.value.ToCompressed() + return out[:], nil +} + +// Deserialize a proof of possession from a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +// If successful, it will assign the Signature +// otherwise it will return an error +func (pop *ProofOfPossession) UnmarshalBinary(data []byte) error { + p2 := new(Signature) + err := p2.UnmarshalBinary(data) + if err != nil { + return err + } + pop.value = p2.Value + return nil +} + +// Verifies that PoP is valid for this pubkey. In order to prevent rogue key attacks, a PoP must be validated +// for each pubkey in an aggregated signature. +func (pop ProofOfPossession) verify(pk *PublicKey, popDst string) (bool, error) { + if pk == nil { + return false, fmt.Errorf("public key cannot be nil") + } + msg, err := pk.MarshalBinary() + if err != nil { + return false, err + } + return pk.verifySignature(msg, &Signature{Value: pop.value}, popDst) +} + +// Represents an MultiSignature in G2. A multisignature is used when multiple signatures +// are calculated over the same message vs an aggregate signature where each message signed +// is a unique. +type MultiSignature struct { + value bls12381.G2 +} + +// Serialize a multi-signature to a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +func (sig MultiSignature) MarshalBinary() ([]byte, error) { + out := sig.value.ToCompressed() + return out[:], nil +} + +// Check a multisignature is valid for a multipublickey and a message +func (sig MultiSignature) verify(pk *MultiPublicKey, message []byte, signDst string) (bool, error) { + if pk == nil { + return false, fmt.Errorf("public key cannot be nil") + } + p := PublicKey{value: pk.value} + return p.verifySignature(message, &Signature{Value: sig.value}, signDst) +} + +// Deserialize a signature from a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +// If successful, it will assign the Signature +// otherwise it will return an error +func (sig *MultiSignature) UnmarshalBinary(data []byte) error { + if len(data) != SignatureSize { + return fmt.Errorf("multi signature must be %v bytes", SignatureSize) + } + s2 := new(Signature) + err := s2.UnmarshalBinary(data) + if err != nil { + return err + } + sig.value = s2.Value + return nil +} + +// Represents accumulated multiple Public Keys in G1 for verifying a multisignature +type MultiPublicKey struct { + value bls12381.G1 +} + +// Serialize a public key to a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +func (pk MultiPublicKey) MarshalBinary() ([]byte, error) { + out := pk.value.ToCompressed() + return out[:], nil +} + +// Deserialize a public key from a byte array in compressed form. +// See +// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization +// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html +// If successful, it will assign the public key +// otherwise it will return an error +func (pk *MultiPublicKey) UnmarshalBinary(data []byte) error { + if len(data) != PublicKeySize { + return fmt.Errorf("multi public key must be %v bytes", PublicKeySize) + } + p1 := new(PublicKey) + err := p1.UnmarshalBinary(data) + if err != nil { + return err + } + pk.value = p1.value + return nil +} + +// Check a multisignature is valid for a multipublickey and a message +func (pk MultiPublicKey) verify(message []byte, sig *MultiSignature, signDst string) (bool, error) { + return sig.verify(&pk, message, signDst) +} + +// PartialSignature represents threshold Gap Diffie-Hellman BLS signature +// that can be combined with other partials to yield a completed BLS signature +// See section 3.2 in +type PartialSignature struct { + Identifier byte + Signature bls12381.G2 +} + +// partialSign creates a partial signature that can be combined with other partial signatures +// to yield a complete signature +func (sks SecretKeyShare) partialSign(message []byte, signDst string) (*PartialSignature, error) { + if len(message) == 0 { + return nil, fmt.Errorf("message cannot be empty or nil") + } + p2 := new(bls12381.G2).Hash(native.EllipticPointHasherSha256(), message, []byte(signDst)) + + var blob [SecretKeySize]byte + copy(blob[:], internal.ReverseScalarBytes(sks.value)) + s, err := bls12381.Bls12381FqNew().SetBytes(&blob) + if err != nil { + return nil, err + } + result := new(bls12381.G2).Mul(p2, s) + if result.InCorrectSubgroup() == 0 { + return nil, fmt.Errorf("point is not on correct subgroup") + } + return &PartialSignature{Identifier: sks.identifier, Signature: *result}, nil +} + +// combineSigs gathers partial signatures and yields a complete signature +func combineSigs(partials []*PartialSignature) (*Signature, error) { + if len(partials) < 2 { + return nil, fmt.Errorf("must have at least 2 partial signatures") + } + if len(partials) > 255 { + return nil, fmt.Errorf("unsupported to combine more than 255 signatures") + } + + // Don't know the actual values so put the minimum + xVars, yVars, err := splitXY(partials) + if err != nil { + return nil, err + } + + sTmp := new(bls12381.G2).Identity() + sig := new(bls12381.G2).Identity() + + // Lagrange interpolation + basis := bls12381.Bls12381FqNew().SetOne() + for i, xi := range xVars { + basis.SetOne() + + for j, xj := range xVars { + if i == j { + continue + } + + num := bls12381.Bls12381FqNew().Neg(xj) // - x_m + den := bls12381.Bls12381FqNew().Sub(xi, xj) // x_j - x_m + _, wasInverted := den.Invert(den) + // wasInverted == false if den == 0 + if !wasInverted { + return nil, fmt.Errorf("signatures cannot be recombined") + } + basis.Mul(basis, num.Mul(num, den)) + } + sTmp.Mul(yVars[i], basis) + sig.Add(sig, sTmp) + } + if sig.InCorrectSubgroup() == 0 { + return nil, fmt.Errorf("signature is not in the correct subgroup") + } + + return &Signature{Value: *sig}, nil +} + +// Ensure no duplicates x values and convert x values to field elements +func splitXY(partials []*PartialSignature) ([]*native.Field, []*bls12381.G2, error) { + x := make([]*native.Field, len(partials)) + y := make([]*bls12381.G2, len(partials)) + + dup := make(map[byte]bool) + + for i, sp := range partials { + if sp == nil { + return nil, nil, fmt.Errorf("partial signature cannot be nil") + } + if _, exists := dup[sp.Identifier]; exists { + return nil, nil, fmt.Errorf("duplicate signature included") + } + if sp.Signature.InCorrectSubgroup() == 0 { + return nil, nil, fmt.Errorf("signature is not in the correct subgroup") + } + dup[sp.Identifier] = true + x[i] = bls12381.Bls12381FqNew().SetUint64(uint64(sp.Identifier)) + y[i] = &sp.Signature + } + return x, y, nil +} diff --git a/signatures/bls/bls_sig/usual_bls_sig_aug_test.go b/signatures/bls/bls_sig/usual_bls_sig_aug_test.go new file mode 100644 index 0000000..0c55d57 --- /dev/null +++ b/signatures/bls/bls_sig/usual_bls_sig_aug_test.go @@ -0,0 +1,417 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bls_sig + +import ( + "testing" + + "github.com/sonr-io/sonr/crypto/core/curves/native/bls12381" +) + +func generateAugSignatureG2(sk *SecretKey, msg []byte, t *testing.T) *Signature { + bls := NewSigAug() + sig, err := bls.Sign(sk, msg) + if err != nil { + t.Errorf("Aug Sign failed") + } + return sig +} + +func generateAugAggregateDataG2(t *testing.T) ([]*PublicKey, []*Signature, [][]byte) { + msgs := make([][]byte, numAggregateG2) + pks := make([]*PublicKey, numAggregateG2) + sigs := make([]*Signature, numAggregateG2) + ikm := make([]byte, 32) + bls := NewSigAug() + + for i := 0; i < numAggregateG2; i++ { + readRand(ikm, t) + pk, sk, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Aug KeyGen failed") + } + msg := make([]byte, 20) + readRand(msg, t) + sig := generateAugSignatureG2(sk, msg, t) + msgs[i] = msg + sigs[i] = sig + pks[i] = pk + } + return pks, sigs, msgs +} + +func TestAugKeyGenG2Works(t *testing.T) { + ikm := make([]byte, 32) + readRand(ikm, t) + bls := NewSigAug() + _, _, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Aug KeyGen failed") + } +} + +func TestAugKeyGenG2Fail(t *testing.T) { + ikm := make([]byte, 10) + readRand(ikm, t) + bls := NewSigAug() + _, _, err := bls.KeygenWithSeed(ikm) + if err == nil { + t.Errorf("Aug KeyGen succeeded when it should've failed") + } +} + +func TestAugCustomDstG2(t *testing.T) { + ikm := make([]byte, 32) + readRand(ikm, t) + bls := NewSigAugWithDst("BLS_SIG_BLS12381G2_XMD:SHA-256_SSWU_RO_AUG_TEST") + pk, sk, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Aug Custom Dst KeyGen failed") + } + + readRand(ikm, t) + sig, err := bls.Sign(sk, ikm) + if err != nil { + t.Errorf("Aug Custom Dst Sign failed") + } + + if res, _ := bls.Verify(pk, ikm, sig); !res { + t.Errorf("Aug Custon Dst Verify failed") + } + + ikm[0] = 0 + if res, _ := bls.Verify(pk, ikm, sig); res { + t.Errorf("Aug Custom Dst Verify succeeded when it should've failed.") + } +} + +func TestAugSigningG2(t *testing.T) { + ikm := make([]byte, 32) + readRand(ikm, t) + bls := NewSigAug() + pk, sk, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Aug KeyGen failed") + } + + readRand(ikm, t) + sig := generateAugSignatureG2(sk, ikm, t) + + if res, _ := bls.Verify(pk, ikm, sig); !res { + t.Errorf("Aug Verify failed") + } + + ikm[0] += 1 + if res, _ := bls.Verify(pk, ikm, sig); res { + t.Errorf("Aug Verify succeeded when it should've failed.") + } +} + +func TestAugSignEmptyMessage(t *testing.T) { + bls := NewSigAug() + _, sk, err := bls.Keygen() + if err != nil { + t.Errorf("Aug KeyGen failed") + } + + // Sign a nil message + _, err = bls.Sign(sk, nil) + if err == nil { + t.Errorf("Expected sign of nil message to fail") + } +} + +func TestAugSignNilMessage(t *testing.T) { + bls := NewSigAug() + _, sk, err := bls.Keygen() + if err != nil { + t.Errorf("Aug KeyGen failed") + } + + // Sign an empty message + _, err = bls.Sign(sk, []byte{}) + if err == nil { + t.Errorf("Expected sign of empty message to fail") + } +} + +func TestAugAggregateVerifyG2Works(t *testing.T) { + pks, sigs, msgs := generateAugAggregateDataG2(t) + bls := NewSigAug() + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); !res { + t.Errorf("Aug AggregateVerify failed") + } +} + +func TestAugAggregateVerifyG2BadPks(t *testing.T) { + bls := NewSigAug() + pks, sigs, msgs := generateAugAggregateDataG2(t) + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); !res { + t.Errorf("Aug AggregateVerify failed") + } + + pks[0] = pks[1] + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Aug AggregateVerify succeeded when it should've failed") + } + + // Try a zero key to make sure it doesn't crash + pkValue := new(bls12381.G1).Identity() + pks[0] = &PublicKey{value: *pkValue} + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Aug AggregateVerify succeeded with zero byte public key it should've failed") + } + + // Try with base generator + pkValue.Generator() + pks[0] = &PublicKey{value: *pkValue} + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf( + "Aug aggregateVerify succeeded with the base generator public key it should've failed", + ) + } +} + +func TestAugAggregateVerifyG2BadSigs(t *testing.T) { + bls := NewSigAug() + pks, sigs, msgs := generateAugAggregateDataG2(t) + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); !res { + t.Errorf("Aug aggregateVerify failed") + } + + sigs[0] = sigs[1] + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Aug aggregateVerify succeeded when it should've failed") + } + + // Try a zero key to make sure it doesn't crash + sigValue := new(bls12381.G2).Identity() + sigs[0] = &Signature{Value: *sigValue} + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Aug aggregateVerify succeeded with zero byte signature it should've failed") + } + + // Try with base generator + sigValue.Generator() + sigs[0] = &Signature{Value: *sigValue} + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf( + "Aug aggregateVerify succeeded with the base generator signature it should've failed", + ) + } +} + +func TestAugAggregateVerifyG2BadMsgs(t *testing.T) { + bls := NewSigAug() + pks, sigs, msgs := generateAugAggregateDataG2(t) + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); !res { + t.Errorf("Aug aggregateVerify failed") + } + + // Test len(pks) != len(msgs) + if res, _ := bls.AggregateVerify(pks, msgs[0:8], sigs); res { + t.Errorf("Aug aggregateVerify succeeded when it should've failed") + } + + msgs[0] = msgs[1] + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Aug aggregateVerify succeeded when it should've failed") + } +} + +func TestAugAggregateVerifyG2DupMsg(t *testing.T) { + bls := NewSigAug() + + // Only two messages but repeated + messages := make([][]byte, numAggregateG2) + messages[0] = []byte("Yes") + messages[1] = []byte("No") + for i := 2; i < numAggregateG2; i++ { + messages[i] = messages[i%2] + } + + pks := make([]*PublicKey, numAggregateG2) + sigs := make([]*Signature, numAggregateG2) + + ikm := make([]byte, 32) + for i := 0; i < numAggregateG2; i++ { + readRand(ikm, t) + pk, sk, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Aug KeyGen failed") + } + + sig := generateAugSignatureG2(sk, messages[i], t) + pks[i] = pk + sigs[i] = sig + } + + if res, _ := bls.AggregateVerify(pks, messages, sigs); !res { + t.Errorf("Aug aggregateVerify failed for duplicate messages") + } +} + +func TestBlsAugG2KeyGen(t *testing.T) { + bls := NewSigAug() + _, _, err := bls.Keygen() + if err != nil { + t.Errorf("Keygen failed: %v", err) + } +} + +func TestAugThresholdKeygenBadInputs(t *testing.T) { + bls := NewSigAug() + _, _, err := bls.ThresholdKeygen(0, 0) + if err == nil { + t.Errorf("ThresholdKeygen should've failed but succeeded") + } + _, _, err = bls.ThresholdKeygen(1, 0) + if err == nil { + t.Errorf("ThresholdKeygen should've failed but succeeded") + } + _, _, err = bls.ThresholdKeygen(3, 2) + if err == nil { + t.Errorf("ThresholdKeygen should've failed but succeeded") + } +} + +func TestAugThresholdKeygen(t *testing.T) { + bls := NewSigAug() + _, sks, err := bls.ThresholdKeygen(3, 5) + if err != nil { + t.Errorf("ThresholdKeygen failed") + } + if len(sks) != 5 { + t.Errorf("ThresholdKeygen did not produce enough shares") + } +} + +func TestAugPartialSign(t *testing.T) { + ikm := make([]byte, 32) + bls := NewSigAug() + pk, sks, err := bls.ThresholdKeygenWithSeed(ikm, 2, 4) + if err != nil { + t.Errorf("ThresholdKeygen failed") + } + msg := make([]byte, 10) + sig1, err := bls.PartialSign(sks[0], pk, msg) + if err != nil { + t.Errorf("partialSign failed: %v", err) + } + sig2, err := bls.PartialSign(sks[1], pk, msg) + if err != nil { + t.Errorf("partialSign failed: %v", err) + } + sig, err := bls.CombineSignatures(sig1, sig2) + if err != nil { + t.Errorf("CombineSignatures failed: %v", err) + } + + if res, _ := bls.Verify(pk, msg, sig); !res { + t.Errorf("Combined signature does not verify") + } + + sig, err = bls.CombineSignatures(sig1) + if err == nil { + t.Errorf("CombineSignatures succeeded when it should've failed") + } + if res, _ := bls.Verify(pk, msg, sig); res { + t.Errorf("Combined signature verify succeeded when it should've failed") + } +} + +// Ensure that mixed partial signatures from distinct origins create invalid composite signatures +func TestAugPartialMixupShares(t *testing.T) { + total := uint(5) + ikm := make([]byte, 32) + bls := NewSigAug() + pk1, sks1, err := bls.ThresholdKeygenWithSeed(ikm, 3, total) + if err != nil { + t.Errorf("ThresholdKeygen failed: %v", err) + } + for i := range ikm { + ikm[i] = 1 + } + pk2, sks2, err := bls.ThresholdKeygenWithSeed(ikm, 3, total) + if err != nil { + t.Errorf("ThresholdKeygen failed: %v", err) + } + + // Generate partial signatures for both sets of keys + msg := make([]byte, 10) + sigs1 := make([]*PartialSignature, total) + sigs2 := make([]*PartialSignature, total) + for i := range sks1 { + sigs1[i], err = bls.PartialSign(sks1[i], pk1, msg) + if err != nil { + t.Errorf("PartialSign failed: %v", err) + } + sigs2[i], err = bls.PartialSign(sks2[i], pk2, msg) + if err != nil { + t.Errorf("PartialSign failed: %v", err) + } + } + + // Try combining 2 from group 1 and 2 from group 2 + sig, err := bls.CombineSignatures(sigs1[0], sigs1[1], sigs2[2], sigs2[3]) + if err != nil { + t.Errorf("CombineSignatures failed: %v", err) + } + // Signature shouldn't validate + if res, _ := bls.Verify(pk1, msg, sig); res { + t.Errorf( + "CombineSignatures worked with different shares of two secret keys for the same message", + ) + } + if res, _ := bls.Verify(pk2, msg, sig); res { + t.Errorf( + "CombineSignatures worked with different shares of two secret keys for the same message", + ) + } + // Should error out due to duplicate identifiers + _, err = bls.CombineSignatures(sigs1[0], sigs1[1], sigs2[0], sigs2[1]) + if err == nil { + t.Errorf("CombineSignatures expected to fail but succeeded.") + } +} + +func TestAugPartialSignEmptyMessage(t *testing.T) { + bls := NewSigAug() + pk, sks, err := bls.ThresholdKeygen(2, 2) + if err != nil { + t.Errorf("ThresholdKeygen failed") + } + + // Test signing an empty message + _, err = bls.PartialSign(sks[0], pk, []byte{}) + if err == nil { + t.Errorf("Expected partial sign of empty message to fail") + } +} + +func TestAugPartialSignNilMessage(t *testing.T) { + bls := NewSigAug() + pk, sks, err := bls.ThresholdKeygen(7, 7) + if err != nil { + t.Errorf("ThresholdKeygen failed") + } + + // Test signing a nil message + _, err = bls.PartialSign(sks[0], pk, nil) + if err == nil { + t.Errorf("Expected partial sign of nil message to fail") + } +} diff --git a/signatures/bls/bls_sig/usual_bls_sig_basic_test.go b/signatures/bls/bls_sig/usual_bls_sig_basic_test.go new file mode 100644 index 0000000..91c332d --- /dev/null +++ b/signatures/bls/bls_sig/usual_bls_sig_basic_test.go @@ -0,0 +1,417 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bls_sig + +import ( + "testing" + + "github.com/sonr-io/sonr/crypto/core/curves/native/bls12381" +) + +func generateBasicSignatureG2(sk *SecretKey, msg []byte, t *testing.T) *Signature { + bls := NewSigBasic() + sig, err := bls.Sign(sk, msg) + if err != nil { + t.Errorf("Basic Sign failed") + } + return sig +} + +func generateBasicAggregateDataG2(t *testing.T) ([]*PublicKey, []*Signature, [][]byte) { + msgs := make([][]byte, numAggregateG2) + pks := make([]*PublicKey, numAggregateG2) + sigs := make([]*Signature, numAggregateG2) + ikm := make([]byte, 32) + bls := NewSigBasic() + + for i := 0; i < numAggregateG2; i++ { + readRand(ikm, t) + pk, sk, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Basic KeyGen failed") + } + msg := make([]byte, 20) + readRand(msg, t) + sig := generateBasicSignatureG2(sk, msg, t) + msgs[i] = msg + sigs[i] = sig + pks[i] = pk + } + return pks, sigs, msgs +} + +func TestBasicKeyGenG2Works(t *testing.T) { + ikm := make([]byte, 32) + readRand(ikm, t) + bls := NewSigBasic() + _, _, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Basic KeyGen failed") + } +} + +func TestBasicKeyGenG2Fail(t *testing.T) { + ikm := make([]byte, 10) + readRand(ikm, t) + bls := NewSigBasic() + _, _, err := bls.KeygenWithSeed(ikm) + if err == nil { + t.Errorf("Basic KeyGen succeeded when it should've failed") + } +} + +func TestBasicCustomDstG2(t *testing.T) { + ikm := make([]byte, 32) + readRand(ikm, t) + bls := NewSigBasicWithDst("BLS_SIG_BLS12381G2_XMD:SHA-256_SSWU_RO_NUL_TEST") + pk, sk, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Basic Custom Dst KeyGen failed") + } + + readRand(ikm, t) + sig, err := bls.Sign(sk, ikm) + if err != nil { + t.Errorf("Basic Custom Dst Sign failed") + } + + if res, _ := bls.Verify(pk, ikm, sig); !res { + t.Errorf("Basic Custon Dst verify failed") + } + + ikm[0] += 1 + if res, _ := bls.Verify(pk, ikm, sig); res { + t.Errorf("Basic Custom Dst verify succeeded when it should've failed.") + } +} + +func TestBasicSigningG2(t *testing.T) { + ikm := make([]byte, 32) + readRand(ikm, t) + bls := NewSigBasic() + pk, sk, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Basic KeyGen failed") + } + + readRand(ikm, t) + sig := generateBasicSignatureG2(sk, ikm, t) + + if res, _ := bls.Verify(pk, ikm, sig); !res { + t.Errorf("Basic verify failed") + } + + ikm[0] += 1 + if res, _ := bls.Verify(pk, ikm, sig); res { + t.Errorf("Basic verify succeeded when it should've failed.") + } +} + +func TestBasicSigningG2EmptyMessage(t *testing.T) { + // So basic + bls := NewSigBasic() + _, sk, err := bls.Keygen() + if err != nil { + t.Errorf("Basic KeyGen failed") + } + + // Sign an empty message + _, err = bls.Sign(sk, []byte{}) + if err != nil { + t.Errorf("Expected signing empty message to succeed: %v", err) + } +} + +func TestBasicSigningG2NilMessage(t *testing.T) { + // So basic + bls := NewSigBasic() + _, sk, err := bls.Keygen() + if err != nil { + t.Errorf("Basic KeyGen failed") + } + + // Sign an empty message + _, err = bls.Sign(sk, nil) + if err == nil { + t.Errorf("Expected signing nil message to fail") + } +} + +func TestBasicAggregateVerifyG2Works(t *testing.T) { + pks, sigs, msgs := generateBasicAggregateDataG2(t) + bls := NewSigBasic() + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); !res { + t.Errorf("Basic AggregateVerify failed") + } +} + +func TestBasicAggregateVerifyG2BadPks(t *testing.T) { + bls := NewSigBasic() + pks, sigs, msgs := generateBasicAggregateDataG2(t) + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); !res { + t.Errorf("Basic AggregateVerify failed") + } + + pks[0] = pks[1] + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Basic AggregateVerify succeeded when it should've failed") + } + + // Try a zero key to make sure it doesn't crash + pkValue := new(bls12381.G1).Identity() + pks[0] = &PublicKey{value: *pkValue} + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Basic AggregateVerify succeeded with zero byte public key it should've failed") + } + + // Try with base generator + pkValue.Generator() + pks[0] = &PublicKey{value: *pkValue} + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf( + "Basic aggregateVerify succeeded with the base generator public key it should've failed", + ) + } +} + +func TestBasicAggregateVerifyG2BadSigs(t *testing.T) { + bls := NewSigBasic() + pks, sigs, msgs := generateBasicAggregateDataG2(t) + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); !res { + t.Errorf("Basic aggregateVerify failed") + } + + sigs[0] = sigs[1] + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Basic aggregateVerify succeeded when it should've failed") + } + + // Try a zero key to make sure it doesn't crash + sigValue := new(bls12381.G2).Identity() + sigs[0] = &Signature{Value: *sigValue} + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Basic aggregateVerify succeeded with zero byte signature it should've failed") + } + + // Try with base generator + sigValue.Generator() + sigs[0] = &Signature{Value: *sigValue} + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf( + "Basic aggregateVerify succeeded with the base generator signature it should've failed", + ) + } +} + +func TestBasicAggregateVerifyG2BadMsgs(t *testing.T) { + bls := NewSigBasic() + pks, sigs, msgs := generateBasicAggregateDataG2(t) + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); !res { + t.Errorf("Basic aggregateVerify failed") + } + + msgs[0] = msgs[1] + + if res, _ := bls.AggregateVerify(pks, msgs, sigs); res { + t.Errorf("Basic aggregateVerify succeeded when it should've failed") + } +} + +func TestBasicThresholdKeygenBadInputs(t *testing.T) { + bls := NewSigBasic() + _, _, err := bls.ThresholdKeygen(0, 0) + if err == nil { + t.Errorf("ThresholdKeygen should've failed but succeeded") + } + _, _, err = bls.ThresholdKeygen(1, 0) + if err == nil { + t.Errorf("ThresholdKeygen should've failed but succeeded") + } + _, _, err = bls.ThresholdKeygen(3, 2) + if err == nil { + t.Errorf("ThresholdKeygen should've failed but succeeded") + } +} + +func TestBasicThresholdKeygen(t *testing.T) { + bls := NewSigBasic() + _, sks, err := bls.ThresholdKeygen(3, 5) + if err != nil { + t.Errorf("ThresholdKeygen failed") + } + if len(sks) != 5 { + t.Errorf("ThresholdKeygen did not produce enough shares") + } +} + +func TestBasicPartialSign(t *testing.T) { + ikm := make([]byte, 32) + bls := NewSigBasic() + pk, sks, err := bls.ThresholdKeygenWithSeed(ikm, 2, 4) + if err != nil { + t.Errorf("ThresholdKeygen failed") + } + msg := make([]byte, 10) + sig1, err := bls.PartialSign(sks[0], msg) + if err != nil { + t.Errorf("partialSign failed: %v", err) + } + sig2, err := bls.PartialSign(sks[1], msg) + if err != nil { + t.Errorf("partialSign failed: %v", err) + } + sig, err := bls.CombineSignatures(sig1, sig2) + if err != nil { + t.Errorf("CombineSignatures failed: %v", err) + } + + if res, _ := bls.Verify(pk, msg, sig); !res { + t.Errorf("Combined signature does not verify") + } + + sig, err = bls.CombineSignatures(sig1) + if err == nil { + t.Errorf("CombineSignatures succeeded when it should've failed") + } + if res, _ := bls.Verify(pk, msg, sig); res { + t.Errorf("Combined signature verify succeeded when it should've failed") + } +} + +// Ensure that duplicate partial signatures cannot be used to create a complete one +func TestBasicPartialDuplicateShares(t *testing.T) { + total := uint(5) + ikm := make([]byte, 32) + bls := NewSigBasic() + pk1, sks1, err := bls.ThresholdKeygenWithSeed(ikm, 3, total) + if err != nil { + t.Errorf("ThresholdKeygen failed: %v", err) + } + + // Generate partial signatures for both sets of keys + msg := make([]byte, 10) + sigs1 := make([]*PartialSignature, total) + for i := range sks1 { + sigs1[i], err = bls.PartialSign(sks1[i], msg) + if err != nil { + t.Errorf("PartialSign failed: %v", err) + } + } + + // Try combining duplicates from group 1 + sig, err := bls.CombineSignatures(sigs1[0], sigs1[0], sigs1[1], sigs1[1]) + if err == nil { + t.Errorf("CombineSignatures expected to fail but succeeded") + } + // Signature shouldn't validate + if res, _ := bls.Verify(pk1, msg, sig); res { + t.Errorf("CombineSignatures worked with duplicate partial signatures") + } +} + +// Ensure that mixed partial signatures from distinct origins create invalid composite signatures +func TestBasicPartialMixupShares(t *testing.T) { + total := uint(5) + ikm := make([]byte, 32) + bls := NewSigBasic() + pk1, sks1, err := bls.ThresholdKeygenWithSeed(ikm, 3, total) + if err != nil { + t.Errorf("ThresholdKeygen failed: %v", err) + } + for i := range ikm { + ikm[i] = 1 + } + pk2, sks2, err := bls.ThresholdKeygenWithSeed(ikm, 3, total) + if err != nil { + t.Errorf("ThresholdKeygen failed: %v", err) + } + + // Generate partial signatures for both sets of keys + msg := make([]byte, 10) + sigs1 := make([]*PartialSignature, total) + sigs2 := make([]*PartialSignature, total) + for i := range sks1 { + sigs1[i], err = bls.PartialSign(sks1[i], msg) + if err != nil { + t.Errorf("PartialSign failed: %v", err) + } + sigs2[i], err = bls.PartialSign(sks2[i], msg) + if err != nil { + t.Errorf("PartialSign failed: %v", err) + } + } + + // Try combining 2 from group 1 and 2 from group 2 + sig, err := bls.CombineSignatures(sigs1[0], sigs1[1], sigs2[2], sigs2[3]) + if err != nil { + t.Errorf("CombineSignatures failed: %v", err) + } + // Signature shouldn't validate + if res, _ := bls.Verify(pk1, msg, sig); res { + t.Errorf( + "CombineSignatures worked with different shares of two secret keys for the same message", + ) + } + if res, _ := bls.Verify(pk2, msg, sig); res { + t.Errorf( + "CombineSignatures worked with different shares of two secret keys for the same message", + ) + } + // Should error out due to duplicate identifiers + _, err = bls.CombineSignatures(sigs1[0], sigs1[1], sigs2[0], sigs2[1]) + if err == nil { + t.Errorf("CombineSignatures expected to fail but succeeded.") + } +} + +func TestIdentityPublicKey(t *testing.T) { + bls := NewSigBasic() + _, sk, err := bls.Keygen() + if err != nil { + t.Errorf("Keygen failed: %v", err) + } + msg := []byte{0, 0, 0, 0} + sig, _ := bls.Sign(sk, msg) + pk := PublicKey{value: *new(bls12381.G1).Identity()} + if res, _ := bls.Verify(&pk, msg, sig); res { + t.Errorf("Verify succeeded when the public key is the identity.") + } +} + +func TestThresholdSignTooHighAndLow(t *testing.T) { + bls := NewSigBasic() + _, sks, err := bls.ThresholdKeygen(3, 5) + if err != nil { + t.Errorf("ThresholdKeygen failed: %v", err) + } + msg := make([]byte, 10) + + ps, err := bls.PartialSign(sks[0], msg) + if err != nil { + t.Errorf("PartialSign failed: %v", err) + } + + _, err = bls.CombineSignatures(ps) + if err == nil { + t.Errorf("CombinSignatures succeeded when it should've failed") + } + + pss := make([]*PartialSignature, 256) + + _, err = bls.CombineSignatures(pss...) + if err == nil { + t.Errorf("CombinSignatures succeeded when it should've failed") + } +} diff --git a/signatures/bls/bls_sig/usual_bls_sig_pop_test.go b/signatures/bls/bls_sig/usual_bls_sig_pop_test.go new file mode 100644 index 0000000..d5230ce --- /dev/null +++ b/signatures/bls/bls_sig/usual_bls_sig_pop_test.go @@ -0,0 +1,936 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package bls_sig + +import ( + "bytes" + "math/big" + "math/rand" + "testing" + + "github.com/sonr-io/sonr/crypto/core/curves/native/bls12381" +) + +const numAggregateG2 = 10 + +func TestGetPublicKeyG1(t *testing.T) { + sk := genSecretKey(t) + pk := genPublicKey(sk, t) + actual := marshalStruct(pk, t) + expected := []byte{ + 166, + 149, + 173, + 50, + 93, + 252, + 126, + 17, + 145, + 251, + 201, + 241, + 134, + 245, + 142, + 255, + 66, + 166, + 52, + 2, + 151, + 49, + 177, + 131, + 128, + 255, + 137, + 191, + 66, + 196, + 100, + 164, + 44, + 184, + 202, + 85, + 178, + 0, + 240, + 81, + 245, + 127, + 30, + 24, + 147, + 198, + 135, + 89, + } + if !bytes.Equal(actual, expected) { + t.Errorf("Expected GetPublicKey to pass but failed.") + } +} + +func testSignG2(message []byte, t *testing.T) { + sk := genSecretKey(t) + sig := genSignature(sk, message, t) + pk := genPublicKey(sk, t) + + bls := NewSigPop() + + if res, err := bls.Verify(pk, message, sig); !res { + t.Errorf("createSignature failed when it should've passed: %v", err) + } +} + +func TestSignG2EmptyMessage(t *testing.T) { + bls := NewSigPop() + sk := genSecretKey(t) + sig, _ := bls.Sign(sk, nil) + pk := genPublicKey(sk, t) + + if res, _ := bls.Verify(pk, nil, sig); res { + t.Errorf("createSignature succeeded when it should've failed") + } + + message := []byte{} + sig = genSignature(sk, message, t) + + if res, err := bls.Verify(pk, message, sig); !res { + t.Errorf("create and verify failed on empty message: %v", err) + } +} + +func TestSignG2OneByteMessage(t *testing.T) { + message := []byte{1} + testSignG2(message, t) +} + +func TestSignG2LargeMessage(t *testing.T) { + message := make([]byte, 1048576) + testSignG2(message, t) +} + +func TestSignG2RandomMessage(t *testing.T) { + message := make([]byte, 65537) + readRand(message, t) + testSignG2(message, t) +} + +func TestSignG2BadMessage(t *testing.T) { + message := make([]byte, 1024) + sk := genSecretKey(t) + sig := genSignature(sk, message, t) + pk := genPublicKey(sk, t) + message = []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 0} + + bls := NewSigPop() + + if res, _ := bls.Verify(pk, message, sig); res { + t.Errorf("Expected signature to not verify") + } +} + +func TestBadConversionsG2(t *testing.T) { + sk := genSecretKey(t) + message := []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 0} + sig := genSignature(sk, message, t) + pk := genPublicKey(sk, t) + + bls := NewSigPop() + + if res, _ := bls.Verify(pk, message, sig); !res { + t.Errorf("Signature should be valid") + } + if res, _ := sig.verify(pk, message, blsSignaturePopDst); !res { + t.Errorf("Signature should be valid") + } + + // Convert public key to signature in G2 + sig2 := new(SignatureVt) + err := sig2.UnmarshalBinary(marshalStruct(pk, t)) + if err != nil { + t.Errorf("Should be able to convert to signature in G2") + } + pk2 := new(PublicKeyVt) + err = pk2.UnmarshalBinary(marshalStruct(sig, t)) + if err != nil { + t.Errorf("Should be able to convert to public key in G1") + } + + if res, _ := pk2.verifySignatureVt(message, sig2, blsSignaturePopDst); res { + t.Errorf("The signature shouldn't verify") + } +} + +func TestAggregatePublicKeysG1(t *testing.T) { + pks := []*PublicKey{} + ikm := make([]byte, 32) + for i := 0; i < 20; i++ { + readRand(ikm, t) + sk := genRandSecretKey(ikm, t) + pk := genPublicKey(sk, t) + pks = append(pks, pk) + } + apk1, err := aggregatePublicKeys(pks...) + if err != nil { + t.Errorf("%v", err) + } + rng := rand.New(rand.NewSource(1234567890)) + rng.Shuffle(len(pks), func(i, j int) { pks[i], pks[j] = pks[j], pks[i] }) + apk2, err := aggregatePublicKeys(pks...) + if err != nil { + t.Errorf("%v", err) + } + if !bytes.Equal(marshalStruct(apk1, t), marshalStruct(apk2, t)) { + t.Errorf("Aggregated public keys should be equal") + } + rand.Shuffle(len(pks), func(i, j int) { pks[i], pks[j] = pks[j], pks[i] }) + apk1, err = aggregatePublicKeys(pks...) + if err != nil { + t.Errorf("%v", err) + } + if !bytes.Equal(marshalStruct(apk1, t), marshalStruct(apk2, t)) { + t.Errorf("Aggregated public keys should be equal") + } +} + +func TestAggregateSignaturesG2(t *testing.T) { + var sigs []*Signature + ikm := make([]byte, 32) + for i := 0; i < 20; i++ { + readRand(ikm, t) + sk := genRandSecretKey(ikm, t) + sig := genSignature(sk, ikm, t) + sigs = append(sigs, sig) + } + asig1, err := aggregateSignatures(sigs...) + if err != nil { + t.Errorf("%v", err) + } + rng2 := rand.New(rand.NewSource(1234567890)) + rng2.Shuffle(len(sigs), func(i, j int) { sigs[i], sigs[j] = sigs[j], sigs[i] }) + asig2, err := aggregateSignatures(sigs...) + if err != nil { + t.Errorf("%v", err) + } + if !bytes.Equal(marshalStruct(asig1, t), marshalStruct(asig2, t)) { + t.Errorf("Aggregated signatures should be equal") + } + rand.Shuffle(len(sigs), func(i, j int) { sigs[i], sigs[j] = sigs[j], sigs[i] }) + asig1, err = aggregateSignatures(sigs...) + if err != nil { + t.Errorf("%v", err) + } + if !bytes.Equal(marshalStruct(asig1, t), marshalStruct(asig2, t)) { + t.Errorf("Aggregated signatures should be equal") + } +} + +func initAggregatedTestValuesG2(messages [][]byte, t *testing.T) ([]*PublicKey, []*Signature) { + pks := []*PublicKey{} + sigs := []*Signature{} + ikm := make([]byte, 32) + for i := 0; i < numAggregateG2; i++ { + readRand(ikm, t) + sk := genRandSecretKey(ikm, t) + sig := genSignature(sk, messages[i%len(messages)], t) + sigs = append(sigs, sig) + pk := genPublicKey(sk, t) + pks = append(pks, pk) + } + return pks, sigs +} + +func TestAggregatedFunctionalityG2(t *testing.T) { + message := make([]byte, 20) + messages := make([][]byte, 1) + messages[0] = message + pks, sigs := initAggregatedTestValuesG2(messages, t) + + bls := NewSigPop() + asig, err := bls.AggregateSignatures(sigs...) + if err != nil { + t.Errorf("%v", err) + } + apk, err := bls.AggregatePublicKeys(pks...) + if err != nil { + t.Errorf("%v", err) + } + if res, _ := bls.VerifyMultiSignature(apk, message, asig); !res { + t.Errorf("Should verify aggregated signatures with same message") + } + if res, _ := asig.verify(apk, message, blsSignaturePopDst); !res { + t.Errorf("MultiSignature.verify failed.") + } + if res, _ := apk.verify(message, asig, blsSignaturePopDst); !res { + t.Errorf("MultiPublicKey.verify failed.") + } +} + +func TestBadAggregatedFunctionalityG2(t *testing.T) { + message := make([]byte, 20) + messages := make([][]byte, 1) + messages[0] = message + pks, sigs := initAggregatedTestValuesG2(messages, t) + + bls := NewSigPop() + + apk, err := bls.AggregatePublicKeys(pks[2:]...) + if err != nil { + t.Errorf("%v", err) + } + asig, err := bls.AggregateSignatures(sigs...) + if err != nil { + t.Errorf("%v", err) + } + + if res, _ := bls.VerifyMultiSignature(apk, message, asig); res { + t.Errorf( + "Should not verify aggregated signatures with same message when some public keys are missing", + ) + } + + apk, err = bls.AggregatePublicKeys(pks...) + if err != nil { + t.Errorf("%v", err) + } + asig, err = bls.AggregateSignatures(sigs[2:]...) + if err != nil { + t.Errorf("%v", err) + } + if res, _ := bls.VerifyMultiSignature(apk, message, asig); res { + t.Errorf( + "Should not verify aggregated signatures with same message when some signatures are missing", + ) + } + + asig, err = bls.AggregateSignatures(sigs...) + if err != nil { + t.Errorf("%v", err) + } + + badmsg := []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20} + if res, _ := bls.VerifyMultiSignature(apk, badmsg, asig); res { + t.Errorf("Should not verify aggregated signature with bad message") + } +} + +func TestAggregateVerifyG2Pass(t *testing.T) { + messages := make([][]byte, numAggregateG2) + for i := 0; i < numAggregateG2; i++ { + message := make([]byte, 20) + readRand(message, t) + messages[i] = message + } + pks, sigs := initAggregatedTestValuesG2(messages, t) + bls := NewSigPop() + if res, _ := bls.AggregateVerify(pks, messages, sigs); !res { + t.Errorf("Expected aggregateVerify to pass but failed") + } +} + +func TestAggregateVerifyG2MsgSigCntMismatch(t *testing.T) { + messages := make([][]byte, 8) + for i := 0; i < 8; i++ { + message := make([]byte, 20) + readRand(message, t) + messages[i] = message + } + + pks, sigs := initAggregatedTestValuesG2(messages, t) + bls := NewSigPop() + if res, _ := bls.AggregateVerify(pks, messages, sigs); res { + t.Errorf("Expected AggregateVerifyG2 to fail with duplicate message but passed") + } +} + +func TestAggregateVerifyG2FailDupMsg(t *testing.T) { + messages := make([][]byte, 10) + for i := 0; i < 9; i++ { + message := make([]byte, 20) + readRand(message, t) + messages[i] = message + } + // Duplicate message + messages[9] = messages[0] + pks, sigs := initAggregatedTestValuesG2(messages, t) + bls := NewSigPop() + if res, _ := bls.AggregateVerify(pks, messages, sigs); res { + t.Errorf("Expected aggregateVerify to fail with duplicate message but passed") + } +} + +func TestAggregateVerifyG2FailIncorrectMsg(t *testing.T) { + messages := make([][]byte, 10) + for i := 0; i < 9; i++ { + message := make([]byte, 20) + readRand(message, t) + messages[i] = message + } + // Duplicate message + messages[9] = messages[0] + pks, sigs := initAggregatedTestValuesG2(messages, t) + bls := NewSigPop() + if res, _ := bls.AggregateVerify(pks[2:], messages[2:], sigs); res { + t.Errorf("Expected aggregateVerify to fail with duplicate message but passed") + } +} + +func TestAggregateVerifyG2OneMsg(t *testing.T) { + messages := make([][]byte, 1) + messages[0] = make([]byte, 20) + sk := genSecretKey(t) + sig := genSignature(sk, messages[0], t) + pk := genPublicKey(sk, t) + bls := NewSigPop() + // Should be the same as verifySignature + if res, _ := bls.AggregateVerify([]*PublicKey{pk}, messages, []*Signature{sig}); !res { + t.Errorf("Expected AggregateVerifyG2OneMsg to pass but failed") + } +} + +func TestVerifyG2Mutability(t *testing.T) { + // verify should not change any inputs + ikm := make([]byte, 32) + ikm_copy := make([]byte, 32) + readRand(ikm, t) + copy(ikm_copy, ikm) + bls := NewSigPop() + pk, sk, err := bls.KeygenWithSeed(ikm) + + if !bytes.Equal(ikm, ikm_copy) { + t.Errorf("SigPop.KeygenWithSeed modifies ikm") + } + if err != nil { + t.Errorf("Expected KeygenWithSeed to succeed but failed.") + } + sig, err := bls.Sign(sk, ikm) + if !bytes.Equal(ikm, ikm_copy) { + t.Errorf("SigPop.Sign modifies message") + } + if err != nil { + t.Errorf("SigPop.KeygenWithSeed to succeed but failed.") + } + sigCopy := marshalStruct(sig, t) + if res, _ := bls.Verify(pk, ikm, sig); !res { + t.Errorf("Expected verify to succeed but failed.") + } + if !bytes.Equal(ikm, ikm_copy) { + t.Errorf("SigPop.verify modifies message") + } + if !bytes.Equal(sigCopy, marshalStruct(sig, t)) { + t.Errorf("SigPop.verify modifies signature") + } +} + +func TestPublicKeyG1FromBadBytes(t *testing.T) { + pk := make([]byte, 32) + err := new(PublicKey).UnmarshalBinary(pk) + if err == nil { + t.Errorf("Expected PublicKeyG1FromBytes to fail but passed") + } + // All zeros + pk = make([]byte, PublicKeySize) + // See https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization + // 1 << 7 == compressed + // 1 << 6 == infinity or zero + pk[0] = 0xc0 + err = new(PublicKey).UnmarshalBinary(pk) + if err == nil { + t.Errorf("Expected PublicKeyG1FromBytes to fail but passed") + } + sk := genSecretKey(t) + pk1, err := sk.GetPublicKey() + if err != nil { + t.Errorf("Expected GetPublicKey to pass but failed.") + } + out := marshalStruct(pk1, t) + out[3] += 1 + err = new(PublicKey).UnmarshalBinary(pk) + if err == nil { + t.Errorf("Expected PublicKeyG1FromBytes to fail but passed") + } +} + +func TestSignatureG2FromBadBytes(t *testing.T) { + sig := make([]byte, 32) + err := new(Signature).UnmarshalBinary(sig) + if err == nil { + t.Errorf("Expected SignatureG2FromBytes to fail but passed") + } + // All zeros + sig = make([]byte, SignatureSize) + // See https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization + // 1 << 7 == compressed + // 1 << 6 == infinity or zero + sig[0] = 0xc0 + err = new(Signature).UnmarshalBinary(sig) + if err == nil { + t.Errorf("Expected SignatureG2FromBytes to fail but passed") + } +} + +func TestBadSecretKeyG2(t *testing.T) { + sk := &SecretKey{value: bls12381.Bls12381FqNew()} + pk, err := sk.GetPublicKey() + if err == nil { + t.Errorf("Expected GetPublicKey to fail with 0 byte secret key but passed: %v", pk) + } + _ = sk.UnmarshalBinary(sk.value.Params.BiModulus.Bytes()) + pk, err = sk.GetPublicKey() + if err == nil { + t.Errorf("Expected GetPublicKey to fail with secret key with Q but passed: %v", pk) + } + + err = sk.UnmarshalBinary([]byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}) + if err == nil { + t.Errorf("Expected SecretKeyFromBytes to fail with not enough bytes but passed: %v", pk) + } + + err = sk.UnmarshalBinary(make([]byte, 32)) + if err == nil { + t.Errorf("Expected SecretKeyFromBytes to fail with all zeros but passed: %v", pk) + } +} + +func TestProofOfPossessionG2Works(t *testing.T) { + ikm := make([]byte, 32) + readRand(ikm, t) + bls := NewSigPop() + pk, sk, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Key gen failed but should've succeeded") + } + pop, err := bls.PopProve(sk) + if err != nil { + t.Errorf("PopProve failed but should've succeeded") + } + if res, _ := bls.PopVerify(pk, pop); !res { + t.Errorf("PopVerify failed but should've succeeded") + } +} + +func TestProofOfPossessionG2FromBadKey(t *testing.T) { + ikm := make([]byte, 32) + value := new(big.Int) + value.SetBytes(ikm) + sk := SecretKey{value: bls12381.Bls12381FqNew().SetBigInt(value)} + _, err := sk.createProofOfPossession(blsSignaturePopDst) + if err == nil { + t.Errorf("createProofOfPossession should've failed but succeeded.") + } +} + +func TestProofOfPossessionG2BytesWorks(t *testing.T) { + sk := genSecretKey(t) + pop, err := sk.createProofOfPossession(blsSignaturePopDst) + if err != nil { + t.Errorf("CreateProofOfPossesionG2 failed but shouldn've succeeded.") + } + out := marshalStruct(pop, t) + if len(out) != ProofOfPossessionSize { + t.Errorf( + "ProofOfPossessionBytes incorrect size: expected %v, got %v", + ProofOfPossessionSize, + len(out), + ) + } + pop2 := new(ProofOfPossession) + err = pop2.UnmarshalBinary(out) + if err != nil { + t.Errorf("ProofOfPossession.UnmarshalBinary failed: %v", err) + } + out2 := marshalStruct(pop2, t) + if !bytes.Equal(out, out2) { + t.Errorf("ProofOfPossession.UnmarshalBinary failed, not equal when deserialized") + } +} + +func TestProofOfPossessionG2BadBytes(t *testing.T) { + zeros := make([]byte, ProofOfPossessionSize) + temp := new(ProofOfPossession) + err := temp.UnmarshalBinary(zeros) + if err == nil { + t.Errorf("ProofOfPossession.UnmarshalBinary shouldn've failed but succeeded.") + } +} + +func TestProofOfPossessionG2Fails(t *testing.T) { + sk := genSecretKey(t) + pop, err := sk.createProofOfPossession(blsSignaturePopDst) + if err != nil { + t.Errorf("Expected createProofOfPossession to succeed but failed.") + } + + ikm := make([]byte, 32) + readRand(ikm, t) + sk = genRandSecretKey(ikm, t) + bad, err := sk.GetPublicKey() + if err != nil { + t.Errorf("Expected PublicKeyG1FromBytes to succeed but failed: %v", err) + } + if res, _ := pop.verify(bad, blsSignaturePopDst); res { + t.Errorf("Expected ProofOfPossession verify to fail but succeeded.") + } +} + +func TestMultiSigG2Bytes(t *testing.T) { + messages := make([][]byte, 1) + message := make([]byte, 20) + messages[0] = message + _, sigs := initAggregatedTestValuesG2(messages, t) + bls := NewSigPop() + msig, err := bls.AggregateSignatures(sigs...) + if err != nil { + t.Errorf("%v", err) + } + msigBytes := marshalStruct(msig, t) + if len(msigBytes) != SignatureSize { + t.Errorf( + "Invalid multi-sig length. Expected %d bytes, found %d", + SignatureSize, + len(msigBytes), + ) + } + msig2 := new(MultiSignature) + err = msig2.UnmarshalBinary(msigBytes) + if err != nil { + t.Errorf("MultiSignatureG2FromBytes failed with %v", err) + } + msigBytes2 := marshalStruct(msig2, t) + + if !bytes.Equal(msigBytes, msigBytes2) { + t.Errorf("Bytes methods not equal.") + } +} + +func TestMultiSigG2BadBytes(t *testing.T) { + messages := make([][]byte, 1) + message := make([]byte, 20) + messages[0] = message + _, sigs := initAggregatedTestValuesG2(messages, t) + bls := NewSigPop() + msig, err := bls.AggregateSignatures(sigs...) + if err != nil { + t.Errorf("%v", err) + } + msigBytes := marshalStruct(msig, t) + if len(msigBytes) != SignatureSize { + t.Errorf( + "Invalid multi-sig length. Expected %d bytes, found %d", + SignatureSize, + len(msigBytes), + ) + } + msigBytes[0] = 0 + temp := new(MultiSignature) + err = temp.UnmarshalBinary(msigBytes) + if err == nil { + t.Errorf("MultiSignatureG2FromBytes should've failed but succeeded") + } + msigBytes = make([]byte, SignatureSize) + err = temp.UnmarshalBinary(msigBytes) + if err == nil { + t.Errorf("MultiSignatureG2FromBytes should've failed but succeeded") + } +} + +func TestMultiPubkeyG1Bytes(t *testing.T) { + messages := make([][]byte, 1) + message := make([]byte, 20) + messages[0] = message + pks, _ := initAggregatedTestValuesG2(messages, t) + bls := NewSigPop() + apk, err := bls.AggregatePublicKeys(pks...) + if err != nil { + t.Errorf("%v", err) + } + apkBytes := marshalStruct(apk, t) + if len(apkBytes) != PublicKeySize { + t.Errorf("MultiPublicKey has an incorrect size") + } + apk2 := new(MultiPublicKey) + err = apk2.UnmarshalBinary(apkBytes) + if err != nil { + t.Errorf("MultiPublicKey.UnmarshalBinary failed with %v", err) + } + apk2Bytes := marshalStruct(apk2, t) + if !bytes.Equal(apkBytes, apk2Bytes) { + t.Errorf("Bytes methods not equal.") + } +} + +func TestMultiPubkeyG1BadBytes(t *testing.T) { + messages := make([][]byte, 1) + message := make([]byte, 20) + messages[0] = message + pks, _ := initAggregatedTestValuesG2(messages, t) + bls := NewSigPop() + apk, err := bls.AggregatePublicKeys(pks...) + if err != nil { + t.Errorf("%v", err) + } + apkBytes := marshalStruct(apk, t) + if len(apkBytes) != PublicKeySize { + t.Errorf("MultiPublicKey has an incorrect size") + } + apkBytes[0] = 0 + temp := new(MultiPublicKey) + err = temp.UnmarshalBinary(apkBytes) + if err == nil { + t.Errorf("MultiPublicKey.UnmarshalBinary should've failed but succeeded") + } + apkBytes = make([]byte, PublicKeySize) + err = temp.UnmarshalBinary(apkBytes) + if err == nil { + t.Errorf("MultiPublicKey.UnmarshalBinary should've failed but succeeded") + } +} + +func TestFastAggregateVerifyG2Works(t *testing.T) { + messages := make([][]byte, 1) + message := make([]byte, 1) + messages[0] = message + pks, sigs := initAggregatedTestValuesG2(messages, t) + asigs, _ := aggregateSignatures(sigs...) + bls := NewSigPop() + if res, _ := bls.FastAggregateVerify(pks, message, asigs); !res { + t.Errorf("FastAggregateVerify failed.") + } +} + +func TestFastAggregateVerifyConstituentG2Works(t *testing.T) { + messages := make([][]byte, 1) + message := make([]byte, 1) + messages[0] = message + pks, sigs := initAggregatedTestValuesG2(messages, t) + bls := NewSigPop() + if res, _ := bls.FastAggregateVerifyConstituent(pks, message, sigs); !res { + t.Errorf("FastAggregateVerify failed.") + } +} + +func TestFastAggregateVerifyG2Fails(t *testing.T) { + messages := make([][]byte, 1) + message := make([]byte, 1) + messages[0] = message + pks, sigs := initAggregatedTestValuesG2(messages, t) + bls := NewSigPop() + message[0] = 1 + if res, _ := bls.FastAggregateVerifyConstituent(pks, message, sigs); res { + t.Errorf("FastAggregateVerify verified when it should've failed.") + } +} + +func TestCustomPopDstG2Works(t *testing.T) { + bls, _ := NewSigPopWithDst("BLS_SIG_BLS12381G2_XMD:SHA-256_SSWU_RO_POP_TEST", + "BLS_POP_BLS12381G2_XMD:SHA-256_SSWU_RO_POP_TEST") + msg := make([]byte, 20) + ikm := make([]byte, 32) + pk, sk, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Couldn't create custom dst keys: %v", err) + } + sig, err := bls.Sign(sk, msg) + if err != nil { + t.Errorf("Couldn't sign with custom dst: %v", err) + } + if res, _ := bls.Verify(pk, msg, sig); !res { + t.Errorf("verify fails with custom dst") + } + + pks := make([]*PublicKey, 10) + sigs := make([]*Signature, 10) + pks[0] = pk + sigs[0] = sig + for i := 1; i < 10; i++ { + readRand(ikm, t) + pkt, skt, err := bls.KeygenWithSeed(ikm) + if err != nil { + t.Errorf("Couldn't create custom dst keys: %v", err) + } + sigt, err := bls.Sign(skt, msg) + if err != nil { + t.Errorf("Couldn't sign with custom dst: %v", err) + } + pks[i] = pkt + sigs[i] = sigt + } + + if res, _ := bls.FastAggregateVerifyConstituent(pks, msg, sigs); !res { + t.Errorf("FastAggregateVerify failed with custom dst") + } + + pop, err := bls.PopProve(sk) + if err != nil { + t.Errorf("PopProve failed with custom dst") + } + + if res, _ := bls.PopVerify(pk, pop); !res { + t.Errorf("PopVerify failed with custom dst") + } +} + +func TestBlsPopG2KeyGenWithSeed(t *testing.T) { + ikm := []byte("Not enough bytes") + bls := NewSigPop() + _, _, err := bls.KeygenWithSeed(ikm) + if err == nil { + t.Errorf("Expected KeygenWithSeed to fail but succeeded") + } +} + +func TestBlsPopG2KeyGen(t *testing.T) { + bls := NewSigPop() + _, _, err := bls.Keygen() + if err != nil { + t.Errorf("Keygen failed: %v", err) + } +} + +func TestPopThresholdKeygenBadInputs(t *testing.T) { + bls := NewSigPop() + _, _, err := bls.ThresholdKeygen(0, 0) + if err == nil { + t.Errorf("ThresholdKeygen should've failed but succeeded") + } + _, _, err = bls.ThresholdKeygen(1, 0) + if err == nil { + t.Errorf("ThresholdKeygen should've failed but succeeded") + } + _, _, err = bls.ThresholdKeygen(3, 2) + if err == nil { + t.Errorf("ThresholdKeygen should've failed but succeeded") + } +} + +func TestPopThresholdKeygen(t *testing.T) { + bls := NewSigPop() + _, sks, err := bls.ThresholdKeygen(3, 5) + if err != nil { + t.Errorf("ThresholdKeygen failed") + } + if len(sks) != 5 { + t.Errorf("ThresholdKeygen did not produce enough shares") + } +} + +func TestPopPartialSign(t *testing.T) { + ikm := make([]byte, 32) + bls := NewSigPop() + pk, sks, err := bls.ThresholdKeygenWithSeed(ikm, 2, 4) + if err != nil { + t.Errorf("ThresholdKeygen failed") + } + msg := make([]byte, 10) + sig1, err := bls.PartialSign(sks[0], msg) + if err != nil { + t.Errorf("partialSign failed: %v", err) + } + sig2, err := bls.PartialSign(sks[1], msg) + if err != nil { + t.Errorf("partialSign failed: %v", err) + } + sig, err := bls.CombineSignatures(sig1, sig2) + if err != nil { + t.Errorf("CombineSignatures failed: %v", err) + } + + if res, _ := bls.Verify(pk, msg, sig); !res { + t.Errorf("Combined signature does not verify") + } + + sig, err = bls.CombineSignatures(sig1) + if err == nil { + t.Errorf("CombineSignatures succeeded when it should've failed") + } + if res, _ := bls.Verify(pk, msg, sig); res { + t.Errorf("Combined signature verify succeeded when it should've failed") + } +} + +// Ensure that mixed partial signatures from distinct origins create invalid composite signatures +func TestPopPartialMixupShares(t *testing.T) { + total := uint(5) + ikm := make([]byte, 32) + bls := NewSigPop() + pk1, sks1, err := bls.ThresholdKeygenWithSeed(ikm, 3, total) + if err != nil { + t.Errorf("ThresholdKeygen failed: %v", err) + } + for i := range ikm { + ikm[i] = 1 + } + pk2, sks2, err := bls.ThresholdKeygenWithSeed(ikm, 3, total) + if err != nil { + t.Errorf("ThresholdKeygen failed: %v", err) + } + + // Generate partial signatures for both sets of keys + msg := make([]byte, 10) + sigs1 := make([]*PartialSignature, total) + sigs2 := make([]*PartialSignature, total) + for i := range sks1 { + sigs1[i], err = bls.PartialSign(sks1[i], msg) + if err != nil { + t.Errorf("PartialSign failed: %v", err) + } + sigs2[i], err = bls.PartialSign(sks2[i], msg) + if err != nil { + t.Errorf("PartialSign failed: %v", err) + } + } + + // Try combining 2 from group 1 and 2 from group 2 + sig, err := bls.CombineSignatures(sigs1[0], sigs1[1], sigs2[2], sigs2[3]) + if err != nil { + t.Errorf("CombineSignatures failed: %v", err) + } + // Signature shouldn't validate + if res, _ := bls.Verify(pk1, msg, sig); res { + t.Errorf( + "CombineSignatures worked with different shares of two secret keys for the same message", + ) + } + if res, _ := bls.Verify(pk2, msg, sig); res { + t.Errorf( + "CombineSignatures worked with different shares of two secret keys for the same message", + ) + } + // Should error out due to duplicate identifiers + _, err = bls.CombineSignatures(sigs1[0], sigs1[1], sigs2[0], sigs2[1]) + if err == nil { + t.Errorf("CombineSignatures expected to fail but succeeded.") + } +} + +func TestNewSigEth2KeyGen(t *testing.T) { + eth2 := NewSigEth2() + _, _, err := eth2.Keygen() + if err != nil { + t.Errorf("Keygen failed: %v", err) + } +} + +func TestSigEth2SignRoundTrip(t *testing.T) { + eth2 := NewSigEth2() + pop := NewSigPop() + eth2Pk, eth2Sk, err := eth2.Keygen() + if err != nil { + t.Errorf("Keygen failed: %v", err) + } + + sig, err := pop.Sign(eth2Sk, []byte{0, 0}) + if err != nil { + t.Errorf("Sign failed: %v", err) + } + if ok, err := eth2.Verify(eth2Pk, []byte{0, 0}, sig); err != nil || !ok { + t.Errorf("Verify failed: %v", err) + } +} diff --git a/signatures/bls/rust/Cargo.toml b/signatures/bls/rust/Cargo.toml new file mode 100755 index 0000000..9b17a1f --- /dev/null +++ b/signatures/bls/rust/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "miracl" +version = "0.1.0" +authors = ["Mike Lodder "] +edition = "2018" + +[dependencies] +bls_sigs_ref = "0.3" +hex = "0.4" +miracl_core = { version = "2.3", features = [ + "bls12381", +], default-features = false } +pairing-plus = "0.19" +rand = "0.8" +serious = { version = "0.1", git = "https://github.com/mikelodder7/malutils" } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +sha2 = "0.8" +structopt = "0.3" diff --git a/signatures/bls/rust/README.md b/signatures/bls/rust/README.md new file mode 100755 index 0000000..e0f8e8d --- /dev/null +++ b/signatures/bls/rust/README.md @@ -0,0 +1,13 @@ +--- +aliases: [README] +tags: [] +title: README +linter-yaml-title-alias: README +date created: Wednesday, April 17th 2024, 4:11:40 pm +date modified: Thursday, April 18th 2024, 8:19:25 am +--- + +## Note + +This rust package is added for the sole purpose of comparing the result of our Go package with other +implementations, including the rust package. diff --git a/signatures/bls/rust/src/main.rs b/signatures/bls/rust/src/main.rs new file mode 100755 index 0000000..43aebd9 --- /dev/null +++ b/signatures/bls/rust/src/main.rs @@ -0,0 +1,388 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +use bls_sigs_ref::BLSSignaturePop; +use miracl_core::bls12381::{big::BIG, ecp::ECP, ecp2::ECP2, rom::MODULUS}; +use pairing_plus::{ + bls12_381::{Fr, G1, G2}, + hash_to_field::BaseFromRO, + serdes::SerDes, + CurveProjective, +}; +use rand::prelude::*; +use serde::Deserialize; +use serious::Encoding; +use sha2::digest::generic_array::GenericArray; +use std::{ + fs::File, + io::{self, Cursor, Read}, + path::PathBuf, +}; +use structopt::StructOpt; + +fn main() { + let args = CliArgs::from_args(); + + match args { + CliArgs::Generate { number } => generate(number), + CliArgs::PublicKey { keys } => pubkey(keys), + CliArgs::Sign { number, data } => sign(number, data), + CliArgs::Verify { keys } => verify(keys), + } +} + +fn generate(number: usize) { + let mut rng = thread_rng(); + print!("["); + let mut sep = ""; + for _ in 0..number { + let mut buf = [0u8; 48]; + rng.fill_bytes(&mut buf); + // let mut sk = buf; + let fr = Fr::from_okm(GenericArray::from_slice(&buf)); + let mut pk = G1::one(); + pk.mul_assign(fr); + + pk.serialize(&mut buf.as_mut(), true).unwrap(); + print!("{}\"{}\"", sep, hex::encode(buf)); + sep = ","; + + // Miracl expects 48 bytes for secret key even though the top 16 bytes are zeros + // sk = [0u8; 48]; + // fr.serialize(&mut sk[16..].as_mut(), true).unwrap(); + // let s = BIG::frombytes(&sk[..]); + // let x = _compress_g1(g1mul(&ECP::generator(), &s)); + // + // println!("Miracl = {}", hex::encode(x)); + } + print!("]"); +} + +/// Compress to BLS12-381 standard vs ANSI X9.62 +fn _compress_g1(pk: ECP) -> [u8; 48] { + let mut x = [0u8; 48]; + pk.getx().tobytes(&mut x); + if pk.is_infinity() { + // Set the second-most significant bit to indicate this point + // is at infinity. + x[0] |= 1 << 6; + } else { + let m = BIG { w: MODULUS }; + let mut negy = BIG::new(); + negy.add(&BIG::modneg(&pk.gety(), &m)); + negy.rmod(&m); + negy.norm(); + // Set the third most significant bit if the correct y-coordinate + // is lexicographically largest. + if BIG::comp(&pk.gety(), &negy) == 1 { + x[0] |= 1 << 5; + } + } + // Set highest bit to distinguish this as a compressed element. + x[0] |= 1 << 7; + x +} + +fn _compress_g2(sig: ECP2) -> [u8; 96] { + let mut x = [0u8; 96]; + sig.getx().geta().tobytes(&mut x[..48]); + sig.getx().getb().tobytes(&mut x[48..]); + if sig.is_infinity() { + // Set the second-most significant bit to indicate this point + // is at infinity. + x[0] |= 1 << 6; + } else { + let mut negy = sig.clone(); + negy.neg(); + + // Set the third most significant bit if the correct y-coordinate + // is lexicographically largest. + negy.sub(&sig); + + if negy.gety().sign() > 0 { + x[0] |= 1 << 5; + } + } + // Set highest bit to distinguish this as a compressed element. + x[0] |= 1 << 7; + x +} + +fn pubkey(keys: String) { + let res = read_input(&keys).unwrap(); + + for pubkey in serde_json::from_slice::>(&res).unwrap() { + let res = hex::decode(&pubkey); + if res.is_err() { + println!("Invalid hex format {}", res.unwrap_err()); + continue; + } + let mut key = res.unwrap(); + let mut cur = Cursor::new(key.as_slice()); + print!("ZCash {} - ", pubkey); + let res = G1::deserialize(&mut cur, true); + + if let Err(e) = res { + println!("fail - {}", e); + } else { + println!("pass"); + } + + let res = _uncompress_g1(key.as_mut_slice()); + + print!("Miracl {} - ", pubkey); + if let Err(e) = res { + println!("fail - {}", e); + } else { + println!("pass"); + } + } +} + +fn _uncompress_g1(d: &[u8]) -> Result { + if d.len() != 48 { + return Err("Invalid length".to_string()); + } + + if d[0] & 0x80 != 0x80 { + return Err("Expected compressed point".to_string()); + } + + // Expect point at infinity + if d[0] & 0x40 == 0x40 { + return if !d.iter().skip(1).all(|b| *b == 0) { + Err("Expected point at infinity but found another point".to_string()) + } else { + Ok(ECP::new()) + }; + } + + let s = d[0] & 0x20; + // Unset top bits + let mut dd = [0u8; 48]; + dd.copy_from_slice(d); + dd[0] &= 0x1F; + let x = BIG::frombytes(&dd); + Ok(ECP::new_bigint(&x, s as isize)) +} + +fn _uncompress_g2(d: &[u8]) -> Result { + if d.len() != 96 { + return Err("Invalid length".to_string()); + } + + if d[0] & 0x80 != 0x80 { + return Err("Expected compressed point".to_string()); + } + + // Expect point at infinity + if d[0] & 0x40 == 0x40 { + return if !d.iter().skip(1).all(|b| *b == 0) { + Err("Expected point at infinity but found another point".to_string()) + } else { + Ok(ECP2::new()) + }; + } + + let s = d[0] & 0x20; + let mut dd = [0u8; 97]; + dd[1..].copy_from_slice(d); + dd[1] &= 0x1F; + // Unset top bits + dd[0] = if s > 0 { 0x3 } else { 0x2 }; + Ok(ECP2::frombytes(&dd)) +} + +fn sign(number: usize, data: String) { + let mut rng = thread_rng(); + let bytes = data.as_bytes(); + let mut sep = ""; + print!("["); + for _ in 0..number { + let mut buf = [0u8; 48]; + rng.fill_bytes(&mut buf); + let fr = Fr::from_okm(GenericArray::from_slice(&buf)); + let mut pk = G1::one(); + pk.mul_assign(fr); + + let mut pubkey = [0u8; 48]; + pk.serialize(&mut pubkey.as_mut(), true).unwrap(); + + let signature = G2::sign(fr, bytes); + let mut sig = [0u8; 96]; + signature.serialize(&mut sig.as_mut(), true).unwrap(); + + print!("{}{{", sep); + print!(r#""data":"{}","#, data); + print!( + r#""public_key":"{}","#, + Encoding::encode(pubkey, Encoding::LowHex).into_string() + ); + print!( + r#""signature":"{}""#, + Encoding::encode(sig, Encoding::LowHex).into_string() + ); + print!("}}"); + sep = ","; + } + print!("]"); +} + +fn verify(keys: String) { + let res = read_input(&keys).unwrap(); + + for req in serde_json::from_slice::>(&res).unwrap() { + let pubkey = Encoding::decode(&req.public_key, Encoding::LowHex).unwrap(); + let sig = Encoding::decode(&req.signature, Encoding::LowHex).unwrap(); + let mut cur = Cursor::new(pubkey.as_slice()); + let verkey = G1::deserialize(&mut cur, true).unwrap(); + cur = Cursor::new(sig.as_slice()); + let signature = G2::deserialize(&mut cur, true).unwrap(); + + print!("ZCash {} - ", req.public_key); + if G2::verify(verkey, signature, req.data.as_bytes()) { + println!("pass"); + } else { + println!("fail"); + } + } +} + +#[derive(Debug, StructOpt)] +enum CliArgs { + Generate { + #[structopt(short, long)] + number: usize, + }, + PublicKey { + #[structopt(name = "KEYS")] + keys: String, + }, + Sign { + #[structopt(short, long)] + number: usize, + #[structopt(short, long)] + data: String, + }, + Verify { + #[structopt(name = "KEYS")] + keys: String, + }, +} + +#[derive(Deserialize)] +struct VerifyRequest { + data: String, + public_key: String, + signature: String, +} + +fn read_input(value: &str) -> Result, String> { + if !value.is_empty() { + match get_file(value) { + Some(file) => match File::open(file.as_path()) { + Ok(mut f) => Ok(read_stream(&mut f)), + Err(_) => Err(format!("Unable to read file {}", file.to_str().unwrap())), + }, + None => Ok(value.as_bytes().to_vec()), + } + } else { + let mut f = io::stdin(); + Ok(read_stream(&mut f)) + } +} + +fn read_stream(f: &mut R) -> Vec { + let mut bytes = Vec::new(); + let mut buffer = [0u8; 4096]; + + let mut read = f.read(&mut buffer); + while read.is_ok() { + let n = read.unwrap(); + + if n == 0 { + break; + } + + bytes.extend_from_slice(&buffer[..n]); + + read = f.read(&mut buffer); + } + + bytes +} + +fn get_file(name: &str) -> Option { + if name.len() > 256 { + // too long to be a file + return None; + } + let mut file = PathBuf::new(); + file.push(name); + if file.as_path().is_file() { + let metadata = file + .as_path() + .symlink_metadata() + .expect("symlink_metadata call failed"); + if metadata.file_type().is_symlink() { + if let Ok(f) = file.as_path().read_link() { + file = f + } else { + return None; + } + } + Some(file) + } else { + None + } +} + +// fn from_encoding(src: &str) -> Result { +// Encoding::parse(src).map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidInput, e)) +// } + +// fn hash_to_g2(d: &[u8]) -> ECP2 { +// const DST: &'static str = "BLS_SIG_BLS12381G2_XMD:SHA-256_SSWU_RO_NUL_"; +// let y = >>::hash_to_curve(d.as_ref(), DST); +// let mut d = [0u8; 193]; +// d[0] = 0x4; +// y.serialize(&mut d[1..].as_mut(), false); +// ECP2::frombytes(&d) +// } + +// fn ceil(a: usize, b: usize) -> usize { +// (a-1)/b+1 +// } +// +// fn hash_to_field(hash: usize, hlen: usize, u: &mut [FP2], dst: &[u8], m: &[u8], ctr: usize) { +// let q = BIG { w: MODULUS }; +// let el = ceil(q.nbits()+AESKEY*16, 8); +// +// let mut okm = [0u8; 512]; +// let mut fd = [0u8; 256]; +// xmd_expand(hash, hlen, &mut okm, el*ctr, &dst, &m); +// u[0] = FP2::new_fps( +// &FP::new_big(&DBIG::frombytes(&okm[0..el]).dmod(&q)), +// &FP::new_big(&DBIG::frombytes(&okm[el..(2*el)]).dmod(&q)) +// ); +// u[1] = FP2::new_fps( +// &FP::new_big(&DBIG::frombytes(&okm[(2*el)..(3*el)]).dmod(&q)), +// &FP::new_big(&DBIG::frombytes(&okm[(3*el)..]).dmod(&q)) +// ); +// } +// +// fn hash_to_ecp2(m: &[u8]) -> ECP2 { +// let dst = b"BLS_SIG_BLS12381G2_XMD:SHA-256_SSWU_RO_NUL_"; +// let mut u = [FP2::new(); 2]; +// hash_to_field(hmac::MC_SHA2, ecp::HASH_TYPE, &mut u, &dst[..], m, 2); +// +// let mut p = ECP2::map2point(&u[0]); +// let q = ECP2::map2point(&u[1]); +// p.add(&q); +// p.cfp(); +// p.affine(); +// p +// } diff --git a/signatures/bls/tests/bls/main.go b/signatures/bls/tests/bls/main.go new file mode 100644 index 0000000..db1c8e6 --- /dev/null +++ b/signatures/bls/tests/bls/main.go @@ -0,0 +1,236 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package main + +import ( + "bufio" + "encoding/hex" + "encoding/json" + "flag" + "fmt" + "os" + + bls "github.com/sonr-io/sonr/crypto/signatures/bls/bls_sig" +) + +type signOp struct { + Data string `json:"data"` + PublicKey string `json:"public_key"` + Signature string `json:"signature"` +} + +type cmdFlags struct { + Generate bool + PublicKeys bool + Sign bool + Verify bool + Number int +} + +func parseCliArgs() cmdFlags { + var generate, publickeys, sign, verify bool + var number int + flag.BoolVar(&generate, "g", false, "Generate public keys") + flag.BoolVar(&publickeys, "p", false, "Verify public keys") + flag.BoolVar(&sign, "s", false, "Generate signatures") + flag.BoolVar(&verify, "v", false, "Verify signatures") + flag.IntVar(&number, "n", 25, "The number of items to generate") + flag.Parse() + return cmdFlags{ + generate, publickeys, sign, verify, number, + } +} + +func main() { + flags := parseCliArgs() + + if flags.Generate { + generate(flags.Number) + } else if flags.PublicKeys { + publicKeys() + } else if flags.Sign { + sign(flags.Number) + } else if flags.Verify { + verify() + } +} + +func generate(number int) { + scheme := bls.NewSigPop() + publicKeys := make([]string, number) + for i := 0; i < number; i++ { + publicKey, _, err := scheme.Keygen() + if err != nil { + fmt.Printf("Keygen error occurred: %v\n", err) + os.Exit(1) + } + b, _ := publicKey.MarshalBinary() + publicKeys[i] = hex.EncodeToString(b) + } + out, _ := json.Marshal(publicKeys) + fmt.Println(string(out)) +} + +func publicKeys() { + input, err := getInput() + if err != nil { + fmt.Printf("Unable to read input: %v", err) + os.Exit(1) + } + var pubkeys []string + err = json.Unmarshal(input, &pubkeys) + if err != nil { + fmt.Printf("Unable to parse json input: %v", err) + os.Exit(1) + } + fmt.Printf("Checking public keys") + for _, pk := range pubkeys { + data, err := hex.DecodeString(pk) + if err != nil { + fmt.Printf("Unable to parse hex input: %v", err) + os.Exit(1) + } + pubkey := new(bls.PublicKey) + fmt.Printf("Checking %s - ", pk) + err = pubkey.UnmarshalBinary(data) + if err != nil { + fmt.Printf("fail\n") + os.Exit(1) + } + fmt.Printf("pass\n") + } +} + +func sign(number int) { + tests := []string{ + "", "aaa", "aaaaaa", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + } + + var err error + scheme := bls.NewSigPop() + secretKeys := make([]*bls.SecretKey, number) + publicKeys := make([]*bls.PublicKey, number) + for i := 0; i < number; i++ { + publicKeys[i], secretKeys[i], err = scheme.Keygen() + if err != nil { + fmt.Printf("Keygen error occurred: %v\n", err) + os.Exit(1) + } + } + + sigs := make([]*signOp, number*len(tests)) + + k := 0 + for _, t := range tests { + for j := 0; j < len(secretKeys); j++ { + signature, err := scheme.Sign(secretKeys[j], []byte(t)) + if err != nil { + fmt.Printf("Signing failed: %v\n", err) + os.Exit(1) + } + pk, _ := publicKeys[j].MarshalBinary() + sig, _ := signature.MarshalBinary() + sigs[k] = &signOp{ + Data: t, + PublicKey: hex.EncodeToString(pk), + Signature: hex.EncodeToString(sig), + } + k++ + } + } + + data, err := json.Marshal(sigs) + if err != nil { + fmt.Printf("Unable to convert signatures to json") + os.Exit(1) + } + fmt.Print(string(data)) +} + +func verify() { + input, err := getInput() + if err != nil { + fmt.Printf("Unable to read input: %v", err) + os.Exit(1) + } + var operations []signOp + err = json.Unmarshal(input, &operations) + if err != nil { + fmt.Printf("Unable to parse json input: %v", err) + os.Exit(1) + } + scheme := bls.NewSigPop() + for _, op := range operations { + fmt.Printf("Checking %s - ", op.PublicKey) + data, err := hex.DecodeString(op.PublicKey) + if err != nil { + fmt.Printf("fail. Unable to parse hex input: %v", err) + os.Exit(1) + } + pubkey := new(bls.PublicKey) + err = pubkey.UnmarshalBinary(data) + if err != nil { + fmt.Printf("fail. Invalid public key: %v", err) + os.Exit(1) + } + data, err = hex.DecodeString(op.Signature) + if err != nil { + fmt.Printf("fail. Unable to parse hex input: %v", err) + os.Exit(1) + } + sig := new(bls.Signature) + err = sig.UnmarshalBinary(data) + if err != nil { + fmt.Printf("fail. Invalid signature format: %v", err) + os.Exit(1) + } + ok, err := scheme.Verify(pubkey, []byte(op.Data), sig) + if !ok || err != nil { + fmt.Printf("fail. Invalid signature") + os.Exit(1) + } + fmt.Printf("pass.") + } +} + +func getInput() ([]byte, error) { + fi, _ := os.Stdin.Stat() + + var data []byte + + if (fi.Mode() & os.ModeCharDevice) == 0 { + // Read from pipe + scanner := bufio.NewScanner(os.Stdin) + for scanner.Scan() { + data = append(data, scanner.Bytes()...) + } + + if err := scanner.Err(); err != nil { + return nil, err + } + } else { + // Read from file + arguments := flag.Args() + if len(arguments) < 1 { + return nil, fmt.Errorf("expected data argument") + } + + _, err := os.Stat(arguments[0]) + + if os.IsNotExist(err) { + data = []byte(arguments[0]) + } else { + contents, err := os.ReadFile(arguments[0]) + if err != nil { + return nil, err + } + data = contents + } + } + + return data, nil +} diff --git a/signatures/common/challenge.go b/signatures/common/challenge.go new file mode 100644 index 0000000..39e4bf5 --- /dev/null +++ b/signatures/common/challenge.go @@ -0,0 +1,14 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package common + +import ( + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// Challenge generated by fiat-shamir heuristic +type Challenge = curves.Scalar diff --git a/signatures/common/commitment.go b/signatures/common/commitment.go new file mode 100644 index 0000000..6390036 --- /dev/null +++ b/signatures/common/commitment.go @@ -0,0 +1,15 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package common + +import ( + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// Commitment represents a point Pedersen commitment of one or more +// points multiplied by scalars +type Commitment = curves.Point diff --git a/signatures/common/hmacdrbg.go b/signatures/common/hmacdrbg.go new file mode 100755 index 0000000..025e3df --- /dev/null +++ b/signatures/common/hmacdrbg.go @@ -0,0 +1,99 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package common + +import ( + "crypto/hmac" + "hash" +) + +// HmacDrbg is an HMAC deterministic random bit generator +// that can use any hash function. Handles reseeding +// automatically +type HmacDrbg struct { + k, v []byte + count int + hasher func() hash.Hash +} + +func NewHmacDrbg(entropy, nonce, pers []byte, hasher func() hash.Hash) *HmacDrbg { + drbg := new(HmacDrbg) + h := hasher() + drbg.k = make([]byte, h.Size()) + drbg.v = make([]byte, h.Size()) + drbg.count = 0 + drbg.hasher = hasher + + for i := range drbg.v { + drbg.v[i] = 1 + } + + drbg.update([][]byte{entropy, nonce, pers}) + drbg.count += 1 + return drbg +} + +func (drbg *HmacDrbg) Read(dst []byte) (n int, err error) { + toRead := len(dst) + if toRead == 0 { + return 0, nil + } + i := 0 + for i < toRead { + vmac := drbg.getHmac() + _, _ = vmac.Write(drbg.v) + drbg.v = vmac.Sum(nil) + + for j, b := range drbg.v { + dst[i+j] = b + } + i += len(drbg.v) + } + drbg.update(nil) + drbg.count++ + return i, nil +} + +func (drbg *HmacDrbg) Reseed(entropy []byte) { + drbg.update([][]byte{entropy}) +} + +func (drbg *HmacDrbg) getHmac() hash.Hash { + return hmac.New(drbg.hasher, drbg.k) +} + +func (drbg *HmacDrbg) update(seeds [][]byte) { + kmac := drbg.getHmac() + _, _ = kmac.Write(drbg.v) + _, _ = kmac.Write([]byte{0}) + if len(seeds) > 0 { + for _, seed := range seeds { + _, _ = kmac.Write(seed) + } + } + drbg.k = kmac.Sum(nil) + + vmac := drbg.getHmac() + _, _ = vmac.Write(drbg.v) + drbg.v = vmac.Sum(nil) + + if len(seeds) == 0 { + return + } + + kmac = drbg.getHmac() + _, _ = kmac.Write(drbg.v) + _, _ = kmac.Write([]byte{1}) + for _, seed := range seeds { + _, _ = kmac.Write(seed) + } + drbg.k = kmac.Sum(nil) + + vmac = drbg.getHmac() + _, _ = vmac.Write(drbg.v) + drbg.v = vmac.Sum(nil) +} diff --git a/signatures/common/nonce.go b/signatures/common/nonce.go new file mode 100644 index 0000000..93918a9 --- /dev/null +++ b/signatures/common/nonce.go @@ -0,0 +1,15 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package common + +import ( + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// Nonce is used for zero-knowledge proofs to prevent replay attacks +// and prove freshness +type Nonce = curves.Scalar diff --git a/signatures/common/proof_committed_builder.go b/signatures/common/proof_committed_builder.go new file mode 100644 index 0000000..d08bc4d --- /dev/null +++ b/signatures/common/proof_committed_builder.go @@ -0,0 +1,89 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package common + +import ( + "fmt" + "io" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +const limit = 65535 + +// ProofCommittedBuilder is used to create +// proofs from multiple commitments where +// each secret is committed with a random blinding factor +// and turned into a Schnorr proof +type ProofCommittedBuilder struct { + points []curves.Point + scalars []curves.Scalar + curve *curves.Curve +} + +// NewProofCommittedBuilder creates a new builder using the specified curve +func NewProofCommittedBuilder(curve *curves.Curve) *ProofCommittedBuilder { + return &ProofCommittedBuilder{ + points: []curves.Point{}, + scalars: []curves.Scalar{}, + curve: curve, + } +} + +// CommitRandom uses the specified point and commits a random value to it +func (pcb *ProofCommittedBuilder) CommitRandom(point curves.Point, reader io.Reader) error { + if len(pcb.points) > limit { + return fmt.Errorf("limit for commitments reached") + } + pcb.points = append(pcb.points, point) + pcb.scalars = append(pcb.scalars, pcb.curve.Scalar.Random(reader)) + return nil +} + +// Commit uses the specified point and scalar to create a commitment +func (pcb *ProofCommittedBuilder) Commit(point curves.Point, scalar curves.Scalar) error { + if len(pcb.points) > limit { + return fmt.Errorf("limit for commitments reached") + } + pcb.points = append(pcb.points, point) + pcb.scalars = append(pcb.scalars, scalar) + return nil +} + +// Get returns the point and scalar at the specified index +func (pcb *ProofCommittedBuilder) Get(index int) (curves.Point, curves.Scalar) { + if index >= len(pcb.points) || index < 0 { + return nil, nil + } + return pcb.points[index], pcb.scalars[index] +} + +// GetChallengeContribution returns the bytes that should be added to +// a sigma protocol transcript for generating the challenge +func (pcb ProofCommittedBuilder) GetChallengeContribution() []byte { + commitment := pcb.curve.Point.SumOfProducts(pcb.points, pcb.scalars) + if commitment == nil { + return nil + } + return commitment.ToAffineCompressed() +} + +// GenerateProof converts the blinding factors and secrets into Schnorr proofs +func (pcb ProofCommittedBuilder) GenerateProof( + challenge curves.Scalar, + secrets []curves.Scalar, +) ([]curves.Scalar, error) { + if len(secrets) != len(pcb.scalars) { + return nil, fmt.Errorf("secrets is not equal to blinding factors") + } + + proofs := make([]curves.Scalar, len(pcb.scalars)) + for i, sc := range pcb.scalars { + proofs[i] = secrets[i].MulAdd(challenge, sc) + } + return proofs, nil +} diff --git a/signatures/common/proof_message.go b/signatures/common/proof_message.go new file mode 100644 index 0000000..0a38cc6 --- /dev/null +++ b/signatures/common/proof_message.go @@ -0,0 +1,79 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package common + +import ( + "io" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// ProofMessage classifies how a message is presented in a proof +// Either Revealed or Hidden. Hidden has two sub categories: +// proof specific i.e. the message is only used for this proof or +// shared i.e. the message should be proved to be common across proofs +type ProofMessage interface { + // IsHidden indicates the message should be hidden + IsHidden() bool + // GetBlinding is used for hidden messages + // blindings can either be proof specific to a signature + // or involved with other proofs like boundchecks, + // set memberships, or inequalities so the blinding + // factor is shared among proofs to produce a common + // schnorr linking proof + GetBlinding(reader io.Reader) curves.Scalar + // GetMessage returns the underlying message + GetMessage() curves.Scalar +} + +type RevealedMessage struct { + Message curves.Scalar +} + +func (r RevealedMessage) IsHidden() bool { + return false +} + +func (r RevealedMessage) GetBlinding(reader io.Reader) curves.Scalar { + return nil +} + +func (r RevealedMessage) GetMessage() curves.Scalar { + return r.Message +} + +type ProofSpecificMessage struct { + Message curves.Scalar +} + +func (ps ProofSpecificMessage) IsHidden() bool { + return true +} + +func (ps ProofSpecificMessage) GetBlinding(reader io.Reader) curves.Scalar { + return ps.Message.Random(reader) +} + +func (ps ProofSpecificMessage) GetMessage() curves.Scalar { + return ps.Message +} + +type SharedBlindingMessage struct { + Message, Blinding curves.Scalar +} + +func (ps SharedBlindingMessage) IsHidden() bool { + return true +} + +func (ps SharedBlindingMessage) GetBlinding(reader io.Reader) curves.Scalar { + return ps.Blinding +} + +func (ps SharedBlindingMessage) GetMessage() curves.Scalar { + return ps.Message +} diff --git a/signatures/common/signature_blinding.go b/signatures/common/signature_blinding.go new file mode 100644 index 0000000..6a84ae9 --- /dev/null +++ b/signatures/common/signature_blinding.go @@ -0,0 +1,14 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package common + +import ( + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// SignatureBlinding is a value used for computing blind signatures +type SignatureBlinding = curves.PairingScalar diff --git a/signatures/schnorr/mina/bitvector.go b/signatures/schnorr/mina/bitvector.go new file mode 100755 index 0000000..70f86bd --- /dev/null +++ b/signatures/schnorr/mina/bitvector.go @@ -0,0 +1,232 @@ +// Copyright (c) 2014 Dropbox, Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, this +// list of conditions and the following disclaimer. +// +// 2. Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. +// +// 3. Neither the name of the copyright holder nor the names of its contributors +// may be used to endorse or promote products derived from this software without +// specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package mina + +// A BitVector is a variable sized vector of bits. It supports +// lookups, sets, appends, insertions, and deletions. +// +// This class is not thread safe. +type BitVector struct { + data []byte + length int +} + +// NewBitVector creates and initializes a new bit vector with length +// elements, using data as its initial contents. +func NewBitVector(data []byte, length int) *BitVector { + return &BitVector{ + data: data, + length: length, + } +} + +// Bytes returns a slice of the contents of the bit vector. If the caller changes the returned slice, +// the contents of the bit vector may change. +func (vector *BitVector) Bytes() []byte { + return vector.data +} + +// Length returns the current number of elements in the bit vector. +func (vector *BitVector) Length() int { + return vector.length +} + +// This function shifts a byte slice one bit lower (less significant). +// bit (either 1 or 0) contains the bit to put in the most significant +// position of the last byte in the slice. +// This returns the bit that was shifted off of the last byte. +func shiftLower(bit byte, b []byte) byte { + bit = bit << 7 + for i := len(b) - 1; i >= 0; i-- { + newByte := b[i] >> 1 + newByte |= bit + bit = (b[i] & 1) << 7 + b[i] = newByte + } + return bit >> 7 +} + +// This function shifts a byte slice one bit higher (more significant). +// bit (either 1 or 0) contains the bit to put in the least significant +// position of the first byte in the slice. +// This returns the bit that was shifted off the last byte. +func shiftHigher(bit byte, b []byte) byte { + for i := 0; i < len(b); i++ { + newByte := b[i] << 1 + newByte |= bit + bit = (b[i] & 0x80) >> 7 + b[i] = newByte + } + return bit +} + +// Returns the minimum number of bytes needed for storing the bit vector. +func (vector *BitVector) bytesLength() int { + lastBitIndex := vector.length - 1 + lastByteIndex := lastBitIndex >> 3 + return lastByteIndex + 1 +} + +// Panics if the given index is not within the bounds of the bit vector. +func (vector *BitVector) indexAssert(i int) { + if i < 0 || i >= vector.length { + panic("Attempted to access element outside buffer") + } +} + +// Append adds a bit to the end of a bit vector. +func (vector *BitVector) Append(bit byte) { + index := uint32(vector.length) + vector.length++ + + if vector.bytesLength() > len(vector.data) { + vector.data = append(vector.data, 0) + } + + byteIndex := index >> 3 + byteOffset := index % 8 + oldByte := vector.data[byteIndex] + var newByte byte + if bit == 1 { + newByte = oldByte | 1<> 3 + byteOffset := uint32(i % 8) + b := vector.data[byteIndex] + // Check the offset bit + return (b >> byteOffset) & 1 +} + +// Set changes the bit in the ith index of the bit vector to the value specified in +// bit. +func (vector *BitVector) Set(bit byte, index int) { + vector.indexAssert(index) + byteIndex := uint32(index >> 3) + byteOffset := uint32(index % 8) + + oldByte := vector.data[byteIndex] + + var newByte byte + if bit == 1 { + // turn on the byteOffset'th bit + newByte = oldByte | 1< len(vector.data) { + vector.data = append(vector.data, 0) + } + + byteIndex := uint32(index >> 3) + byteOffset := uint32(index % 8) + var bitToInsert byte + if bit == 1 { + bitToInsert = 1 << byteOffset + } + + oldByte := vector.data[byteIndex] + // This bit will need to be shifted into the next byte + leftoverBit := (oldByte & 0x80) >> 7 + // Make masks to pull off the bits below and above byteOffset + // This mask has the byteOffset lowest bits set. + bottomMask := byte((1 << byteOffset) - 1) + // This mask has the 8 - byteOffset top bits set. + topMask := ^bottomMask + top := (oldByte & topMask) << 1 + newByte := bitToInsert | (oldByte & bottomMask) | top + + vector.data[byteIndex] = newByte + // Shift the rest of the bytes in the slice one higher, append + // the leftoverBit obtained above. + shiftHigher(leftoverBit, vector.data[byteIndex+1:]) +} + +// Delete removes the bit in the supplied index of the bit vector. All +// bits in positions greater than or equal to index before the call will +// be shifted down by one. +func (vector *BitVector) Delete(index int) { + vector.indexAssert(index) + vector.length-- + byteIndex := uint32(index >> 3) + byteOffset := uint32(index % 8) + + oldByte := vector.data[byteIndex] + + // Shift all the bytes above the byte we're modifying, return the + // leftover bit to include in the byte we're modifying. + bit := shiftLower(0, vector.data[byteIndex+1:]) + + // Modify oldByte. + // At a high level, we want to select the bits above byteOffset, + // and shift them down by one, removing the bit at byteOffset. + + // This selects the bottom bits + bottomMask := byte((1 << byteOffset) - 1) + // This selects the top (8 - byteOffset - 1) bits + topMask := byte(^((1 << (byteOffset + 1)) - 1)) + // newTop is the top bits, shifted down one, combined with the leftover bit from shifting + // the other bytes. + newTop := (oldByte&topMask)>>1 | (bit << 7) + // newByte takes the bottom bits and combines with the new top. + newByte := (bottomMask & oldByte) | newTop + vector.data[byteIndex] = newByte + + // The desired length is the byte index of the last element plus one, + // where the byte index of the last element is the bit index of the last + // element divided by 8. + byteLength := vector.bytesLength() + if byteLength < len(vector.data) { + vector.data = vector.data[:byteLength] + } +} diff --git a/signatures/schnorr/mina/challenge_derive.go b/signatures/schnorr/mina/challenge_derive.go new file mode 100644 index 0000000..be0380c --- /dev/null +++ b/signatures/schnorr/mina/challenge_derive.go @@ -0,0 +1,46 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package mina + +import ( + "fmt" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +type MinaTSchnorrHandler struct{} + +func (m MinaTSchnorrHandler) DeriveChallenge( + msg []byte, + pubKey curves.Point, + r curves.Point, +) (curves.Scalar, error) { + txn := new(Transaction) + err := txn.UnmarshalBinary(msg) + if err != nil { + return nil, err + } + input := new(roinput).Init(3, 75) + txn.addRoInput(input) + + pt, ok := pubKey.(*curves.PointPallas) + if !ok { + return nil, fmt.Errorf("invalid point") + } + R, ok := r.(*curves.PointPallas) + if !ok { + return nil, fmt.Errorf("invalid point") + } + + pk := new(PublicKey) + pk.value = pt.GetEp() + + sc := msgHash(pk, R.X(), input, ThreeW, MainNet) + s := new(curves.ScalarPallas) + s.SetFq(sc) + return s, nil +} diff --git a/signatures/schnorr/mina/keys.go b/signatures/schnorr/mina/keys.go new file mode 100644 index 0000000..82ea8f5 --- /dev/null +++ b/signatures/schnorr/mina/keys.go @@ -0,0 +1,282 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package mina + +import ( + crand "crypto/rand" + "crypto/sha256" + "crypto/subtle" + "encoding/binary" + "fmt" + "io" + + "github.com/mr-tron/base58" + "golang.org/x/crypto/blake2b" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/core/curves/native/pasta/fp" + "github.com/sonr-io/sonr/crypto/core/curves/native/pasta/fq" +) + +const ( + version = 0xcb + nonZeroCurvePointVersion = 0x01 + isCompressed = 0x01 +) + +// PublicKey is the verification key +type PublicKey struct { + value *curves.Ep +} + +// GenerateAddress converts the public key to an address +func (pk PublicKey) GenerateAddress() string { + var payload [40]byte + payload[0] = version + payload[1] = nonZeroCurvePointVersion + payload[2] = isCompressed + + buffer := pk.value.ToAffineUncompressed() + copy(payload[3:35], buffer[:32]) + payload[35] = buffer[32] & 1 + hash1 := sha256.Sum256(payload[:36]) + hash2 := sha256.Sum256(hash1[:]) + copy(payload[36:40], hash2[:4]) + return base58.Encode(payload[:]) +} + +// ParseAddress converts a given string into a public key returning an error on failure +func (pk *PublicKey) ParseAddress(b58 string) error { + buffer, err := base58.Decode(b58) + if err != nil { + return err + } + if len(buffer) != 40 { + return fmt.Errorf("invalid byte sequence") + } + if buffer[0] != version { + return fmt.Errorf("invalid version") + } + if buffer[1] != nonZeroCurvePointVersion { + return fmt.Errorf("invalid non-zero curve point version") + } + if buffer[2] != isCompressed { + return fmt.Errorf("invalid compressed flag") + } + hash1 := sha256.Sum256(buffer[:36]) + hash2 := sha256.Sum256(hash1[:]) + if subtle.ConstantTimeCompare(hash2[:4], buffer[36:40]) != 1 { + return fmt.Errorf("invalid checksum") + } + x := buffer[3:35] + x[31] |= buffer[35] << 7 + value, err := new(curves.Ep).FromAffineCompressed(x) + if err != nil { + return err + } + pk.value = value + return nil +} + +func (pk PublicKey) MarshalBinary() ([]byte, error) { + return pk.value.ToAffineCompressed(), nil +} + +func (pk *PublicKey) UnmarshalBinary(input []byte) error { + pt, err := new(curves.Ep).FromAffineCompressed(input) + if err != nil { + return err + } + pk.value = pt + return nil +} + +func (pk *PublicKey) SetPointPallas(pallas *curves.PointPallas) { + pk.value = pallas.GetEp() +} + +// SecretKey is the signing key +type SecretKey struct { + value *fq.Fq +} + +// GetPublicKey returns the corresponding verification +func (sk SecretKey) GetPublicKey() *PublicKey { + pk := new(curves.Ep).Mul(new(curves.Ep).Generator(), sk.value) + return &PublicKey{pk} +} + +func (sk SecretKey) MarshalBinary() ([]byte, error) { + t := sk.value.Bytes() + return t[:], nil +} + +func (sk *SecretKey) UnmarshalBinary(input []byte) error { + if len(input) != 32 { + return fmt.Errorf("invalid byte sequence") + } + var buf [32]byte + copy(buf[:], input) + value, err := new(fq.Fq).SetBytes(&buf) + if err != nil { + return err + } + sk.value = value + return nil +} + +func (sk *SecretKey) SetFq(fq *fq.Fq) { + sk.value = fq +} + +// NewKeys creates a new keypair using a CSPRNG +func NewKeys() (*PublicKey, *SecretKey, error) { + return NewKeysFromReader(crand.Reader) +} + +// NewKeysFromReader creates a new keypair using the specified reader +func NewKeysFromReader(reader io.Reader) (*PublicKey, *SecretKey, error) { + t := new(curves.ScalarPallas).Random(reader) + sc, ok := t.(*curves.ScalarPallas) + if !ok || t.IsZero() { + return nil, nil, fmt.Errorf("invalid key") + } + sk := sc.GetFq() + pk := new(curves.Ep).Mul(new(curves.Ep).Generator(), sk) + if pk.IsIdentity() { + return nil, nil, fmt.Errorf("invalid key") + } + + return &PublicKey{pk}, &SecretKey{sk}, nil +} + +// SignTransaction generates a signature over the specified txn and network id +// See https://github.com/MinaProtocol/c-reference-signer/blob/master/crypto.c#L1020 +func (sk *SecretKey) SignTransaction(transaction *Transaction) (*Signature, error) { + input := new(roinput).Init(3, 75) + transaction.addRoInput(input) + return sk.finishSchnorrSign(input, transaction.NetworkId) +} + +// SignMessage signs a _string_. this is somewhat non-standard; we do it by just adding bytes to the roinput. +// See https://github.com/MinaProtocol/c-reference-signer/blob/master/crypto.c#L1020 +func (sk *SecretKey) SignMessage(message string) (*Signature, error) { + input := new(roinput).Init(0, len(message)) + input.AddBytes([]byte(message)) + return sk.finishSchnorrSign(input, MainNet) +} + +func (sk *SecretKey) finishSchnorrSign(input *roinput, networkId NetworkType) (*Signature, error) { + if sk.value.IsZero() { + return nil, fmt.Errorf("invalid secret key") + } + pk := sk.GetPublicKey() + k := sk.msgDerive(input, pk, networkId) + if k.IsZero() { + return nil, fmt.Errorf("invalid nonce generated") + } + // r = k*G + r := new(curves.Ep).Generator() + r.Mul(r, k) + + if r.Y().IsOdd() { + k.Neg(k) + } + rx := r.X() + e := msgHash(pk, rx, input, ThreeW, networkId) + + // S = k + e*sk + e.Mul(e, sk.value) + s := new(fq.Fq).Add(k, e) + if rx.IsZero() || s.IsZero() { + return nil, fmt.Errorf("invalid signature") + } + return &Signature{ + R: rx, + S: s, + }, nil +} + +// VerifyTransaction checks if the signature is over the given transaction using this public key +func (pk *PublicKey) VerifyTransaction(sig *Signature, transaction *Transaction) error { + input := new(roinput).Init(3, 75) + transaction.addRoInput(input) + return pk.finishSchnorrVerify(sig, input, transaction.NetworkId) +} + +// VerifyMessage checks if the claimed signature on a _string_ is valid. this is nonstandard; see above. +func (pk *PublicKey) VerifyMessage(sig *Signature, message string) error { + input := new(roinput).Init(0, len(message)) + input.AddBytes([]byte(message)) + return pk.finishSchnorrVerify(sig, input, MainNet) +} + +func (pk *PublicKey) finishSchnorrVerify( + sig *Signature, + input *roinput, + networkId NetworkType, +) error { + if pk.value.IsIdentity() { + return fmt.Errorf("invalid public key") + } + if sig.R.IsZero() || sig.S.IsZero() { + return fmt.Errorf("invalid signature") + } + e := msgHash(pk, sig.R, input, ThreeW, networkId) + sg := new(curves.Ep).Generator() + sg.Mul(sg, sig.S) + + epk := new(curves.Ep).Mul(pk.value, e) + epk.Neg(epk) + + r := new(curves.Ep).Add(sg, epk) + if !r.Y().IsOdd() && r.X().Equal(sig.R) { + return nil + } else { + return fmt.Errorf("signature verification failed") + } +} + +func msgHash( + pk *PublicKey, + rx *fp.Fp, + input *roinput, + hashType Permutation, + networkId NetworkType, +) *fq.Fq { + input.AddFp(pk.value.X()) + input.AddFp(pk.value.Y()) + input.AddFp(rx) + + ctx := new(Context).Init(hashType, networkId) + fields := input.Fields() + ctx.Update(fields) + return ctx.Digest() +} + +func (sk SecretKey) msgDerive(msg *roinput, pk *PublicKey, networkId NetworkType) *fq.Fq { + input := msg.Clone() + input.AddFp(pk.value.X()) + input.AddFp(pk.value.Y()) + input.AddFq(sk.value) + input.AddBytes([]byte{byte(networkId)}) + inputBytes := input.Bytes() + + h, _ := blake2b.New(32, []byte{}) + _, _ = h.Write(inputBytes) + hash := h.Sum(nil) + + // Clear top two bits + hash[31] &= 0x3F + tmp := [4]uint64{ + binary.LittleEndian.Uint64(hash[:8]), + binary.LittleEndian.Uint64(hash[8:16]), + binary.LittleEndian.Uint64(hash[16:24]), + binary.LittleEndian.Uint64(hash[24:32]), + } + return new(fq.Fq).SetRaw(&tmp) +} diff --git a/signatures/schnorr/mina/keys_test.go b/signatures/schnorr/mina/keys_test.go new file mode 100644 index 0000000..7ec6067 --- /dev/null +++ b/signatures/schnorr/mina/keys_test.go @@ -0,0 +1,132 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package mina + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves/native/pasta/fq" +) + +func TestNewKeys(t *testing.T) { + pk, sk, err := NewKeys() + require.NoError(t, err) + require.NotNil(t, sk) + require.NotNil(t, pk) + require.False(t, sk.value.IsZero()) + require.False(t, pk.value.IsIdentity()) +} + +func TestSecretKeySignTransaction(t *testing.T) { + // See https://github.com/MinaProtocol/c-reference-signer/blob/master/reference_signer.c#L15 + skValue := &fq.Fq{ + 0xca14d6eed923f6e3, 0x61185a1b5e29e6b2, 0xe26d38de9c30753b, 0x3fdf0efb0a5714, + } + sk := &SecretKey{value: skValue} + /* + This illustrates constructing and signing the following transaction. + amounts are in nanocodas. + { + "common": { + "fee": "3", + "fee_token": "1", + "fee_payer_pk": "B62qiy32p8kAKnny8ZFwoMhYpBppM1DWVCqAPBYNcXnsAHhnfAAuXgg", + "nonce": "200", + "valid_until": "10000", + "memo": "E4Yq8cQXC1m9eCYL8mYtmfqfJ5cVdhZawrPQ6ahoAay1NDYfTi44K" + }, + "body": [ + "Payment", + { + "source_pk": "B62qiy32p8kAKnny8ZFwoMhYpBppM1DWVCqAPBYNcXnsAHhnfAAuXgg", + "receiver_pk": "B62qrcFstkpqXww1EkSGrqMCwCNho86kuqBd4FrAAUsPxNKdiPzAUsy", + "token_id": "1", + "amount": "42" + } + ] + } + */ + feePayerPk := new(PublicKey) + err := feePayerPk.ParseAddress("B62qiy32p8kAKnny8ZFwoMhYpBppM1DWVCqAPBYNcXnsAHhnfAAuXgg") + require.NoError(t, err) + sourcePk := new(PublicKey) + err = sourcePk.ParseAddress("B62qiy32p8kAKnny8ZFwoMhYpBppM1DWVCqAPBYNcXnsAHhnfAAuXgg") + require.NoError(t, err) + receiverPk := new(PublicKey) + err = receiverPk.ParseAddress("B62qrcFstkpqXww1EkSGrqMCwCNho86kuqBd4FrAAUsPxNKdiPzAUsy") + require.NoError(t, err) + txn := &Transaction{ + Fee: 3, + FeeToken: 1, + Nonce: 200, + ValidUntil: 10000, + Memo: "this is a memo", + FeePayerPk: feePayerPk, + SourcePk: sourcePk, + ReceiverPk: receiverPk, + TokenId: 1, + Amount: 42, + Locked: false, + Tag: [3]bool{false, false, false}, + NetworkId: MainNet, + } + sig, err := sk.SignTransaction(txn) + require.NoError(t, err) + pk := sk.GetPublicKey() + require.NoError(t, pk.VerifyTransaction(sig, txn)) +} + +func TestSecretKeySignMessage(t *testing.T) { + // See https://github.com/MinaProtocol/c-reference-signer/blob/master/reference_signer.c#L15 + skValue := &fq.Fq{ + 0xca14d6eed923f6e3, 0x61185a1b5e29e6b2, 0xe26d38de9c30753b, 0x3fdf0efb0a5714, + } + sk := &SecretKey{value: skValue} + sig, err := sk.SignMessage("A test message.") + require.NoError(t, err) + pk := sk.GetPublicKey() + require.NoError(t, pk.VerifyMessage(sig, "A test message.")) +} + +func TestSecretKeySignTransactionStaking(t *testing.T) { + // https://github.com/MinaProtocol/c-reference-signer/blob/master/reference_signer.c#L128 + skValue := &fq.Fq{ + 0xca14d6eed923f6e3, 0x61185a1b5e29e6b2, 0xe26d38de9c30753b, 0x3fdf0efb0a5714, + } + sk := &SecretKey{value: skValue} + + feePayerPk := new(PublicKey) + err := feePayerPk.ParseAddress("B62qiy32p8kAKnny8ZFwoMhYpBppM1DWVCqAPBYNcXnsAHhnfAAuXgg") + require.NoError(t, err) + sourcePk := new(PublicKey) + err = sourcePk.ParseAddress("B62qiy32p8kAKnny8ZFwoMhYpBppM1DWVCqAPBYNcXnsAHhnfAAuXgg") + require.NoError(t, err) + receiverPk := new(PublicKey) + err = receiverPk.ParseAddress("B62qkfHpLpELqpMK6ZvUTJ5wRqKDRF3UHyJ4Kv3FU79Sgs4qpBnx5RR") + require.NoError(t, err) + txn := &Transaction{ + Fee: 3, + FeeToken: 1, + Nonce: 10, + ValidUntil: 4000, + Memo: "more delegates more fun", + FeePayerPk: feePayerPk, + SourcePk: sourcePk, + ReceiverPk: receiverPk, + TokenId: 1, + Amount: 0, + Locked: false, + Tag: [3]bool{false, false, true}, + NetworkId: MainNet, + } + sig, err := sk.SignTransaction(txn) + require.NoError(t, err) + pk := sk.GetPublicKey() + require.NoError(t, pk.VerifyTransaction(sig, txn)) +} diff --git a/signatures/schnorr/mina/poseidon_config.go b/signatures/schnorr/mina/poseidon_config.go new file mode 100644 index 0000000..dc0620c --- /dev/null +++ b/signatures/schnorr/mina/poseidon_config.go @@ -0,0 +1,112 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package mina + +import ( + "github.com/sonr-io/sonr/crypto/core/curves/native/pasta/fp" +) + +// SBox is the type of exponentiation to perform +type SBox int + +const ( + Cube = iota // x^3 + Quint // x^5 + Sept // x^7 + Inverse // x^-1 +) + +// Exp mutates f by computing x^3, x^5, x^7 or x^-1 as described in +// https://eprint.iacr.org/2019/458.pdf page 8 +func (sbox SBox) Exp(f *fp.Fp) { + switch sbox { + case Cube: + t := new(fp.Fp).Square(f) + f.Mul(t, f) + case Quint: + t := new(fp.Fp).Square(f) + t.Square(t) + f.Mul(t, f) + case Sept: + f2 := new(fp.Fp).Square(f) + f4 := new(fp.Fp).Square(f2) + t := new(fp.Fp).Mul(f2, f4) + f.Mul(t, f) + case Inverse: + f.Invert(f) + default: + } +} + +// Permutation is the permute function to use +type Permutation int + +const ( + ThreeW = iota + FiveW + Three +) + +// Permute executes the poseidon hash function +func (p Permutation) Permute(ctx *Context) { + switch p { + case ThreeW: + for r := 0; r < ctx.fullRounds; r++ { + ark(ctx, r) + sbox(ctx) + mds(ctx) + } + ark(ctx, ctx.fullRounds) + case Three: + fallthrough + case FiveW: + // Full rounds only + for r := 0; r < ctx.fullRounds; r++ { + sbox(ctx) + mds(ctx) + ark(ctx, r) + } + default: + } +} + +func ark(ctx *Context, round int) { + for i := 0; i < ctx.spongeWidth; i++ { + ctx.state[i].Add(ctx.state[i], ctx.roundKeys[round][i]) + } +} + +func sbox(ctx *Context) { + for i := 0; i < ctx.spongeWidth; i++ { + ctx.sBox.Exp(ctx.state[i]) + } +} + +func mds(ctx *Context) { + state2 := make([]*fp.Fp, len(ctx.state)) + for i := range ctx.state { + state2[i] = new(fp.Fp).SetZero() + } + for row := 0; row < ctx.spongeWidth; row++ { + for col := 0; col < ctx.spongeWidth; col++ { + t := new(fp.Fp).Mul(ctx.state[col], ctx.mdsMatrix[row][col]) + state2[row].Add(state2[row], t) + } + } + for i, f := range state2 { + ctx.state[i].Set(f) + } +} + +// NetworkType is which Mina network id to use +type NetworkType int + +const ( + TestNet = iota + MainNet + NullNet +) diff --git a/signatures/schnorr/mina/poseidon_hash.go b/signatures/schnorr/mina/poseidon_hash.go new file mode 100644 index 0000000..d38d40d --- /dev/null +++ b/signatures/schnorr/mina/poseidon_hash.go @@ -0,0 +1,1182 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package mina + +import ( + "github.com/sonr-io/sonr/crypto/core/curves/native/pasta/fp" + "github.com/sonr-io/sonr/crypto/core/curves/native/pasta/fq" +) + +type Context struct { + state []*fp.Fp + absorbed, spongeWidth, spongeRate, fullRounds int + sBox SBox + pType Permutation + spongeIv [][]*fp.Fp + roundKeys [][]*fp.Fp + mdsMatrix [][]*fp.Fp +} + +var contexts = []*Context{ + // threeW + { + spongeWidth: 3, + spongeRate: 2, + fullRounds: 63, + sBox: Quint, + pType: ThreeW, + roundKeys: [][]*fp.Fp{ + { + {0xd2425a07cfec91d, 0x6130240fd42af5be, 0x3fb56f00f649325, 0x107d26d6fefb125f}, + {0x1dcedf2d0ebcb628, 0x2381dfa5face2460, 0x24e92a6d36d75404, 0xce8a325b8f74c91}, + {0x5df2ca8d054dc3a, 0x7fb9bf2f82379968, 0x424e2934a76cffb8, 0xb775aeab9b31f6a}, + }, + { + {0x9e6d7a567eaacc30, 0xced5d7ac222f233c, 0x2fe5c196ec8ffd26, 0x2a8f0caeda769601}, + {0xdc68662e69f84551, 0x1495455fbfab4087, 0xab2e97a03e2a079d, 0x3e93afa4e82ac2a0}, + {0x4cc46cd35daca246, 0x54dcc54ee433c73f, 0x893be89025513bde, 0x3b52fbe29b9d1b53}, + }, + { + {0xd96382010ec8b913, 0x9921d471216af4b5, 0xa7df09d5ecf06de, 0xa360d0e19232e76}, + {0x408add78a89dcf34, 0xb15031ad7e3ec92, 0x8ef35f1ab8093a79, 0x23276aa7a64b85a4}, + {0xdac52436f6c1cdd0, 0x46257295a42ee0b2, 0x3090799e349ade62, 0x261f8de11adb9313}, + }, + { + {0x51fb0207578466ed, 0xace76bd4ce53012a, 0x45f74735a873a7a6, 0x25be1a7e5c85f326}, + {0xdee055cc9572cc61, 0x9373df1526d6e34b, 0x2084c5641a3122a3, 0x3062d3265012feed}, + {0x1bd9070c51f40e9b, 0x9ea653d50b3fa6f, 0xa31a6b51060fc899, 0x703ce3434f96fea}, + }, + { + {0x937e0bd5442efc15, 0xc1b3a953fbd209b7, 0xb3737616f1f7eb8b, 0x5b10777bdf5dacd}, + {0x10791e59a7d5788a, 0x12f9041014d93ea, 0xb4bc24f34f470c71, 0x2f00cd1954db2d8c}, + {0xe912d7ae74abca54, 0xc5c26a35e725fd41, 0xb6af66a891d1c628, 0x3e5ec2bf0970d4a3}, + }, + { + {0x8340c5579ef76e75, 0x84685beb75f0fd3d, 0xd3a06c47523190d2, 0x308b8895c2d04040}, + {0x457356859f821f53, 0x8abdeeacf3a1ba9e, 0x43b602e5b2ad8b28, 0xc1879b3610fd2f4}, + {0x97b86a707b5809bc, 0x8dd94d73fb34a3ee, 0x5141652598014000, 0x32e8c24d1cee432e}, + }, + { + {0xa998c731389a48f3, 0x74fa8b44a0ab13b, 0x1ad03f591da71333, 0x2f03d178701bcb30}, + {0x8ff226bd0dd22c53, 0x26157e7a0aa47f2a, 0xe2531d8e88c5531d, 0x13d2bbc731281e5f}, + {0x76d7f22ef74ebe99, 0x245727aa206d8c55, 0x4e9bda26e39fe51, 0x72b21a3b6ea088d}, + }, + { + {0x67aff49f723add69, 0xccae0df20e3d633b, 0x85d57c5cda0e022c, 0xa398b1d3e6f2db1}, + {0x3fda2e26bcb6fc68, 0x593102ff961e4b40, 0xcaca5e29529738de, 0x2af42667a6e9b6cc}, + {0x9d05ac0056c6910a, 0x51343579482ba8b5, 0x33398f54089da2a3, 0x1879180149c97c34}, + }, + { + {0xfa93dc63c73d6490, 0x898847d037d78917, 0xf104b998c8ba5384, 0x1c6102bfd9c26df2}, + {0x3b13a7624fe64fc3, 0xae197dfb77fc7968, 0x855c3edfd013edc, 0x247f5769ca4aa6}, + {0x58bf743e0004f4eb, 0xa168da971a4635e8, 0xae0d93fc0aa0ca7c, 0x2e94bd91ee0eedec}, + }, + { + {0x826688455b3034d9, 0xd7a8296917d9820f, 0xbf14001a68903362, 0x3029935cebc0e1b5}, + {0x45622b94048f7c58, 0x1025e0f2169c46c1, 0x93dccace2e8635fa, 0x2110fe2a3a9c405b}, + {0xe6249e1fd6ae6204, 0x359cea9c7fc56811, 0x4561c87d295edc47, 0x3462ff6269ff9b7f}, + }, + { + {0xe88503091bf18fdb, 0xc7c9e48c7792429a, 0x7c2bedc34044daad, 0x239b54131c85ebfe}, + {0x9f92ac6cdf6d115c, 0x64fe79c6ea405241, 0x3fdbbe356870f930, 0x3c0d419e26ff24c1}, + {0xeb4e19e81548807b, 0x30d9ca531360e746, 0xe7fd824c32ef0f3d, 0x25c98a72c313174a}, + }, + { + {0xed7835758762c591, 0xd3a5813b88ed365f, 0x954d02a8633dba6f, 0x3da1af9d7eb3a01e}, + {0x86a8692d3ed59690, 0xf2873c2381bf29b6, 0x5d8735bb1f3f459, 0x3c9a66efcbdbfd6e}, + {0x7acc7d7a0e1d24a3, 0x8614a6c50e15e4f7, 0xac5ee237c5548dd4, 0x12061a68b6963446}, + }, + { + {0x81bc9093c7730d5, 0xbf3e57fb7d94a12f, 0xab7caef0406ad333, 0x30d704e038c83cee}, + {0x53fd9bef62ab35ab, 0xdd9258a43a400a0e, 0x41fc71c1f14a3fe0, 0xaafb95e685e323a}, + {0xbb168efcfbc6417d, 0x6eec41829c340ecc, 0x3e1a203ea728cf86, 0x32403a5339001606}, + }, + { + {0xaa842cc888cce8e1, 0xaf60b8e8cfa84e30, 0xa8345e318e18911e, 0x23adb957cfe95986}, + {0x9b565aa4fc6cbed, 0x715714218a6da1db, 0x60740ecb2b402402, 0x3446170f139a28ee}, + {0x2d56545ce19df759, 0x2e62009452ac4624, 0xf834fd669efdc382, 0x3cdb040c5a2c8135}, + }, + { + {0x2c093b07989ac45b, 0xbc5a7ce41629b4d8, 0xf9f8f9ccd52de847, 0xa6852ff99c0df59}, + {0x62ec3921cae6ad0c, 0xed01dd4b15bc12e2, 0xcf099203d7296486, 0x10c70f52d8e4c35c}, + {0xe3afb13d98e0aa36, 0xd5c2e410a19ecae7, 0x7ef462f8ef00f1ee, 0x3b2666c865ffaf5f}, + }, + { + {0x2a34a9799fb10dea, 0xae3b7ac93a88e642, 0x9ce2e0a5b4676e62, 0x35338d2e290f6835}, + {0x704c2116789cd3e, 0x55c9408a44e87b39, 0x9a178778cf8123ac, 0x2a237d751ce80e22}, + {0xe73cc5f3949b8dac, 0x25fcebdc28f57fc2, 0xb8f4c26538ae8063, 0x160d42c37b816d52}, + }, + { + {0x78bba3a1b4334fec, 0xe1ad733be7312e24, 0x166c29284c5e74cf, 0x1e39c10d204c6e}, + {0xeba34937ed572fb8, 0xec650563d7045e13, 0xbf694cf0e16bc82c, 0x14394f78ca804fcb}, + {0xa28d8e959c93e39c, 0xfe2361a2d86799e3, 0xb04a4d8890bfaa19, 0x3bd58529949c0ff6}, + }, + { + {0x29225c95f5b1b6f9, 0xa6deebafdf12f757, 0x4d08632fbf4f058, 0x22ca57e20c30a4e2}, + {0xbd76f38890b3567b, 0x26cf81518916ab2, 0xe6096fe367359511, 0x1f5e2a08564c51ca}, + {0x522b7903b745f6ae, 0x97976d9feb2f329a, 0x4042a062305c3dd, 0x10e8bfbac34f6ab}, + }, + { + {0x53a7679692da3aa1, 0xe450599e85d31d58, 0x7b0eb8260f95a840, 0x1ace63dc713e7378}, + {0x725231ba432706f4, 0xccfededdca80880, 0xeba96b0dfbc1ccb5, 0x2584d3df4dd8a065}, + {0xe55f4913a1b47696, 0x34767187e1938949, 0xdbe1913ab957b7f2, 0x3c4be85646076541}, + }, + { + {0x96165fe1910385e3, 0xd8d34657e37bf741, 0x2dc65b5bd92b7412, 0x17c70695eabaad8c}, + {0x1aa94fbcda906296, 0x92b63261bad15d4, 0xde1fae454d20bc2, 0xff4dd19e7bb91c}, + {0x9a3c6bb3b3792b29, 0xba9a9d2d8c8f32fb, 0x90cae23b992784b0, 0x682d0d05588a0a4}, + }, + { + {0x8bea2f0e3b07fdae, 0xa2bcf89d80d35726, 0x544cd1414cc270fe, 0x113a9293d2324718}, + {0xae456164d267504c, 0x571cb023dbc1ce73, 0x21bac9730f19acbf, 0x3233efcc4435feb}, + {0x8c75648ce93bee2e, 0x6a7b2664251ea438, 0xca0e6900ef478974, 0x2ea2eb8e4287afe1}, + }, + { + {0x5acd5490088631ea, 0x796fd55cbfe132ea, 0xca378169084f5b20, 0x3bf94c0b770e6732}, + {0x3c7cc5fe22e9da10, 0x8c8312f4ace0a8a5, 0xc87567978ce028f8, 0x5a6f235fb313cd5}, + {0x2e6558b92407dcaf, 0x47e50ce3601012f4, 0x1e5797dd5bec08f6, 0x3f8733eee4c3467}, + }, + { + {0x94d2410224bcb62d, 0x3a926c5d9a86b7d4, 0x4e194b53953a44be, 0x352e9b3d78b5bcc}, + {0x54883a363d4cf9d5, 0x11f8990c505f0d36, 0x6bc1b45721bf8c66, 0x1741726454535739}, + {0x6f9a3ecf74a296b, 0x5de95fe78d089d68, 0xd73cd49e13d43129, 0x2093f7ce8f9a0900}, + }, + { + {0xe9cb00b14e26ff0d, 0x3f08fd94461dc18e, 0x631adca53058abce, 0x3214d344acadc846}, + {0xded7eb4dbd85489a, 0x88fec23b8cd8b77, 0x16d4ed13eab05211, 0x2846ac03154cebb9}, + {0xfac2a0ad57a4f26, 0xc1f76ecd19989adf, 0xaf30edcd16db54f2, 0x126d20470a443867}, + }, + { + {0xcff7162b34203071, 0xe55b077617c9a757, 0x2130b1adac59d068, 0x14c5aaf1e7b110eb}, + {0x699dd6dbcf0482c0, 0xea319e0d0b2bb999, 0xaa9e419d224d713d, 0x1f4d7ca828085388}, + {0xe0ce736d12ec2b2e, 0x2f38bcf04ebf093c, 0x4b4b4eb19457afe9, 0x36a6b22a47328281}, + }, + { + {0x787526c6865dcd1e, 0xc2680fe54617f4a8, 0xb727dbb67712e717, 0x3a271ba53713445c}, + {0x806dc7288ade48b8, 0x37ae24211f9b6b9d, 0x8ca3d974dbf15054, 0x10fd6d9432eb0b68}, + {0xec73bef3a5597993, 0x911600cb416be443, 0x3ea4d6875cf79676, 0x1b77d3b73ff96642}, + }, + { + {0x599dae5eba7bbf12, 0xad64c1aab4e1e894, 0x5d661ccb5ee325bf, 0x126b7751010f9d3f}, + {0x5ed0e07555bb26cd, 0x6e878aa29bf2c2e8, 0xf5a5eeccbaa31dc1, 0x1d3867eb4e090941}, + {0x2937ca036763d28b, 0x86e99a452605b663, 0x724c2748daf8484b, 0xebc687217853e09}, + }, + { + {0x4dbc428bc8ad6e5d, 0xa2f7ba399263b882, 0x7bf0cdf85013257c, 0x28aeab12f70ef4a}, + {0x3c7790781e18b6e, 0x1bb3023fe1e655cb, 0xed1fdfcb455dca1a, 0x32e02d39a4d657a}, + {0x44e6d9c3a1dff228, 0xdf469cea3c0a5407, 0x299c3245b897c072, 0x2be7edf85aee84d5}, + }, + { + {0x7cef1e98b43bf15c, 0x5a8dd042cde4ffe2, 0xd86af3fcf5e44a3b, 0x126fff14c130fdf6}, + {0x902d3cbb3b8bffd3, 0xd0510141c9be133a, 0x1153608479eba1e3, 0x33b9a4fa153248fb}, + {0x2267900dcf1e0fe4, 0xaf0c04b7861398c6, 0x55b20fd2619336f9, 0x3729ba618d213b74}, + }, + { + {0x7bccad65b5fd53a3, 0x275fd70abbee0824, 0x6fce1c43407f5ddd, 0x381402c3966b0d15}, + {0xd4e90f5f5d1b1215, 0x7abfb980598b6f39, 0x6cc413a7353c523b, 0x3af9e5c6dbcecf69}, + {0xfc86b3533d23176f, 0xf2f83c14f801bf0f, 0x5fb324c8b8b84f4c, 0x2a9949609d62d389}, + }, + { + {0xfca79d23caa474da, 0x94af585882c48b4, 0x9184e6e773524de, 0x1971a4bd05472cc6}, + {0xe14a7a9f4d347481, 0xf68eb0d753ed0146, 0x624211b6c3d94cff, 0x399c54dc6cd81b81}, + {0x86039539dbb961f9, 0xa5af68dc06d8ea6, 0xcc02fcf05e368eed, 0xf64acd9952a945b}, + }, + { + {0x88c5c3ea3b0286e6, 0x8c0cf8675f0040d5, 0xc0f11c1177699ea2, 0xe3cc78066df561b}, + {0x49162b2f404f976c, 0x9a40001b3ac29cdd, 0x17287f8ca8386222, 0x39ca8d14934343bf}, + {0xa8dd36db44116537, 0x42dbe51eb9216283, 0xfec6c18c5ea56c1c, 0x9b0bc57ea6681fa}, + }, + { + {0x53684675590d10f8, 0x228bd6ed1447104c, 0xcac8753557c5e945, 0x2533527c9ad29ad6}, + {0x5e26e312638c73b4, 0x34e1114452f840c, 0xc90124a9e02e5aad, 0x2af2662d93aa4250}, + {0x76bb15558221d4af, 0x41d2a02a322e09b9, 0x29727b4d6c29e353, 0x13f30e86ab0297f6}, + }, + { + {0x8b113723e368b29c, 0x2ebddd5dcfd07680, 0x90027a89063fd6b, 0xab82f5420ead368}, + {0x6102c2fbeb0b8f83, 0x37caec74787f8363, 0xef4c7fedf4d49d09, 0x157481ef03f526da}, + {0x897dc99e348f8989, 0xfae2d8c6ca328b03, 0xd2a217387ae7e8fa, 0x2309412d902ce2d3}, + }, + { + {0x57895d8995bb037b, 0x4f303912d7010f4, 0x89d126adee61fd7b, 0x16ae40bb98717e4a}, + {0x970b8cda0d943140, 0xd07503f516f70525, 0xb14ed69e29e5ede5, 0x2316911f23d9bed1}, + {0x632e10c13ba9d605, 0x23723cd16be7a1a3, 0xc0804d9b3264d489, 0x25b18b66bd5a14b1}, + }, + { + {0x2cc13abb89f41136, 0x7b209265228a3e0b, 0xde1b3e0db09f17e0, 0x10e37b1b53ecfceb}, + {0x765eb14dd8c343a4, 0x3359bbc963368294, 0xeb4667bd15fd4a6c, 0x2db8142000298d91}, + {0x916a19d68c0ed401, 0x5002ac7be8c90d22, 0x8ae3857c98f24376, 0xb2557905a7150c}, + }, + { + {0xf2b38d5f2758254d, 0x236745488b58741a, 0x394898e9d7458c8c, 0x37be2b56562adda1}, + {0x4aa28e0e6f54d290, 0x115bb413d8c4a639, 0x3944ec613d50506e, 0xbc68674dac60a3e}, + {0x82db6a2e85fec32c, 0x97802c924aadd00a, 0xbb6cc8685d8b265f, 0x16b975c2e70b76e6}, + }, + { + {0xe2f39f5ef957115b, 0x9a9db22a4623e0fa, 0x86f28972da216598, 0x11dc93268964d29c}, + {0xd8842bd61b12b92a, 0x6b1e45e4a6b4da39, 0xc2541381b20e4fc2, 0x3cc006d14574ded2}, + {0xfe0a6647ec349b4a, 0x7d7f9c30364402f9, 0x8f30b3425f1e6b75, 0xa08c94f56352fda}, + }, + { + {0xe8678e25a4d59721, 0x7c2331e36880e306, 0x82ad2f154d53292f, 0x38f905d3bf125a0d}, + {0x37c5ba5cf32727dd, 0x4e50703bbe74875c, 0x6e81ccf687c1edf3, 0x13d5a0a5cd167d3e}, + {0x549db53e76170f2d, 0x6601954d27f0614d, 0xa2e8516c0a8be8db, 0xe97e0bdc860ec97}, + }, + { + {0xab07a7c64e7a0c19, 0x3231ef6a85c561a2, 0x45cb8d5c9e495f6c, 0x3c130965bd821488}, + {0x4e85b1262b64c882, 0x148a4053173c6bbf, 0x2d30540d2bdf16b4, 0x1c4069538aad6db3}, + {0xe6f0d54ae64a6b4c, 0x8e435e285f5a0431, 0x89f8a4e55b2e5266, 0xc59b65276e7adbf}, + }, + { + {0xb3c5e9a0a063ba3c, 0xa5f4f9456cd30d09, 0x6d04f16139358814, 0xfe50ec7b61f34d3}, + {0x397f9724c5df2d2c, 0xebd5168a65e7dd00, 0x6e2d8f4b4688dfcc, 0x2089bd58ed27155}, + {0x4edc28aa719ba453, 0xd106c9909fe6d1bf, 0x583c7c64a6b2b9b9, 0x337410bcfb086e51}, + }, + { + {0xab6b2a207aa0b5dc, 0x4a8b65d7af08b29c, 0x933af8749e812390, 0x279107e984004c7f}, + {0x905ad996b3c96494, 0x64c09614294ad370, 0xe3a6ebea9d5f50c7, 0x39f0c91fd7487f70}, + {0xfb555601b96a98f6, 0x2779c5a69548b485, 0x1024d8abadf302ac, 0x9d7b11afa205c31}, + }, + { + {0x28cc587976dcbd5f, 0x7ec12e67d9fd9bff, 0x8519c024bfaacb31, 0xc3c59eef0b57c4}, + {0x7049bc5718274ce, 0xc5d45c2b8efbc27b, 0x1f2519b69fd58b2, 0x21d203679cf4943f}, + {0xdf1c276d845b18b, 0x83b415bcfd6f4794, 0x18cd69a7c02ec588, 0x28286f8440aac608}, + }, + { + {0xab9395e8f09c0e5e, 0x54e90df06eeabe37, 0x989b955540f5df9, 0x47eea2cb710d36a}, + {0x1ebb22981a2358d4, 0x978b30395e4ae485, 0x9b80f8337febb2dc, 0x6eadd7dff66e6fc}, + {0x8235f76b05fb36f3, 0xe81d3e6b55c01c67, 0xef1c4fbfd4f2689f, 0x356208269cd6bf63}, + }, + { + {0xdade3c9e413ae12e, 0xf0e6ec9130474658, 0xaf6a528f73acabe0, 0x25fa114c625e684a}, + {0x46eb556fec530561, 0xf037878098d1e6fa, 0x16665ada231de2c6, 0xe5526a3c3f20a1f}, + {0x8cf9f26ffb620afb, 0x10b561ad3be8bf53, 0xd095012a132c7d3f, 0x29dcadbf2a3da8a2}, + }, + { + {0xc1f246cabd2b3006, 0xa36999931ad6917e, 0xa86d5a37a14ebfc6, 0x233db2873238ccef}, + {0x6acabbbf7f09c6ab, 0x62e42b55c506b5ac, 0x20d3e5414eabcf58, 0x3047b6ea2b2ead12}, + {0xebb686baebe27e60, 0x6e299977bf344ce, 0x62074c04b5eaa97, 0x2f8be92e4b475d6c}, + }, + { + {0x68ec476c77321432, 0x6dc59804560e83e6, 0xad6ec6887fa80a57, 0xd2381657826abc9}, + {0xb377a593d24bcde1, 0x9a3338ee6dc43188, 0xfda6b04c6b645795, 0x1ebafbaa3cac50f4}, + {0xf53a7e9aa0eaf7a2, 0xc425d1cf708205ee, 0xc4bf63055e40b848, 0x31fd982712a1810}, + }, + { + {0x6aaec40a5ee97dc6, 0x1b740fec535e8d07, 0x72eb71573af7f8dc, 0x32a6cfd27721af}, + {0x4422e0763e1715cb, 0x2cc98a9c36481c08, 0x90a04c2c1100cf7, 0x14bdba391dc19c19}, + {0xf7ed8041fc74c1ba, 0x2da17664b7e0a39d, 0x7f194ed781738bd0, 0x185a0fb1e41d78e2}, + }, + { + {0x362d5eb8cf158562, 0x2cbda32193e3b946, 0x54a1587b53b6d3e, 0x3f6a83a8d453698e}, + {0xe4f331c807d7dbe4, 0x268c8fd3827cafa, 0x8128d4066a80b733, 0x3ac9356638ef0909}, + {0xc173b74baf5d10c0, 0xef4884b5f01dcb2a, 0x8ee4fbaf7d1af482, 0x3a631a390c1ace3a}, + }, + { + {0x8db20233cb7664ef, 0xcbd2b0d64c8e2b19, 0xb09d212d4b96af9b, 0xa41894a30594d96}, + {0xc87ad145936cc7b5, 0x831d623d07e2d55a, 0xb9f94b89928f0348, 0x32480b57ca35650c}, + {0x8df84ce2e7f6469f, 0x901e9cc791984cf9, 0xc8fcb9b481d64cbe, 0x3f5d039f9330361e}, + }, + { + {0xbf7db6c50e4b6c8e, 0xec86607f277ed803, 0x788b68697fc5fc3d, 0x31e6a675fa09e651}, + {0x1132c9835ce2f214, 0xe753150f8c8b375f, 0x8621813806da885e, 0xefb3b636dc3218c}, + {0x4ce5ce169e972cad, 0x36c4e02c437c353d, 0x91d0f983117961d3, 0x3fab66c8f61b43f4}, + }, + { + {0x26e7f148037e4831, 0xc898cea85a6d9ce7, 0x296eb709b0bce897, 0x13c7e41ce4a9413a}, + {0x364510c0cc6957d1, 0xc25f1640446d6363, 0xa38c8faccf2af7bc, 0x302f893eb7f3a293}, + {0xf47cdc9a193b6a1a, 0xbd3e81440b147a51, 0x1b8e11dc417ad50b, 0xbc7ad99db78ba74}, + }, + { + {0x5693220ff22b64e3, 0x95b8a7d6f5f07e88, 0xbb3aaa303d8a574, 0x20f50189f52021f3}, + {0xac8d038b73e50e93, 0x613109576b0dfb1e, 0x4ac8d41e35f9b309, 0xa7ad75a9d37c68c}, + {0x9c979eb72d6864c7, 0x8f1b33a9db15c462, 0xd9dfc2decd86ff41, 0x536ee7d16b7b5d5}, + }, + { + {0x624a1196b5b7005f, 0xb8d9cdf932bdf18, 0x14682525e48adc4a, 0x1d434955ccf03ab0}, + {0x719a6381517b8c7a, 0x6168713e68dfa531, 0x9b6cf63daf06a7ee, 0x1d259cdc7f7100c8}, + {0x6f1e012d7c9270c9, 0xe523c121eae14ae6, 0x50e570fed81bd490, 0x3604d717343ea349}, + }, + { + {0x4eb31e3e9fb8bc15, 0xaa82d889ed027926, 0x861770ba9013af2c, 0x379ab47829cd822c}, + {0x6f9b1e3a7b6dce3, 0xc5c9ef5c6b7a53d0, 0xc8d12ce69f47f1d6, 0xbc256406e070545}, + {0x6440d55f63d23009, 0x74c5732854d7e658, 0x4f7e9fd81fd40c7b, 0x399992d613926dad}, + }, + { + {0xc29845928af2930, 0xa0130ffd9a9f0e2a, 0x48033877dafbdd89, 0x8c39d86214ce71a}, + {0x9af58f43601d6790, 0xd1af4f75b46b599b, 0x4a8b0b5e6e229017, 0x187b443781223437}, + {0xe4304790543ef2b4, 0x9c231e915d799bd, 0x200b86e24d27b2ce, 0x2b71199749ffc729}, + }, + { + {0xf4ba819f140a9647, 0x959dd33caab9515e, 0xfd99be65b9533f42, 0x28a03868f0a95555}, + {0x6afe5aee6300eeb3, 0x950ea44539e2fa43, 0xbd9aae96c7978e8f, 0x2e3b4f7256ec9b73}, + {0xcc62f85dcccaf357, 0xb56baaa116eae113, 0xd39121b0dcf3259b, 0x46f25e4866044af}, + }, + { + {0xfa220016b44669a2, 0xfbe99bbe5092f557, 0xe04b667a942bafa6, 0x26854edc78b0bd2e}, + {0x93c3a940486a4eb8, 0x74cbbc7bd198d4a2, 0xd64f6e74ed8521f2, 0x80843fa28104df1}, + {0xd544d8f569cb4c5d, 0x9be59548e6b93d2d, 0x65c0c8fe0898dd66, 0x2199702a5841ff1c}, + }, + { + {0xf4502dae36e05405, 0xd06e02b361bd5640, 0x783d406554218c20, 0x2f8b5506aeba914c}, + {0xe3e3f0f92963d863, 0xb3e9c513b7af16e, 0x3248dce06f85e561, 0x257525c4550a214b}, + {0x87b7d8d05103a90b, 0x775220d219e02ff3, 0x930897d4af90307d, 0x14fa87e7b059d3eb}, + }, + { + {0x646e9e36c66167c6, 0x8b8c8990b4cc40f8, 0x2392a8027c056fa3, 0x3468cae95b73113a}, + {0xbcbaaa65cae2ad7b, 0xa381602916655d36, 0x1e74c496a740a52f, 0x3e5d35946fd70251}, + {0x720ef833761a1b2a, 0xd2e1ad032e9ed61f, 0x1284360fc1bc35f0, 0x1d10c1b1e832840d}, + }, + { + {0xf018bd00e4ba7073, 0xc5c912b1fb1f136a, 0x4fa748314dd8ceb2, 0x2a5f7e0ca57c7b77}, + {0xcdf3b35babdc87d5, 0x2475d527ee5ef5e9, 0x7bbb70f05dbf6d98, 0x983fe2aff185429}, + {0x88ab0d4fb8c260a1, 0x78a852ca5a6f034a, 0xd04dfc8b6d06ada9, 0x306dbc4ba1531639}, + }, + { + {0x10dd644c6e5743c2, 0xa2114971e3faae9, 0xd9fbefe6b48e618c, 0x3e38b722e0705ba8}, + {0x46ad6b9283d7e0c3, 0x2e308fb2ac0d8c2a, 0x3e1cb59c2a9f60de, 0x3230c38937f63858}, + {0x77fcd1b48730d90a, 0x388ebff7f7ca5f87, 0xa37bd480d5a52d33, 0x1bee5d4ec9503e54}, + }, + { + {0x9e939970e11081bd, 0x72a0c62c6cf51be1, 0x6e8fd8f7900a914b, 0x36036298567aa99c}, + {0xd6faab04a619d2f5, 0x4a25d62545a6d348, 0xb7ad6f35c22f47b1, 0x1de168bf0fbefdec}, + {0x1345bfdadaae5f21, 0x59e61cc1ee11a507, 0xd8115cbdd6eec40b, 0x139c3f6dd5233369}, + }, + { + {0x58ba96a14e408681, 0xa06d5b071e1ae71c, 0x1a06d51e6e7f692d, 0x1207ba481447ae8d}, + {0xac27f34f246f6412, 0x6419a4c21404724a, 0x5eb99bc09179a67c, 0x251da17ba5f44d7f}, + {0x7c7edb3d3d4d0be, 0xebbd780f9995055f, 0x219149388a5fd89c, 0x46ecaa2536098}, + }, + }, + mdsMatrix: [][]*fp.Fp{ + { + {0x32f4f94379d14f6, 0x666eef381fb1d4b0, 0xd760525c85a9299a, 0x70288de13f861f}, + {0x2ab57684465d1ca, 0xf12514d37806396c, 0x825085389a26a582, 0x308efdddaf47d944}, + {0x1b2098a19e203e93, 0x914dcdea2a56e245, 0xc64ed9aa2aef8379, 0xb176f95c389478e}, + }, + { + {0x8f85e752c76f7c9c, 0x8297f4f031b02763, 0x30e4ea62df5067b7, 0x2821d0423006dcae}, + {0x5cece392cc5d403f, 0x123da1ba8becd2de, 0x193510960c81a54f, 0x1be17f43c42fe5c0}, + {0x65fc36e3c120e5dd, 0x51a4797b81835701, 0x3123b2b88ae51832, 0x19f174900d86138a}, + }, + { + {0xd03df46130dd77b4, 0xe694d8c7d8fd4ef4, 0xf71d2a65470713aa, 0x255c475344778d2c}, + {0x78597119a27f97bb, 0x1b1fb7c15ccb3746, 0xb86d8ab32d6a6edf, 0xb1e00f75148f670}, + {0x875597b15bf7ed8d, 0x73fa4e676bb9cc5f, 0x96babdc32ae359e, 0x31e6d9f5ccaa763e}, + }, + }, + spongeIv: [][]*fp.Fp{ + // Testnet + { + {0x67097c15f1a46d64, 0xc76fd61db3c20173, 0xbdf9f393b220a17, 0x10c0e352378ab1fd}, + {0x57dbbe3a20c2a32, 0x486f1b93a41e04c7, 0xa21341e97da1bdc1, 0x24a095608e4bf2e9}, + {0xd4559679d839ff92, 0x577371d495f4d71b, 0x3227c7db607b3ded, 0x2ca212648a12291e}, + }, + // Mainnet + { + {0xc21e7c13c81e894, 0x710189d783717f27, 0x7825ac132f04e050, 0x6fd140c96a52f28}, + {0x25611817aeec99d8, 0x24e1697f7e63d4b4, 0x13dabc79c3b8bba9, 0x232c7b1c778fbd08}, + {0x70bff575f3c9723c, 0x96818a1c2ae2e7ef, 0x2eec149ee0aacb0c, 0xecf6e7248a576ad}, + }, + }, + }, + // fiveW + { + spongeWidth: 5, + spongeRate: 4, + fullRounds: 53, + sBox: Sept, + roundKeys: [][]*fp.Fp{ + { + {0x6342181358ae2f17, 0x5a321a1614499301, 0x4359bc382232456a, 0x3c06cd69a97c028b}, + {0xc8f709e31405ba8d, 0x2ad4d6aa3c7651a4, 0x64ceac42dd7ccf06, 0x35c6bf27e315e7b9}, + {0x42218b11632afaf, 0x90b0a10532f0546, 0x9e04edfc8863e7f9, 0x1ff6086dc7ed5384}, + {0x16d30e86f07fe92b, 0x49034fc6f7a0437c, 0x7e969951637e0a28, 0x290172d88a15ab18}, + {0xd254d8abb8cde612, 0xd4661c22ac9199eb, 0x512959a2410883d0, 0x35b38913f7bb3552}, + }, + { + {0x49f9638de015c972, 0xad7264c15c3300ef, 0xa62f4865d8c45b04, 0x315e43c0ae02c353}, + {0xb3801cb83df2182f, 0xcaaccf3669280a81, 0xd23d1db585bf366e, 0x116befc5fb4b732a}, + {0x208bcd0d2edd006c, 0x10eb450d1445d24e, 0x430c3ea6421ac01c, 0x2faf9819445679d3}, + {0x7d2dedb966b0ebb4, 0x4209eee542441a34, 0xf0d333d24b06af71, 0x10dedf831bfe44d8}, + {0x382b013bde3a59ae, 0xebafdd87c283d4af, 0x32d9c8ef1bce04a8, 0x37556fb5c9dfe161}, + }, + { + {0xe7016d6ae4ed55a4, 0xbadf50c278ef084f, 0x5f2fc45b67f08884, 0x718c8e8163346fb}, + {0x2d21d77bdfad5760, 0x6ed140b4216a3a63, 0x43b402fb536c00d2, 0xb47d43fb0ea216d}, + {0x36a2aed80574b2f1, 0x43955b0622809eb3, 0x6feacba71072e845, 0x28df76e003ac1ce6}, + {0x95e3a1a7336bd728, 0xd87d937c7f109e25, 0x8ce854afc1645048, 0x2fd788ebb6f37b5e}, + {0x4e4171605858df04, 0x6f56c8c5f323deed, 0x6a570e86d39294ba, 0x6a33164c054814e}, + }, + { + {0xe182b857a075e511, 0x1246b8af401e6e, 0x498681baac02e546, 0x317e99888d6935e3}, + {0xbbb9ff9616a4c70, 0xbd5c5a42104dcf4, 0x92895e7865a8d476, 0x3c40b0adcfe6deb8}, + {0xfc63b58d228c1ad9, 0xb912e4ec588e1a52, 0x601be7d93b9e73fe, 0x1d52635b6bf4a796}, + {0x8defc77d5096467a, 0xfb8b83bb3cee16b9, 0x1498e0216590fc1d, 0x337571eb0ad8f47a}, + {0x8fed932080f3d458, 0x8a1ca06c98c3849f, 0x1644f9415395314f, 0x2919282dc8950b51}, + }, + { + {0x749d4e7efe5fad9b, 0x33b58b4510a5f0a1, 0xce62030d41fdca38, 0x239d5d59f87e0ddf}, + {0x55d59bdcc39fb71b, 0x7c183b304cad27d1, 0xb84182c63c47f121, 0x2469351d6d7b0ac4}, + {0xbe942f1ad599f550, 0xfa1e12f8c552df88, 0x1ce36aa79f22cf45, 0x18e7e49a56a18f41}, + {0x5a85fc171d1c0130, 0xee3b8aebc4fb144c, 0xb7910bda1c5a2946, 0x2ef6a9412227b683}, + {0xe3f397f0634ba64, 0xb24c5da645b804a8, 0x8efdcb393a1c51d1, 0x388a37934aa31a42}, + }, + { + {0xf0aeba711a86d351, 0x917e0a9875d8182e, 0x255ebbbe4e9633da, 0x3edb9c2a8feb51c7}, + {0xf7085a35274435d5, 0x4e859571631715f8, 0xd465913c64aecaac, 0x3df65e7104e3d373}, + {0x95964ef5b04dba5a, 0x86296dcb59e4f8b5, 0x340fae9fdeb8e75e, 0x3c095e04bcd91636}, + {0x6ca87eaf42d54b9d, 0x5efd6b2fd3843e9a, 0x1a9120a05cc3b07c, 0xbb0503c4b83daca}, + {0xfdd237f0786fa203, 0x76e67649a894dff6, 0xcea3a7485eb3522, 0x371ed39b30e34b8}, + }, + { + {0xb9ea58fb107cee4e, 0xea9f1a7f2d6d936f, 0x60883d0bd19662e8, 0x137e698d12aceebf}, + {0x37d5024d477cbc47, 0xbd489bbac329f617, 0x37201ef9b7544c1f, 0x1019235cf42868cb}, + {0xa676a36a225fd2d0, 0x16c1d622bd02030a, 0x71b81aab0eacb647, 0x36af1193dcd5753f}, + {0xe00395d80fc7fa84, 0x2122fd483a170d2e, 0x6786ca04c13fbe30, 0x159d681f1d489146}, + {0xacbab2cc73b4508f, 0x47443d2762112d66, 0x26c9a77344312882, 0xa68b32d6c0b1024}, + }, + { + {0xe5943e9ec5816b5f, 0x78f97275cb9e7f63, 0x28d993db0402e6f5, 0x35023474a298a50a}, + {0xd5f6f7304d58bd86, 0xbeed688b64192acc, 0xd7211bea14f1a406, 0x24f92d631c9c4bc}, + {0x6952a1fd17d693e3, 0xc04dd964f234b8cf, 0x7caf0fe8884ef070, 0x1c1fdcd698bf94b5}, + {0xca8b6804ac41ba2d, 0x81794823bc0576ff, 0xd4a19f41722a0f09, 0x2846565f83dc9972}, + {0x51ea8b6d4a20f06e, 0xcfc9a709b77cbe9d, 0x30ff8818851924e9, 0x211d3ce41b7ee763}, + }, + { + {0xff21fe3230917894, 0xab2870fddeb86f88, 0x62e4a41add2270a6, 0x15884194ae363d14}, + {0x4a6fb13b70e67a19, 0xc5247d0dba887080, 0x31b4c6c5e685c605, 0x2e9f9208fcc2515a}, + {0x1c90f4fb1e0414b4, 0x8ff65aee4323cf80, 0xa13dda064773ad4d, 0x810a513ddf12c38}, + {0xe053ad98a6ec9bef, 0x39b9faf0e080b472, 0x818c7ad0f950eae0, 0xf1cd4227514673}, + {0xd196b25dc683b945, 0xd724c2ad624ad4a9, 0xa02ff7daa5b740a1, 0x15d55dbe3c7b4e13}, + }, + { + {0xb315a1ea41fe829c, 0x18713cd306622126, 0x6118592ab4ec0503, 0x3ed3f347d4db5134}, + {0x5975228afeaccf0c, 0xfe4e4239b60b0efc, 0xc9cca90df89e496b, 0x230f526cd9442560}, + {0x5a49f311ef676e8a, 0xad7cb07e3ed9efad, 0x2a417082abc3cf1a, 0x21601ac12eba703e}, + {0xdf338b83df06f68c, 0x63cd7cb53f8cdce4, 0x1f0fdaf0eecf7ab6, 0xcb74130d7ea6889}, + {0xd99c453addc57eae, 0x2e5ca7b4ccd548e9, 0x1d61848bbc4141ef, 0x1502dc917545edad}, + }, + { + {0x31688174ae3a3088, 0xc2f2735e215a8858, 0xe021199a3e4ad81c, 0x3c3a13210a719854}, + {0x5cd766b0b1aa7928, 0x9469f3c1ab1f06d1, 0x23c181781a8a1af8, 0x669f23736966be7}, + {0x5e43af558f0a0b, 0x7568a76e8644aa47, 0xa49bfc0b2c3a0969, 0xdaabd0e2866cfd8}, + {0x3f57582785bde7cc, 0x95a273aed529176e, 0xef25328aff37a9e8, 0x116b8853ab55ee5a}, + {0xeda91d71ffd7b2a0, 0x6efe645946126c4c, 0xe9f09dd2a7027804, 0x387a2c92b4e648d5}, + }, + { + {0x64ab4aa2e9f0aff2, 0xe966eeaf6883d60c, 0xb94697e6a3a0a4df, 0x38f0fc799ed7a14a}, + {0x1511dd33c4afdbf3, 0xe36ee2b5ffe811cc, 0x505a5de39ec98985, 0x3df8294a06678e64}, + {0xc710119f6242f55d, 0x2466c6cb6d325477, 0xb1774657e651de5, 0x310d190e78ee5dfd}, + {0xb1e6071ea081861b, 0xe92b3ce474295159, 0x77456109d94dc351, 0x1d893fd638fd7a1b}, + {0xc782a16511338c59, 0xd18c9bc3c41203d7, 0x847badef6c2b829f, 0x126c269e06a4a430}, + }, + { + {0xa0e7271d058e1b65, 0xc29f191eed5dc914, 0x89207dbde1650706, 0x1bd2a2d62a9947b2}, + {0x6ae20ca1c2d65d68, 0x80f9d7daed9c8c8c, 0xad5cf3b156b2f1de, 0x509f8b72998a87c}, + {0xb03a6b97357d97f0, 0x593eee3fbeacfe95, 0x9fee173d856a5b7c, 0x133fc88ee7b23b27}, + {0x4b9a0736e24a0f26, 0x405d5a665f66fbc4, 0x4d4d5268d0b8b9d9, 0x3238606d9b26856a}, + {0xd073bfc69cab34f0, 0xb4133b646eb1841b, 0x10a149c352b6f7df, 0x20cf915f29d33c57}, + }, + { + {0x307d2b8cb0fd415, 0xc8319598907073a4, 0x6b773db66a05a6a2, 0x33e6c0d1d806c5ce}, + {0xc08315afa7ec4292, 0xdf1042b5d5054c91, 0xa610476590769545, 0x354676a843acb066}, + {0xa1c7c9de915601be, 0x69cadc4f1bbf31a7, 0x47661b8d743f21e, 0x2939e4566b8e1260}, + {0xf8166d26b5875ae8, 0x391c8625906a68c9, 0x97a671ae3e7920b6, 0xb62d4ddc6a61f73}, + {0x5b13bd8369bec282, 0xe2ea0c100e5347a6, 0x2d9c57262923cb1, 0x1e31165d6dc07d45}, + }, + { + {0x3df3f3bfd54018f2, 0xca762a746d00043e, 0x25022728a3503107, 0xd5efa7f874457bc}, + {0x613e721107b4b48d, 0xf10823eec3d12df3, 0xeb54dfa62698b875, 0x243340259e551904}, + {0x1ce630ba8530b2a4, 0x6f5eddfa4f7ddda2, 0xdffb5a531052c7b4, 0x3c6b192f75dff4c1}, + {0x8d8f971036624659, 0xebd0ccfe39e0803e, 0xebdcb61a65d66931, 0x21868796aae7a40b}, + {0x22e12b186fa512c7, 0x4968f02a800e1ecc, 0x4725f2ec01f4b71e, 0x28e74c6a4f22fcb9}, + }, + { + {0xeffca0f81d56aa11, 0xcb0ae88503b5be82, 0x69b43848fe8e74c1, 0xf0b271c54f3b2a3}, + {0x23db18e63a2414b1, 0x7eba0c1ea4e2d784, 0x72108a3064e1a124, 0x138c36a9897505ac}, + {0xb93afa2c44d2b18f, 0x616aef5e3ec452fe, 0xcb15eebb579916f7, 0xed9d9c3d23aaa60}, + {0x19c722ddc6d11a6c, 0x933aa7e601881608, 0x3d680c98391faed1, 0x2809e56840f1eca3}, + {0x682adeb5d9a53026, 0xcdb02ab94f3e9259, 0xed7adc874c00a2d4, 0x1764188e52d76c52}, + }, + { + {0x2cb2173bfd2a8b7d, 0x742418360f62a8f4, 0xffa5daf7a2f06510, 0x2622bec30f05eda4}, + {0x956300c0a931ef90, 0x3e8dcd122d9b3016, 0xd77959f2fba021a4, 0x51f68f4d9b5836a}, + {0xcca7550f4e2663fb, 0x3a7115aac8cd273a, 0xfa9108f48b6ec0f7, 0x2eb9ac59d63b2756}, + {0x25c658fd552f8699, 0x24c4b27a4de55c10, 0xf2a39825d38a8469, 0x261dc2c828f9be1c}, + {0xf33c05063dced35a, 0xb0dada5d213d36ea, 0xe1a0c81f1f6ca22f, 0x3b5ea3d73588bcf5}, + }, + { + {0x5938e55ad487efbc, 0x65ff0bdaa2002589, 0x24f12d149cfb0ad8, 0x2d7f7be151666e78}, + {0x832fab7224860b2d, 0xc9f4cdbadd955fa2, 0x4aedceb5506c2655, 0x1c44fa130dd1ecc7}, + {0x7d034b1f6a58ddaf, 0xf897b22ef62bf04f, 0xd973ac696faf14aa, 0x31548d27d817dbcc}, + {0xf89844128d9c6ae6, 0xfc4cfe0229c7aab1, 0xac2b0c7d97647680, 0x1e1d5254aa0782f9}, + {0xe8c1dd74ba3631f0, 0xd81c8d077b5a6f56, 0xf3294e721e883318, 0x380d3a1eab70459b}, + }, + { + {0xaba511448d72ecf, 0xbf82f1fdf1687a8e, 0x6313bb88e45ffa56, 0x2ae425e1e1234cff}, + {0xf4a6807d301a531f, 0x96429863d70e0604, 0x15bdd9eae828ddc7, 0x31f0f80173fe31d7}, + {0x3a6d20e8dea8c483, 0x2adca6c88e7509ef, 0x48b1d6d05be6c961, 0x35053945aa6e5402}, + {0xdc5f96bd86658107, 0x5aee32dc2a32affb, 0xe200cec62dc0d495, 0x1055b57944bf554b}, + {0xe1ca0c53b24b06d, 0x528de276ea0c8c5e, 0x1c7ffa0b483f3002, 0x1c72f58595847427}, + }, + { + {0x26a28a08b5b246ea, 0xba3b8d9f4f4b0f41, 0x99c9ed2c1fcf4a3e, 0x58070c1e7b659c5}, + {0x7aa8e9a2d203b7f4, 0x9acb8ddb590fc9a0, 0xd7cf3e5554e162c, 0x17769ee1912d75e1}, + {0x6c6901252870a99d, 0x9a4108b035e55928, 0x297dab35f2c77cae, 0x31f2978ace0f7e2d}, + {0xfcf1119abff1e989, 0xe8502332327be648, 0xa8918572496177c5, 0x1710468c2227c8d6}, + {0xdd2bf9131735dd76, 0xe43ec3b817349505, 0x61ec884ee479524b, 0x377c72d607beacc5}, + }, + { + {0xa9bef84e0b68b0e8, 0x2c4c8f2ab7b0c9fc, 0xee234cad52493f69, 0x1c5bd50dc4a1fced}, + {0x57f989e88a97334c, 0xf99d4d667c1b859a, 0x64164c1e8e48da1b, 0x36b841653e8612c5}, + {0xb55c8effbf87f8d8, 0xe15c71abf8372eb, 0xc606d853488806ff, 0x2c6be0beffd5a9b3}, + {0x97235a2b7f573c80, 0x8f5053ff091130d7, 0x201611ece80cd2e6, 0x22498e90ad20fd7b}, + {0xb25923ea87f4f825, 0x1faf60a8b1d87720, 0x1c480f9378722c18, 0x187ba4d5f603542d}, + }, + { + {0x2553e37a713a8650, 0x4af5a87c8bb53cde, 0x9470f8df7dc4e62a, 0x1147db739156a158}, + {0x7a58fe90b257b6b7, 0x8ee9df6553d968a9, 0x85057b2342c19359, 0x3ca66ad9e8533b29}, + {0xebe091f37f855e8b, 0x78312923a64e1e08, 0xf968ab79c1cb96b, 0x84c6a2f87e5877d}, + {0xd3393bcf45ec7f72, 0xeebaad3d085cc500, 0x8dfb7b13fe964753, 0x29e0048c6a967c5}, + {0x6ee6f52f14c5c52b, 0x51eda970b620a200, 0xe1239122e1ee6ed3, 0x20241311a411c6dd}, + }, + { + {0x3a632070a61738d0, 0x58360c4de1248c90, 0x2007e0611a3ddc78, 0x318e43c7104b5d29}, + {0xe11a8859f0b07f43, 0x22423a78bf5d6ce7, 0xfe8417dfe2f81f05, 0x2a9cf2284ea93e6}, + {0x3c682a7371dae56a, 0xb537b6fc7564fea3, 0x4c8c6573f55fa435, 0x152489488b5a1639}, + {0x3cf49703bdb0de0, 0xf828bf910f380e10, 0x8fb14d900fd140d1, 0x3f6cf44c3e3ff6db}, + {0x7b70aeec460c3296, 0x2afdb7b9dd091761, 0xe5b3b021d8f70e09, 0x1f75fbbd77a4b405}, + }, + { + {0xb824cf8de5beaed8, 0x70a7fe173b87433b, 0x1a8efeec667f72e4, 0x39565d2fad0c609a}, + {0x5cd0203a1d4f951, 0xdb78389b84917080, 0x6c4c97504ab70cd5, 0x29cc98e95cf64495}, + {0x6bc75c72ecd52b50, 0x33afc1a9068b1413, 0x33daf830e0a55f27, 0x71875230561158e}, + {0xb19ed2b87a280098, 0x1e9ed62c5d6a622e, 0xf1c47cd609238e2b, 0x88c1888884476d}, + {0x69ea31042e6e347f, 0x9bb2a44f8642afdb, 0xeccc2d81df513162, 0x3af58f661fb1f19c}, + }, + { + {0x46713a78770b8c85, 0xb3ccf0a4b425690e, 0xc65beb7710375cf8, 0x1c83ac2e75d29e4e}, + {0x8df4d89a09fbc390, 0x4d57e4b593fc2239, 0x94b4e16defc746d7, 0x1e00fdaba6801cc9}, + {0x62d3e199fefcf465, 0xddfca365e5282190, 0xadd48dd560275162, 0x250a1b6745f9c2a6}, + {0x935e7ff4c5ad3690, 0x931629e4dcf656, 0xad870e5416ca92d0, 0x2d2002e4c1a7fb42}, + {0x3020c37bbe98a69f, 0x3bbbef2df0bb0743, 0x735468317fea682d, 0x3bb75622e8ae0e5}, + }, + { + {0x6d101c64f48442cb, 0x9c4d0d7cabbe37d0, 0x6e457716d0cc5c54, 0x131685a66db0333d}, + {0x48bcf6f7121dc6fa, 0x44ea62ad25ddb6aa, 0x8636e258625c8e02, 0x171b08836f73a4a4}, + {0x7b5d53163078c6db, 0x79f022d48797b027, 0x8a6611711def9ec3, 0x281eb0327e36241a}, + {0xe807c130c139b23f, 0x8bd55fb76af83b50, 0x2917722317575e1b, 0x8f90e5cd2c3173a}, + {0x4759babec3357d26, 0x265b8a66badcbb36, 0x44df217c22db1fd1, 0x23c7ef2b68b42cf3}, + }, + { + {0xb6a7c51b7ecb6bc, 0x4d1ae5944bfbeed8, 0x864b9db1caabff7e, 0x39f90ff79f187276}, + {0xd1969a2901b910c2, 0x67af6508acaf97a2, 0x8380c23a59ae6c60, 0x271d877b5644c4a3}, + {0xe17e8fd5a391261a, 0xab17b0e6a4632c50, 0x1c3e97e07f259c9c, 0x2d07bd641a6586e2}, + {0x138aae9b3cd42fb3, 0x81f64f590fb78cb4, 0x885724f615f7f233, 0xa63fee53381d1b6}, + {0x2ec005773e160199, 0x475ef3383e134dd2, 0xce774f49e51de44a, 0x36ef885cefb664f8}, + }, + { + {0x37420d26e384e4a8, 0x1742df50ce970b26, 0x99f3ea60e2297d13, 0x291cc0cacac121bb}, + {0x59992f7b95d59d06, 0xbfbbbce9ffae7ef8, 0x230d4b9bb86868f8, 0x135612cee5c3cbf5}, + {0x6ab6b33ab48fd8b4, 0x2c46df90b0bdae8c, 0x31e33e7cf970f45a, 0x36c4a50b91a1475a}, + {0xaad1c48efff98a5d, 0x7a478e439cc52346, 0xb77125717607cc5d, 0x24c45ff83f2e80a8}, + {0x5103153d6fdd5dfd, 0x45b61844136521c6, 0x41c397561d8772fd, 0x1ed1ee06b89dbf2e}, + }, + { + {0x6c5059eebd2c5991, 0x3d6cf236df839e48, 0xd92711e12b52886b, 0x29829fb71567bb3a}, + {0x771a7702e2a54d3a, 0x29b8e99e7644939b, 0x3d453254475ea815, 0x2e5f8163d03b6cb5}, + {0x263df2c33bcdbc46, 0xfed7cba7787b1a36, 0xc315fa3c16682da4, 0x2ab983af9d9b6f25}, + {0x8a0353e693c8e2c, 0xbe92370d0d219261, 0x723aa4237242dd57, 0x25361783d1e56fc3}, + {0x7e7de67d78a9ff19, 0x83c2d2fd23156a32, 0xe65e5d243aa459b6, 0x35813f92ed31f777}, + }, + { + {0x4819c016f5a5c698, 0x4f72e64273a5868e, 0x61751f954f65b95d, 0x379db14ab6232b32}, + {0x5d8d95a7f9270b96, 0x61541332ec3b7a2b, 0xbf5b05056d41baab, 0xcaf69513fccef00}, + {0x966ebe56652901fd, 0x3d01e1815a5244ad, 0xd62a7487593ee708, 0x5f1ef41b294e025}, + {0xda81a0814d58ed14, 0xe5824bb0a3739516, 0xe559c39f79e50e7f, 0x3cf706e11c52afd4}, + {0xb07502481fd9ac00, 0xbe85d565f578f9e9, 0x25e2168d537c5428, 0x1f8caff53a89afab}, + }, + { + {0x67ef0684d24ef4d6, 0xe479b8723faeeb8a, 0xed152c7174bce1c6, 0x2c3879d3206619b}, + {0xc0d0b18f192aac4d, 0x8ff92db980036473, 0x39a88f384fb77d28, 0x9fe5e9d746e308b}, + {0xf10d5629175358d0, 0xa258ea27ec17d224, 0x3730ea6667ba6289, 0x1f8bfc64890f7e59}, + {0x5f8c12d96d37065d, 0x4d7ee138862aa83, 0x4a18488a63ce6118, 0x1930781a4f270e4}, + {0xbd4453cefd9ebc0c, 0x14827cfda7ed7ee8, 0x3dd6c45400957559, 0x33c9719ccdaab5b6}, + }, + { + {0xed0c855c5de359df, 0x6871e8f1798b7bf5, 0x3803b19eb2c1f511, 0x1b88826bc36516df}, + {0xecec6a0bd180a9c0, 0x954e9adfdd68e064, 0x323c890828d78811, 0x2c89829ba7ac7fe7}, + {0x623862b5c28aed0f, 0x544fea8657153f5f, 0x41139508c925a0b2, 0x10d5e06354bae812}, + {0xeae3b52c47a3ad31, 0xa40c52de4949ba9d, 0x239515052b2d0bb6, 0xc68f2ca20bfa5a4}, + {0x86da286806879c23, 0x2bc62129dce2d327, 0x8f8f1b3f3a607809, 0x257de258bba457f3}, + }, + { + {0xe55015708a8bf114, 0x8f7b7799abb7e89f, 0xcb8fa2a6bf9b602d, 0x1dd3b45e4f2bd1e9}, + {0xa48bd0f7b831b6af, 0xbe6ed2ae7e9b5ea6, 0xecdd091614986315, 0x1bcc64e2d4434539}, + {0xee95991ac731a5d7, 0xb643176046b51d45, 0xf396998dc25f72a3, 0x1474b46564931dc3}, + {0xe8973ad2df550e5e, 0x8561e0de83378bad, 0x90495ddc20bf8d64, 0x2c8ee5b7e87d23ea}, + {0xf15e8598d3360040, 0xd9237d9f5bd4da94, 0x2f32ec74a0b7cb8f, 0x2bfa790791857d69}, + }, + { + {0x51ed3818b8a671e0, 0x93f8de29901b0101, 0xdd6948f429d84a64, 0x339864f118ba5599}, + {0x8070718964c6881, 0x664a56735cd1d096, 0x966ebb68ce0c59be, 0xdb44ab4420ed185}, + {0x390fdea200fe8c8d, 0xb9ddf1781fc7dcfc, 0x6ccc8d97ea91a52b, 0xaf86429842ad1ea}, + {0x6fbc15b9f8ee61d5, 0x485407282205ba19, 0x9f2b3a9eb0762424, 0x1167e61f6e4bd42e}, + {0x9b2e606d2a12a53f, 0xfe4ee2337eadcc76, 0x97152598ff76e36b, 0x275e80f05a7648b6}, + }, + { + {0xc3cc461cc12d86e3, 0x48b15302057c1d0e, 0xef7a34bbb6748beb, 0x286c7795696d139}, + {0x337d4ce46c81b278, 0x2e5feb77948c70d3, 0xd82b4f43ac0ebab0, 0x23a5e817dfdebe91}, + {0x2ce1459a01867e26, 0x224b53d8806aa3a5, 0x1bba8ab295cb47f5, 0x6cbbeb19cc3e900}, + {0x1774396945cf1d1b, 0x325ec3d335425b6b, 0xdaf58659cd291e5, 0x3f8cfcb73cb1aff7}, + {0x31812080ad76a765, 0x76b57d46db21d506, 0xa9f0b894c076a2b, 0x6328153e24ddb21}, + }, + { + {0x635e53a6c9b16d33, 0x39a4746bf0a3364b, 0x61555f31318c6ae0, 0x3b775cd07e4fa0ba}, + {0x2cc47754e893144e, 0x76f56441be34ac0c, 0x580502c5981f6c05, 0x3f1912d87eb51724}, + {0x4ead108af1a4e97c, 0x14ed7a0fc16f0e1a, 0x88a0cf37b2ee10aa, 0x13aa67b47ce7c7fd}, + {0xf54922d903cdce93, 0x825eb6a0321c91fa, 0x6d71f23e5b2a77ae, 0x31d82d03cd7ef1f8}, + {0xf6c8ba6acea0c3a8, 0x3dcdacfe5ace7aa0, 0xbbbff1c714c8bbcb, 0x26b4c61ca5f05c8e}, + }, + { + {0xc2bddbeee6db7663, 0x5de993fdf4bee2bc, 0xc5598a002460bb6a, 0x195ed57ac7350182}, + {0xa3c22c35da970f5e, 0xca1c98fb8ebbea38, 0x1fac0f5c50dfc365, 0x1aef8ca61af69bd9}, + {0x26e8837fb91809f1, 0xf9688f1eec8f7e7e, 0x610d6fea8d7fdb89, 0x3d0a1bd4d427b4ac}, + {0x4805ec51b5a8b95, 0x35841575c48b553f, 0xefc86563cec776dc, 0x2840e92680329f35}, + {0x88aadc874b15e054, 0xf4c407659f438e86, 0xc78f11dc3ac39010, 0xe83c809dce62ea5}, + }, + { + {0xa0d5b4ce25ad95e3, 0x4eb48fba265577ca, 0xec34d547e740b067, 0xf87455a1a87a825}, + {0x7171a3dbff618247, 0x8f77af4f143160e8, 0x639543558218623e, 0x1ad8547be9074db4}, + {0xa0dbdf918c38f312, 0x62d083dc471500fb, 0x5358448cc9aad1d1, 0x5825571f70873cd}, + {0xb9c01a42ad74f7f3, 0x96425e46aa40f166, 0xbc305b6f8c2ca3c1, 0x38885a0e462a0d16}, + {0x1f3b4561c10caa2, 0xe7f7e2edd641ea35, 0xd946c46002ec9b24, 0x22dc4744611dfaf}, + }, + { + {0x89d2596131b9d801, 0x6d61479c7e6f6d9, 0xf5aad5649f5a1c79, 0x225393c033553d15}, + {0x5b0fdd7cbea91565, 0x95e46f19a0dffebc, 0x91d50f19a3a46071, 0x2c03ec2d9ea3aaaf}, + {0xdecba3c94506fd1b, 0xa07f4c4397072961, 0xc072d04541ab3761, 0x250beb5d4be1732e}, + {0x9ce4550f36a82208, 0xad906f79c2991285, 0x82adb87da0fe9206, 0x34aff80145dc173b}, + {0xb84407732055b44d, 0xf9c85a0309677606, 0x1816b1a6361d2c99, 0x302b805797745fa1}, + }, + { + {0xbbfbaa3655132a2f, 0x85de63edc71b8169, 0x231b387734b227a0, 0x791e6d7e390ab58}, + {0x846a348eee728261, 0x91e35881244dcaed, 0x8755e3179f39f84, 0x11c757c8adbbcee2}, + {0xdc5b85ce4ff2e16, 0xe6680a9d77d56b79, 0x2e1d8a190b81a71b, 0x110440e7f341ccbf}, + {0xc5714f4e64e4f5b5, 0x9073123174ba32aa, 0xd1aafcb808fb13a9, 0x2b0bf27c449467b4}, + {0x674d2c9c2a9f6c33, 0xd64a81fc8872e544, 0x54da3af1c7ea91f3, 0x22bb7d2c8479681d}, + }, + { + {0x5ea26e9a08f957a5, 0x531b4a8427d261c2, 0x92a9d9e3c38cfcf0, 0x2006044428827241}, + {0xf0834849c1fecda7, 0xfab565ee5f9319d, 0x3a3b976275b87643, 0x2bc18d1039d22ce7}, + {0x7ad9bd18d3ef9d11, 0x136c2bdc157de581, 0xf1999edaa1f99ec6, 0x3bdb875af3652fea}, + {0xb06ff1fa4faa6be7, 0x7b65a2d09d62e6fe, 0xd52851aa16d4a33d, 0x3b7e6a651e5f1100}, + {0x70b297d3bd617bde, 0x42dfd73b30a28150, 0xbcbc0b930ad9480c, 0x14452569cf1b7495}, + }, + { + {0xb997f89dc4f23510, 0x86aeec894d2eb890, 0x40b5eb4203777c22, 0x3f6900dd45610c9}, + {0xbed1fbbc558ae3b9, 0x7b9919fc76fb8f31, 0xa8155c8d8f223d05, 0x7be199f23efc89b}, + {0x124bd2f29dfdfc99, 0xa3cd06dc334c28dc, 0x8321717bb314584c, 0x7a90593068a94cf}, + {0x6d262cbbd1c3c7a3, 0xfa55dbcbb37ab4e4, 0xdfb0541244749109, 0x1b42310efbbc8ae1}, + {0x159b02d7b19c8807, 0xa3155c41a3100f0b, 0x649691f59c73a27d, 0xb1c6cf5eaa1a3da}, + }, + { + {0x56a321c5d5620a4e, 0xd704e017be94ce48, 0xbb7e58300ff6e106, 0x231353460278eda6}, + {0x7f24d9a9e896902, 0x379dfae44f3dd605, 0xa756baa38400f59f, 0x1ca15e781769cff3}, + {0x3b1d025a762cebf4, 0x9be99c300caf395, 0x1ec3a4ee83fc2b9c, 0x1db9c130790aa77c}, + {0x92f350103a625f89, 0x7f7abde4baa7fe7c, 0xdb4ce7149975b21a, 0x37619aef93207815}, + {0x2df14ae96e32f360, 0x8154a50068c1d6d7, 0x3696523ff84dccbd, 0x3d71103fe9b7d2cc}, + }, + { + {0xdc8f123847c2f6f0, 0x772b9f5f133bb07a, 0xf48472df2de5637d, 0xb30587972382d91}, + {0xd32076031e6caf, 0x7a70c0a191315b1, 0xc72d68ebbd493f22, 0x26a6b863263ca385}, + {0xd1ad7a8f3f52bc58, 0x7fd97d9102a5e717, 0xdc4dea9fba06a94f, 0x24292fae82eb3182}, + {0x9c8f6b9aed0c14a, 0x7ce6499a8da0ffc5, 0x4c575abbd55a091, 0x2ffffd706225c6b5}, + {0x51ff57474553fd13, 0xbcd2c2f63e851309, 0xad42d629fbc07620, 0x37cb8a314456dcb3}, + }, + { + {0x9e725f4a1ea322f0, 0x51275d226f5fc65d, 0xcc96b5ceb521cae0, 0x31697ac9e08fd09a}, + {0xfbbfc3ed25936e24, 0x885d8e71956e9fbf, 0xb8e0d819be1b2b0c, 0x2a27ec07450706e5}, + {0xb86f23d85d56ff2, 0x25630952048f156b, 0x96949d2297030e85, 0x28ed6be9457a4c1d}, + {0x85cefc2d5a4254e4, 0x93335e124a410406, 0xe2caa252d7d83ee, 0x206bb8d1b294acc2}, + {0xaf3c9df6a0ed5396, 0xce5b0093a6e41bfb, 0xdae7d2b6669cdd65, 0x91670d7e0c906e}, + }, + { + {0x848e19c09bf3ab82, 0xd37733a864fbf5b1, 0x5834568186cadc57, 0x3d8894b3840c836b}, + {0xcb24e184ea8d715, 0xdde18b32791992e1, 0xc754f597613dec6e, 0x124ddceb631af8ac}, + {0x200fede77fe9ccaa, 0x4120a35be4499eb2, 0x6bcb74340da3069e, 0x18d40006660a4d9}, + {0x8ccd91c1dc2b219e, 0x1dc15dbbeee5b7cc, 0x30e961143289ebc0, 0x33aebd15476f6d}, + {0xd4c03228e1165ff1, 0xe755127566374ca5, 0x5eb5e5f7835f51b9, 0x11d1f3f590664116}, + }, + { + {0x1c2a43421f2b006a, 0x6258ea61fff6649b, 0x5a847a0bdf1ff16b, 0x2038498881736f80}, + {0xe603ee6ce8b0bbc6, 0x620a0a604d315efa, 0x40378c0521e6518d, 0x2d4d8c63bccec23}, + {0xa77ca49f7f8b90ff, 0xcb7652bf1a0cf5ea, 0x3d911a1c6e5c74c3, 0x1a8c15f40c9e99d6}, + {0x293a2d7589738448, 0xea4c1d5f407e77d5, 0xee7f4d7f7d8d69f0, 0x6d9a037bd4006b5}, + {0x7e0382dbce83b459, 0x8210fa8a33a15c2c, 0x4da8c81848a7e20f, 0x35c902f371d587a5}, + }, + { + {0x3d331566c2c6c483, 0xc66d7c43cf6c6899, 0x7c1c36147e850129, 0x18396a316e009c03}, + {0x16d27bc62b29f952, 0x65a7c24e59dc8b05, 0x5c0acd2a0f886241, 0x15e74885451d7b84}, + {0xe3a5b84fbaedfd4b, 0xfd3304cb4b735b85, 0x8d8df8b6031ccd91, 0x603d2627ed921bc}, + {0x194cd2aa6240d213, 0xa3d4d947c6027d50, 0x7355e133684d2417, 0x3a4ad2ca80689906}, + {0x754062c7dfe10fb, 0x15be7881422ab514, 0xbd3cb5e4447b5b04, 0x267919778e6db957}, + }, + { + {0x9e687a0099aafbe4, 0x8716f500deb883a4, 0xc7c44b3a4227e0f9, 0x36dfbee5ac583fec}, + {0xb9c5848f3b063542, 0xbd7beb77de16d02e, 0x6ed1369422f1597f, 0x35cd85390d4f063e}, + {0x50a256d280092fdc, 0xd2c2b4dbbb258e39, 0x9c6679aa1c4480db, 0x383efabe992c0d62}, + {0xb8fadb8a950482a8, 0x6279eecfb927c631, 0xf8b75e3cffc23fbd, 0x239433b63ac65264}, + {0xaf95feec451ffd03, 0x7ad4f5520a280f0a, 0xfb6d86aa8efee6c6, 0x5ad99e8a107bd86}, + }, + { + {0xd47e6d75a1e72978, 0xec21efdeed48fc4c, 0x3f95628431615152, 0x321e3149a1bd5bda}, + {0x366a1936303c4a03, 0xda1980c0a597dd0f, 0x2fddc599df0832d5, 0x10090314b014c0af}, + {0x9af98513b8fa3e32, 0x76a99773a39804d6, 0x49ed6650bc0128d4, 0x34fce51deddd27d8}, + {0x29ccc4a9b0c2b010, 0x8a8f514b705229fe, 0xc1e6d12b311256f4, 0x3291611fbb2fab1e}, + {0xcb4ebbb08bdb9c9d, 0x744ebe7f914218fb, 0xe77c5babcec8a686, 0x2d42a41428a90175}, + }, + { + {0x8ec3d97ab5616a9e, 0xe8014e7c1141e6ff, 0xa8daca3054309538, 0x1c08ef18d73c76fc}, + {0xcf92950eb4f3789b, 0xaaa450f22a280bf6, 0xc9e4a80c9f2cd996, 0x27f4ac96508752f6}, + {0x7bbd2ed16a76edc6, 0xd8e2186c7de847db, 0x2aa88a2b65118dd0, 0x1c55b5f615dc28ea}, + {0x7ccf57f28acc118a, 0xf5d3c6eb14e65154, 0xdc079fcf442a0f54, 0x11993cb931932e5a}, + {0xf76b4a8906e70667, 0xda7f02e1bf4e36df, 0xaab6b7f1de20dd0f, 0x19fc46ff2ddb173c}, + }, + { + {0x3a3d2dc8cc132eee, 0xf357bef4e1d3a63c, 0xd48a3a0eb2bc1415, 0x2bc75acfaee9db1}, + {0x5fbb66b806674243, 0xf73af3e558a86209, 0x5d30fbb40ede547c, 0x501e03ee293fd1d}, + {0xe90568a8237171ac, 0x650fc27c4da7f2e3, 0x546a3b7012993072, 0x87e3d6d0e54850d}, + {0xd5dfc1afce8127cb, 0xc6874ecb5b959056, 0x17d19ee832e735c7, 0xbf09138a8fe4ccb}, + {0x489c20a8f79b4f58, 0xaa9a7c84d23aedd3, 0xb66eddf614963ee3, 0x2dbf9ab7c2343a43}, + }, + { + {0x3dfd42b46f1e6946, 0x58f29edf399435ba, 0x2521d4287bc79d64, 0x1781fbca8872f93c}, + {0x6d6aed7cb045289f, 0x44132433ac541a78, 0x660c1badd4c7e0de, 0x30330b4022cfe5fa}, + {0x31b90687198ee938, 0xf9b1217c09a47868, 0x1df6bdd931eb2b30, 0x151f40cfa7ce6982}, + {0x9049df43bd2b446c, 0xc117a3d9f6656367, 0xfd356803d03495d8, 0x200e3c9d4c1b8eb8}, + {0x90d2cc13778a0928, 0x9b7d207f836f7d68, 0xac2d328e8bc0cdb3, 0xc70770d5c26e2ae}, + }, + }, + mdsMatrix: [][]*fp.Fp{ + { + {0x32f4f94379d14f6, 0x666eef381fb1d4b0, 0xd760525c85a9299a, 0x70288de13f861f}, + {0x2ab57684465d1ca, 0xf12514d37806396c, 0x825085389a26a582, 0x308efdddaf47d944}, + {0x1b2098a19e203e93, 0x914dcdea2a56e245, 0xc64ed9aa2aef8379, 0xb176f95c389478e}, + {0xf895153087f5dca3, 0xa53543f74c7e98f, 0xf5a0b430a14b8c2d, 0x6ae54007a872b0}, + {0xfd0c32f86be981dc, 0xf60dbc5c1bd0b583, 0xd4f3f8f9a2a4537c, 0x1d71b70d52f42936}, + }, + { + {0x8f85e752c76f7c9c, 0x8297f4f031b02763, 0x30e4ea62df5067b7, 0x2821d0423006dcae}, + {0x5cece392cc5d403f, 0x123da1ba8becd2de, 0x193510960c81a54f, 0x1be17f43c42fe5c0}, + {0x65fc36e3c120e5dd, 0x51a4797b81835701, 0x3123b2b88ae51832, 0x19f174900d86138a}, + {0x6d0db66a74936d4b, 0x1b8aa34d8d4554ad, 0x8605b5c1a219423d, 0x3055d8d876253885}, + {0x9b8b22118b0179db, 0xe14da53ccd481770, 0x109e7ae5ae61278d, 0x13cd85bd55c2f52e}, + }, + { + {0xd03df46130dd77b4, 0xe694d8c7d8fd4ef4, 0xf71d2a65470713aa, 0x255c475344778d2c}, + {0x78597119a27f97bb, 0x1b1fb7c15ccb3746, 0xb86d8ab32d6a6edf, 0xb1e00f75148f670}, + {0x875597b15bf7ed8d, 0x73fa4e676bb9cc5f, 0x96babdc32ae359e, 0x31e6d9f5ccaa763e}, + {0xf27e0f92236fd303, 0x6e607a16f84adab, 0x4cc8addf91894557, 0x1fb0a70aa0f1061c}, + {0xfd2a420b19b31725, 0xddc5361119d53b6e, 0x3d58af3f6737f156, 0x1350a7bb521c58a6}, + }, + { + {0xf88f2f863cb9d6fb, 0x5078f8e89e8f9ff9, 0xc5583fcea6176010, 0x25e363acdb694459}, + {0x2ece0c297d2f49a2, 0x9ccc88a13c91abae, 0x4138c965288c3d87, 0x437768eb72dfb4f}, + {0x9fab22e085f93fe5, 0x63ca08a361b7fb0d, 0x7e9790bf5bd5837a, 0x6080ada873c8216}, + {0x32d3b34d8c43a402, 0x2c7e5748fb940669, 0xc0a36e42a28c6f80, 0x24ac3e6b181bb185}, + {0x470f7d02d1dae46, 0x8cf3cde540035d00, 0xe3c0216f8d5d807b, 0x3e3e8312ef71fa39}, + }, + { + {0x6682f4469913559f, 0x3b053e58dc4560d6, 0xf84c58444b5bdccf, 0xc3230d834c17967}, + {0xbff2a45de17e9da7, 0x6309bfdc8e152f51, 0xb9ae2f9af1f30a1b, 0x27a8797c59f97b06}, + {0xd787c4dd405d1b3f, 0x7da8effab83f1842, 0xb3f8303ad313dac2, 0x3a1a7a3002e72833}, + {0x327b7f748a0695fa, 0x7dde58a92f496b95, 0x8a02b6088016449c, 0x1cef42b151422c3d}, + {0xb22ffb451127fa1a, 0x3c17ca7183462744, 0x4de2e19b12854d65, 0x20938ee131fc7ef0}, + }, + }, + spongeIv: [][]*fp.Fp{ + // Testnet + { + {0x63d01a5eb2171352, 0xa156f498468c138a, 0x6863ea2849c3a1a2, 0x9d3a988f1f410b1}, + {0x7102a042f3032c7a, 0xec792d3bae28c836, 0x56ca8c6f048bc984, 0x1219b5fcf34e0a1f}, + {0x2ac12e04eb8f550a, 0xa5757bca84777f2b, 0xd3c2bf917b1192ea, 0x1989968c7935c607}, + {0xe1d62db2c86caa07, 0xb8ed617d8704c6b, 0x4e71934f60359a00, 0x25459aa434d50ff}, + {0x290a6ff9dd02df5e, 0x6e4c26ecf7984888, 0x8f5fb54612793d95, 0x31404beb90f0fdc8}, + }, + // Mainnet + { + {0x1bc46288607092ee, 0x679d1013fcd27af4, 0x2302588441a00b35, 0x52aa4180a0e1d3f}, + {0xf44d99f5d1788e7c, 0xa808f4bc1c5e8caa, 0xd3fd8806f5f3de6, 0x12ad0b5be60d68f1}, + {0x6928c9d83855c9d, 0x4b93a3d0d8209f22, 0xbbaea51d0f1f12e6, 0x62815b7ee55e6a8}, + {0x1fbd2d82dcfa2d, 0x78ec7156c609e43a, 0xf1e203a769275642, 0x3e15c2753ca6c1d}, + {0x6299f39409f35a31, 0x279b391979868236, 0x87b62f1b72d1deea, 0x3b44d1afce3a530d}, + }, + }, + }, + // three + { + spongeWidth: 3, + spongeRate: 2, + fullRounds: 54, + sBox: Sept, + roundKeys: [][]*fp.Fp{ + { + {0xa7eaec68b00f442f, 0x6f59e1a835643145, 0x5e1085dc39694ee7, 0x31f43b11041ce57e}, + {0xaf9d994fe02cad85, 0xd023d4dba24251f2, 0xab2b289011b10b15, 0x136703f8461e5900}, + {0x17d4dcf1505a5b8e, 0x905301ba46f35eda, 0xbf183c861c890269, 0xcd2255c8e8bfad7}, + }, + { + {0x652519f35396e7b9, 0xf939da87dd2565f7, 0x77863388eeb1739a, 0x2ef0165a2c2e5844}, + {0x477f697921c5a46f, 0x2f5597aaf22a3c0e, 0xea51d9a2b20ef8ee, 0x27f19ea80ac259a1}, + {0x475253132eafc552, 0x691f153f119e2158, 0x3463b3d75dc73170, 0x3875a2611a0025e4}, + }, + { + {0xc1500848eb7005a6, 0x61e2acd1faf26ff8, 0xf50c67341380fa2f, 0x14e95cd68d6778e}, + {0xa82b34cad0ebd99e, 0x771d54af176024ba, 0xa343788ac43bbde2, 0x31c9ab354d21b72a}, + {0x3deb92a1d0140aab, 0xb710f83bb2ce1349, 0xf3dc159cb9171a97, 0x6c3eac66a23c368}, + }, + { + {0xd25cd659b62b584, 0x3a52d26b8092be14, 0xe36c5155d1e135b, 0x16deeee5467c7a70}, + {0x9b446f453cb50aca, 0xeadf51c8f0af7a58, 0x75da6f44c4a04ca2, 0xccc39a8549cb487}, + {0xe09191801b0fdabc, 0xa7452c89066d4bd0, 0x5244883ddc1fd9cf, 0x3862cc172ab37d2f}, + }, + { + {0x6a87a9921ce9aabf, 0x5d701ed9a030bc04, 0xf99642aa2a835ec8, 0x28319fcffc61a144}, + {0xafb8a54d87a15b37, 0x31e93428debef477, 0xd328f47b48852d58, 0x1e9fb7a3f8c8e39f}, + {0xa58f9aa995fcab1, 0x5fb0135046864286, 0x6ad292753e09ec37, 0x24cb4d2ff2e77907}, + }, + { + {0xbebb7c8e57280d90, 0xa2575331fa3f83b, 0x1bdaa15abc6032e4, 0x137f4d26f7fc2b89}, + {0x8ea04d93f2a3faba, 0x76d83e0976143e11, 0xde45171e6c479b69, 0x3ec500ad89afdcfe}, + {0x9741b7a395379855, 0x916b939a142e1e68, 0x4fda77496b815109, 0x19d993fa47fbc1a7}, + }, + { + {0x31d6e313b491fe95, 0x19f07f1a489cb20a, 0x517ac8eb07d91f83, 0x16bd2c0e204bb5dd}, + {0x42170b694f7b1926, 0xdde594e9f6060540, 0xbdb7c66abef575a5, 0x26c2c710d9aea3c9}, + {0xccfb5e566332e76d, 0x4e73df588d423339, 0xa4ede3fda7178916, 0x28fc0255accddbf0}, + }, + { + {0x2fc2f7a5cf4eeb76, 0x709a30d94daa56c0, 0x67a5dd9c696533dc, 0x3ace056f839007b7}, + {0x7769ae0082cae71b, 0xdaa4d4c08c74011a, 0x919a01423e32424d, 0x22467d425c99d5c9}, + {0x6bcd3613ace4871c, 0xb0bc217531069def, 0x55348199fa2b487b, 0x2385b602228e77ee}, + }, + { + {0x7022e34179f7fd32, 0xd7d122a3a91838e2, 0x20e0714f41741103, 0x55ca30203c35d65}, + {0xeac64a4e27a532ac, 0xfad717669ae09d08, 0x16088f7d30d724f1, 0x37bb5a0c062e6400}, + {0x7c34a314bcba4635, 0xbfdd1e4cda8ce53, 0x7b9809bd9828ebbb, 0xbe17455fb915f02}, + }, + { + {0x3eedf4d1439104cc, 0x6944849f8d44d187, 0x4dbe80e8f415dfdc, 0x1df9173c6d047e75}, + {0xbe0b090e46076c2e, 0xdc964a5825b49df4, 0xc8c690008b31f4cc, 0x2a7eea60cbe9e9c2}, + {0x7e3ea7d6debb93ed, 0x697818afff9c8ff8, 0xbd716ed0d057327c, 0xb4a53cd8838ca03}, + }, + { + {0x9249570785f595ac, 0xa8baedefd054d755, 0x4e349f30983cb2d1, 0x20e038ae5219f06f}, + {0x1e6b0d84b6b0bd2b, 0x28036e399a182d83, 0x9b4db5cdc8f5900f, 0xba7d843e40009}, + {0xc1862aa768e1a9de, 0x134ee89ea258d84b, 0xa344c6a98af5c8e1, 0x35868f24584f89c0}, + }, + { + {0x6562561d6d2bd2eb, 0xe906eab7bfb25ce1, 0xdc3286360790ca24, 0x95cf9864fa2e99f}, + {0x8df04360bfee0f49, 0xf36da4b0797f28ab, 0x973b5df087ce6868, 0x31b4ebf5790f547a}, + {0x5236c69070d6a6cd, 0xe3483908bed7cd3a, 0x230cf521e8f636d5, 0x1369fed0b1ebdd4e}, + }, + { + {0x6b84b4d8a249ce56, 0xf8f9e37aee88a381, 0x547e20f39ea3ee67, 0x1a521535f4c4609a}, + {0xece3e35b95cf0b40, 0xc3f355a2343a9bba, 0x352dca283e1dc20, 0x18533c495c159ea5}, + {0x5c080eb0e667f655, 0xae7eedb84aaa82cf, 0xc40800efbbfb36e2, 0x23f8f08e412d848e}, + }, + { + {0xe47de2b666816c07, 0xfd084e96b59c227a, 0xffee4cb4adefb5b9, 0x27873596008bf329}, + {0x74c07063640d932f, 0xdcafa7d16923f8c6, 0x69cc607697b5fa58, 0x3fe8dcd350f158a0}, + {0x4685e7e0534a4487, 0x3e0a5a82c1308413, 0xe8f4882500745f38, 0x15d7152e439e3e39}, + }, + { + {0x1b6a574d8750bc73, 0x106eecce46abf1e6, 0xaac26d11ddaa2fa6, 0x38e31bc0f50b77a8}, + {0x46f4e9b5d03cc39d, 0xcc3f83cbed376830, 0x9b08a2cd1eb3c25c, 0x1b205334b2958429}, + {0xd58f58d5f1219de9, 0x1cfaa7547a262198, 0xcded38f37a2a880d, 0x177392b21a898be2}, + }, + { + {0xabc2d6caf40fda5a, 0xeb350f697f26ad45, 0x3a8945142b944356, 0x646f6d15a42f200}, + {0xff43256a693e8ab0, 0x2d305df7c836dcae, 0xccde78bd165aba3, 0x784cf8c1e09fd03}, + {0x11c0a75dfb1d6922, 0xa7dc5f890e8b9385, 0xaf784b2ec0758e11, 0xb79376b50d40562}, + }, + { + {0x4cfa43b93f239b43, 0x8465e6265d898268, 0x1288bcdb6fff50f4, 0x819462af6ba8c78}, + {0xba07b8ffdba328b0, 0x4215ac492cce28b3, 0x9730ec3e2f2188f3, 0x10bc9871d3004f71}, + {0x5a31626caca7283f, 0x40424e694cc63e9a, 0xcceb1adc2a52c6b9, 0x2836d9e7ccb5b686}, + }, + { + {0xf2191ab8935e3139, 0x6263e421438839a9, 0xf36d5611a925964c, 0x3ad254981c5584f}, + {0x77f3cbe4b7ec0506, 0x14cfd27cad91e7f6, 0xd66382bc65d30343, 0x11eb1499a7eb9296}, + {0x8306372b9fccfce6, 0xff3f90b389214f3f, 0x589837150e5e7261, 0x330673584db884bd}, + }, + { + {0xbf438550dc08ca8c, 0xa67d5b92e6b2d9f4, 0x45f1b4851f11355e, 0x3197089756209bf0}, + {0xa7bb2e56a3383876, 0xa1b85198d1a992d9, 0x807a2bb7e34327c6, 0x1a7be86ebc9ceb25}, + {0xb67db6d33a48b0df, 0x20bcebe6769fcc54, 0x2c5defebf8a107df, 0x3c0e75597f21e444}, + }, + { + {0x3ee09fdbfc3969f8, 0xfa1d3b39ae852fe9, 0xf9a75fb275dc4bf3, 0x2a38ad56c662e4c3}, + {0x436111724d967633, 0x7bc3452ae208f145, 0xfa8b9c79a56e7177, 0x3328b53b61a96c1a}, + {0x7dc685041c4a8de8, 0xcc8d3bf1000d5962, 0x53f0f1b8456b9659, 0xe5633a4ee33b43f}, + }, + { + {0x50e48fb72f31229f, 0xa5b753a3355553ad, 0x1c81ccd682c6dbf0, 0x6c795d332f94020}, + {0xf0fdbce25dad3d31, 0x9c588e128b3cebf0, 0x278767bad1a401ec, 0x12fd7107dacf168b}, + {0x98f200455814ae5c, 0x47f8009a5ae445bd, 0x487393e3ebc8077f, 0x3d579e85ebce0cba}, + }, + { + {0xe8df711446a2a238, 0xde4cc56b9510c04c, 0xb69991b66d096631, 0x1ab962f474f31f94}, + {0x994397076df00b58, 0x655b22de2fc1d376, 0x8c4152fcacaf0b18, 0x20051bcd8c1c3f37}, + {0xd66fe84e8d8d5bcb, 0xba022f54bb73200, 0xdaf17b9ad85c1d89, 0xd939c085418afd2}, + }, + { + {0xd5e63b74c211bc, 0xb12fc9c1def2e171, 0x44eaf0fd6faad3ea, 0x242c8eaf5e3f8025}, + {0xbf97706088f0ff2e, 0x1d8dad8a65750bd0, 0xc29b958d06fc399f, 0x134e81757cb8421e}, + {0x90be439afe7faf2, 0xc4b4762c8cc16bea, 0x1e477be4c4f40c9b, 0x23b4d50dc7ec40d7}, + }, + { + {0x333f64ebf7d2e7a8, 0x870b1f5dbdb11bdf, 0x74c9542dd65f4221, 0x128bb297ac5b8087}, + {0x74cdc264c1c32632, 0xcdbd7f0678ca09f9, 0xeb378a38bded711e, 0x1c4c18ced367bb10}, + {0x436cf961456330a9, 0x5261fdb10401ba02, 0x5aa07cfe8724969b, 0x17d4aa22d7539f30}, + }, + { + {0x22577a304a9a99c0, 0x26990766bceeaf3e, 0xf44a18d2965b75e6, 0x2b9ccb511447556b}, + {0xb4816bee4c57ea14, 0x657c7ff22284eddf, 0x80327f4b561a57a, 0x185d288b2163e408}, + {0x2521ae71bb3e92cd, 0xcfa6a3908f9422a4, 0xc5566b5d0cba3b6a, 0x19186ee34b32adf1}, + }, + { + {0x6ababe0d83a14804, 0x48a2976613fe9c87, 0x8fd28b1e6637a35f, 0x36be57a5b7dc101}, + {0x9409b310f8dedbd5, 0xdcb6ff2dd7ade6e8, 0x58ef424bfb1e96cf, 0x59c25a9e8435caa}, + {0x6017336865718144, 0x3b735147994d3174, 0x556dacb8916ce5fb, 0x6db68d040cf3894}, + }, + { + {0xfb28daa548eae196, 0xdee9f1fb48ad3a18, 0x8aeb7ea74f7458e0, 0x2f9a904f8eca76f4}, + {0x78ab93c53d27c0a9, 0x7cbd38a19659d91e, 0x6190fc4c6fcea287, 0x392493ffebdcf4db}, + {0xc288a18be7c7b29a, 0x293635495be4702a, 0x8eabeaa050c4e742, 0x3ee20e43c9a7e3a9}, + }, + { + {0xb247c20bce94d2fb, 0x7b37f102aba6dc41, 0x4e4cd6f2dfd06908, 0x2a4b7450c10e8656}, + {0xb8530701f1190812, 0x8852c8a7dd78c157, 0xef99612848a09ee6, 0x18cac72301419e17}, + {0x3bf72719696125f9, 0xa9153cf6b92ed66c, 0x5dee1004fa68e8ad, 0x2b0ab9d9c7146b8d}, + }, + { + {0x536e86c644632835, 0xfb528949c3d10378, 0x93cef66ee69cc174, 0xef7cc63c029b540}, + {0x3f71db10b88e45e1, 0x6a1cdfba155d75ba, 0x3ae6893f9c00830, 0x1f4aa1edaeea25af}, + {0x6c911d68b8bae9c7, 0xb562eb9132f9adcf, 0xb99552efd2922e09, 0x1aee619f6e53e5d1}, + }, + { + {0xc3e7eba538b57ce4, 0xe31fc856b301a0ed, 0x907bed9fd07a7f39, 0x2d8b156182efe6c2}, + {0x3c46848a7286379b, 0x8c1ce4abf2bdac01, 0xfe59adadfa57ab2d, 0x118d842df28ba098}, + {0x40346673db347d50, 0x9e9e286fbc221bac, 0x629f295d0d56a062, 0x2c7473af62ed3627}, + }, + { + {0x752c0e54b9084c5e, 0x9876503df35123f2, 0x4033dd4b36b302eb, 0x1f2f3ed1fdf6db7b}, + {0xc1aac2892f6ad5a0, 0xa4d2c62a12460d75, 0xfdfc27280cba16d8, 0x32e7b499b5afa89f}, + {0xa93b9c721205117, 0x29f32892dd66377a, 0x5db34e58446659b5, 0x35b9a957235ce56c}, + }, + { + {0x27dd8772ea8dd6fb, 0x7657580893377a80, 0x98116540d5d734fc, 0x1784ef3dbedaa7f1}, + {0x7ea4b4453558ad1e, 0xe79ae8a84479841f, 0x1448d360ba9dceaf, 0x29004bf2b27c1cc}, + {0x9be743f9caccf63, 0xf852240a936f7e9e, 0xb1cd6029e13842dd, 0x255bdfce8905f3d5}, + }, + { + {0x7415ee9f67631e70, 0x545553ffaf7d2592, 0xde51ae9194c9a7b2, 0x304409b176b5751d}, + {0x9c447ba5504d7b34, 0xfb288ee73506db89, 0x4cb32cefbc235f79, 0x3f2084c1c1605185}, + {0xdd24109730f8173c, 0x58b00f316d07a773, 0x1e86bdb19d121999, 0x3c46e8c04c88a74f}, + }, + { + {0x9ff193f33610856, 0x7cff32dc29fafc7f, 0xab1176764a90b2b5, 0x37a314e9261a12bc}, + {0xea6753fdcff09509, 0x4222dd5e1a66381d, 0x6056412c14d4667e, 0x30f19d0141776609}, + {0x383a756cdd6bbf4a, 0x4b314310043c1039, 0x7aa6b018a79ad7a4, 0x2d78d62918aa8e15}, + }, + { + {0x2c202fafd61e9da3, 0x89be41afc9c1f1fe, 0x15b6d718ea74a2d, 0x1c1c7eb84e510588}, + {0x342bbc7354bde7a5, 0xc995adfbe2005f6b, 0x85a618fbe9ccea3c, 0x3d61c03132b831b7}, + {0x84f0053a596c462a, 0x3725a512ef2e7da, 0x9866bda3e8d65025, 0x16307f53c31b9b2d}, + }, + { + {0xe11a11d7ad7c2941, 0xa196efe53885dac4, 0x401c98e5702a4b4d, 0x2627aedf841d2535}, + {0x6c137315f42a3d7, 0x1b27880c4b80acc2, 0xd2463e7af19c63f8, 0x1ac361c3ae9360d0}, + {0x9ca7acb3c1e8764c, 0xa3abf95002b63ec7, 0x607aec3d66927bd, 0x350322d135e4b5b0}, + }, + { + {0x8767a56f58823fb1, 0xa17115bf335777f7, 0x2d2d011c09bc4b6, 0x23f5c30c8bec1802}, + {0x469e5aee1e354cff, 0x19b662c5ee659a15, 0x8222b7d81ef2e3d0, 0x25dbe21f1dc14bbb}, + {0xcf3036e9bef51413, 0x2ec597011f15b75d, 0x1b6f15cf41987ffb, 0x243e4331eaa8ca3f}, + }, + { + {0x7e12d7264aa9d64a, 0x986866249a40df19, 0x224d9731f988d510, 0x3da1029b04d0699}, + {0x63ce9c82f26c2122, 0xd07b42737417e607, 0xdffe6c18b3e32c4e, 0xcb558a55e33635b}, + {0xa9e72e1bbb1e1ac6, 0x7c0eac490aa84fe, 0xc716b352d91ef297, 0x17c0df062139a3b0}, + }, + { + {0xb6e0e5acafd75f1b, 0xe79ccc9d178be022, 0xa62fb84a4594821, 0x118ff23cec0e9ab7}, + {0xdaeb6bb9649cc9ed, 0x2b1c328f5bb8662f, 0x5512487ef834c9ff, 0x10a6d8b760d8381f}, + {0x55bcd8a0925661ee, 0x92289bf6cb46afe5, 0xa2b1c38630374b57, 0x1a382c83ffa7d479}, + }, + { + {0x5b9ed41d83b22227, 0x861a12cf34e20309, 0x722187ecc9fb01cd, 0x3d7bb8d8fbbf19aa}, + {0x56b2351caaa77660, 0x5649f8afb77f8250, 0x179b134befc40c2a, 0x117b5ac7fab8a73b}, + {0xbb53d76efa39fb63, 0x3821ad8ab6648b10, 0x860e686568c98df, 0x181510039e8bf016}, + }, + { + {0xabcab07505b93cf, 0x4088c208ddc38a79, 0x60aa91fcf3d30a95, 0x1cca89ba50887f72}, + {0x756ea77ac464fb2f, 0xb54ae17c6bba044a, 0x3350ae0df3e90873, 0x186b9034baedae26}, + {0x813cc8de3ccd6501, 0x612bffa93d7579f9, 0x5ad42ea9bc5bbd6c, 0x123ed625db167d32}, + }, + { + {0x2db34a3c43a01e76, 0x93bf90bd39919618, 0xb62aafc5a062e6ec, 0x30e33d44bc2851c0}, + {0x74dcd4e07d98a23b, 0xb390d88ce8c1c12d, 0xc60d4855592f4f12, 0x3d3808ea888bc3ef}, + {0x6b6125132d3b6c9b, 0xefb93b37d8c98901, 0x5396ab9879f061f3, 0x26814970d99a4859}, + }, + { + {0x65a39dff4c093ac6, 0xdb8b03741b7d38b3, 0x34e5285eb2732099, 0xcffcab13b9c402}, + {0xba151ecf346e77e5, 0xdc944d4cedf21de6, 0xbf6157bdc8bf2df3, 0x39a726986b4d3042}, + {0x44150981f7bb214f, 0xdaf82667aa9df080, 0x4cb0e4251cbd837c, 0x96407a7f4c8919d}, + }, + { + {0xab327a1874a4376d, 0x2ac8b215b83ec21, 0x72ccc1310e756e19, 0x337f64158d29681d}, + {0xfdd547b12cd1c47c, 0xeda570d77efb35ef, 0xee673a9c5c5c6c24, 0x1069938e1c2d5fb8}, + {0xe18e30b72b41cebd, 0x9530ddc35a81e36e, 0xcb076c0372dd1f10, 0x1a6810a2f1aee9a3}, + }, + { + {0xf42b4184e45ef9b0, 0x621c9c9f4b7805bb, 0xe64a4966c57c625e, 0x331dfaac4fca1afe}, + {0x5745585e5aa8e18, 0x48138c500616daa7, 0x740880dd14d57c7b, 0x621e4237f79bc67}, + {0x7752004d293d5b5d, 0x32176da95d737cf5, 0xe14bb9ef4281cf03, 0x56e679688dec9b0}, + }, + { + {0xcf96c57a6c6dced2, 0x9d0641008bc1e8e6, 0xaf7c8a61f00aea43, 0x3de7cc921f006f35}, + {0xe5a284274906c87c, 0x3f57a5c227ad9ef7, 0x8ca3d4e109a701b9, 0x23fa54fa0717813}, + {0xa418bb283bcf6e4c, 0xfe2037c5295b505d, 0xcaa0956e946f4a29, 0x2a042a0cc94e6eb7}, + }, + { + {0xd8df14778b795fec, 0x16c27f664e2ea362, 0xe24b2f6edd5eaebe, 0x14c903e18d6d1fc3}, + {0x89586ac5b4450805, 0xadc6ee91f14ae921, 0xc6bc4c3b0f873a03, 0x351fccf49d14543a}, + {0xd3601422d37aaa9e, 0x988c203077dd68e9, 0x1cae7a1d3150e958, 0x1292d11437a1acd4}, + }, + { + {0xe53e955d71efa6c, 0x8fe1b41936d8c2e, 0xc1115be6796700ba, 0x16f2577444f333ac}, + {0xb46fdc6c76b91715, 0xba7c98748d26f41a, 0x8a767b731b64d9a2, 0x380f78e27ff89d8f}, + {0xfd9976c4488e5f16, 0xbeaf3cb1cf0cfc03, 0xf2bc02883a339b2a, 0x3a669a9fec53277f}, + }, + { + {0x7489c5648c85dee1, 0x7f9d020bd71f0bc3, 0xcf347020818fd255, 0x34ce3d2873ef623b}, + {0x82891abb666f3ff4, 0x370d13492155070d, 0x26c0426048d99ec2, 0x89cceef0d956cc}, + {0xf161875b3c921a95, 0xe0ee4bd7518d961a, 0xcd3614b6acaf6d93, 0x27ef7c723667b755}, + }, + { + {0x13fb2dcf933fd4d, 0x103f7deb9f64a5aa, 0x26d375314a7a8189, 0x1fb433f9d0d1a1af}, + {0xa49a8d2583ef3bdd, 0x9a7a627ea2417df1, 0x93277db6c1298ce4, 0x224efdd22b53fe03}, + {0xd9ef2ab6918b48ed, 0xab7f9a26705488c0, 0xc179467a22565381, 0x3c8192acf2659bbf}, + }, + { + {0x90c3c01685741695, 0x8fc7d42cbdf62efb, 0x8ca74bdebec3a42b, 0x2d07192135436805}, + {0x597f91232510722, 0xb6ff30a1505c4ea8, 0xdd22fd456a942afe, 0x1cb7fdcfaa9bdd05}, + {0xdcf520384f1ddaf0, 0xeb0ee12c33953a63, 0x9a3a10c7e3aa6f41, 0x1471506df8d986b4}, + }, + { + {0xcd605e2791e6a5e5, 0x1c27ce94b83f068f, 0xb172e53c549a3c12, 0x21f264ce80eef855}, + {0xfe4a5caf09cd178f, 0xd76b6393a8b8597f, 0x828a31309599f1fa, 0x1d7eb56ca9c920f6}, + {0x589bdbc5eb3cc1fc, 0xa5ab6823129a3ed3, 0x1f5b9bd5aca5d3ec, 0x64aae35f2e4b6bc}, + }, + { + {0x365002f75230793, 0x2b8d834bd6a4c78f, 0x7f445a46d203a93d, 0x265c579246ab3106}, + {0x4983fb4db3ca5acd, 0x770c07654eeefaab, 0x54456b57fa1fd5af, 0xb92cdbcf718c353}, + {0x1ac57ac76f928fe, 0x816c4d52b670680, 0x39b8598cba83c07d, 0x29af1b8a7d9ed796}, + }, + { + {0x715eb17f6f71404f, 0xd081dca0cad01695, 0xdeefdf16929d4947, 0x3d7f767198a2e29b}, + {0xebfb13feb0205761, 0x73e014ef38a3d8cb, 0x113bd31d16c16db7, 0x288eacb7eaa4d63c}, + {0x5d22751f1f2c937d, 0x50cc30867fe04f68, 0x70917ec415d4ac2c, 0x58c8b7c2b87e78b}, + }, + }, + mdsMatrix: [][]*fp.Fp{ + { + {0x19ebb0733ab608d5, 0xba29ddd056f8255c, 0x90b26832e301952d, 0xd393e38c7a5d0ab}, + {0x1370915e16c94656, 0xa9d4f14bbd2ea831, 0xbe629fe93a27e612, 0x21571a7cc32e18af}, + {0xb34a71b4f799ea19, 0x9685275173ff9b6b, 0xc2aa354b7f11d698, 0x15691c24c0a9f088}, + }, + { + {0xae932a9d486aface, 0xbef0293e7653db35, 0x279408c4d244c0d1, 0x39dd8040ea0c8d80}, + {0x9d32fbdbaf9bbe27, 0x999e1c5168806efa, 0x7d5f270b3cb077a8, 0xb4ac0738805f8de}, + {0xcfc1e61e0bcf9d5f, 0x12346d0d47576a9, 0xdbd1b0da710c7b5, 0x1db48905ceecc479}, + }, + { + {0x89dde65da2c627b, 0x873534c4aaf1243a, 0xa4f76bf3e5b626c5, 0x22a75f7cd3cad9d3}, + {0xb58ea56557361c3b, 0xd6cbb2cb0bc99acb, 0x5fee1fb5b71b86dd, 0x11d87bbca8e20fbe}, + {0x1d511fe638baa5c8, 0x7b6c7932776e1032, 0x45b27ee070531360, 0x19eb7c902ac90c5}, + }, + }, + spongeIv: [][]*fp.Fp{ + // Testnet + { + {0, 0, 0, 0}, + {0, 0, 0, 0}, + {0, 0, 0, 0}, + }, + // Mainnet + { + {0, 0, 0, 0}, + {0, 0, 0, 0}, + {0, 0, 0, 0}, + }, + }, + }, +} + +func (ctx *Context) Init(pType Permutation, networkId NetworkType) *Context { + if ctx == nil { + return nil + } + if pType != ThreeW && pType != FiveW && pType != Three { + return nil + } + if networkId != TestNet && networkId != MainNet && networkId != NullNet { + return nil + } + + ctx.pType = pType + ctx.spongeWidth = contexts[pType].spongeWidth + ctx.spongeRate = contexts[pType].spongeRate + ctx.fullRounds = contexts[pType].fullRounds + ctx.sBox = contexts[pType].sBox + ctx.roundKeys = contexts[pType].roundKeys + ctx.mdsMatrix = contexts[pType].mdsMatrix + ctx.spongeIv = contexts[pType].spongeIv + ctx.state = make([]*fp.Fp, contexts[pType].spongeWidth) + if networkId != NullNet { + iv := contexts[pType].spongeIv[networkId] + for i := range iv { + ctx.state[i] = new(fp.Fp).Set(iv[i]) + } + } else { + for i := range ctx.state { + ctx.state[i] = new(fp.Fp).SetZero() + } + } + ctx.absorbed = 0 + return ctx +} + +func (ctx *Context) Update(fields []*fp.Fp) { + for _, f := range fields { + if ctx.absorbed == ctx.spongeRate { + ctx.pType.Permute(ctx) + ctx.absorbed = 0 + } + ctx.state[ctx.absorbed].Add(ctx.state[ctx.absorbed], f) + ctx.absorbed++ + } +} + +func (ctx *Context) Digest() *fq.Fq { + ctx.pType.Permute(ctx) + res := ctx.state[0].ToRaw() + return new(fq.Fq).SetRaw(&res) +} diff --git a/signatures/schnorr/mina/poseidon_hash_test.go b/signatures/schnorr/mina/poseidon_hash_test.go new file mode 100644 index 0000000..b79251d --- /dev/null +++ b/signatures/schnorr/mina/poseidon_hash_test.go @@ -0,0 +1,122 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package mina + +import ( + "encoding/hex" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves/native/pasta/fp" + "github.com/sonr-io/sonr/crypto/core/curves/native/pasta/fq" +) + +func TestPoseidonHash(t *testing.T) { + // Reference https://github.com/o1-labs/proof-systems/blob/master/oracle/tests/test_vectors/3w.json + testVectors := []struct { + input []*fp.Fp + output *fq.Fq + }{ + { + input: []*fp.Fp{}, + output: hexToFq("1b3251b6912d82edc78bbb0a5c88f0c6fde1781bc3e654123fa6862a4c63e617"), + }, + { + input: []*fp.Fp{ + hexToFp("df698e389c6f1987ffe186d806f8163738f5bf22e8be02572cce99dc6a4ab030"), + }, + output: hexToFq("f9b1b6c5f8c98017c6b35ac74bc689b6533d6dbbee1fd868831b637a43ea720c"), + }, + { + input: []*fp.Fp{ + hexToFp("56b648a5a85619814900a6b40375676803fe16fb1ad2d1fb79115eb1b52ac026"), + hexToFp("f26a8a03d9c9bbd9c6b2a1324d2a3f4d894bafe25a7e4ad1a498705f4026ff2f"), + }, + output: hexToFq("7a556e93bcfbd27b55867f533cd1df293a7def60dd929a086fdd4e70393b0918"), + }, + { + input: []*fp.Fp{ + hexToFp("075c41fa23e4690694df5ded43624fd60ab7ee6ec6dd48f44dc71bc206cecb26"), + hexToFp("a4e2beebb09bd02ad42bbccc11051e8262b6ef50445d8382b253e91ab1557a0d"), + hexToFp("7dfc23a1242d9c0d6eb16e924cfba342bb2fccf36b8cbaf296851f2e6c469639"), + }, + output: hexToFq("f94b39a919aab06f43f4a4b5a3e965b719a4dbd2b9cd26d2bba4197b10286b35"), + }, + { + input: []*fp.Fp{ + hexToFp("a1a659b14e80d47318c6fcdbbd388de4272d5c2815eb458cf4f196d52403b639"), + hexToFp("5e33065d1801131b64d13038ff9693a7ef6283f24ec8c19438d112ff59d50f04"), + hexToFp("38a8f4d0a9b6d0facdc4e825f6a2ba2b85401d5de119bf9f2bcb908235683e06"), + hexToFp("3456d0313a30d7ccb23bd71ed6aa70ab234dad683d8187b677aef73f42f4f52e"), + }, + output: hexToFq("cc1ccfa964fd6ef9ff1994beb53cfce9ebe1212847ce30e4c64f0777875aec34"), + }, + { + input: []*fp.Fp{ + hexToFp("bccfee48dc76bb991c97bd531cf489f4ee37a66a15f5cfac31bdd4f159d4a905"), + hexToFp("2d106fb21a262f85fd400a995c6d74bad48d8adab2554046871c215e585b072b"), + hexToFp("8300e93ee8587956534d0756bb2aa575e5878c670cff5c8e3e55c62632333c06"), + hexToFp("879c32da31566f6d16afdefff94cba5260fec1057e97f19fc9a61dc2c54a6417"), + hexToFp("9c0aa6e5501cfb2d08aeaea5b3cddac2c9bee85d13324118b44bafb63a59611e"), + }, + output: hexToFq("cf7b9c2128f0e2c0fed4e1eca8d5954b629640c2458d24ba238c1bd3ccbc8e12"), + }, + } + for _, tv := range testVectors { + ctx := new(Context).Init(ThreeW, NetworkType(NullNet)) + ctx.Update(tv.input) + res := ctx.Digest() + require.True(t, res.Equal(tv.output)) + } + testVectors = []struct { + input []*fp.Fp + output *fq.Fq + }{ + { + input: []*fp.Fp{ + hexToFp("0f48c65bd25f85f3e4ea4efebeb75b797bd743603be04b4ead845698b76bd331"), + hexToFp("0f48c65bd25f85f3e4ea4efebeb75b797bd743603be04b4ead845698b76bd331"), + hexToFp("f34b505e1a05ecfb327d8d664ff6272ddf5cc1f69618bb6a4407e9533067e703"), + hexToFp("0f48c65bd25f85f3e4ea4efebeb75b797bd743603be04b4ead845698b76bd331"), + hexToFp("ac7cb9c568955737eca56f855954f394cc6b05ac9b698ba3d974f029177cb427"), + hexToFp("010141eca06991fe68dcbd799d93037522dc6c4dead1d77202e8c2ea8f5b1005"), + hexToFp("0300000000000000010000000000000090010000204e0000021ce8d0d2e64012"), + hexToFp("9b030903692b6b7b030000000000000000000000000000000000800100000000"), + hexToFp("000000a800000000000000000000000000000000000000000000000000000000"), + }, + output: &fq.Fq{ + 1348483115953159504, + 14115862092770957043, + 15858311826851986539, + 1644043871107534594, + }, + }, + } + for _, tv := range testVectors { + ctx := new(Context).Init(ThreeW, NetworkType(MainNet)) + ctx.Update(tv.input) + res := ctx.Digest() + require.True(t, res.Equal(tv.output)) + } +} + +func hexToFp(s string) *fp.Fp { + var buffer [32]byte + input, _ := hex.DecodeString(s) + copy(buffer[:], input) + f, _ := new(fp.Fp).SetBytes(&buffer) + return f +} + +func hexToFq(s string) *fq.Fq { + var buffer [32]byte + input, _ := hex.DecodeString(s) + copy(buffer[:], input) + f, _ := new(fq.Fq).SetBytes(&buffer) + return f +} diff --git a/signatures/schnorr/mina/roinput.go b/signatures/schnorr/mina/roinput.go new file mode 100644 index 0000000..4aa9996 --- /dev/null +++ b/signatures/schnorr/mina/roinput.go @@ -0,0 +1,136 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package mina + +import ( + "github.com/sonr-io/sonr/crypto/core/curves/native/pasta/fp" + "github.com/sonr-io/sonr/crypto/core/curves/native/pasta/fq" +) + +// Handles the packing of bits and fields according to Mina spec +type roinput struct { + fields []*fp.Fp + bits *BitVector +} + +var conv = map[bool]int{ + true: 1, + false: 0, +} + +func (r *roinput) Init(fields int, bytes int) *roinput { + r.fields = make([]*fp.Fp, 0, fields) + r.bits = NewBitVector(make([]byte, bytes), 0) + return r +} + +func (r *roinput) Clone() *roinput { + t := new(roinput) + t.fields = make([]*fp.Fp, len(r.fields)) + for i, f := range r.fields { + t.fields[i] = new(fp.Fp).Set(f) + } + buffer := r.bits.Bytes() + data := make([]byte, len(buffer)) + copy(data, buffer) + t.bits = NewBitVector(data, r.bits.Length()) + return t +} + +func (r *roinput) AddFp(fp *fp.Fp) { + r.fields = append(r.fields, fp) +} + +func (r *roinput) AddFq(fq *fq.Fq) { + scalar := fq.ToRaw() + // Mina handles fields as 255 bit numbers + // with each field we lose a bit + for i := 0; i < 255; i++ { + limb := i / 64 + idx := i % 64 + b := (scalar[limb] >> idx) & 1 + r.bits.Append(byte(b)) + } +} + +func (r *roinput) AddBit(b bool) { + r.bits.Append(byte(conv[b])) +} + +func (r *roinput) AddBytes(input []byte) { + for _, b := range input { + for i := 0; i < 8; i++ { + r.bits.Append(byte((b >> i) & 1)) + } + } +} + +func (r *roinput) AddUint32(x uint32) { + for i := 0; i < 32; i++ { + r.bits.Append(byte((x >> i) & 1)) + } +} + +func (r *roinput) AddUint64(x uint64) { + for i := 0; i < 64; i++ { + r.bits.Append(byte((x >> i) & 1)) + } +} + +func (r roinput) Bytes() []byte { + out := make([]byte, (r.bits.Length()+7)/8+32*len(r.fields)) + res := NewBitVector(out, 0) + // Mina handles fields as 255 bit numbers + // with each field we lose a bit + for _, f := range r.fields { + buf := f.ToRaw() + for i := 0; i < 255; i++ { + limb := i / 64 + idx := i % 64 + b := (buf[limb] >> idx) & 1 + res.Append(byte(b)) + } + } + for i := 0; i < r.bits.Length(); i++ { + res.Append(r.bits.Element(i)) + } + return out +} + +func (r roinput) Fields() []*fp.Fp { + fields := make([]*fp.Fp, 0, len(r.fields)+r.bits.Length()/256) + for _, f := range r.fields { + fields = append(fields, new(fp.Fp).Set(f)) + } + const maxChunkSize = 254 + bitsConsumed := 0 + bitIdx := 0 + + for bitsConsumed < r.bits.Length() { + var chunk [4]uint64 + + remaining := r.bits.Length() - bitsConsumed + var chunkSizeInBits int + if remaining > maxChunkSize { + chunkSizeInBits = maxChunkSize + } else { + chunkSizeInBits = remaining + } + + for i := 0; i < chunkSizeInBits; i++ { + limb := i >> 6 + idx := i & 0x3F + b := r.bits.Element(bitIdx) + chunk[limb] |= uint64(b) << idx + bitIdx++ + } + fields = append(fields, new(fp.Fp).SetRaw(&chunk)) + bitsConsumed += chunkSizeInBits + } + + return fields +} diff --git a/signatures/schnorr/mina/signature.go b/signatures/schnorr/mina/signature.go new file mode 100644 index 0000000..1fbe8fe --- /dev/null +++ b/signatures/schnorr/mina/signature.go @@ -0,0 +1,49 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package mina + +import ( + "fmt" + + "github.com/sonr-io/sonr/crypto/core/curves/native/pasta/fp" + "github.com/sonr-io/sonr/crypto/core/curves/native/pasta/fq" +) + +// Signature is a Mina compatible signature either for payment or delegation +type Signature struct { + R *fp.Fp + S *fq.Fq +} + +func (sig Signature) MarshalBinary() ([]byte, error) { + var buf [64]byte + rx := sig.R.Bytes() + s := sig.S.Bytes() + copy(buf[:32], rx[:]) + copy(buf[32:], s[:]) + return buf[:], nil +} + +func (sig *Signature) UnmarshalBinary(input []byte) error { + if len(input) != 64 { + return fmt.Errorf("invalid byte sequence") + } + var buf [32]byte + copy(buf[:], input[:32]) + rx, err := new(fp.Fp).SetBytes(&buf) + if err != nil { + return err + } + copy(buf[:], input[32:]) + s, err := new(fq.Fq).SetBytes(&buf) + if err != nil { + return err + } + sig.R = rx + sig.S = s + return nil +} diff --git a/signatures/schnorr/mina/txn.go b/signatures/schnorr/mina/txn.go new file mode 100644 index 0000000..67efec5 --- /dev/null +++ b/signatures/schnorr/mina/txn.go @@ -0,0 +1,224 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package mina + +import ( + "encoding/binary" + "encoding/json" + "fmt" + + "github.com/cosmos/btcutil/base58" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// Transaction is a Mina transaction for payments or delegations +type Transaction struct { + Fee, FeeToken uint64 + FeePayerPk *PublicKey + Nonce, ValidUntil uint32 + Memo string + Tag [3]bool + SourcePk, ReceiverPk *PublicKey + TokenId, Amount uint64 + Locked bool + NetworkId NetworkType +} + +type txnJson struct { + Common txnCommonJson + Body [2]any +} + +type txnCommonJson struct { + Fee uint64 `json:"fee"` + FeeToken uint64 `json:"fee_token"` + FeePayerPk string `json:"fee_payer_pk"` + Nonce uint32 `json:"nonce"` + ValidUntil uint32 `json:"valid_until"` + Memo string `json:"memo"` + NetworkId uint8 `json:"network_id"` +} + +type txnBodyPaymentJson struct { + SourcePk string `json:"source_pk"` + ReceiverPk string `json:"receiver_pk"` + TokenId uint64 `json:"token_id"` + Amount uint64 `json:"amount"` +} + +type txnBodyDelegationJson struct { + Delegator string `json:"delegator"` + NewDelegate string `json:"new_delegate"` +} + +func (txn *Transaction) MarshalBinary() ([]byte, error) { + mapper := map[bool]byte{ + true: 1, + false: 0, + } + out := make([]byte, 175) + binary.LittleEndian.PutUint64(out, txn.Fee) + binary.LittleEndian.PutUint64(out[8:16], txn.FeeToken) + copy(out[16:48], txn.FeePayerPk.value.ToAffineCompressed()) + binary.LittleEndian.PutUint32(out[48:52], txn.Nonce) + binary.LittleEndian.PutUint32(out[52:56], txn.ValidUntil) + + out[56] = 0x01 + out[57] = byte(len(txn.Memo)) + copy(out[58:90], txn.Memo[:]) + out[90] = mapper[txn.Tag[0]] + out[91] = mapper[txn.Tag[1]] + out[92] = mapper[txn.Tag[2]] + copy(out[93:125], txn.SourcePk.value.ToAffineCompressed()) + copy(out[125:157], txn.ReceiverPk.value.ToAffineCompressed()) + binary.LittleEndian.PutUint64(out[157:165], txn.TokenId) + binary.LittleEndian.PutUint64(out[165:173], txn.Amount) + out[173] = mapper[txn.Locked] + out[174] = byte(txn.NetworkId) + return out, nil +} + +func (txn *Transaction) UnmarshalBinary(input []byte) error { + mapper := map[byte]bool{ + 1: true, + 0: false, + } + if len(input) < 175 { + return fmt.Errorf("invalid byte sequence") + } + feePayerPk := new(PublicKey) + sourcePk := new(PublicKey) + receiverPk := new(PublicKey) + err := feePayerPk.UnmarshalBinary(input[16:48]) + if err != nil { + return err + } + err = sourcePk.UnmarshalBinary(input[93:125]) + if err != nil { + return err + } + err = receiverPk.UnmarshalBinary(input[125:157]) + if err != nil { + return err + } + txn.Fee = binary.LittleEndian.Uint64(input[:8]) + txn.FeeToken = binary.LittleEndian.Uint64(input[8:16]) + txn.FeePayerPk = feePayerPk + txn.Nonce = binary.LittleEndian.Uint32(input[48:52]) + txn.ValidUntil = binary.LittleEndian.Uint32(input[52:56]) + txn.Memo = string(input[58 : 58+input[57]]) + txn.Tag[0] = mapper[input[90]] + txn.Tag[1] = mapper[input[91]] + txn.Tag[2] = mapper[input[92]] + txn.SourcePk = sourcePk + txn.ReceiverPk = receiverPk + txn.TokenId = binary.LittleEndian.Uint64(input[157:165]) + txn.Amount = binary.LittleEndian.Uint64(input[165:173]) + txn.Locked = mapper[input[173]] + txn.NetworkId = NetworkType(input[174]) + return nil +} + +func (txn *Transaction) UnmarshalJSON(input []byte) error { + var t txnJson + err := json.Unmarshal(input, &t) + if err != nil { + return err + } + strType, ok := t.Body[0].(string) + if !ok { + return fmt.Errorf("unexpected type") + } + memo, _, err := base58.CheckDecode(t.Common.Memo) + if err != nil { + return err + } + switch strType { + case "Payment": + b, ok := t.Body[1].(txnBodyPaymentJson) + if !ok { + return fmt.Errorf("unexpected type") + } + feePayerPk := new(PublicKey) + err = feePayerPk.ParseAddress(b.SourcePk) + if err != nil { + return err + } + receiverPk := new(PublicKey) + err = receiverPk.ParseAddress(b.ReceiverPk) + if err != nil { + return nil + } + txn.FeePayerPk = feePayerPk + txn.ReceiverPk = receiverPk + case "Stake_delegation": + bType, ok := t.Body[1].([2]any) + if !ok { + return fmt.Errorf("unexpected type") + } + delegateType, ok := bType[0].(string) + if !ok { + return fmt.Errorf("unexpected type") + } + if delegateType == "Set_delegate" { + b, ok := bType[1].(txnBodyDelegationJson) + if !ok { + return fmt.Errorf("unexpected type") + } + feePayerPk := new(PublicKey) + err = feePayerPk.ParseAddress(b.Delegator) + if err != nil { + return err + } + receiverPk := new(PublicKey) + err = receiverPk.ParseAddress(b.NewDelegate) + if err != nil { + return err + } + txn.FeePayerPk = feePayerPk + txn.ReceiverPk = receiverPk + } else { + return fmt.Errorf("unexpected type") + } + default: + return fmt.Errorf("unexpected type") + } + txn.Memo = string(memo[2 : 2+memo[1]]) + sourcePk := new(PublicKey) + sourcePk.value = new(curves.Ep).Set(txn.FeePayerPk.value) + txn.Fee = t.Common.Fee + txn.FeeToken = t.Common.FeeToken + txn.Nonce = t.Common.Nonce + txn.ValidUntil = t.Common.ValidUntil + txn.NetworkId = NetworkType(t.Common.NetworkId) + return nil +} + +func (txn Transaction) addRoInput(input *roinput) { + input.AddFp(txn.FeePayerPk.value.X()) + input.AddFp(txn.SourcePk.value.X()) + input.AddFp(txn.ReceiverPk.value.X()) + + input.AddUint64(txn.Fee) + input.AddUint64(txn.FeeToken) + input.AddBit(txn.FeePayerPk.value.Y().IsOdd()) + input.AddUint32(txn.Nonce) + input.AddUint32(txn.ValidUntil) + memo := [34]byte{0x01, byte(len(txn.Memo))} + copy(memo[2:], txn.Memo) + input.AddBytes(memo[:]) + for _, b := range txn.Tag { + input.AddBit(b) + } + + input.AddBit(txn.SourcePk.value.Y().IsOdd()) + input.AddBit(txn.ReceiverPk.value.Y().IsOdd()) + input.AddUint64(txn.TokenId) + input.AddUint64(txn.Amount) + input.AddBit(txn.Locked) +} diff --git a/signatures/schnorr/nem/ed25519_keccak.go b/signatures/schnorr/nem/ed25519_keccak.go new file mode 100644 index 0000000..c746738 --- /dev/null +++ b/signatures/schnorr/nem/ed25519_keccak.go @@ -0,0 +1,323 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// This file implements the Ed25519 signature algorithm. See +// https://ed25519.cr.yp.to/. +// +// These functions are also compatible with the “Ed25519” function defined in +// RFC 8032. However, unlike RFC 8032's formulation, this package's private key +// representation includes a public key suffix to make multiple signing +// operations with the same key more efficient. This package refers to the RFC +// 8032 private key as the “seed”. +// This code is a port of the public domain, “ref10” implementation of ed25519 +// from SUPERCOP. + +package nem + +import ( + "bytes" + "crypto" + cryptorand "crypto/rand" + "fmt" + "io" + + "filippo.io/edwards25519" + "golang.org/x/crypto/sha3" + + "github.com/sonr-io/sonr/crypto/internal" +) + +const ( + // PublicKeySize is the size, in bytes, of public keys as used in this package. + PublicKeySize = 32 + // PrivateKeySize is the size, in bytes, of private keys as used in this package. + PrivateKeySize = 64 + // SignatureSize is the size, in bytes, of signatures generated and verified by this package. + SignatureSize = 64 + // SeedSize is the size, in bytes, of private key seeds. These are the private key representations used by RFC 8032. + SeedSize = 32 +) + +// PublicKey is the type of Ed25519 public keys. +type PublicKey []byte + +// PrivateKey is the type of Ed25519 private keys. It implements crypto.Signer. +type PrivateKey []byte + +// Bytes returns the publicKey in byte array +func (p PublicKey) Bytes() []byte { + return p +} + +// Public returns the PublicKey corresponding to priv. +func (priv PrivateKey) Public() crypto.PublicKey { + publicKey := make([]byte, PublicKeySize) + copy(publicKey, priv[32:]) + return PublicKey(publicKey) +} + +func Keccak512(data []byte) ([]byte, error) { + k512 := sha3.NewLegacyKeccak512() + _, err := k512.Write(data) + if err != nil { + return nil, err + } + return k512.Sum(nil), nil +} + +// Seed returns the private key seed corresponding to priv. It is provided for +// interoperability with RFC 8032. RFC 8032's private keys correspond to seeds +// in this package. +func (priv PrivateKey) Seed() []byte { + seed := make([]byte, SeedSize) + copy(seed, priv[:32]) + return seed +} + +// Sign signs the given message with priv. +// Ed25519 performs two passes over messages to be signed and therefore cannot +// handle pre-hashed messages. Thus opts.HashFunc() must return zero to +// indicate the message hasn't been hashed. This can be achieved by passing +// crypto.Hash(0) as the value for opts. +func (priv PrivateKey) Sign( + rand io.Reader, + message []byte, + opts crypto.SignerOpts, +) (signature []byte, err error) { + if opts.HashFunc() != crypto.Hash(0) { + return nil, fmt.Errorf("ed25519: cannot sign hashed message") + } + sig, err := Sign(priv, message) + if err != nil { + return nil, err + } + return sig, nil +} + +// GenerateKey generates a public/private key pair using entropy from rand. +// If rand is nil, crypto/rand.Reader will be used. +func GenerateKey(rand io.Reader) (PublicKey, PrivateKey, error) { + if rand == nil { + rand = cryptorand.Reader + } + + seed := make([]byte, SeedSize) + if _, err := io.ReadFull(rand, seed); err != nil { + return nil, nil, err + } + + privateKey, err := NewKeyFromSeed(seed) + if err != nil { + return nil, nil, err + } + publicKey := make([]byte, PublicKeySize) + copy(publicKey, privateKey[32:]) + + return publicKey, privateKey, nil +} + +// NewKeyFromSeed calculates a private key from a seed. It will panic if +// len(seed) is not SeedSize. This function is provided for interoperability +// with RFC 8032. RFC 8032's private keys correspond to seeds in this +// package. +func NewKeyFromSeed(seed []byte) (PrivateKey, error) { + // Outline the function body so that the returned key can be stack-allocated. + privateKey := make([]byte, PrivateKeySize) + err := newKeyFromSeed(privateKey, seed) + if err != nil { + return nil, err + } + return privateKey, nil +} + +func newKeyFromSeed(privateKey, seed []byte) error { + if l := len(seed); l != SeedSize { + return fmt.Errorf("ed25519: bad seed length: %d", l) + } + + // Weird required step to get compatibility with the NEM test vectors + // Have to reverse the bytes from the given seed + digest, err := Keccak512(internal.ReverseScalarBytes(seed)) + if err != nil { + return err + } + + sc, err := edwards25519.NewScalar().SetBytesWithClamping(digest[:32]) + if err != nil { + return err + } + + A := edwards25519.Point{} + A.ScalarBaseMult(sc) + publicKeyBytes := A.Bytes() + + copy(privateKey, seed) + copy(privateKey[32:], publicKeyBytes[:]) + return nil +} + +// Sign signs the message with privateKey and returns a signature. It will +// panic if len(privateKey) is not PrivateKeySize. +func Sign(privateKey PrivateKey, message []byte) ([]byte, error) { + // Outline the function body so that the returned signature can be + // stack-allocated. + signature := make([]byte, SignatureSize) + err := sign(signature, privateKey, message) + if err != nil { + return nil, err + } + return signature, nil +} + +func sign(signature, privateKey, message []byte) error { + if l := len(privateKey); l != PrivateKeySize { + return fmt.Errorf("ed25519: bad private key length: %d", l) + } + + seed := privateKey[:32] + digest, err := Keccak512(internal.ReverseScalarBytes(seed)) + if err != nil { + return err + } + + // H(seed) ie. privkey + expandedSecretKey := digest[:32] + sc, err := edwards25519.NewScalar().SetBytesWithClamping(expandedSecretKey) + if err != nil { + return err + } + + // r = H(H(seed) + msg) + hEngine := sha3.NewLegacyKeccak512() + _, err = hEngine.Write(digest[32:]) + if err != nil { + return err + } + + _, err = hEngine.Write(message) + if err != nil { + return err + } + + var hOut1 [64]byte + hEngine.Sum(hOut1[:0]) + + // hash output -> scalar + // Take 64 byte output from keccak512 so need to set bytes as long + r, err := edwards25519.NewScalar().SetUniformBytes(hOut1[:]) + if err != nil { + return err + } + + // R = r*G + R := edwards25519.Point{} + R.ScalarBaseMult(r) + RBytes := R.Bytes() + + // s = H(R + pubkey + msg) + hEngine.Reset() + _, err = hEngine.Write(RBytes) + if err != nil { + return err + } + + _, err = hEngine.Write(privateKey[32:]) + if err != nil { + return err + } + + _, err = hEngine.Write(message) + if err != nil { + return err + } + + var hOut2 [64]byte + hEngine.Sum(hOut2[:0]) + + // hash output -> scalar + // Take 64 byte output from keccak512 so need to set bytes as long + h, err := edwards25519.NewScalar().SetUniformBytes(hOut2[:]) + if err != nil { + return err + } + + // s = (r + h * privKey) + s := edwards25519.NewScalar().MultiplyAdd(h, sc, r) + + copy(signature[:], RBytes) + copy(signature[32:], s.Bytes()) + + return nil +} + +// Verify reports whether sig is a valid signature of message by publicKey. It +// will panic if len(publicKey) is not PublicKeySize. +// Previously publicKey is of type PublicKey +func Verify(publicKey PublicKey, message, sig []byte) (bool, error) { + if l := len(publicKey); l != PublicKeySize { + return false, fmt.Errorf("ed25519: bad public key length: %d", l) + } + + if len(sig) != SignatureSize || sig[63]&224 != 0 { + return false, fmt.Errorf("ed25519: bad signature size: %d", len(sig)) + } + + RBytes := sig[:32] + sBytes := sig[32:] + + var publicKeyBytes [32]byte + copy(publicKeyBytes[:], publicKey) + + A := edwards25519.Point{} + _, err := A.SetBytes(publicKeyBytes[:]) + if err != nil { + return false, err + } + + negA := edwards25519.Point{} + negA.Negate(&A) + + // h = H(R + pubkey + msg) + hEngine := sha3.NewLegacyKeccak512() + _, err = hEngine.Write(RBytes) + if err != nil { + return false, err + } + + _, err = hEngine.Write(publicKeyBytes[:]) + if err != nil { + return false, err + } + + _, err = hEngine.Write(message) + if err != nil { + return false, err + } + + var hOut1 [64]byte + hEngine.Sum(hOut1[:0]) + + // hash output -> scalar + // Take 64 byte output from keccak512 so need to set bytes as long + h, err := edwards25519.NewScalar().SetUniformBytes(hOut1[:]) + if err != nil { + return false, err + } + + // s was generated in sign so can set as canonical + s, err := edwards25519.NewScalar().SetCanonicalBytes(sBytes) + if err != nil { + return false, err + } + + // R' = s*G - h*Pubkey = h*negPubkey + s*G + RPrime := edwards25519.Point{} + RPrime.VarTimeDoubleScalarBaseMult(h, &negA, s) + RPrimeBytes := RPrime.Bytes() + + // Check R == R' + return bytes.Equal(RBytes, RPrimeBytes), nil +} diff --git a/signatures/schnorr/nem/ed25519_keccak_test.go b/signatures/schnorr/nem/ed25519_keccak_test.go new file mode 100755 index 0000000..39e0d6d --- /dev/null +++ b/signatures/schnorr/nem/ed25519_keccak_test.go @@ -0,0 +1,190 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package nem + +import ( + "bytes" + "encoding/hex" + "testing" + + "github.com/stretchr/testify/require" + "golang.org/x/crypto/sha3" +) + +type KeyPair struct { + Privkey string `json:"privateKey"` + Pubkey string `json:"publicKey"` +} + +type TestSig struct { + Privkey string `json:"privateKey"` + Pubkey string `json:"publicKey"` + Data string `json:"data"` + Length int `json:"length"` + Sig string `json:"signature"` +} + +// NOTE: NEM provides no test vectors for Keccak512, but has test vectors for Keccak256 +// We use Keccak256 and 512 in the exact same manner, so ensuring this test passes +// gives decent confidence in our Keccak512 use as well +func TestKeccak256SanityCheck(t *testing.T) { + data := "A6151D4904E18EC288243028CEDA30556E6C42096AF7150D6A7232CA5DBA52BD2192E23DAA5FA2BEA3D4BD95EFA2389CD193FCD3376E70A5C097B32C1C62C80AF9D710211545F7CDDDF63747420281D64529477C61E721273CFD78F8890ABB4070E97BAA52AC8FF61C26D195FC54C077DEF7A3F6F79B36E046C1A83CE9674BA1983EC2FB58947DE616DD797D6499B0385D5E8A213DB9AD5078A8E0C940FF0CB6BF92357EA5609F778C3D1FB1E7E36C35DB873361E2BE5C125EA7148EFF4A035B0CCE880A41190B2E22924AD9D1B82433D9C023924F2311315F07B88BFD42850047BF3BE785C4CE11C09D7E02065D30F6324365F93C5E7E423A07D754EB314B5FE9DB4614275BE4BE26AF017ABDC9C338D01368226FE9AF1FB1F815E7317BDBB30A0F36DC69" + toMatch := "4E9E79AB7434F6C7401FB3305D55052EE829B9E46D5D05D43B59FEFB32E9A619" + toMatchBytes, err := hex.DecodeString(toMatch) + require.NoError(t, err) + dataBytes, err := hex.DecodeString(data) + require.NoError(t, err) + k256 := sha3.NewLegacyKeccak256() + _, err = k256.Write(dataBytes) + require.NoError(t, err) + var hashed []byte + hashed = k256.Sum(hashed) + require.Equal(t, hashed, toMatchBytes) +} + +// Test that the pubkey can get derived correctly from privkey +func TestPrivToPubkey(t *testing.T) { + testVectors := GetPrivToPubkeyTestCases() + + for _, pair := range testVectors { + privkeyBytes, err := hex.DecodeString(pair.Privkey) + require.NoError(t, err) + pubkeyBytes, err := hex.DecodeString(pair.Pubkey) + require.NoError(t, err) + + privKeyCalced, err := NewKeyFromSeed(privkeyBytes) + require.NoError(t, err) + + pubKeyCalced := privKeyCalced.Public().(PublicKey) + + require.Equal(t, pubKeyCalced.Bytes(), pubkeyBytes) + } +} + +// Test that we can: +// Get pubkey from privkey +// Obtain the correct signature +// Verify the test vector provided signature +func TestSigs(t *testing.T) { + testVectors := GetSigTestCases() + + for _, ts := range testVectors { + // Test priv -> pubkey again + privkeyBytes, err := hex.DecodeString(ts.Privkey) + require.NoError(t, err) + pubkeyBytes, err := hex.DecodeString(ts.Pubkey) + require.NoError(t, err) + + privKeyCalced, err := NewKeyFromSeed(privkeyBytes) + require.NoError(t, err) + + pubKeyCalced := privKeyCalced.Public().(PublicKey) + + require.True(t, bytes.Equal(pubKeyCalced.Bytes(), pubkeyBytes)) + + dataBytes, err := hex.DecodeString(ts.Data) + require.NoError(t, err) + sigBytes, err := hex.DecodeString(ts.Sig) + require.NoError(t, err) + + // Test sign + sigCalced, err := Sign(privKeyCalced, dataBytes) + require.NoError(t, err) + + require.True(t, bytes.Equal(sigCalced, sigBytes)) + + // Test verify + verified, err := Verify(pubKeyCalced, dataBytes, sigBytes) + require.NoError(t, err) + + require.True(t, verified) + } +} + +// NOTE: Test cases were obtained from NEM +// See link: https://github.com/symbol/test-vectors +// Pulled 5 test vectors for each test case, at time of writing confirmed that all 10000 vectors passed +func GetPrivToPubkeyTestCases() []KeyPair { + var toReturn []KeyPair + + kp1 := KeyPair{ + Privkey: "575DBB3062267EFF57C970A336EBBC8FBCFE12C5BD3ED7BC11EB0481D7704CED", + Pubkey: "C5F54BA980FCBB657DBAAA42700539B207873E134D2375EFEAB5F1AB52F87844", + } + + kp2 := KeyPair{ + Privkey: "5B0E3FA5D3B49A79022D7C1E121BA1CBBF4DB5821F47AB8C708EF88DEFC29BFE", + Pubkey: "96EB2A145211B1B7AB5F0D4B14F8ABC8D695C7AEE31A3CFC2D4881313C68EEA3", + } + + kp3 := KeyPair{ + Privkey: "738BA9BB9110AEA8F15CAA353ACA5653B4BDFCA1DB9F34D0EFED2CE1325AEEDA", + Pubkey: "2D8425E4CA2D8926346C7A7CA39826ACD881A8639E81BD68820409C6E30D142A", + } + + kp4 := KeyPair{ + Privkey: "E8BF9BC0F35C12D8C8BF94DD3A8B5B4034F1063948E3CC5304E55E31AA4B95A6", + Pubkey: "4FEED486777ED38E44C489C7C4E93A830E4C4A907FA19A174E630EF0F6ED0409", + } + + kp5 := KeyPair{ + Privkey: "C325EA529674396DB5675939E7988883D59A5FC17A28CA977E3BA85370232A83", + Pubkey: "83EE32E4E145024D29BCA54F71FA335A98B3E68283F1A3099C4D4AE113B53E54", + } + + toReturn = append(toReturn, kp1, kp2, kp3, kp4, kp5) + + return toReturn +} + +func GetSigTestCases() []TestSig { + var toReturn []TestSig + + t1 := TestSig{ + Privkey: "ABF4CF55A2B3F742D7543D9CC17F50447B969E6E06F5EA9195D428AB12B7318D", + Pubkey: "8A558C728C21C126181E5E654B404A45B4F0137CE88177435A69978CC6BEC1F4", + Data: "8CE03CD60514233B86789729102EA09E867FC6D964DEA8C2018EF7D0A2E0E24BF7E348E917116690B9", + Length: 41, + Sig: "D9CEC0CC0E3465FAB229F8E1D6DB68AB9CC99A18CB0435F70DEB6100948576CD5C0AA1FEB550BDD8693EF81EB10A556A622DB1F9301986827B96716A7134230C", + } + + t2 := TestSig{ + Privkey: "6AA6DAD25D3ACB3385D5643293133936CDDDD7F7E11818771DB1FF2F9D3F9215", + Pubkey: "BBC8CBB43DDA3ECF70A555981A351A064493F09658FFFE884C6FAB2A69C845C6", + Data: "E4A92208A6FC52282B620699191EE6FB9CF04DAF48B48FD542C5E43DAA9897763A199AAA4B6F10546109F47AC3564FADE0", + Length: 49, + Sig: "98BCA58B075D1748F1C3A7AE18F9341BC18E90D1BEB8499E8A654C65D8A0B4FBD2E084661088D1E5069187A2811996AE31F59463668EF0F8CB0AC46A726E7902", + } + + t3 := TestSig{ + Privkey: "8E32BC030A4C53DE782EC75BA7D5E25E64A2A072A56E5170B77A4924EF3C32A9", + Pubkey: "72D0E65F1EDE79C4AF0BA7EC14204E10F0F7EA09F2BC43259CD60EA8C3A087E2", + Data: "13ED795344C4448A3B256F23665336645A853C5C44DBFF6DB1B9224B5303B6447FBF8240A2249C55", + Length: 40, + Sig: "EF257D6E73706BB04878875C58AA385385BF439F7040EA8297F7798A0EA30C1C5EFF5DDC05443F801849C68E98111AE65D088E726D1D9B7EECA2EB93B677860C", + } + + t4 := TestSig{ + Privkey: "C83CE30FCB5B81A51BA58FF827CCBC0142D61C13E2ED39E78E876605DA16D8D7", + Pubkey: "3EC8923F9EA5EA14F8AAA7E7C2784653ED8C7DE44E352EF9FC1DEE81FC3FA1A3", + Data: "A2704638434E9F7340F22D08019C4C8E3DBEE0DF8DD4454A1D70844DE11694F4C8CA67FDCB08FED0CEC9ABB2112B5E5F89", + Length: 49, + Sig: "0C684E71B35FED4D92B222FC60561DB34E0D8AFE44BDD958AAF4EE965911BEF5991236F3E1BCED59FC44030693BCAC37F34D29E5AE946669DC326E706E81B804", + } + + t5 := TestSig{ + Privkey: "2DA2A0AAE0F37235957B51D15843EDDE348A559692D8FA87B94848459899FC27", + Pubkey: "D73D0B14A9754EEC825FCB25EF1CFA9AE3B1370074EDA53FC64C22334A26C254", + Data: "D2488E854DBCDFDB2C9D16C8C0B2FDBC0ABB6BAC991BFE2B14D359A6BC99D66C00FD60D731AE06D0", + Length: 40, + Sig: "6F17F7B21EF9D6907A7AB104559F77D5A2532B557D95EDFFD6D88C073D87AC00FC838FC0D05282A0280368092A4BD67E95C20F3E14580BE28D8B351968C65E03", + } + + toReturn = append(toReturn, t1, t2, t3, t4, t5) + + return toReturn +} diff --git a/subtle/hkdf.go b/subtle/hkdf.go new file mode 100644 index 0000000..6aaa591 --- /dev/null +++ b/subtle/hkdf.go @@ -0,0 +1,58 @@ +package subtle + +import ( + "fmt" + "io" + + "golang.org/x/crypto/hkdf" +) + +const ( + // Minimum tag size in bytes. This provides minimum 80-bit security strength. + minTagSizeInBytes = uint32(10) +) + +// validateHKDFParams validates parameters of HKDF constructor. +func validateHKDFParams(hash string, _ uint32, tagSize uint32) error { + // validate tag size + digestSize, err := GetHashDigestSize(hash) + if err != nil { + return err + } + if tagSize > 255*digestSize { + return fmt.Errorf("tag size too big") + } + if tagSize < minTagSizeInBytes { + return fmt.Errorf("tag size too small") + } + return nil +} + +// ComputeHKDF extracts a pseudorandom key. +func ComputeHKDF( + hashAlg string, + key []byte, + salt []byte, + info []byte, + tagSize uint32, +) ([]byte, error) { + keySize := uint32(len(key)) + if err := validateHKDFParams(hashAlg, keySize, tagSize); err != nil { + return nil, fmt.Errorf("hkdf: %s", err) + } + hashFunc := GetHashFunc(hashAlg) + if hashFunc == nil { + return nil, fmt.Errorf("hkdf: invalid hash algorithm") + } + if len(salt) == 0 { + salt = make([]byte, hashFunc().Size()) + } + + result := make([]byte, tagSize) + kdf := hkdf.New(hashFunc, key, salt, info) + n, err := io.ReadFull(kdf, result) + if n != len(result) || err != nil { + return nil, fmt.Errorf("compute of hkdf failed") + } + return result, nil +} diff --git a/subtle/random/random.go b/subtle/random/random.go new file mode 100644 index 0000000..9bfc537 --- /dev/null +++ b/subtle/random/random.go @@ -0,0 +1,22 @@ +package random + +import ( + "crypto/rand" + "encoding/binary" +) + +// GetRandomBytes randomly generates n bytes. +func GetRandomBytes(n uint32) []byte { + buf := make([]byte, n) + _, err := rand.Read(buf) + if err != nil { + panic(err) // out of randomness, should never happen + } + return buf +} + +// GetRandomUint32 randomly generates an unsigned 32-bit integer. +func GetRandomUint32() uint32 { + b := GetRandomBytes(4) + return binary.BigEndian.Uint32(b) +} diff --git a/subtle/subtle.go b/subtle/subtle.go new file mode 100644 index 0000000..88754fb --- /dev/null +++ b/subtle/subtle.go @@ -0,0 +1,130 @@ +package subtle + +import ( + "crypto/elliptic" + "crypto/sha1" + "crypto/sha256" + "crypto/sha512" + "encoding/hex" + "errors" + "hash" + "math/big" +) + +var errNilHashFunc = errors.New("nil hash function") + +// hashDigestSize maps hash algorithms to their digest size in bytes. +var hashDigestSize = map[string]uint32{ + "SHA1": uint32(20), + "SHA224": uint32(28), + "SHA256": uint32(32), + "SHA384": uint32(48), + "SHA512": uint32(64), +} + +// GetHashDigestSize returns the digest size of the specified hash algorithm. +func GetHashDigestSize(hash string) (uint32, error) { + digestSize, ok := hashDigestSize[hash] + if !ok { + return 0, errors.New("invalid hash algorithm") + } + return digestSize, nil +} + +// TODO(ckl): Perhaps return an explicit error instead of ""/nil for the +// following functions. + +// ConvertHashName converts different forms of a hash name to the +// hash name that tink recognizes. +func ConvertHashName(name string) string { + switch name { + case "SHA-224": + return "SHA224" + case "SHA-256": + return "SHA256" + case "SHA-384": + return "SHA384" + case "SHA-512": + return "SHA512" + case "SHA-1": + return "SHA1" + default: + return "" + } +} + +// ConvertCurveName converts different forms of a curve name to the +// name that tink recognizes. +func ConvertCurveName(name string) string { + switch name { + case "secp256r1", "P-256": + return "NIST_P256" + case "secp384r1", "P-384": + return "NIST_P384" + case "secp521r1", "P-521": + return "NIST_P521" + default: + return "" + } +} + +// GetHashFunc returns the corresponding hash function of the given hash name. +func GetHashFunc(hash string) func() hash.Hash { + switch hash { + case "SHA1": + return sha1.New + case "SHA224": + return sha256.New224 + case "SHA256": + return sha256.New + case "SHA384": + return sha512.New384 + case "SHA512": + return sha512.New + default: + return nil + } +} + +// GetCurve returns the curve object that corresponds to the given curve type. +// It returns null if the curve type is not supported. +func GetCurve(curve string) elliptic.Curve { + switch curve { + case "NIST_P256": + return elliptic.P256() + case "NIST_P384": + return elliptic.P384() + case "NIST_P521": + return elliptic.P521() + default: + return nil + } +} + +// ComputeHash calculates a hash of the given data using the given hash function. +func ComputeHash(hashFunc func() hash.Hash, data []byte) ([]byte, error) { + if hashFunc == nil { + return nil, errNilHashFunc + } + h := hashFunc() + + _, err := h.Write(data) + if err != nil { + return nil, err + } + + return h.Sum(nil), nil +} + +// NewBigIntFromHex returns a big integer from a hex string. +func NewBigIntFromHex(s string) (*big.Int, error) { + if len(s)%2 == 1 { + s = "0" + s + } + b, err := hex.DecodeString(s) + if err != nil { + return nil, err + } + ret := new(big.Int).SetBytes(b) + return ret, nil +} diff --git a/subtle/x25519.go b/subtle/x25519.go new file mode 100644 index 0000000..7a869c5 --- /dev/null +++ b/subtle/x25519.go @@ -0,0 +1,25 @@ +package subtle + +import ( + "crypto/rand" + + "golang.org/x/crypto/curve25519" +) + +// GeneratePrivateKeyX25519 generates a new 32-byte private key. +func GeneratePrivateKeyX25519() ([]byte, error) { + privKey := make([]byte, curve25519.ScalarSize) + _, err := rand.Read(privKey) + return privKey, err +} + +// ComputeSharedSecretX25519 returns the 32-byte shared key, i.e. +// privKey * pubValue on the curve. +func ComputeSharedSecretX25519(privKey, pubValue []byte) ([]byte, error) { + return curve25519.X25519(privKey, pubValue) +} + +// PublicFromPrivateX25519 computes privKey's corresponding public key. +func PublicFromPrivateX25519(privKey []byte) ([]byte, error) { + return ComputeSharedSecretX25519(privKey, curve25519.Basepoint) +} diff --git a/tecdsa/dklsv1/README.md b/tecdsa/dklsv1/README.md new file mode 100755 index 0000000..738ec9d --- /dev/null +++ b/tecdsa/dklsv1/README.md @@ -0,0 +1,13 @@ +--- +aliases: [README] +tags: [] +title: README +linter-yaml-title-alias: README +date created: Wednesday, April 17th 2024, 4:11:40 pm +date modified: Thursday, April 18th 2024, 8:19:25 am +--- + +## Secure Two-party Threshold ECDSA from ECDSA Assumptions + +Package dkls implements the 2-of-2 threshold ECDSA signing algorithm of +[Secure Two-party Threshold ECDSA from ECDSA Assumptions](https://eprint.iacr.org/2018/499). diff --git a/tecdsa/dklsv1/boilerplate.go b/tecdsa/dklsv1/boilerplate.go new file mode 100644 index 0000000..ab0e4c4 --- /dev/null +++ b/tecdsa/dklsv1/boilerplate.go @@ -0,0 +1,436 @@ +package dklsv1 + +import ( + "hash" + + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/core/protocol" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1/dkg" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1/refresh" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1/sign" +) + +// AliceDkg DKLS DKG implementation that satisfies the protocol iterator interface. +type AliceDkg struct { + protoStepper + *dkg.Alice +} + +// BobDkg DKLS DKG implementation that satisfies the protocol iterator interface. +type BobDkg struct { + protoStepper + *dkg.Bob +} + +// AliceSign DKLS sign implementation that satisfies the protocol iterator interface. +type AliceSign struct { + protoStepper + *sign.Alice +} + +// BobSign DKLS sign implementation that satisfies the protocol iterator interface. +type BobSign struct { + protoStepper + *sign.Bob +} + +// AliceRefresh DKLS refresh implementation that satisfies the protocol iterator interface. +type AliceRefresh struct { + protoStepper + *refresh.Alice +} + +// BobRefresh DKLS refresh implementation that satisfies the protocol iterator interface. +type BobRefresh struct { + protoStepper + *refresh.Bob +} + +var ( + // Static type assertions + _ protocol.Iterator = &AliceDkg{} + _ protocol.Iterator = &BobDkg{} + _ protocol.Iterator = &AliceSign{} + _ protocol.Iterator = &BobSign{} + _ protocol.Iterator = &AliceRefresh{} + _ protocol.Iterator = &BobRefresh{} +) + +// NewAliceDkg creates a new protocol that can compute a DKG as Alice +func NewAliceDkg(curve *curves.Curve, version uint) *AliceDkg { + a := &AliceDkg{Alice: dkg.NewAlice(curve)} + a.steps = []func(*protocol.Message) (*protocol.Message, error){ + func(input *protocol.Message) (*protocol.Message, error) { + bobSeed, err := decodeDkgRound2Input(input) + if err != nil { + return nil, errors.WithStack(err) + } + roundOutput, err := a.Round2CommitToProof(bobSeed) + if err != nil { + return nil, err + } + return encodeDkgRound2Output(roundOutput, version) + }, + func(input *protocol.Message) (*protocol.Message, error) { + proof, err := decodeDkgRound4Input(input) + if err != nil { + return nil, errors.WithStack(err) + } + aliceProof, err := a.Round4VerifyAndReveal(proof) + if err != nil { + return nil, err + } + return encodeDkgRound4Output(aliceProof, version) + }, + func(input *protocol.Message) (*protocol.Message, error) { + proof, err := decodeDkgRound6Input(input) + if err != nil { + return nil, errors.WithStack(err) + } + choices, err := a.Round6DkgRound2Ot(proof) + if err != nil { + return nil, err + } + return encodeDkgRound6Output(choices, version) + }, + func(input *protocol.Message) (*protocol.Message, error) { + challenge, err := decodeDkgRound8Input(input) + if err != nil { + return nil, errors.WithStack(err) + } + responses, err := a.Round8DkgRound4Ot(challenge) + if err != nil { + return nil, err + } + return encodeDkgRound8Output(responses, version) + }, + func(input *protocol.Message) (*protocol.Message, error) { + opening, err := decodeDkgRound10Input(input) + if err != nil { + return nil, errors.WithStack(err) + } + if err := a.Round10DkgRound6Ot(opening); err != nil { + return nil, err + } + return nil, nil + }, + } + return a +} + +// Result Returns an encoded version of Alice as sequence of bytes that can be used to initialize an AliceSign protocol. +func (a *AliceDkg) Result(version uint) (*protocol.Message, error) { + // Sanity check + if !a.complete() { + return nil, nil + } + if a.Alice == nil { + return nil, protocol.ErrNotInitialized + } + + result := a.Output() + return EncodeAliceDkgOutput(result, version) +} + +// NewBobDkg Creates a new protocol that can compute a DKG as Bob. +func NewBobDkg(curve *curves.Curve, version uint) *BobDkg { + b := &BobDkg{Bob: dkg.NewBob(curve)} + b.steps = []func(message *protocol.Message) (*protocol.Message, error){ + func(*protocol.Message) (*protocol.Message, error) { + commitment, err := b.Round1GenerateRandomSeed() + if err != nil { + return nil, err + } + return encodeDkgRound1Output(commitment, version) + }, + func(input *protocol.Message) (*protocol.Message, error) { + round3Input, err := decodeDkgRound3Input(input) + if err != nil { + return nil, errors.WithStack(err) + } + bobProof, err := b.Round3SchnorrProve(round3Input) + if err != nil { + return nil, err + } + return encodeDkgRound3Output(bobProof, version) + }, + func(input *protocol.Message) (*protocol.Message, error) { + proof, err := decodeDkgRound5Input(input) + if err != nil { + return nil, errors.WithStack(err) + } + bobProof, err := b.Round5DecommitmentAndStartOt(proof) + if err != nil { + return nil, err + } + return encodeDkgRound5Output(bobProof, version) + }, + func(input *protocol.Message) (*protocol.Message, error) { + choices, err := decodeDkgRound7Input(input) + if err != nil { + return nil, errors.WithStack(err) + } + challenge, err := b.Round7DkgRound3Ot(choices) + if err != nil { + return nil, err + } + return encodeDkgRound7Output(challenge, version) + }, + func(input *protocol.Message) (*protocol.Message, error) { + responses, err := decodeDkgRound9Input(input) + if err != nil { + return nil, errors.WithStack(err) + } + opening, err := b.Round9DkgRound5Ot(responses) + if err != nil { + return nil, err + } + return encodeDkgRound9Output(opening, version) + }, + } + return b +} + +// Result returns an encoded version of Bob as sequence of bytes that can be used to initialize an BobSign protocol. +func (b *BobDkg) Result(version uint) (*protocol.Message, error) { + // Sanity check + if !b.complete() { + return nil, nil + } + if b.Bob == nil { + return nil, protocol.ErrNotInitialized + } + + result := b.Output() + return EncodeBobDkgOutput(result, version) +} + +// NewAliceSign creates a new protocol that can compute a signature as Alice. +// Requires dkg state that was produced at the end of DKG.Output(). +func NewAliceSign( + curve *curves.Curve, + hash hash.Hash, + message []byte, + dkgResultMessage *protocol.Message, + version uint, +) (*AliceSign, error) { + dkgResult, err := DecodeAliceDkgResult(dkgResultMessage) + if err != nil { + return nil, errors.WithStack(err) + } + a := &AliceSign{Alice: sign.NewAlice(curve, hash, dkgResult)} + a.steps = []func(message *protocol.Message) (*protocol.Message, error){ + func(*protocol.Message) (*protocol.Message, error) { + aliceCommitment, err := a.Round1GenerateRandomSeed() + if err != nil { + return nil, err + } + return encodeSignRound1Output(aliceCommitment, version) + }, + func(input *protocol.Message) (*protocol.Message, error) { + round2Output, err := decodeSignRound3Input(input) + if err != nil { + return nil, errors.WithStack(err) + } + round3Output, err := a.Round3Sign(message, round2Output) + if err != nil { + return nil, errors.WithStack(err) + } + return encodeSignRound3Output(round3Output, version) + }, + } + return a, nil +} + +// NewBobSign creates a new protocol that can compute a signature as Bob. +// Requires dkg state that was produced at the end of DKG.Output(). +func NewBobSign( + curve *curves.Curve, + hash hash.Hash, + message []byte, + dkgResultMessage *protocol.Message, + version uint, +) (*BobSign, error) { + dkgResult, err := DecodeBobDkgResult(dkgResultMessage) + if err != nil { + return nil, errors.WithStack(err) + } + b := &BobSign{Bob: sign.NewBob(curve, hash, dkgResult)} + b.steps = []func(message *protocol.Message) (*protocol.Message, error){ + func(input *protocol.Message) (*protocol.Message, error) { + commitment, err := decodeSignRound2Input(input) + if err != nil { + return nil, errors.WithStack(err) + } + round2Output, err := b.Round2Initialize(commitment) + if err != nil { + return nil, errors.WithStack(err) + } + return encodeSignRound2Output(round2Output, version) + }, + func(input *protocol.Message) (*protocol.Message, error) { + round4Input, err := decodeSignRound4Input(input) + if err != nil { + return nil, errors.WithStack(err) + } + + if err = b.Round4Final(message, round4Input); err != nil { + return nil, errors.WithStack(err) + } + return nil, nil + }, + } + return b, nil +} + +// Result always returns an error. +// Alice does not compute a signature in the DKLS protocol; only Bob computes the signature. +func (a *AliceSign) Result(_ uint) (*protocol.Message, error) { + return nil, errors.New("dkls.Alice does not produce a signature") +} + +// Result returns the signature that Bob computed as a *core.EcdsaSignature if the signing protocol completed successfully. +func (b *BobSign) Result(version uint) (*protocol.Message, error) { + // We can't produce a signature until the protocol completes + if !b.complete() { + return nil, nil + } + if b.Bob == nil { + // Object wasn't created with NewXSign() + return nil, protocol.ErrNotInitialized + } + return encodeSignature(b.Bob.Signature, version) +} + +// NewAliceRefresh creates a new protocol that can compute a key refresh as Alice +func NewAliceRefresh( + curve *curves.Curve, + dkgResultMessage *protocol.Message, + version uint, +) (*AliceRefresh, error) { + dkgResult, err := DecodeAliceDkgResult(dkgResultMessage) + if err != nil { + return nil, errors.WithStack(err) + } + + a := &AliceRefresh{Alice: refresh.NewAlice(curve, dkgResult)} + a.steps = []func(*protocol.Message) (*protocol.Message, error){ + func(_ *protocol.Message) (*protocol.Message, error) { + aliceSeed := a.Round1RefreshGenerateSeed() + return encodeRefreshRound1Output(aliceSeed, version) + }, + func(input *protocol.Message) (*protocol.Message, error) { + round3Input, err := decodeRefreshRound3Input(input) + if err != nil { + return nil, errors.WithStack(err) + } + round3Output, err := a.Round3RefreshMultiplyRound2Ot(round3Input) + if err != nil { + return nil, err + } + return encodeRefreshRound3Output(round3Output, version) + }, + func(input *protocol.Message) (*protocol.Message, error) { + round5Input, err := decodeRefreshRound5Input(input) + if err != nil { + return nil, errors.WithStack(err) + } + round5Output, err := a.Round5RefreshRound4Ot(round5Input) + if err != nil { + return nil, err + } + return encodeRefreshRound5Output(round5Output, version) + }, + func(input *protocol.Message) (*protocol.Message, error) { + round7Input, err := decodeRefreshRound7Input(input) + if err != nil { + return nil, errors.WithStack(err) + } + if err := a.Round7DkgRound6Ot(round7Input); err != nil { + return nil, err + } + return nil, nil + }, + } + return a, nil +} + +// Result Returns an encoded version of Alice as sequence of bytes that can be used to initialize an AliceSign protocol. +func (a *AliceRefresh) Result(version uint) (*protocol.Message, error) { + // Sanity check + if !a.complete() { + return nil, nil + } + if a.Alice == nil { + return nil, protocol.ErrNotInitialized + } + + result := a.Output() + return EncodeAliceDkgOutput(result, version) +} + +// NewBobRefresh Creates a new protocol that can compute a refresh as Bob. +func NewBobRefresh( + curve *curves.Curve, + dkgResultMessage *protocol.Message, + version uint, +) (*BobRefresh, error) { + dkgResult, err := DecodeBobDkgResult(dkgResultMessage) + if err != nil { + return nil, errors.WithStack(err) + } + + b := &BobRefresh{Bob: refresh.NewBob(curve, dkgResult)} + b.steps = []func(message *protocol.Message) (*protocol.Message, error){ + func(input *protocol.Message) (*protocol.Message, error) { + round2Input, err := decodeRefreshRound2Input(input) + if err != nil { + return nil, errors.WithStack(err) + } + round2Output, err := b.Round2RefreshProduceSeedAndMultiplyAndStartOT(round2Input) + if err != nil { + return nil, err + } + return encodeRefreshRound2Output(round2Output, version) + }, + func(input *protocol.Message) (*protocol.Message, error) { + round4Input, err := decodeRefreshRound4Input(input) + if err != nil { + return nil, errors.WithStack(err) + } + round4Output, err := b.Round4RefreshRound3Ot(round4Input) + if err != nil { + return nil, err + } + return encodeRefreshRound4Output(round4Output, version) + }, + func(input *protocol.Message) (*protocol.Message, error) { + round6Input, err := decodeRefreshRound6Input(input) + if err != nil { + return nil, errors.WithStack(err) + } + round6Output, err := b.Round6RefreshRound5Ot(round6Input) + if err != nil { + return nil, err + } + return encodeRefreshRound6Output(round6Output, version) + }, + } + return b, nil +} + +// Result returns an encoded version of Bob as sequence of bytes that can be used to initialize an BobSign protocol. +func (b *BobRefresh) Result(version uint) (*protocol.Message, error) { + // Sanity check + if !b.complete() { + return nil, nil + } + if b.Bob == nil { + return nil, protocol.ErrNotInitialized + } + + result := b.Output() + return EncodeBobDkgOutput(result, version) +} diff --git a/tecdsa/dklsv1/dealer/dealer.go b/tecdsa/dklsv1/dealer/dealer.go new file mode 100644 index 0000000..f638c3d --- /dev/null +++ b/tecdsa/dklsv1/dealer/dealer.go @@ -0,0 +1,87 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Package dealer implements key generation via a trusted dealer for the protocol [DKLs18](https://eprint.iacr.org/2018/499.pdf). +// The trusted dealer produces the same output as the corresponding DKG protocol and can be used for signing without +// additional modifications. +// Note that running actual DKG is ALWAYS recommended over a trusted dealer. +package dealer + +import ( + "crypto/rand" + + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/ot/base/simplest" + "github.com/sonr-io/sonr/crypto/ot/extension/kos" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1/dkg" +) + +// GenerateAndDeal produces private key material for alice and bob which they can later use in signing. +// Running actual DKG is ALWAYS recommended over using this function, as this function breaks the security guarantees of DKG. +// only use this function if you have a very good reason to. +func GenerateAndDeal(curve *curves.Curve) (*dkg.AliceOutput, *dkg.BobOutput, error) { + aliceSecretShare, bobSecretShare, publicKey := produceKeyShares(curve) + aliceOTOutput, bobOTOutput, err := produceOTResults(curve) + if err != nil { + return nil, nil, errors.Wrap(err, "couldn't produce OT results") + } + alice := &dkg.AliceOutput{ + PublicKey: publicKey, + SecretKeyShare: aliceSecretShare, + SeedOtResult: aliceOTOutput, + } + bob := &dkg.BobOutput{ + PublicKey: publicKey, + SecretKeyShare: bobSecretShare, + SeedOtResult: bobOTOutput, + } + return alice, bob, nil +} + +func produceKeyShares( + curve *curves.Curve, +) (aliceSecretShare curves.Scalar, bobSecretShare curves.Scalar, publicKey curves.Point) { + aliceSecretShare = curve.Scalar.Random(rand.Reader) + bobSecretShare = curve.Scalar.Random(rand.Reader) + publicKey = curve.ScalarBaseMult(aliceSecretShare.Mul(bobSecretShare)) + return aliceSecretShare, bobSecretShare, publicKey +} + +func produceOTResults( + curve *curves.Curve, +) (*simplest.ReceiverOutput, *simplest.SenderOutput, error) { + oneTimePadEncryptionKeys := make([]simplest.OneTimePadEncryptionKeys, kos.Kappa) + oneTimePadDecryptionKey := make([]simplest.OneTimePadDecryptionKey, kos.Kappa) + + // we'll need a receiver because in its constructor random bits will be selected. + receiver, err := simplest.NewReceiver(curve, kos.Kappa, [simplest.DigestSize]byte{}) + if err != nil { + return nil, nil, errors.Wrap(err, "couldn't initialize a receiver") + } + packedRandomChoiceBits, randomChoiceBits := receiver.Output.PackedRandomChoiceBits, receiver.Output.RandomChoiceBits + + for i := 0; i < kos.Kappa; i++ { + if _, err := rand.Read(oneTimePadEncryptionKeys[i][0][:]); err != nil { + return nil, nil, errors.WithStack(err) + } + if _, err := rand.Read(oneTimePadEncryptionKeys[i][1][:]); err != nil { + return nil, nil, errors.WithStack(err) + } + oneTimePadDecryptionKey[i] = oneTimePadEncryptionKeys[i][randomChoiceBits[i]] + } + + senderOutput := &simplest.SenderOutput{ + OneTimePadEncryptionKeys: oneTimePadEncryptionKeys, + } + receiverOutput := &simplest.ReceiverOutput{ + PackedRandomChoiceBits: packedRandomChoiceBits, + RandomChoiceBits: randomChoiceBits, + OneTimePadDecryptionKey: oneTimePadDecryptionKey, + } + return receiverOutput, senderOutput, nil +} diff --git a/tecdsa/dklsv1/dealer/dealer_test.go b/tecdsa/dklsv1/dealer/dealer_test.go new file mode 100644 index 0000000..9ccb46b --- /dev/null +++ b/tecdsa/dklsv1/dealer/dealer_test.go @@ -0,0 +1,57 @@ +package dealer_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + "golang.org/x/crypto/sha3" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1/dealer" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1/sign" +) + +func Test_DealerCanGenerateKeysThatSign(t *testing.T) { + curveInstances := []*curves.Curve{ + curves.K256(), + curves.P256(), + } + for _, curve := range curveInstances { + aliceOutput, bobOutput, err := dealer.GenerateAndDeal(curve) + require.NoError(t, err) + + alice := sign.NewAlice(curve, sha3.New256(), aliceOutput) + bob := sign.NewBob(curve, sha3.New256(), bobOutput) + + message := []byte("A message.") + seed, err := alice.Round1GenerateRandomSeed() + require.NoError(t, err) + round3Output, err := bob.Round2Initialize(seed) + require.NoError(t, err) + round4Output, err := alice.Round3Sign(message, round3Output) + require.NoError(t, err) + err = bob.Round4Final(message, round4Output) + require.NoError(t, err, "curve: %s", curve.Name) + } +} + +func Test_DealerGeneratesDifferentResultsEachTime(t *testing.T) { + curve := curves.K256() + aliceOutput1, bobOutput1, err := dealer.GenerateAndDeal(curve) + require.NoError(t, err) + aliceOutput2, bobOutput2, err := dealer.GenerateAndDeal(curve) + require.NoError(t, err) + + require.NotEqual(t, aliceOutput1.SecretKeyShare, aliceOutput2.SecretKeyShare) + require.NotEqual(t, bobOutput1.SecretKeyShare, bobOutput2.SecretKeyShare) + require.NotEqualValues( + t, + aliceOutput1.SeedOtResult.RandomChoiceBits, + aliceOutput2.SeedOtResult.RandomChoiceBits, + ) + require.NotEqualValues( + t, + bobOutput1.SeedOtResult.OneTimePadEncryptionKeys, + bobOutput2.SeedOtResult.OneTimePadEncryptionKeys, + ) +} diff --git a/tecdsa/dklsv1/dkg/dkg.go b/tecdsa/dklsv1/dkg/dkg.go new file mode 100644 index 0000000..134bfa1 --- /dev/null +++ b/tecdsa/dklsv1/dkg/dkg.go @@ -0,0 +1,309 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Package dkg implements the Distributed Key Generation (DKG) protocol of [DKLs18](https://eprint.iacr.org/2018/499.pdf). +// The DKG protocol is defined in "Protocol 2" page 7, of the paper. The Zero Knowledge Proof ideal functionalities are +// realized using schnorr proofs. Moreover, the seed OT is realized using the Verified Simplest OT protocol. +package dkg + +import ( + "crypto/rand" + + "github.com/gtank/merlin" + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/ot/base/simplest" + "github.com/sonr-io/sonr/crypto/ot/extension/kos" + "github.com/sonr-io/sonr/crypto/zkp/schnorr" +) + +// AliceOutput is the result of running DKG for Alice. It contains both the public and secret values that are needed +// for signing. +type AliceOutput struct { + // PublicKey is the joint public key of Alice and Bob. + // This value is public. + PublicKey curves.Point + + // SecretKeyShare is Alice's secret key for the joint public key. + // This output must be kept secret. If it is lost, the users will lose access and cannot create signatures. + SecretKeyShare curves.Scalar + + // SeedOtResult are the outputs that the receiver will obtain as a result of running the "random" OT protocol. + // This output must be kept secret. Although, if it is lost the users can run another OT protocol and obtain + // new values to replace it. + SeedOtResult *simplest.ReceiverOutput +} + +// BobOutput is the result of running DKG for Bob. It contains both the public and secret values that are needed +// for signing. +type BobOutput struct { + // PublicKey is the joint public key of Alice and Bob. + // This value is public. + PublicKey curves.Point + + // SecretKeyShare is Bob's secret key for the joint public key. + // This output must be kept secret. If it is lost, the users will lose access and cannot create signatures. + SecretKeyShare curves.Scalar + + // SeedOtResult are the outputs that the sender will obtain as a result of running the "random" OT protocol. + // This output must be kept secret. Although, if it is lost the users can run another OT protocol and obtain + // new values to replace it. + SeedOtResult *simplest.SenderOutput +} + +// Alice struct encoding Alice's state during one execution of the overall signing algorithm. +// At the end of the joint computation, Alice will NOT obtain the signature. +type Alice struct { + // prover is a schnorr prover for Alice's portion of public key. + prover *schnorr.Prover + + // proof is alice's proof to her portion of the public key. It is stored as an intermediate value, during commitment phase. + proof *schnorr.Proof + + // receiver is the base OT receiver. + receiver *simplest.Receiver + + // secretKeyShare is Alice's secret key for the joint public key. + secretKeyShare curves.Scalar + + // publicKey is the joint public key of Alice and Bob. + publicKey curves.Point + + curve *curves.Curve + + transcript *merlin.Transcript +} + +// Bob struct encoding Bob's state during one execution of the overall signing algorithm. +// At the end of the joint computation, Bob will obtain the signature. +type Bob struct { + // prover is a schnorr prover for Bob's portion of public key. + prover *schnorr.Prover // this is a "schnorr statement" for pkB. + + // sender is the base OT sender. + sender *simplest.Sender + + // secretKeyShare is Bob's secret key for the joint public key. + secretKeyShare curves.Scalar + + // publicKey is the joint public key of Alice and Bob. + publicKey curves.Point + + // schnorr proof commitment to Alice's schnorr proof. + aliceCommitment schnorr.Commitment + // 32-byte transcript salt which will be used for Alice's schnorr proof + aliceSalt [simplest.DigestSize]byte + + curve *curves.Curve + + transcript *merlin.Transcript +} + +// Round2Output contains the output of the 2nd round of DKG. +type Round2Output struct { + // Seed is the random value used to derive the joint unique session id. + Seed [simplest.DigestSize]byte + + // Commitment is the commitment to the ZKP to Alice's secret key share. + Commitment schnorr.Commitment +} + +// NewAlice creates a party that can participate in 2-of-2 DKG and threshold signature. +func NewAlice(curve *curves.Curve) *Alice { + return &Alice{ + curve: curve, + transcript: merlin.NewTranscript("Coinbase_DKLs_DKG"), + } +} + +// NewBob creates a party that can participate in 2-of-2 DKG and threshold signature. This party +// is the receiver of the signature at the end. +func NewBob(curve *curves.Curve) *Bob { + return &Bob{ + curve: curve, + transcript: merlin.NewTranscript("Coinbase_DKLs_DKG"), + } +} + +// Round1GenerateRandomSeed Bob flips random coins, and sends these to Alice +// in this round, Bob flips 32 random bytes and sends them to Alice. +// note that this is not _explicitly_ given as part of the protocol in https://eprint.iacr.org/2018/499.pdf, Protocol 1). +// rather, it is part of our generation of a unique session identifier, for use in subsequent schnorr proofs / seed OT / etc. +// we do it by having each party sample 32 bytes, then by appending _both_ as salts. secure if either party is honest +func (bob *Bob) Round1GenerateRandomSeed() ([simplest.DigestSize]byte, error) { + bobSeed := [simplest.DigestSize]byte{} + if _, err := rand.Read(bobSeed[:]); err != nil { + return [simplest.DigestSize]byte{}, errors.Wrap( + err, + "generating random bytes in bob DKG round 1 generate", + ) + } + bob.transcript.AppendMessage( + []byte("session_id_bob"), + bobSeed[:], + ) // note: bob appends first here + return bobSeed, nil +} + +// Round2CommitToProof steps 1) and 2) of protocol 2 on page 7. +func (alice *Alice) Round2CommitToProof(bobSeed [simplest.DigestSize]byte) (*Round2Output, error) { + aliceSeed := [simplest.DigestSize]byte{} + if _, err := rand.Read(aliceSeed[:]); err != nil { + return nil, errors.Wrap(err, "generating random bytes in bob DKG round 1 generate") + } + alice.transcript.AppendMessage([]byte("session_id_bob"), bobSeed[:]) + alice.transcript.AppendMessage([]byte("session_id_alice"), aliceSeed[:]) + + var err error + uniqueSessionId := [simplest.DigestSize]byte{} // note: will use and re-use this below for sub-session IDs. + copy( + uniqueSessionId[:], + alice.transcript.ExtractBytes([]byte("salt for simplest OT"), simplest.DigestSize), + ) + alice.receiver, err = simplest.NewReceiver(alice.curve, kos.Kappa, uniqueSessionId) + if err != nil { + return nil, errors.Wrap(err, "alice constructing new seed OT receiver in Alice DKG round 1") + } + + alice.secretKeyShare = alice.curve.Scalar.Random(rand.Reader) + copy( + uniqueSessionId[:], + alice.transcript.ExtractBytes([]byte("salt for alice schnorr"), simplest.DigestSize), + ) + alice.prover = schnorr.NewProver(alice.curve, nil, uniqueSessionId[:]) + var commitment schnorr.Commitment + alice.proof, commitment, err = alice.prover.ProveCommit( + alice.secretKeyShare, + ) // will mutate `pkA` + if err != nil { + return nil, errors.Wrap(err, "prove + commit in alice DKG Commit round 1") + } + return &Round2Output{ + Commitment: commitment, + Seed: aliceSeed, + }, nil +} + +// Round3SchnorrProve receives Bob's Commitment and returns schnorr statment + proof. +// Steps 1 and 3 of protocol 2 on page 7. +func (bob *Bob) Round3SchnorrProve(round2Output *Round2Output) (*schnorr.Proof, error) { + bob.transcript.AppendMessage([]byte("session_id_alice"), round2Output.Seed[:]) + + bob.aliceCommitment = round2Output.Commitment // store it, so that we can check when alice decommits + + var err error + uniqueSessionId := [simplest.DigestSize]byte{} // note: will use and re-use this below for sub-session IDs. + copy( + uniqueSessionId[:], + bob.transcript.ExtractBytes([]byte("salt for simplest OT"), simplest.DigestSize), + ) + bob.sender, err = simplest.NewSender(bob.curve, kos.Kappa, uniqueSessionId) + if err != nil { + return nil, errors.Wrap(err, "bob constructing new OT sender in DKG round 2") + } + // extract alice's salt in the right order; we won't use this until she reveals her proof and we verify it below + copy( + bob.aliceSalt[:], + bob.transcript.ExtractBytes([]byte("salt for alice schnorr"), simplest.DigestSize), + ) + bob.secretKeyShare = bob.curve.Scalar.Random(rand.Reader) + copy( + uniqueSessionId[:], + bob.transcript.ExtractBytes([]byte("salt for bob schnorr"), simplest.DigestSize), + ) + bob.prover = schnorr.NewProver(bob.curve, nil, uniqueSessionId[:]) + proof, err := bob.prover.Prove(bob.secretKeyShare) + if err != nil { + return nil, errors.Wrap(err, "bob schnorr proving in DKG round 2") + } + return proof, err +} + +// Round4VerifyAndReveal step 4 of protocol 2 on page 7. +func (alice *Alice) Round4VerifyAndReveal(proof *schnorr.Proof) (*schnorr.Proof, error) { + var err error + uniqueSessionId := [simplest.DigestSize]byte{} + copy( + uniqueSessionId[:], + alice.transcript.ExtractBytes([]byte("salt for bob schnorr"), simplest.DigestSize), + ) + if err = schnorr.Verify(proof, alice.curve, nil, uniqueSessionId[:]); err != nil { + return nil, errors.Wrap( + err, + "alice's verification of Bob's schnorr proof failed in DKG round 3", + ) + } + alice.publicKey = proof.Statement.Mul(alice.secretKeyShare) + return alice.proof, nil +} + +// Round5DecommitmentAndStartOt step 5 of protocol 2 on page 7. +func (bob *Bob) Round5DecommitmentAndStartOt(proof *schnorr.Proof) (*schnorr.Proof, error) { + var err error + if err = schnorr.DecommitVerify(proof, bob.aliceCommitment, bob.curve, nil, bob.aliceSalt[:]); err != nil { + return nil, errors.Wrap(err, "decommit + verify failed in bob's DKG round 4") + } + bob.publicKey = proof.Statement.Mul(bob.secretKeyShare) + seedOTRound1Output, err := bob.sender.Round1ComputeAndZkpToPublicKey() + if err != nil { + return nil, errors.Wrap(err, "bob computing round 1 of seed OT within DKG round 4") + } + return seedOTRound1Output, nil +} + +// Round6DkgRound2Ot is a thin wrapper around the 2nd round of seed OT protocol. +func (alice *Alice) Round6DkgRound2Ot( + proof *schnorr.Proof, +) ([]simplest.ReceiversMaskedChoices, error) { + return alice.receiver.Round2VerifySchnorrAndPadTransfer(proof) +} + +// Round7DkgRound3Ot is a thin wrapper around the 3rd round of seed OT protocol. +func (bob *Bob) Round7DkgRound3Ot( + compressedReceiversMaskedChoice []simplest.ReceiversMaskedChoices, +) ([]simplest.OtChallenge, error) { + return bob.sender.Round3PadTransfer(compressedReceiversMaskedChoice) +} + +// Round8DkgRound4Ot is a thin wrapper around the 4th round of seed OT protocol. +func (alice *Alice) Round8DkgRound4Ot( + challenge []simplest.OtChallenge, +) ([]simplest.OtChallengeResponse, error) { + return alice.receiver.Round4RespondToChallenge(challenge) +} + +// Round9DkgRound5Ot is a thin wrapper around the 5th round of seed OT protocol. +func (bob *Bob) Round9DkgRound5Ot( + challengeResponses []simplest.OtChallengeResponse, +) ([]simplest.ChallengeOpening, error) { + return bob.sender.Round5Verify(challengeResponses) +} + +// Round10DkgRound6Ot is a thin wrapper around the 6th round of seed OT protocol. +func (alice *Alice) Round10DkgRound6Ot(challengeOpenings []simplest.ChallengeOpening) error { + return alice.receiver.Round6Verify(challengeOpenings) +} + +// Output returns the output of the DKG operation. Must be called after step 9. Calling it before that step +// has undefined behaviour. +func (alice *Alice) Output() *AliceOutput { + return &AliceOutput{ + PublicKey: alice.publicKey, + SecretKeyShare: alice.secretKeyShare, + SeedOtResult: alice.receiver.Output, + } +} + +// Output returns the output of the DKG operation. Must be called after step 9. Calling it before that step +// has undefined behaviour. +func (bob *Bob) Output() *BobOutput { + return &BobOutput{ + PublicKey: bob.publicKey, + SecretKeyShare: bob.secretKeyShare, + SeedOtResult: bob.sender.Output, + } +} diff --git a/tecdsa/dklsv1/dkg/dkg_test.go b/tecdsa/dklsv1/dkg/dkg_test.go new file mode 100644 index 0000000..214af50 --- /dev/null +++ b/tecdsa/dklsv1/dkg/dkg_test.go @@ -0,0 +1,104 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package dkg + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/ot/extension/kos" +) + +func TestDkg(t *testing.T) { + t.Parallel() + curveInstances := []*curves.Curve{ + curves.K256(), + curves.P256(), + } + for _, curve := range curveInstances { + boundCurve := curve + t.Run(fmt.Sprintf("testing dkg for curve %s", boundCurve.Name), func(tt *testing.T) { + tt.Parallel() + alice := NewAlice(boundCurve) + bob := NewBob(boundCurve) + + seed, err := bob.Round1GenerateRandomSeed() + require.NoError(tt, err) + round3Output, err := alice.Round2CommitToProof(seed) + require.NoError(tt, err) + proof, err := bob.Round3SchnorrProve(round3Output) + require.NoError(tt, err) + proof, err = alice.Round4VerifyAndReveal(proof) + require.NoError(tt, err) + proof, err = bob.Round5DecommitmentAndStartOt(proof) + require.NoError(tt, err) + compressedReceiversMaskedChoice, err := alice.Round6DkgRound2Ot(proof) + require.NoError(tt, err) + challenge, err := bob.Round7DkgRound3Ot(compressedReceiversMaskedChoice) + require.NoError(tt, err) + challengeResponse, err := alice.Round8DkgRound4Ot(challenge) + require.NoError(tt, err) + challengeOpenings, err := bob.Round9DkgRound5Ot(challengeResponse) + require.NoError(tt, err) + err = alice.Round10DkgRound6Ot(challengeOpenings) + require.NoError(tt, err) + // Verify correctness of the OT subprotocol after has completed + for i := 0; i < kos.Kappa; i++ { + if alice.receiver.Output.OneTimePadDecryptionKey[i] != bob.sender.Output.OneTimePadEncryptionKeys[i][alice.receiver.Output.RandomChoiceBits[i]] { + tt.Errorf("oblivious transfer is incorrect at index i=%v", i) + } + } + + pkA := boundCurve.ScalarBaseMult(alice.Output().SecretKeyShare) + pkB := boundCurve.ScalarBaseMult(bob.Output().SecretKeyShare) + + computedPublicKeyA := pkA.Mul(bob.Output().SecretKeyShare) + require.True(tt, computedPublicKeyA.Equal(alice.Output().PublicKey)) + require.True(tt, computedPublicKeyA.Equal(bob.Output().PublicKey)) + + computedPublicKeyB := pkB.Mul(alice.Output().SecretKeyShare) + require.True(tt, computedPublicKeyB.Equal(alice.Output().PublicKey)) + require.True(tt, computedPublicKeyB.Equal(bob.Output().PublicKey)) + }) + } +} + +func BenchmarkDkg(b *testing.B) { + if testing.Short() { + b.SkipNow() + } + curve := curves.K256() + + for n := 0; n < b.N; n++ { + alice := NewAlice(curve) + bob := NewBob(curve) + + seed, err := bob.Round1GenerateRandomSeed() + require.NoError(b, err) + round3Output, err := alice.Round2CommitToProof(seed) + require.NoError(b, err) + proof, err := bob.Round3SchnorrProve(round3Output) + require.NoError(b, err) + proof, err = alice.Round4VerifyAndReveal(proof) + require.NoError(b, err) + proof, err = bob.Round5DecommitmentAndStartOt(proof) + require.NoError(b, err) + compressedReceiversMaskedChoice, err := alice.Round6DkgRound2Ot(proof) + require.NoError(b, err) + challenge, err := bob.Round7DkgRound3Ot(compressedReceiversMaskedChoice) + require.NoError(b, err) + challengeResponse, err := alice.Round8DkgRound4Ot(challenge) + require.NoError(b, err) + challengeOpenings, err := bob.Round9DkgRound5Ot(challengeResponse) + require.NoError(b, err) + err = alice.Round10DkgRound6Ot(challengeOpenings) + require.NoError(b, err) + } +} diff --git a/tecdsa/dklsv1/dkgserializers.go b/tecdsa/dklsv1/dkgserializers.go new file mode 100644 index 0000000..00a6012 --- /dev/null +++ b/tecdsa/dklsv1/dkgserializers.go @@ -0,0 +1,327 @@ +package dklsv1 + +import ( + "bytes" + "encoding/gob" + + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/core/protocol" + "github.com/sonr-io/sonr/crypto/ot/base/simplest" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1/dkg" + "github.com/sonr-io/sonr/crypto/zkp/schnorr" +) + +const payloadKey = "direct" + +func newDkgProtocolMessage(payload []byte, round string, version uint) *protocol.Message { + return &protocol.Message{ + Protocol: protocol.Dkls18Dkg, + Version: version, + Payloads: map[string][]byte{payloadKey: payload}, + Metadata: map[string]string{"round": round}, + } +} + +func registerTypes() { + gob.Register(&curves.ScalarK256{}) + gob.Register(&curves.PointK256{}) + gob.Register(&curves.ScalarP256{}) + gob.Register(&curves.PointP256{}) +} + +func encodeDkgRound1Output(commitment [32]byte, version uint) (*protocol.Message, error) { + if version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + registerTypes() + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(&commitment); err != nil { + return nil, errors.WithStack(err) + } + return newDkgProtocolMessage(buf.Bytes(), "1", version), nil +} + +func decodeDkgRound2Input(m *protocol.Message) ([32]byte, error) { + if m.Version != protocol.Version1 { + return [32]byte{}, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := [32]byte{} + if err := dec.Decode(&decoded); err != nil { + return [32]byte{}, errors.WithStack(err) + } + return decoded, nil +} + +func encodeDkgRound2Output(output *dkg.Round2Output, version uint) (*protocol.Message, error) { + if version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(output); err != nil { + return nil, errors.WithStack(err) + } + return newDkgProtocolMessage(buf.Bytes(), "2", version), nil +} + +func decodeDkgRound3Input(m *protocol.Message) (*dkg.Round2Output, error) { + if m.Version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := new(dkg.Round2Output) + if err := dec.Decode(decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} + +func encodeDkgRound3Output(proof *schnorr.Proof, version uint) (*protocol.Message, error) { + if version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(proof); err != nil { + return nil, errors.WithStack(err) + } + return newDkgProtocolMessage(buf.Bytes(), "3", version), nil +} + +func decodeDkgRound4Input(m *protocol.Message) (*schnorr.Proof, error) { + if m.Version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := new(schnorr.Proof) + if err := dec.Decode(decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} + +func encodeDkgRound4Output(proof *schnorr.Proof, version uint) (*protocol.Message, error) { + if version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(proof); err != nil { + return nil, errors.WithStack(err) + } + return newDkgProtocolMessage(buf.Bytes(), "4", version), nil +} + +func decodeDkgRound5Input(m *protocol.Message) (*schnorr.Proof, error) { + if m.Version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := new(schnorr.Proof) + if err := dec.Decode(decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} + +func encodeDkgRound5Output(proof *schnorr.Proof, version uint) (*protocol.Message, error) { + if version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(proof); err != nil { + return nil, errors.WithStack(err) + } + return newDkgProtocolMessage(buf.Bytes(), "5", version), nil +} + +func decodeDkgRound6Input(m *protocol.Message) (*schnorr.Proof, error) { + if m.Version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := new(schnorr.Proof) + if err := dec.Decode(decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} + +func encodeDkgRound6Output( + choices []simplest.ReceiversMaskedChoices, + version uint, +) (*protocol.Message, error) { + if version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(choices); err != nil { + return nil, errors.WithStack(err) + } + return newDkgProtocolMessage(buf.Bytes(), "6", version), nil +} + +func decodeDkgRound7Input(m *protocol.Message) ([]simplest.ReceiversMaskedChoices, error) { + if m.Version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := []simplest.ReceiversMaskedChoices{} + if err := dec.Decode(&decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} + +func encodeDkgRound7Output( + challenge []simplest.OtChallenge, + version uint, +) (*protocol.Message, error) { + if version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(challenge); err != nil { + return nil, errors.WithStack(err) + } + return newDkgProtocolMessage(buf.Bytes(), "7", version), nil +} + +func decodeDkgRound8Input(m *protocol.Message) ([]simplest.OtChallenge, error) { + if m.Version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := []simplest.OtChallenge{} + if err := dec.Decode(&decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} + +func encodeDkgRound8Output( + responses []simplest.OtChallengeResponse, + version uint, +) (*protocol.Message, error) { + if version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(responses); err != nil { + return nil, errors.WithStack(err) + } + return newDkgProtocolMessage(buf.Bytes(), "8", version), nil +} + +func decodeDkgRound9Input(m *protocol.Message) ([]simplest.OtChallengeResponse, error) { + if m.Version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := []simplest.OtChallengeResponse{} + if err := dec.Decode(&decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} + +func encodeDkgRound9Output( + opening []simplest.ChallengeOpening, + version uint, +) (*protocol.Message, error) { + if version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(opening); err != nil { + return nil, errors.WithStack(err) + } + return newDkgProtocolMessage(buf.Bytes(), "9", version), nil +} + +func decodeDkgRound10Input(m *protocol.Message) ([]simplest.ChallengeOpening, error) { + if m.Version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := []simplest.ChallengeOpening{} + if err := dec.Decode(&decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} + +// EncodeAliceDkgOutput serializes Alice DKG output based on the protocol version. +func EncodeAliceDkgOutput(result *dkg.AliceOutput, version uint) (*protocol.Message, error) { + if version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + registerTypes() + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(result); err != nil { + return nil, errors.WithStack(err) + } + return newDkgProtocolMessage(buf.Bytes(), "alice-output", version), nil +} + +// DecodeAliceDkgResult deserializes Alice DKG output. +func DecodeAliceDkgResult(m *protocol.Message) (*dkg.AliceOutput, error) { + if m.Version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + registerTypes() + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := new(dkg.AliceOutput) + if err := dec.Decode(&decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} + +// EncodeBobDkgOutput serializes Bob DKG output based on the protocol version. +func EncodeBobDkgOutput(result *dkg.BobOutput, version uint) (*protocol.Message, error) { + if version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + registerTypes() + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(result); err != nil { + return nil, errors.WithStack(err) + } + return newDkgProtocolMessage(buf.Bytes(), "bob-output", version), nil +} + +// DecodeBobDkgResult deserializes Bob DKG output. +func DecodeBobDkgResult(m *protocol.Message) (*dkg.BobOutput, error) { + if m.Version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := new(dkg.BobOutput) + if err := dec.Decode(&decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} diff --git a/tecdsa/dklsv1/protocol.go b/tecdsa/dklsv1/protocol.go new file mode 100644 index 0000000..f29573f --- /dev/null +++ b/tecdsa/dklsv1/protocol.go @@ -0,0 +1,33 @@ +// package dklsv1 provides a wrapper around the [DKLs18](https://eprint.iacr.org/2018/499.pdf) sign and dkg and provides +// serialization, serialization, and versioning for the serialized data. +package dklsv1 + +import ( + "github.com/sonr-io/sonr/crypto/core/protocol" +) + +// Basic protocol interface implementation that calls the next step func in a pre-defined list +type protoStepper struct { + steps []func(input *protocol.Message) (*protocol.Message, error) + step int +} + +// Next runs the next step in the protocol and reports errors or increments the step index +func (p *protoStepper) Next(input *protocol.Message) (*protocol.Message, error) { + if p.complete() { + return nil, protocol.ErrProtocolFinished + } + + // Run the current protocol step and report any errors + output, err := p.steps[p.step](input) + if err != nil { + return nil, err + } + + // Increment the step index and report success + p.step++ + return output, nil +} + +// Reports true if the step index exceeds the number of steps +func (p *protoStepper) complete() bool { return p.step >= len(p.steps) /**/ } diff --git a/tecdsa/dklsv1/protocol_test.go b/tecdsa/dklsv1/protocol_test.go new file mode 100644 index 0000000..d624986 --- /dev/null +++ b/tecdsa/dklsv1/protocol_test.go @@ -0,0 +1,428 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package dklsv1 + +import ( + "math/big" + "testing" + + "github.com/stretchr/testify/require" + "golang.org/x/crypto/sha3" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/core/protocol" + "github.com/sonr-io/sonr/crypto/ot/extension/kos" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1/dkg" +) + +// For DKG bob starts first. For refresh and sign, Alice starts first. +func runIteratedProtocol( + firstParty protocol.Iterator, + secondParty protocol.Iterator, +) (error, error) { + var ( + message *protocol.Message + aErr error + bErr error + ) + + for aErr != protocol.ErrProtocolFinished || bErr != protocol.ErrProtocolFinished { + // Crank each protocol forward one iteration + message, bErr = firstParty.Next(message) + if bErr != nil && bErr != protocol.ErrProtocolFinished { + return nil, bErr + } + + message, aErr = secondParty.Next(message) + if aErr != nil && aErr != protocol.ErrProtocolFinished { + return aErr, nil + } + } + return aErr, bErr +} + +// Running steps in sequence ensures that no hidden read/write dependency exist in the read/write interfaces. +func TestDkgProto(t *testing.T) { + curveInstances := []*curves.Curve{ + curves.K256(), + curves.P256(), + } + for _, curve := range curveInstances { + alice := NewAliceDkg(curve, protocol.Version1) + bob := NewBobDkg(curve, protocol.Version1) + aErr, bErr := runIteratedProtocol(bob, alice) + + t.Run("both alice/bob complete simultaneously", func(t *testing.T) { + require.ErrorIs(t, aErr, protocol.ErrProtocolFinished) + require.ErrorIs(t, bErr, protocol.ErrProtocolFinished) + }) + + for i := 0; i < kos.Kappa; i++ { + if alice.Alice.Output().SeedOtResult.OneTimePadDecryptionKey[i] != bob.Bob.Output().SeedOtResult.OneTimePadEncryptionKeys[i][alice.Alice.Output().SeedOtResult.RandomChoiceBits[i]] { + t.Errorf("oblivious transfer is incorrect at index i=%v", i) + } + } + + t.Run("Both parties produces identical composite pubkey", func(t *testing.T) { + require.True(t, alice.Alice.Output().PublicKey.Equal(bob.Bob.Output().PublicKey)) + }) + + var aliceResult *dkg.AliceOutput + var bobResult *dkg.BobOutput + t.Run("alice produces valid result", func(t *testing.T) { + // Get the result + r, err := alice.Result(protocol.Version1) + + // Test + require.NoError(t, err) + require.NotNil(t, r) + aliceResult, err = DecodeAliceDkgResult(r) + require.NoError(t, err) + }) + t.Run("bob produces valid result", func(t *testing.T) { + // Get the result + r, err := bob.Result(protocol.Version1) + + // Test + require.NoError(t, err) + require.NotNil(t, r) + bobResult, err = DecodeBobDkgResult(r) + require.NoError(t, err) + }) + + t.Run("alice/bob agree on pubkey", func(t *testing.T) { + require.Equal(t, aliceResult.PublicKey, bobResult.PublicKey) + }) + } +} + +// +// // DKG > Refresh > Sign +// func TestRefreshProto(t *testing.T) { +// t.Parallel() +// curveInstances := []*curves.Curve{ +// curves.K256(), +// curves.P256(), +// } +// for _, curve := range curveInstances { +// boundCurve := curve +// t.Run(fmt.Sprintf("testing refresh for curve %s", boundCurve.Name), func(tt *testing.T) { +// tt.Parallel() +// // DKG +// aliceDkg := NewAliceDkg(boundCurve, protocol.Version1) +// bobDkg := NewBobDkg(boundCurve, protocol.Version1) +// +// aDkgErr, bDkgErr := runIteratedProtocol(bobDkg, aliceDkg) +// require.ErrorIs(tt, aDkgErr, protocol.ErrProtocolFinished) +// require.ErrorIs(tt, bDkgErr, protocol.ErrProtocolFinished) +// +// aliceDkgResultMessage, err := aliceDkg.Result(protocol.Version1) +// require.NoError(tt, err) +// bobDkgResultMessage, err := bobDkg.Result(protocol.Version1) +// require.NoError(tt, err) +// +// // Refresh +// aliceRefreshResultMessage, bobRefreshResultMessage := refreshV1(t, boundCurve, aliceDkgResultMessage, bobDkgResultMessage) +// +// // sign +// signV1(t, boundCurve, aliceRefreshResultMessage, bobRefreshResultMessage) +// }) +// } +// } + +// DKG > Output > NewDklsSign > Sign > Output +func TestDkgSignProto(t *testing.T) { + // Setup + curve := curves.K256() + + aliceDkg := NewAliceDkg(curve, protocol.Version1) + bobDkg := NewBobDkg(curve, protocol.Version1) + + // DKG + aErr, bErr := runIteratedProtocol(bobDkg, aliceDkg) + require.ErrorIs(t, aErr, protocol.ErrProtocolFinished) + require.ErrorIs(t, bErr, protocol.ErrProtocolFinished) + + // Output + aliceDkgResultMessage, err := aliceDkg.Result(protocol.Version1) + require.NoError(t, err) + + bobDkgResultMessage, err := bobDkg.Result(protocol.Version1) + require.NoError(t, err) + + // New DklsSign + msg := []byte("As soon as you trust yourself, you will know how to live.") + aliceSign, err := NewAliceSign( + curve, + sha3.New256(), + msg, + aliceDkgResultMessage, + protocol.Version1, + ) + require.NoError(t, err) + bobSign, err := NewBobSign(curve, sha3.New256(), msg, bobDkgResultMessage, protocol.Version1) + require.NoError(t, err) + + // Sign + t.Run("sign", func(t *testing.T) { + aErr, bErr = runIteratedProtocol(aliceSign, bobSign) + require.ErrorIs(t, aErr, protocol.ErrProtocolFinished) + require.ErrorIs(t, bErr, protocol.ErrProtocolFinished) + }) + // Don't continue to verifying results if sign didn't run to completion. + require.ErrorIs(t, aErr, protocol.ErrProtocolFinished) + require.ErrorIs(t, bErr, protocol.ErrProtocolFinished) + + // Output + var result *curves.EcdsaSignature + t.Run("bob produces result of correct type", func(t *testing.T) { + resultMessage, err := bobSign.Result(protocol.Version1) + require.NoError(t, err) + result, err = DecodeSignature(resultMessage) + require.NoError(t, err) + }) + require.NotNil(t, result) + + t.Run("valid signature", func(t *testing.T) { + hash := sha3.New256() + _, err = hash.Write(msg) + require.NoError(t, err) + digest := hash.Sum(nil) + unCompressedAffinePublicKey := aliceDkg.Output().PublicKey.ToAffineUncompressed() + require.Equal(t, 65, len(unCompressedAffinePublicKey)) + x := new(big.Int).SetBytes(unCompressedAffinePublicKey[1:33]) + y := new(big.Int).SetBytes(unCompressedAffinePublicKey[33:]) + ecCurve, err := curve.ToEllipticCurve() + require.NoError(t, err) + publicKey := &curves.EcPoint{ + Curve: ecCurve, + X: x, + Y: y, + } + require.True(t, + curves.VerifyEcdsa(publicKey, + digest[:], + result, + ), + "signature failed verification", + ) + }) +} + +// +// // Decode > NewDklsSign > Sign > Output +// // NOTE: this cold-start test ensures backwards compatibility with durable, +// // encoding DKG state that may exist within production systems like test +// // Breaking changes must consider downstream production impacts. +// func TestSignColdStart(t *testing.T) { +// // Decode alice/bob state from file +// aliceDkg, err := os.ReadFile("testdata/alice-dkls-v1-dkg.bin") +// require.NoError(t, err) +// bobDkg, err := os.ReadFile("testdata/bob-dkls-v1-dkg.bin") +// require.NoError(t, err) +// +// // The choice of json marshaling is arbitrary, the binary could have been marshaled in other forms as well +// // The purpose here is to obtain an instance of `protocol.Message` +// +// aliceDkgMessage := &protocol.Message{} +// err = json.Unmarshal(aliceDkg, aliceDkgMessage) +// require.NoError(t, err) +// +// bobDkgMessage := &protocol.Message{} +// err = json.Unmarshal(bobDkg, bobDkgMessage) +// require.NoError(t, err) +// +// signV1(t, curves.K256(), aliceDkgMessage, bobDkgMessage) +// } +// +// func TestEncodeDecode(t *testing.T) { +// curve := curves.K256() +// +// alice := NewAliceDkg(curve, protocol.Version1) +// bob := NewBobDkg(curve, protocol.Version1) +// _, _ = runIteratedProtocol(bob, alice) +// +// var aliceBytes []byte +// var bobBytes []byte +// t.Run("Encode Alice/Bob", func(t *testing.T) { +// aliceDkgMessage, err := EncodeAliceDkgOutput(alice.Alice.Output(), protocol.Version1) +// require.NoError(t, err) +// aliceBytes, err = json.Marshal(aliceDkgMessage) +// require.NoError(t, err) +// +// bobDkgMessage, err := EncodeBobDkgOutput(bob.Bob.Output(), protocol.Version1) +// require.NoError(t, err) +// bobBytes, err = json.Marshal(bobDkgMessage) +// require.NoError(t, err) +// }) +// require.NotEmpty(t, aliceBytes) +// require.NotEmpty(t, bobBytes) +// +// t.Run("Decode Alice", func(t *testing.T) { +// decodedAliceMessage := &protocol.Message{} +// err := json.Unmarshal(aliceBytes, decodedAliceMessage) +// require.NoError(t, err) +// require.NotNil(t, decodedAliceMessage) +// decodedAlice, err := DecodeAliceDkgResult(decodedAliceMessage) +// require.NoError(t, err) +// +// require.True(t, alice.Output().PublicKey.Equal(decodedAlice.PublicKey)) +// require.Equal(t, alice.Output().SecretKeyShare, decodedAlice.SecretKeyShare) +// }) +// +// t.Run("Decode Bob", func(t *testing.T) { +// decodedBobMessage := &protocol.Message{} +// err := json.Unmarshal(bobBytes, decodedBobMessage) +// require.NoError(t, err) +// require.NotNil(t, decodedBobMessage) +// decodedBob, err := DecodeBobDkgResult(decodedBobMessage) +// require.NoError(t, err) +// +// require.True(t, bob.Output().PublicKey.Equal(decodedBob.PublicKey)) +// require.Equal(t, bob.Output().SecretKeyShare, decodedBob.SecretKeyShare) +// }) +// } +// +// func signV1(t *testing.T, curve *curves.Curve, aliceDkgResultMessage *protocol.Message, bobDkgResultMessage *protocol.Message) { +// t.Helper() +// // New DklsSign +// msg := []byte("As soon as you trust yourself, you will know how to live.") +// aliceSign, err := NewAliceSign(curve, sha3.New256(), msg, aliceDkgResultMessage, protocol.Version1) +// require.NoError(t, err) +// bobSign, err := NewBobSign(curve, sha3.New256(), msg, bobDkgResultMessage, protocol.Version1) +// require.NoError(t, err) +// +// // Sign +// var aErr error +// var bErr error +// t.Run("sign", func(t *testing.T) { +// aErr, bErr = runIteratedProtocol(aliceSign, bobSign) +// require.ErrorIs(t, aErr, protocol.ErrProtocolFinished) +// require.ErrorIs(t, bErr, protocol.ErrProtocolFinished) +// }) +// // Don't continue to verifying results if sign didn't run to completion. +// require.ErrorIs(t, aErr, protocol.ErrProtocolFinished) +// require.ErrorIs(t, bErr, protocol.ErrProtocolFinished) +// +// // Output +// var result *curves.EcdsaSignature +// t.Run("bob produces result of correct type", func(t *testing.T) { +// resultMessage, err := bobSign.Result(protocol.Version1) +// require.NoError(t, err) +// result, err = DecodeSignature(resultMessage) +// require.NoError(t, err) +// }) +// require.NotNil(t, result) +// +// aliceDkg, err := DecodeAliceDkgResult(aliceDkgResultMessage) +// require.NoError(t, err) +// +// t.Run("valid signature", func(t *testing.T) { +// hash := sha3.New256() +// _, err = hash.Write(msg) +// require.NoError(t, err) +// digest := hash.Sum(nil) +// unCompressedAffinePublicKey := aliceDkg.PublicKey.ToAffineUncompressed() +// require.Equal(t, 65, len(unCompressedAffinePublicKey)) +// x := new(big.Int).SetBytes(unCompressedAffinePublicKey[1:33]) +// y := new(big.Int).SetBytes(unCompressedAffinePublicKey[33:]) +// ecCurve, err := curve.ToEllipticCurve() +// require.NoError(t, err) +// publicKey := &curves.EcPoint{ +// Curve: ecCurve, +// X: x, +// Y: y, +// } +// require.True(t, +// curves.VerifyEcdsa(publicKey, +// digest[:], +// result, +// ), +// "signature failed verification", +// ) +// }) +// } +// +// func refreshV1(t *testing.T, curve *curves.Curve, aliceDkgResultMessage, bobDkgResultMessage *protocol.Message) (aliceRefreshResultMessage, bobRefreshResultMessage *protocol.Message) { +// t.Helper() +// aliceRefresh, err := NewAliceRefresh(curve, aliceDkgResultMessage, protocol.Version1) +// require.NoError(t, err) +// bobRefresh, err := NewBobRefresh(curve, bobDkgResultMessage, protocol.Version1) +// require.NoError(t, err) +// +// aErr, bErr := runIteratedProtocol(aliceRefresh, bobRefresh) +// require.ErrorIs(t, aErr, protocol.ErrProtocolFinished) +// require.ErrorIs(t, bErr, protocol.ErrProtocolFinished) +// +// aliceRefreshResultMessage, err = aliceRefresh.Result(protocol.Version1) +// require.NoError(t, err) +// require.NotNil(t, aliceRefreshResultMessage) +// _, err = DecodeAliceRefreshResult(aliceRefreshResultMessage) +// require.NoError(t, err) +// +// bobRefreshResultMessage, err = bobRefresh.Result(protocol.Version1) +// require.NoError(t, err) +// require.NotNil(t, bobRefreshResultMessage) +// _, err = DecodeBobRefreshResult(bobRefreshResultMessage) +// require.NoError(t, err) +// +// return aliceRefreshResultMessage, bobRefreshResultMessage +// } +// +// func BenchmarkDKGProto(b *testing.B) { +// curveInstances := []*curves.Curve{ +// curves.K256(), +// curves.P256(), +// } +// for _, curve := range curveInstances { +// alice := NewAliceDkg(curve, protocol.Version1) +// bob := NewBobDkg(curve, protocol.Version1) +// aErr, bErr := runIteratedProtocol(bob, alice) +// +// b.Run("both alice/bob complete simultaneously", func(t *testing.B) { +// require.ErrorIs(t, aErr, protocol.ErrProtocolFinished) +// require.ErrorIs(t, bErr, protocol.ErrProtocolFinished) +// }) +// +// for i := 0; i < kos.Kappa; i++ { +// if alice.Alice.Output().SeedOtResult.OneTimePadDecryptionKey[i] != bob.Bob.Output().SeedOtResult.OneTimePadEncryptionKeys[i][alice.Alice.Output().SeedOtResult.RandomChoiceBits[i]] { +// b.Errorf("oblivious transfer is incorrect at index i=%v", i) +// } +// } +// +// b.Run("Both parties produces identical composite pubkey", func(t *testing.B) { +// require.True(t, alice.Alice.Output().PublicKey.Equal(bob.Bob.Output().PublicKey)) +// }) +// +// var aliceResult *dkg.AliceOutput +// var bobResult *dkg.BobOutput +// b.Run("alice produces valid result", func(t *testing.B) { +// // Get the result +// r, err := alice.Result(protocol.Version1) +// +// // Test +// require.NoError(t, err) +// require.NotNil(t, r) +// aliceResult, err = DecodeAliceDkgResult(r) +// require.NoError(t, err) +// }) +// b.Run("bob produces valid result", func(t *testing.B) { +// // Get the result +// r, err := bob.Result(protocol.Version1) +// +// // Test +// require.NoError(t, err) +// require.NotNil(t, r) +// bobResult, err = DecodeBobDkgResult(r) +// require.NoError(t, err) +// }) +// +// b.Run("alice/bob agree on pubkey", func(t *testing.B) { +// require.Equal(t, aliceResult.PublicKey, bobResult.PublicKey) +// }) +// } +// } diff --git a/tecdsa/dklsv1/refresh/refresh.go b/tecdsa/dklsv1/refresh/refresh.go new file mode 100644 index 0000000..8e81810 --- /dev/null +++ b/tecdsa/dklsv1/refresh/refresh.go @@ -0,0 +1,193 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// This file implements the key refresh protocol of [DKLs18](https://eprint.iacr.org/2018/499.pdf). +// The key refresh protocol is defined as follows: +// 1. Key Share: +// 1.1. alice generates k_A <-- F_q; writes it to merlin transcript. sends it to bob. +// 1.2. bob receives k_A and writes it to merlin transcript. generates k_B <-- F_q and writes it to merlin transcript. reads k out of merlin transcript. overwrites sk_B *= k. sends k_B to Alice. +// 1.3. alice writes k_B to merlin transcript. reads k from it. overwrites sk_A *= k^{-1}. +// 2. OT: Redo OT (as it is done in the DKG) +package refresh + +import ( + "crypto/rand" + + "github.com/gtank/merlin" + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/ot/base/simplest" + "github.com/sonr-io/sonr/crypto/ot/extension/kos" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1/dkg" + "github.com/sonr-io/sonr/crypto/zkp/schnorr" +) + +// Alice struct encoding Alice's state during one execution of the overall signing algorithm. +// At the end of the joint computation, Alice will NOT obtain the signature. +type Alice struct { + // receiver is the base OT receiver. + receiver *simplest.Receiver + + // secretKeyShare is Alice's secret key for the joint public key. + secretKeyShare curves.Scalar + + // publicKey is the joint public key of Alice and Bob. + publicKey curves.Point + + curve *curves.Curve + + transcript *merlin.Transcript +} + +// Bob struct encoding Bob's state during one execution of the overall signing algorithm. +// At the end of the joint computation, Bob will obtain the signature. +type Bob struct { + // sender is the base OT sender. + sender *simplest.Sender + + // secretKeyShare is Bob's secret key for the joint public key. + secretKeyShare curves.Scalar + + // publicKey is the joint public key of Alice and Bob. + publicKey curves.Point + + curve *curves.Curve + + transcript *merlin.Transcript +} + +type RefreshRound2Output struct { + SeedOTRound1Output *schnorr.Proof + BobMultiplier curves.Scalar +} + +// NewAliceRefresh creates a party that can participate in 2-of-2 key refresh. +func NewAlice(curve *curves.Curve, dkgOutput *dkg.AliceOutput) *Alice { + return &Alice{ + curve: curve, + secretKeyShare: dkgOutput.SecretKeyShare, + publicKey: dkgOutput.PublicKey, + transcript: merlin.NewTranscript("Coinbase_DKLs_Refresh"), + } +} + +// NewBobRefresh creates a party that can participate in 2-of-2 key refresh. +func NewBob(curve *curves.Curve, dkgOutput *dkg.BobOutput) *Bob { + return &Bob{ + curve: curve, + secretKeyShare: dkgOutput.SecretKeyShare, + publicKey: dkgOutput.PublicKey, + transcript: merlin.NewTranscript("Coinbase_DKLs_Refresh"), + } +} + +func (alice *Alice) Round1RefreshGenerateSeed() curves.Scalar { + refreshSeed := alice.curve.Scalar.Random(rand.Reader) + alice.transcript.AppendMessage([]byte("alice refresh seed"), refreshSeed.Bytes()) + return refreshSeed +} + +func (bob *Bob) Round2RefreshProduceSeedAndMultiplyAndStartOT( + aliceSeed curves.Scalar, +) (*RefreshRound2Output, error) { + bob.transcript.AppendMessage([]byte("alice refresh seed"), aliceSeed.Bytes()) + bobSeed := bob.curve.Scalar.Random(rand.Reader) + bob.transcript.AppendMessage([]byte("bob refresh seed"), bobSeed.Bytes()) + k, err := bob.curve.NewScalar().SetBytes( + bob.transcript.ExtractBytes([]byte("secret key share multiplier"), simplest.DigestSize), + ) + if err != nil { + return nil, errors.Wrap(err, "couldn't produce bob's secret key share multiplier") + } + bob.secretKeyShare = bob.secretKeyShare.Mul(k) + + uniqueSessionId := [simplest.DigestSize]byte{} // note: will use and re-use this below for sub-session IDs. + copy( + uniqueSessionId[:], + bob.transcript.ExtractBytes([]byte("salt for simplest OT"), simplest.DigestSize), + ) + bob.sender, err = simplest.NewSender(bob.curve, kos.Kappa, uniqueSessionId) + if err != nil { + return nil, errors.Wrap(err, "bob constructing new OT sender in refresh round 2") + } + seedOTRound1Output, err := bob.sender.Round1ComputeAndZkpToPublicKey() + if err != nil { + return nil, errors.Wrap(err, "bob computing round 1 of seed OT within refresh round 2") + } + + return &RefreshRound2Output{ + SeedOTRound1Output: seedOTRound1Output, + BobMultiplier: bobSeed, + }, nil +} + +func (alice *Alice) Round3RefreshMultiplyRound2Ot( + input *RefreshRound2Output, +) ([]simplest.ReceiversMaskedChoices, error) { + alice.transcript.AppendMessage([]byte("bob refresh seed"), input.BobMultiplier.Bytes()) + k, err := alice.curve.NewScalar().SetBytes( + alice.transcript.ExtractBytes([]byte("secret key share multiplier"), simplest.DigestSize), + ) + if err != nil { + return nil, errors.Wrap(err, "couldn't produce bob's secret key share multiplier") + } + kInverse, err := k.Invert() + if err != nil { + return nil, errors.Wrap(err, "couldn't produce k inverse (alice's secret key share)") + } + alice.secretKeyShare = alice.secretKeyShare.Mul(kInverse) + + uniqueSessionId := [simplest.DigestSize]byte{} // note: will use and re-use this below for sub-session IDs. + copy( + uniqueSessionId[:], + alice.transcript.ExtractBytes([]byte("salt for simplest OT"), simplest.DigestSize), + ) + alice.receiver, err = simplest.NewReceiver(alice.curve, kos.Kappa, uniqueSessionId) + if err != nil { + return nil, errors.Wrap(err, "couldn't construct OT receiver") + } + + return alice.receiver.Round2VerifySchnorrAndPadTransfer(input.SeedOTRound1Output) +} + +func (bob *Bob) Round4RefreshRound3Ot( + compressedReceiversMaskedChoice []simplest.ReceiversMaskedChoices, +) ([]simplest.OtChallenge, error) { + return bob.sender.Round3PadTransfer(compressedReceiversMaskedChoice) +} + +func (alice *Alice) Round5RefreshRound4Ot( + challenge []simplest.OtChallenge, +) ([]simplest.OtChallengeResponse, error) { + return alice.receiver.Round4RespondToChallenge(challenge) +} + +func (bob *Bob) Round6RefreshRound5Ot( + challengeResponses []simplest.OtChallengeResponse, +) ([]simplest.ChallengeOpening, error) { + return bob.sender.Round5Verify(challengeResponses) +} + +func (alice *Alice) Round7DkgRound6Ot(challengeOpenings []simplest.ChallengeOpening) error { + return alice.receiver.Round6Verify(challengeOpenings) +} + +func (alice *Alice) Output() *dkg.AliceOutput { + return &dkg.AliceOutput{ + PublicKey: alice.publicKey, + SecretKeyShare: alice.secretKeyShare, + SeedOtResult: alice.receiver.Output, + } +} + +func (bob *Bob) Output() *dkg.BobOutput { + return &dkg.BobOutput{ + PublicKey: bob.publicKey, + SecretKeyShare: bob.secretKeyShare, + SeedOtResult: bob.sender.Output, + } +} diff --git a/tecdsa/dklsv1/refresh/refresh_test.go b/tecdsa/dklsv1/refresh/refresh_test.go new file mode 100644 index 0000000..2e830a5 --- /dev/null +++ b/tecdsa/dklsv1/refresh/refresh_test.go @@ -0,0 +1,215 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package refresh_test + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" + "golang.org/x/crypto/sha3" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/ot/extension/kos" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1/dkg" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1/refresh" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1/sign" +) + +func performDKG(t *testing.T, curve *curves.Curve) (*dkg.Alice, *dkg.Bob) { + t.Helper() + + alice := dkg.NewAlice(curve) + bob := dkg.NewBob(curve) + + seed, err := bob.Round1GenerateRandomSeed() + require.NoError(t, err) + round3Output, err := alice.Round2CommitToProof(seed) + require.NoError(t, err) + proof, err := bob.Round3SchnorrProve(round3Output) + require.NoError(t, err) + proof, err = alice.Round4VerifyAndReveal(proof) + require.NoError(t, err) + proof, err = bob.Round5DecommitmentAndStartOt(proof) + require.NoError(t, err) + compressedReceiversMaskedChoice, err := alice.Round6DkgRound2Ot(proof) + require.NoError(t, err) + challenge, err := bob.Round7DkgRound3Ot(compressedReceiversMaskedChoice) + require.NoError(t, err) + challengeResponse, err := alice.Round8DkgRound4Ot(challenge) + require.NoError(t, err) + challengeOpenings, err := bob.Round9DkgRound5Ot(challengeResponse) + require.NoError(t, err) + err = alice.Round10DkgRound6Ot(challengeOpenings) + require.NoError(t, err) + + return alice, bob +} + +func performRefresh( + t *testing.T, + curve *curves.Curve, + aliceSecretKeyShare, bobSecretKeyShare curves.Scalar, +) (*refresh.Alice, *refresh.Bob) { + t.Helper() + alice := refresh.NewAlice(curve, &dkg.AliceOutput{SecretKeyShare: aliceSecretKeyShare}) + bob := refresh.NewBob(curve, &dkg.BobOutput{SecretKeyShare: bobSecretKeyShare}) + + round1Output := alice.Round1RefreshGenerateSeed() + require.False(t, round1Output.IsZero()) + round2Output, err := bob.Round2RefreshProduceSeedAndMultiplyAndStartOT(round1Output) + require.NoError(t, err) + round3Output, err := alice.Round3RefreshMultiplyRound2Ot(round2Output) + require.NoError(t, err) + round4Output, err := bob.Round4RefreshRound3Ot(round3Output) + require.NoError(t, err) + round5Output, err := alice.Round5RefreshRound4Ot(round4Output) + require.NoError(t, err) + round6Output, err := bob.Round6RefreshRound5Ot(round5Output) + require.NoError(t, err) + err = alice.Round7DkgRound6Ot(round6Output) + require.NoError(t, err) + return alice, bob +} + +func Test_RefreshLeadsToTheSamePublicKeyButDifferentPrivateMaterial(t *testing.T) { + t.Parallel() + curveInstances := []*curves.Curve{ + curves.K256(), + curves.P256(), + } + for _, curve := range curveInstances { + boundCurve := curve + t.Run(fmt.Sprintf("testing refresh for curve %s", boundCurve.Name), func(tt *testing.T) { + tt.Parallel() + alice, bob := performDKG(tt, boundCurve) + + publicKey := alice.Output().PublicKey + require.True(tt, publicKey.Equal(bob.Output().PublicKey)) + + aliceRefreshed, bobRefreshed := performRefresh( + tt, + boundCurve, + alice.Output().SecretKeyShare, + bob.Output().SecretKeyShare, + ) + + require.NotEqual( + tt, + aliceRefreshed.Output().SecretKeyShare, + alice.Output().SecretKeyShare, + ) + require.NotEqual(tt, bobRefreshed.Output().SeedOtResult, bob.Output().SecretKeyShare) + require.NotEqualValues( + tt, + aliceRefreshed.Output().SeedOtResult.OneTimePadDecryptionKey, + alice.Output().SeedOtResult.OneTimePadDecryptionKey, + ) + require.NotEqualValues( + tt, + aliceRefreshed.Output().SeedOtResult.PackedRandomChoiceBits, + alice.Output().SeedOtResult.PackedRandomChoiceBits, + ) + require.NotEqualValues( + tt, + aliceRefreshed.Output().SeedOtResult.RandomChoiceBits, + alice.Output().SeedOtResult.RandomChoiceBits, + ) + require.NotEqualValues( + tt, + bobRefreshed.Output().SeedOtResult.OneTimePadEncryptionKeys, + bob.Output().SeedOtResult.OneTimePadEncryptionKeys, + ) + + pkA := boundCurve.ScalarBaseMult(aliceRefreshed.Output().SecretKeyShare) + computedPublicKeyA := pkA.Mul(bobRefreshed.Output().SecretKeyShare) + require.True(tt, computedPublicKeyA.Equal(publicKey)) + + pkB := boundCurve.ScalarBaseMult(bobRefreshed.Output().SecretKeyShare) + computedPublicKeyB := pkB.Mul(aliceRefreshed.Output().SecretKeyShare) + require.True(tt, computedPublicKeyB.Equal(publicKey)) + }) + } +} + +func Test_RefreshOTIsCorrect(t *testing.T) { + t.Parallel() + curveInstances := []*curves.Curve{ + curves.K256(), + curves.P256(), + } + for _, curve := range curveInstances { + boundCurve := curve + t.Run( + fmt.Sprintf("testing OT correctness of key refresh for curve %s", boundCurve.Name), + func(tt *testing.T) { + tt.Parallel() + alice, bob := performDKG(tt, boundCurve) + aliceRefreshed, bobRefreshed := performRefresh( + tt, + boundCurve, + alice.Output().SecretKeyShare, + bob.Output().SecretKeyShare, + ) + for i := 0; i < kos.Kappa; i++ { + if aliceRefreshed.Output().SeedOtResult.OneTimePadDecryptionKey[i] != bobRefreshed.Output().SeedOtResult.OneTimePadEncryptionKeys[i][aliceRefreshed.Output().SeedOtResult.RandomChoiceBits[i]] { + tt.Errorf("oblivious transfer is incorrect at index i=%v", i) + } + } + }, + ) + } +} + +func Test_CanSignAfterRefresh(t *testing.T) { + t.Parallel() + curveInstances := []*curves.Curve{ + curves.K256(), + curves.P256(), + } + for _, curve := range curveInstances { + boundCurve := curve + t.Run( + fmt.Sprintf("testing sign after refresh for curve %s", boundCurve.Name), + func(tt *testing.T) { + tt.Parallel() + aliceDKG, bobDKG := performDKG(tt, boundCurve) + + publicKey := aliceDKG.Output().PublicKey + require.True(tt, publicKey.Equal(bobDKG.Output().PublicKey)) + + aliceRefreshed, bobRefreshed := performRefresh( + tt, + boundCurve, + aliceDKG.Output().SecretKeyShare, + bobDKG.Output().SecretKeyShare, + ) + + alice := sign.NewAlice(boundCurve, sha3.New256(), &dkg.AliceOutput{ + SeedOtResult: aliceRefreshed.Output().SeedOtResult, + SecretKeyShare: aliceRefreshed.Output().SecretKeyShare, + PublicKey: publicKey, + }) + bob := sign.NewBob(boundCurve, sha3.New256(), &dkg.BobOutput{ + SeedOtResult: bobRefreshed.Output().SeedOtResult, + SecretKeyShare: bobRefreshed.Output().SecretKeyShare, + PublicKey: publicKey, + }) + + message := []byte("A message.") + seed, err := alice.Round1GenerateRandomSeed() + require.NoError(tt, err) + round3Output, err := bob.Round2Initialize(seed) + require.NoError(tt, err) + round4Output, err := alice.Round3Sign(message, round3Output) + require.NoError(tt, err) + err = bob.Round4Final(message, round4Output) + require.NoError(tt, err, "curve: %s", boundCurve.Name) + }, + ) + } +} diff --git a/tecdsa/dklsv1/refreshserializers.go b/tecdsa/dklsv1/refreshserializers.go new file mode 100644 index 0000000..c7e9926 --- /dev/null +++ b/tecdsa/dklsv1/refreshserializers.go @@ -0,0 +1,253 @@ +package dklsv1 + +import ( + "bytes" + "encoding/gob" + + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/core/protocol" + "github.com/sonr-io/sonr/crypto/ot/base/simplest" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1/dkg" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1/refresh" +) + +func newRefreshProtocolMessage(payload []byte, round string, version uint) *protocol.Message { + return &protocol.Message{ + Protocol: protocol.Dkls18Refresh, + Version: version, + Payloads: map[string][]byte{payloadKey: payload}, + Metadata: map[string]string{"round": round}, + } +} + +func versionIsSupported(messageVersion uint) error { + if messageVersion < uint(protocol.Version1) { + return errors.New("only version 1 is supported.") + } + return nil +} + +func encodeRefreshRound1Output(seed curves.Scalar, version uint) (*protocol.Message, error) { + if err := versionIsSupported(version); err != nil { + return nil, errors.Wrap(err, "version error") + } + registerTypes() + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(&seed); err != nil { + return nil, errors.WithStack(err) + } + return newRefreshProtocolMessage(buf.Bytes(), "1", version), nil +} + +func decodeRefreshRound2Input(m *protocol.Message) (curves.Scalar, error) { + if err := versionIsSupported(m.Version); err != nil { + return nil, errors.Wrap(err, "version error") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := new(curves.Scalar) + if err := dec.Decode(&decoded); err != nil { + return nil, errors.WithStack(err) + } + return *decoded, nil +} + +func encodeRefreshRound2Output( + output *refresh.RefreshRound2Output, + version uint, +) (*protocol.Message, error) { + if err := versionIsSupported(version); err != nil { + return nil, errors.Wrap(err, "version error") + } + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(output); err != nil { + return nil, errors.WithStack(err) + } + return newRefreshProtocolMessage(buf.Bytes(), "2", version), nil +} + +func decodeRefreshRound3Input(m *protocol.Message) (*refresh.RefreshRound2Output, error) { + if err := versionIsSupported(m.Version); err != nil { + return nil, errors.Wrap(err, "version error") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := new(refresh.RefreshRound2Output) + if err := dec.Decode(decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} + +func encodeRefreshRound3Output( + choices []simplest.ReceiversMaskedChoices, + version uint, +) (*protocol.Message, error) { + if err := versionIsSupported(version); err != nil { + return nil, errors.Wrap(err, "version error") + } + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(choices); err != nil { + return nil, errors.WithStack(err) + } + return newRefreshProtocolMessage(buf.Bytes(), "3", version), nil +} + +func decodeRefreshRound4Input(m *protocol.Message) ([]simplest.ReceiversMaskedChoices, error) { + if err := versionIsSupported(m.Version); err != nil { + return nil, errors.Wrap(err, "version error") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := []simplest.ReceiversMaskedChoices{} + if err := dec.Decode(&decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} + +func encodeRefreshRound4Output( + challenge []simplest.OtChallenge, + version uint, +) (*protocol.Message, error) { + if err := versionIsSupported(version); err != nil { + return nil, errors.Wrap(err, "version error") + } + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(challenge); err != nil { + return nil, errors.WithStack(err) + } + return newRefreshProtocolMessage(buf.Bytes(), "4", version), nil +} + +func decodeRefreshRound5Input(m *protocol.Message) ([]simplest.OtChallenge, error) { + if err := versionIsSupported(m.Version); err != nil { + return nil, errors.Wrap(err, "version error") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := []simplest.OtChallenge{} + if err := dec.Decode(&decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} + +func encodeRefreshRound5Output( + responses []simplest.OtChallengeResponse, + version uint, +) (*protocol.Message, error) { + if err := versionIsSupported(version); err != nil { + return nil, errors.Wrap(err, "version error") + } + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(responses); err != nil { + return nil, errors.WithStack(err) + } + return newRefreshProtocolMessage(buf.Bytes(), "5", version), nil +} + +func decodeRefreshRound6Input(m *protocol.Message) ([]simplest.OtChallengeResponse, error) { + if err := versionIsSupported(m.Version); err != nil { + return nil, errors.Wrap(err, "version error") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := []simplest.OtChallengeResponse{} + if err := dec.Decode(&decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} + +func encodeRefreshRound6Output( + opening []simplest.ChallengeOpening, + version uint, +) (*protocol.Message, error) { + if err := versionIsSupported(version); err != nil { + return nil, errors.Wrap(err, "version error") + } + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(opening); err != nil { + return nil, errors.WithStack(err) + } + return newRefreshProtocolMessage(buf.Bytes(), "6", version), nil +} + +func decodeRefreshRound7Input(m *protocol.Message) ([]simplest.ChallengeOpening, error) { + if err := versionIsSupported(m.Version); err != nil { + return nil, errors.Wrap(err, "version error") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := []simplest.ChallengeOpening{} + if err := dec.Decode(&decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} + +// EncodeAliceRefreshOutput serializes Alice Refresh output based on the protocol version. +func EncodeAliceRefreshOutput(result *dkg.AliceOutput, version uint) (*protocol.Message, error) { + if err := versionIsSupported(version); err != nil { + return nil, errors.Wrap(err, "version error") + } + registerTypes() + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(result); err != nil { + return nil, errors.WithStack(err) + } + return newRefreshProtocolMessage(buf.Bytes(), "alice-output", version), nil +} + +// DecodeAliceRefreshResult deserializes Alice refresh output. +func DecodeAliceRefreshResult(m *protocol.Message) (*dkg.AliceOutput, error) { + if err := versionIsSupported(m.Version); err != nil { + return nil, errors.Wrap(err, "version error") + } + registerTypes() + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := new(dkg.AliceOutput) + if err := dec.Decode(&decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} + +// EncodeBobRefreshOutput serializes Bob refresh output based on the protocol version. +func EncodeBobRefreshOutput(result *dkg.BobOutput, version uint) (*protocol.Message, error) { + if err := versionIsSupported(version); err != nil { + return nil, errors.Wrap(err, "version error") + } + registerTypes() + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(result); err != nil { + return nil, errors.WithStack(err) + } + return newRefreshProtocolMessage(buf.Bytes(), "bob-output", version), nil +} + +// DecodeBobRefreshResult deserializes Bob refhresh output. +func DecodeBobRefreshResult(m *protocol.Message) (*dkg.BobOutput, error) { + if err := versionIsSupported(m.Version); err != nil { + return nil, errors.Wrap(err, "version error") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := new(dkg.BobOutput) + if err := dec.Decode(&decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} diff --git a/tecdsa/dklsv1/sign/multiply.go b/tecdsa/dklsv1/sign/multiply.go new file mode 100644 index 0000000..7905aaa --- /dev/null +++ b/tecdsa/dklsv1/sign/multiply.go @@ -0,0 +1,299 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package sign + +import ( + "crypto/rand" + "crypto/subtle" + "fmt" + "math/big" + + "github.com/gtank/merlin" + "github.com/pkg/errors" + "golang.org/x/crypto/sha3" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/internal" + "github.com/sonr-io/sonr/crypto/ot/base/simplest" + "github.com/sonr-io/sonr/crypto/ot/extension/kos" +) + +// This implements the Multiplication protocol of DKLs, protocol 5. https://eprint.iacr.org/2018/499.pdf +// two parties---the "sender" and "receiver", let's say---each input a scalar modulo q. +// the functionality multiplies their two scalars modulo q, and then randomly additively shares the product mod q. +// it then returns the two respective additive shares to the two parties. + +// MultiplySender is the party that plays the role of Sender in the multiplication protocol (protocol 5 of the paper). +type MultiplySender struct { + cOtSender *kos.Sender // underlying cOT sender struct, used by mult. + outputAdditiveShare curves.Scalar // ultimate output share of mult. + gadget [kos.L]curves.Scalar + curve *curves.Curve + transcript *merlin.Transcript + uniqueSessionId [simplest.DigestSize]byte +} + +// MultiplyReceiver is the party that plays the role of Sender in the multiplication protocol (protocol 5 of the paper). +type MultiplyReceiver struct { + cOtReceiver *kos.Receiver // underlying cOT receiver struct, used by mult. + outputAdditiveShare curves.Scalar // ultimate output share of mult. + omega [kos.COtBlockSizeBytes]byte // this is used as an intermediate result during the course of mult. + gadget [kos.L]curves.Scalar + curve *curves.Curve + transcript *merlin.Transcript + uniqueSessionId [simplest.DigestSize]byte +} + +func generateGadgetVector(curve *curves.Curve) ([kos.L]curves.Scalar, error) { + var err error + gadget := [kos.L]curves.Scalar{} + for i := 0; i < kos.Kappa; i++ { + gadget[i], err = curve.Scalar.SetBigInt(new(big.Int).Lsh(big.NewInt(1), uint(i))) + if err != nil { + return gadget, errors.Wrap(err, "creating gadget scalar from big int") + } + } + shake := sha3.NewCShake256(nil, []byte("Coinbase DKLs gadget vector")) + for i := kos.Kappa; i < kos.L; i++ { + var err error + bytes := [simplest.DigestSize]byte{} + if _, err = shake.Read(bytes[:]); err != nil { + return gadget, err + } + gadget[i], err = curve.Scalar.SetBytes(bytes[:]) + if err != nil { + return gadget, errors.Wrap(err, "creating gadget scalar from bytes") + } + } + return gadget, nil +} + +// NewMultiplySender generates a `MultiplySender` instance, ready to take part in multiplication as the "sender". +// You must supply it the _output_ of a seed OT, from the receiver's point of view, as well as params and a unique ID. +// That is, the mult sender must run the base OT as the receiver; note the (apparent) reversal of roles. +func NewMultiplySender( + seedOtResults *simplest.ReceiverOutput, + curve *curves.Curve, + uniqueSessionId [simplest.DigestSize]byte, +) (*MultiplySender, error) { + sender := kos.NewCOtSender(seedOtResults, curve) + gadget, err := generateGadgetVector(curve) + if err != nil { + return nil, errors.Wrap(err, "error generating gadget vector in new multiply sender") + } + + transcript := merlin.NewTranscript("Coinbase_DKLs_Multiply") + transcript.AppendMessage([]byte("session_id"), uniqueSessionId[:]) + return &MultiplySender{ + cOtSender: sender, + curve: curve, + transcript: transcript, + uniqueSessionId: uniqueSessionId, + gadget: gadget, + }, nil +} + +// NewMultiplyReceiver generates a `MultiplyReceiver` instance, ready to take part in multiplication as the "receiver". +// You must supply it the _output_ of a seed OT, from the sender's point of view, as well as params and a unique ID. +// That is, the mult sender must run the base OT as the sender; note the (apparent) reversal of roles. +func NewMultiplyReceiver( + seedOtResults *simplest.SenderOutput, + curve *curves.Curve, + uniqueSessionId [simplest.DigestSize]byte, +) (*MultiplyReceiver, error) { + receiver := kos.NewCOtReceiver(seedOtResults, curve) + gadget, err := generateGadgetVector(curve) + if err != nil { + return nil, errors.Wrap(err, "error generating gadget vector in new multiply receiver") + } + transcript := merlin.NewTranscript("Coinbase_DKLs_Multiply") + transcript.AppendMessage([]byte("session_id"), uniqueSessionId[:]) + return &MultiplyReceiver{ + cOtReceiver: receiver, + curve: curve, + transcript: transcript, + uniqueSessionId: uniqueSessionId, + gadget: gadget, + }, nil +} + +// MultiplyRound2Output is the output of the second round of the multiplication protocol. +type MultiplyRound2Output struct { + COTRound2Output *kos.Round2Output + R [kos.L]curves.Scalar + U curves.Scalar +} + +// Algorithm 5. in DKLs. "Encodes" Bob's secret input scalars `beta` in the right way, using the opts. +// The idea is that if Bob were to just put beta's as the choice vector, then Alice could learn a few of Bob's bits. +// using selective failure attacks. so you subtract random components of a public random vector. see paper for details. +func (receiver *MultiplyReceiver) encode(beta curves.Scalar) ([kos.COtBlockSizeBytes]byte, error) { + // passing beta by value, so that we can mutate it locally. check that this does what i want. + encoding := [kos.COtBlockSizeBytes]byte{} + bytesOfBetaMinusDotProduct := beta.Bytes() + if _, err := rand.Read(encoding[kos.KappaBytes:]); err != nil { + return encoding, errors.Wrap( + err, + "sampling `gamma` random bytes in multiply receiver encode", + ) + } + for j := kos.Kappa; j < kos.L; j++ { + jthBitOfGamma := simplest.ExtractBitFromByteVector(encoding[:], j) + // constant-time computation of the dot product beta - < gR, gamma >. + // we can only `ConstantTimeCopy` byte slices (as opposed to big ints). so keep them as bytes. + option0, err := receiver.curve.Scalar.SetBytes(bytesOfBetaMinusDotProduct[:]) + if err != nil { + return encoding, errors.Wrap(err, "setting masking bits scalar from bytes") + } + option0Bytes := option0.Bytes() + option1 := option0.Sub(receiver.gadget[j]) + option1Bytes := option1.Bytes() + bytesOfBetaMinusDotProduct = option0Bytes + subtle.ConstantTimeCopy(int(jthBitOfGamma), bytesOfBetaMinusDotProduct[:], option1Bytes) + } + copy(encoding[0:kos.KappaBytes], internal.ReverseScalarBytes(bytesOfBetaMinusDotProduct[:])) + return encoding, nil +} + +// Round1Initialize Protocol 5., Multiplication, 3). Bob (receiver) encodes beta and initiates the cOT extension +func (receiver *MultiplyReceiver) Round1Initialize(beta curves.Scalar) (*kos.Round1Output, error) { + var err error + if receiver.omega, err = receiver.encode(beta); err != nil { + return nil, errors.Wrap(err, "encoding input beta in receiver round 1 initialize") + } + cOtRound1Output, err := receiver.cOtReceiver.Round1Initialize( + receiver.uniqueSessionId, + receiver.omega, + ) + if err != nil { + return nil, errors.Wrap( + err, + "error in cOT round 1 initialize within multiply round 1 initialize", + ) + } + // write the output of the first round to the transcript + for i := 0; i < kos.Kappa; i++ { + label := []byte(fmt.Sprintf("row %d of U", i)) + receiver.transcript.AppendMessage(label, cOtRound1Output.U[i][:]) + } + receiver.transcript.AppendMessage([]byte("wPrime"), cOtRound1Output.WPrime[:]) + receiver.transcript.AppendMessage([]byte("vPrime"), cOtRound1Output.VPrime[:]) + return cOtRound1Output, nil +} + +// Round2Multiply Protocol 5., steps 3) 5), 7). Alice _responds_ to Bob's initial cOT message, using alpha as input. +// Doesn't actually send the message yet, only stashes it and moves onto the next steps of the multiplication protocol +// specifically, Alice can then do step 5) (compute the outputs of the multiplication protocol), also stashes this. +// Finishes by taking care of 7), after that, Alice is totally done with multiplication and has stashed the outputs. +func (sender *MultiplySender) Round2Multiply( + alpha curves.Scalar, + round1Output *kos.Round1Output, +) (*MultiplyRound2Output, error) { + var err error + alphaHat := sender.curve.Scalar.Random(rand.Reader) + input := [kos.L][2]curves.Scalar{} // sender's input, namely integer "sums" in case w_j == 1. + for j := 0; j < kos.L; j++ { + input[j][0] = alpha + input[j][1] = alphaHat + } + round2Output := &MultiplyRound2Output{} + round2Output.COTRound2Output, err = sender.cOtSender.Round2Transfer( + sender.uniqueSessionId, + input, + round1Output, + ) + if err != nil { + return nil, errors.Wrap(err, "error in cOT within round 2 multiply") + } + // write the output of the first round to the transcript + for i := 0; i < kos.Kappa; i++ { + label := []byte(fmt.Sprintf("row %d of U", i)) + sender.transcript.AppendMessage(label, round1Output.U[i][:]) + } + sender.transcript.AppendMessage([]byte("wPrime"), round1Output.WPrime[:]) + sender.transcript.AppendMessage([]byte("vPrime"), round1Output.VPrime[:]) + // write our own output of the second round to the transcript + chiWidth := 2 + for i := 0; i < kos.Kappa; i++ { + for k := 0; k < chiWidth; k++ { + label := []byte(fmt.Sprintf("row %d of Tau", i)) + sender.transcript.AppendMessage(label, round2Output.COTRound2Output.Tau[i][k].Bytes()) + } + } + chi := make([]curves.Scalar, chiWidth) + for k := 0; k < 2; k++ { + label := []byte(fmt.Sprintf("draw challenge chi %d", k)) + randomBytes := sender.transcript.ExtractBytes(label, kos.KappaBytes) + chi[k], err = sender.curve.Scalar.SetBytes(randomBytes) + if err != nil { + return nil, errors.Wrap(err, "setting chi scalar from bytes") + } + } + sender.outputAdditiveShare = sender.curve.Scalar.Zero() + for j := 0; j < kos.L; j++ { + round2Output.R[j] = sender.curve.Scalar.Zero() + for k := 0; k < chiWidth; k++ { + round2Output.R[j] = round2Output.R[j].Add( + chi[k].Mul(sender.cOtSender.OutputAdditiveShares[j][k]), + ) + } + sender.outputAdditiveShare = sender.outputAdditiveShare.Add( + sender.gadget[j].Mul(sender.cOtSender.OutputAdditiveShares[j][0]), + ) + } + round2Output.U = chi[0].Mul(alpha).Add(chi[1].Mul(alphaHat)) + return round2Output, nil +} + +// Round3Multiply Protocol 5., Multiplication, 3) and 6). Bob finalizes the cOT extension. +// using that and Alice's multiplication message, Bob completes the multiplication protocol, including checks. +// At the end, Bob's values tB_j are populated. +func (receiver *MultiplyReceiver) Round3Multiply(round2Output *MultiplyRound2Output) error { + chiWidth := 2 + // write the output of the second round to the transcript + for i := 0; i < kos.Kappa; i++ { + for k := 0; k < chiWidth; k++ { + label := []byte(fmt.Sprintf("row %d of Tau", i)) + receiver.transcript.AppendMessage(label, round2Output.COTRound2Output.Tau[i][k].Bytes()) + } + } + if err := receiver.cOtReceiver.Round3Transfer(round2Output.COTRound2Output); err != nil { + return errors.Wrap(err, "error within cOT round 3 transfer within round 3 multiply") + } + var err error + chi := make([]curves.Scalar, chiWidth) + for k := 0; k < chiWidth; k++ { + label := []byte(fmt.Sprintf("draw challenge chi %d", k)) + randomBytes := receiver.transcript.ExtractBytes(label, kos.KappaBytes) + chi[k], err = receiver.curve.Scalar.SetBytes(randomBytes) + if err != nil { + return errors.Wrap(err, "setting chi scalar from bytes") + } + } + + receiver.outputAdditiveShare = receiver.curve.Scalar.Zero() + for j := 0; j < kos.L; j++ { + // compute the LHS of bob's step 6) for j. note that we're "adding r_j" to both sides"; so this LHS includes r_j. + // the reason to do this is so that the constant-time (i.e., independent of w_j) calculation of w_j * u can proceed more cleanly. + leftHandSideOfCheck := round2Output.R[j] + for k := 0; k < chiWidth; k++ { + leftHandSideOfCheck = leftHandSideOfCheck.Add( + chi[k].Mul(receiver.cOtReceiver.OutputAdditiveShares[j][k]), + ) + } + rightHandSideOfCheck := [simplest.DigestSize]byte{} + jthBitOfOmega := simplest.ExtractBitFromByteVector(receiver.omega[:], j) + subtle.ConstantTimeCopy(int(jthBitOfOmega), rightHandSideOfCheck[:], round2Output.U.Bytes()) + if subtle.ConstantTimeCompare(rightHandSideOfCheck[:], leftHandSideOfCheck.Bytes()) != 1 { + return fmt.Errorf("alice's values R and U failed to check in round 3 multiply") + } + receiver.outputAdditiveShare = receiver.outputAdditiveShare.Add( + receiver.gadget[j].Mul(receiver.cOtReceiver.OutputAdditiveShares[j][0]), + ) + } + return nil +} diff --git a/tecdsa/dklsv1/sign/multiply_test.go b/tecdsa/dklsv1/sign/multiply_test.go new file mode 100644 index 0000000..c4af12d --- /dev/null +++ b/tecdsa/dklsv1/sign/multiply_test.go @@ -0,0 +1,52 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package sign + +import ( + "crypto/rand" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/ot/base/simplest" + "github.com/sonr-io/sonr/crypto/ot/extension/kos" + "github.com/sonr-io/sonr/crypto/ot/ottest" +) + +func TestMultiply(t *testing.T) { + curve := curves.K256() + hashKeySeed := [simplest.DigestSize]byte{} + _, err := rand.Read(hashKeySeed[:]) + require.NoError(t, err) + + baseOtSenderOutput, baseOtReceiverOutput, err := ottest.RunSimplestOT( + curve, + kos.Kappa, + hashKeySeed, + ) + require.NoError(t, err) + + sender, err := NewMultiplySender(baseOtReceiverOutput, curve, hashKeySeed) + require.NoError(t, err) + receiver, err := NewMultiplyReceiver(baseOtSenderOutput, curve, hashKeySeed) + require.NoError(t, err) + + alpha := curve.Scalar.Random(rand.Reader) + beta := curve.Scalar.Random(rand.Reader) + + round1Output, err := receiver.Round1Initialize(beta) + require.Nil(t, err) + round2Output, err := sender.Round2Multiply(alpha, round1Output) + require.Nil(t, err) + err = receiver.Round3Multiply(round2Output) + require.Nil(t, err) + + product := alpha.Mul(beta) + sum := sender.outputAdditiveShare.Add(receiver.outputAdditiveShare) + require.Equal(t, product, sum) +} diff --git a/tecdsa/dklsv1/sign/sign.go b/tecdsa/dklsv1/sign/sign.go new file mode 100644 index 0000000..0679ac3 --- /dev/null +++ b/tecdsa/dklsv1/sign/sign.go @@ -0,0 +1,409 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Package sign implements the 2-2 threshold signature protocol of [DKLs18](https://eprint.iacr.org/2018/499.pdf). +// The signing protocol is defined in "Protocol 4" page 9, of the paper. The Zero Knowledge Proof ideal functionalities are +// realized using schnorr proofs. +package sign + +import ( + "crypto/ecdsa" + "crypto/rand" + "fmt" + "hash" + "math/big" + + "github.com/gtank/merlin" + "github.com/pkg/errors" + "golang.org/x/crypto/sha3" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/ot/base/simplest" + "github.com/sonr-io/sonr/crypto/ot/extension/kos" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1/dkg" + "github.com/sonr-io/sonr/crypto/zkp/schnorr" +) + +const multiplicationCount = 2 + +// Alice struct encoding Alice's state during one execution of the overall signing algorithm. +// At the end of the joint computation, Alice will not possess the signature. +type Alice struct { + hash hash.Hash // which hash function should we use to compute message (i.e, teh digest) + seedOtResults *simplest.ReceiverOutput + secretKeyShare curves.Scalar // the witness + publicKey curves.Point + curve *curves.Curve + transcript *merlin.Transcript +} + +// Bob struct encoding Bob's state during one execution of the overall signing algorithm. +// At the end of the joint computation, Bob will obtain the signature. +type Bob struct { + // Signature is the resulting digital signature and is the output of this protocol. + Signature *curves.EcdsaSignature + + hash hash.Hash // which hash function should we use to compute message + seedOtResults *simplest.SenderOutput + secretKeyShare curves.Scalar + publicKey curves.Point + transcript *merlin.Transcript + // multiplyReceivers are 2 receivers that are used to perform the two multiplications needed: + // 1. (phi + 1/kA) * (1/kB) + // 2. skA/KA * skB/kB + multiplyReceivers [multiplicationCount]*MultiplyReceiver + kB curves.Scalar + dB curves.Point + curve *curves.Curve +} + +// NewAlice creates a party that can participate in protocol runs of DKLs sign, in the role of Alice. +func NewAlice(curve *curves.Curve, hash hash.Hash, dkgOutput *dkg.AliceOutput) *Alice { + return &Alice{ + hash: hash, + seedOtResults: dkgOutput.SeedOtResult, + curve: curve, + secretKeyShare: dkgOutput.SecretKeyShare, + publicKey: dkgOutput.PublicKey, + transcript: merlin.NewTranscript("Coinbase_DKLs_Sign"), + } +} + +// NewBob creates a party that can participate in protocol runs of DKLs sign, in the role of Bob. +// This party receives the signature at the end. +func NewBob(curve *curves.Curve, hash hash.Hash, dkgOutput *dkg.BobOutput) *Bob { + return &Bob{ + hash: hash, + seedOtResults: dkgOutput.SeedOtResult, + curve: curve, + secretKeyShare: dkgOutput.SecretKeyShare, + publicKey: dkgOutput.PublicKey, + transcript: merlin.NewTranscript("Coinbase_DKLs_Sign"), + } +} + +// SignRound2Output is the output of the 3rd round of the protocol. +type SignRound2Output struct { + // KosRound1Outputs is the output of the first round of OT Extension, stored for future rounds. + KosRound1Outputs [multiplicationCount]*kos.Round1Output + + // DB is D_{B} = k_{B} . G from the paper. + DB curves.Point + + // Seed is the random value used to derive the joint unique session id. + Seed [simplest.DigestSize]byte +} + +// SignRound3Output is the output of the 3rd round of the protocol. +type SignRound3Output struct { + // MultiplyRound2Outputs is the output of the second round of multiply sub-protocol. Stored to use in future rounds. + MultiplyRound2Outputs [multiplicationCount]*MultiplyRound2Output + + // RSchnorrProof is ZKP for the value R = k_{A} . D_{B} from the paper. + RSchnorrProof *schnorr.Proof + + // RPrime is R' = k'_{A} . D_{B} from the paper. + RPrime curves.Point + + // EtaPhi is the Eta_{Phi} from the paper. + EtaPhi curves.Scalar + + // EtaSig is the Eta_{Sig} from the paper. + EtaSig curves.Scalar +} + +// Round1GenerateRandomSeed first step of the generation of the shared random salt `idExt` +// in this round, Alice flips 32 random bytes and sends them to Bob. +// Note that this is not _explicitly_ given as part of the protocol in https://eprint.iacr.org/2018/499.pdf, Protocol 1). +// Rather, it is part of our generation of `idExt`, the shared random salt which both parties must use in cOT. +// This value introduced in Protocol 9), very top of page 16. it is not indicated how it should be derived. +// We do it by having each party sample 32 bytes, then by appending _both_ as salts. Secure if either party is honest +func (alice *Alice) Round1GenerateRandomSeed() ([simplest.DigestSize]byte, error) { + aliceSeed := [simplest.DigestSize]byte{} + if _, err := rand.Read(aliceSeed[:]); err != nil { + return [simplest.DigestSize]byte{}, errors.Wrap( + err, + "generating random bytes in alice round 1 generate", + ) + } + alice.transcript.AppendMessage([]byte("session_id_alice"), aliceSeed[:]) + return aliceSeed, nil +} + +// Round2Initialize Bob's initial message, which kicks off the signature process. Protocol 1, Bob's steps 1) - 3). +// Bob's work here entails beginning the Diffie–Hellman-like construction of the instance key / nonce, +// as well as preparing the inputs which he will feed into the multiplication protocol, +// and moreover actually initiating the (first respective messages of) the multiplication protocol using these inputs. +// This latter step in turn amounts to sending the initial message in a new cOT extension. +// All the resulting data gets packaged and sent to Alice. +func (bob *Bob) Round2Initialize(aliceSeed [simplest.DigestSize]byte) (*SignRound2Output, error) { + bobSeed := [simplest.DigestSize]byte{} + if _, err := rand.Read(bobSeed[:]); err != nil { + return nil, errors.Wrap(err, "flipping random coins in bob round 2 initialize") + } + bob.transcript.AppendMessage([]byte("session_id_alice"), aliceSeed[:]) + bob.transcript.AppendMessage([]byte("session_id_bob"), bobSeed[:]) + + var err error + uniqueSessionId := [simplest.DigestSize]byte{} // will use and _re-use_ this throughout, for sub-session IDs + copy( + uniqueSessionId[:], + bob.transcript.ExtractBytes([]byte("multiply receiver id 0"), simplest.DigestSize), + ) + bob.multiplyReceivers[0], err = NewMultiplyReceiver( + bob.seedOtResults, + bob.curve, + uniqueSessionId, + ) + if err != nil { + return nil, errors.Wrap(err, "error creating multiply receiver 0 in Bob sign round 3") + } + copy( + uniqueSessionId[:], + bob.transcript.ExtractBytes([]byte("multiply receiver id 1"), simplest.DigestSize), + ) + bob.multiplyReceivers[1], err = NewMultiplyReceiver( + bob.seedOtResults, + bob.curve, + uniqueSessionId, + ) + if err != nil { + return nil, errors.Wrap(err, "error creating multiply receiver 1 in Bob sign round 3") + } + round2Output := &SignRound2Output{ + Seed: bobSeed, + } + bob.kB = bob.curve.Scalar.Random(rand.Reader) + bob.dB = bob.curve.ScalarBaseMult(bob.kB) + round2Output.DB = bob.dB + kBInv := bob.curve.Scalar.One().Div(bob.kB) + + round2Output.KosRound1Outputs[0], err = bob.multiplyReceivers[0].Round1Initialize(kBInv) + if err != nil { + return nil, errors.Wrap( + err, + "error in multiply round 1 initialize 0 within Bob sign round 3 initialize", + ) + } + round2Output.KosRound1Outputs[1], err = bob.multiplyReceivers[1].Round1Initialize( + bob.secretKeyShare.Mul(kBInv), + ) + if err != nil { + return nil, errors.Wrap( + err, + "error in multiply round 1 initialize 1 within Bob sign round 3 initialize", + ) + } + return round2Output, nil +} + +// Round3Sign Alice's first message. Alice is the _responder_; she is responding to Bob's initial message. +// This is Protocol 1 (p. 6), and contains Alice's steps 3) -- 8). these can all be combined into one message. +// Alice's job here is to finish computing the shared instance key / nonce, as well as multiplication input values; +// then to invoke the multiplication on these two input values (stashing the outputs in her running result struct), +// then to use the _output_ of the multiplication (which she already possesses as of the end of her computation), +// and use that to compute some final values which will help Bob compute the final signature. +func (alice *Alice) Round3Sign( + message []byte, + round2Output *SignRound2Output, +) (*SignRound3Output, error) { + alice.transcript.AppendMessage([]byte("session_id_bob"), round2Output.Seed[:]) + + multiplySenders := [multiplicationCount]*MultiplySender{} + var err error + uniqueSessionId := [simplest.DigestSize]byte{} // will use and _re-use_ this throughout, for sub-session IDs + copy( + uniqueSessionId[:], + alice.transcript.ExtractBytes([]byte("multiply receiver id 0"), simplest.DigestSize), + ) + if multiplySenders[0], err = NewMultiplySender(alice.seedOtResults, alice.curve, uniqueSessionId); err != nil { + return nil, errors.Wrap(err, "creating multiply sender 0 in Alice round 4 sign") + } + copy( + uniqueSessionId[:], + alice.transcript.ExtractBytes([]byte("multiply receiver id 1"), simplest.DigestSize), + ) + if multiplySenders[1], err = NewMultiplySender(alice.seedOtResults, alice.curve, uniqueSessionId); err != nil { + return nil, errors.Wrap(err, "creating multiply sender 1 in Alice round 4 sign") + } + round3Output := &SignRound3Output{} + kPrimeA := alice.curve.Scalar.Random(rand.Reader) + round3Output.RPrime = round2Output.DB.Mul(kPrimeA) + hashRPrimeBytes := sha3.Sum256(round3Output.RPrime.ToAffineCompressed()) + hashRPrime, err := alice.curve.Scalar.SetBytes(hashRPrimeBytes[:]) + if err != nil { + return nil, errors.Wrap(err, "setting hashRPrime scalar from bytes") + } + kA := hashRPrime.Add(kPrimeA) + copy( + uniqueSessionId[:], + alice.transcript.ExtractBytes([]byte("schnorr proof for R"), simplest.DigestSize), + ) + rSchnorrProver := schnorr.NewProver(alice.curve, round2Output.DB, uniqueSessionId[:]) + round3Output.RSchnorrProof, err = rSchnorrProver.Prove(kA) + if err != nil { + return nil, errors.Wrap( + err, + "generating schnorr proof for R = kA * DB in alice round 4 sign", + ) + } + // reassign / stash the below value here just for notational clarity. + // this is _the_ key public point R in the ECDSA signature. we'll use its coordinate X in various places. + r := round3Output.RSchnorrProof.Statement + phi := alice.curve.Scalar.Random(rand.Reader) + kAInv := alice.curve.Scalar.One().Div(kA) + + if round3Output.MultiplyRound2Outputs[0], err = multiplySenders[0].Round2Multiply(phi.Add(kAInv), round2Output.KosRound1Outputs[0]); err != nil { + return nil, errors.Wrap(err, "error in round 2 multiply 0 within alice round 4 sign") + } + if round3Output.MultiplyRound2Outputs[1], err = multiplySenders[1].Round2Multiply(alice.secretKeyShare.Mul(kAInv), round2Output.KosRound1Outputs[1]); err != nil { + return nil, errors.Wrap(err, "error in round 2 multiply 1 within alice round 4 sign") + } + + one := alice.curve.Scalar.One() + gamma1 := alice.curve.ScalarBaseMult(kA.Mul(phi).Add(one)) + other := r.Mul(multiplySenders[0].outputAdditiveShare.Neg()) + gamma1 = gamma1.Add(other) + hashGamma1Bytes := sha3.Sum256(gamma1.ToAffineCompressed()) + hashGamma1, err := alice.curve.Scalar.SetBytes(hashGamma1Bytes[:]) + if err != nil { + return nil, errors.Wrap(err, "setting hashGamma1 scalar from bytes") + } + round3Output.EtaPhi = hashGamma1.Add(phi) + if _, err = alice.hash.Write(message); err != nil { + return nil, errors.Wrap(err, "writing message to hash in alice round 4 sign") + } + digest := alice.hash.Sum(nil) + hOfMAsInteger, err := alice.curve.Scalar.SetBytes(digest) + if err != nil { + return nil, errors.Wrap(err, "setting hOfMAsInteger scalar from bytes") + } + affineCompressedForm := r.ToAffineCompressed() + if len(affineCompressedForm) != 33 { + return nil, errors.New("the compressed form must be exactly 33 bytes") + } + // Discard the leading byte and parse the rest as the X coordinate. + rX, err := alice.curve.Scalar.SetBytes(affineCompressedForm[1:]) + if err != nil { + return nil, errors.Wrap(err, "setting rX scalar from bytes") + } + + sigA := hOfMAsInteger.Mul(multiplySenders[0].outputAdditiveShare). + Add(rX.Mul(multiplySenders[1].outputAdditiveShare)) + gamma2 := alice.publicKey.Mul(multiplySenders[0].outputAdditiveShare) + other = alice.curve.ScalarBaseMult(multiplySenders[1].outputAdditiveShare.Neg()) + gamma2 = gamma2.Add(other) + hashGamma2Bytes := sha3.Sum256(gamma2.ToAffineCompressed()) + hashGamma2, err := alice.curve.Scalar.SetBytes(hashGamma2Bytes[:]) + if err != nil { + return nil, errors.Wrap(err, "setting hashGamma2 scalar from bytes") + } + round3Output.EtaSig = hashGamma2.Add(sigA) + return round3Output, nil +} + +// Round4Final this is Bob's last portion of the signature computation, and ultimately results in the complete signature +// corresponds to Protocol 1, Bob's steps 3) -- 10). +// Bob begins by _finishing_ the OT-based multiplication, using Alice's one and only message to him re: the mult. +// Bob then move's onto the remainder of Alice's message, which contains extraneous data used to finish the signature. +// Using this data, Bob completes the signature, which gets stored in `Bob.Sig`. Bob also verifies it. +func (bob *Bob) Round4Final(message []byte, round3Output *SignRound3Output) error { + if err := bob.multiplyReceivers[0].Round3Multiply(round3Output.MultiplyRound2Outputs[0]); err != nil { + return errors.Wrap(err, "error in round 3 multiply 0 within sign round 5") + } + if err := bob.multiplyReceivers[1].Round3Multiply(round3Output.MultiplyRound2Outputs[1]); err != nil { + return errors.Wrap(err, "error in round 3 multiply 1 within sign round 5") + } + rPrimeHashedBytes := sha3.Sum256(round3Output.RPrime.ToAffineCompressed()) + rPrimeHashed, err := bob.curve.Scalar.SetBytes(rPrimeHashedBytes[:]) + if err != nil { + return errors.Wrap(err, "setting rPrimeHashed scalar from bytes") + } + r := bob.dB.Mul(rPrimeHashed) + r = r.Add(round3Output.RPrime) + // To ensure that the correct public statement is used, we use the public statement that we have calculated + // instead of the open Alice sent us. + round3Output.RSchnorrProof.Statement = r + uniqueSessionId := [simplest.DigestSize]byte{} + copy( + uniqueSessionId[:], + bob.transcript.ExtractBytes([]byte("schnorr proof for R"), simplest.DigestSize), + ) + if err = schnorr.Verify(round3Output.RSchnorrProof, bob.curve, bob.dB, uniqueSessionId[:]); err != nil { + return errors.Wrap(err, "bob's verification of alice's schnorr proof re: r failed") + } + zero := bob.curve.Scalar.Zero() + affineCompressedForm := r.ToAffineCompressed() + if len(affineCompressedForm) != 33 { + return errors.New("the compressed form must be exactly 33 bytes") + } + rY := affineCompressedForm[0] & 0x1 // this is bit(0) of Y coordinate + rX, err := bob.curve.Scalar.SetBytes(affineCompressedForm[1:]) + if err != nil { + return errors.Wrap(err, "setting rX scalar from bytes") + } + bob.Signature = &curves.EcdsaSignature{ + R: rX.Add(zero). + BigInt(), + // slight trick here; add it to 0 just to mod it by q (now it's mod p!) + V: int(rY), + } + gamma1 := r.Mul(bob.multiplyReceivers[0].outputAdditiveShare) + gamma1HashedBytes := sha3.Sum256(gamma1.ToAffineCompressed()) + gamma1Hashed, err := bob.curve.Scalar.SetBytes(gamma1HashedBytes[:]) + if err != nil { + return errors.Wrap(err, "setting gamma1Hashed scalar from bytes") + } + phi := round3Output.EtaPhi.Sub(gamma1Hashed) + theta := bob.multiplyReceivers[0].outputAdditiveShare.Sub(phi.Div(bob.kB)) + if _, err = bob.hash.Write(message); err != nil { + return errors.Wrap(err, "writing message to hash in Bob sign round 5 final") + } + digestBytes := bob.hash.Sum(nil) + digest, err := bob.curve.Scalar.SetBytes(digestBytes) + if err != nil { + return errors.Wrap(err, "setting digest scalar from bytes") + } + capitalR, err := bob.curve.Scalar.SetBigInt(bob.Signature.R) + if err != nil { + return errors.Wrap(err, "setting capitalR scalar from big int") + } + sigB := digest.Mul(theta).Add(capitalR.Mul(bob.multiplyReceivers[1].outputAdditiveShare)) + gamma2 := bob.curve.ScalarBaseMult(bob.multiplyReceivers[1].outputAdditiveShare) + other := bob.publicKey.Mul(theta.Neg()) + gamma2 = gamma2.Add(other) + gamma2HashedBytes := sha3.Sum256(gamma2.ToAffineCompressed()) + gamma2Hashed, err := bob.curve.Scalar.SetBytes(gamma2HashedBytes[:]) + if err != nil { + return errors.Wrap(err, "setting gamma2Hashed scalar from bytes") + } + scalarS := sigB.Add(round3Output.EtaSig.Sub(gamma2Hashed)) + bob.Signature.S = scalarS.BigInt() + if bob.Signature.S.Bit(255) == 1 { + bob.Signature.S = scalarS.Neg().BigInt() + bob.Signature.V ^= 1 + } + // now verify the signature + unCompressedAffinePublicKey := bob.publicKey.ToAffineUncompressed() + if len(unCompressedAffinePublicKey) != 65 { + return errors.New("the uncompressed form must have exactly 65 bytes") + } + x := new(big.Int).SetBytes(unCompressedAffinePublicKey[1:33]) + y := new(big.Int).SetBytes(unCompressedAffinePublicKey[33:]) + ellipticCurve, err := bob.curve.ToEllipticCurve() + if err != nil { + return errors.Wrap(err, "invalid curve") + } + if !ecdsa.Verify( + &ecdsa.PublicKey{Curve: ellipticCurve, X: x, Y: y}, + digestBytes, + bob.Signature.R, + bob.Signature.S, + ) { + return fmt.Errorf("final signature failed to verify") + } + return nil +} diff --git a/tecdsa/dklsv1/sign/sign_test.go b/tecdsa/dklsv1/sign/sign_test.go new file mode 100644 index 0000000..272f878 --- /dev/null +++ b/tecdsa/dklsv1/sign/sign_test.go @@ -0,0 +1,121 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package sign + +import ( + "crypto/rand" + "testing" + + "github.com/stretchr/testify/require" + "golang.org/x/crypto/sha3" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/ot/base/simplest" + "github.com/sonr-io/sonr/crypto/ot/extension/kos" + "github.com/sonr-io/sonr/crypto/ot/ottest" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1/dkg" +) + +func TestSign(t *testing.T) { + curveInstances := []*curves.Curve{ + curves.K256(), + curves.P256(), + } + for _, curve := range curveInstances { + hashKeySeed := [simplest.DigestSize]byte{} + _, err := rand.Read(hashKeySeed[:]) + require.NoError(t, err) + + baseOtSenderOutput, baseOtReceiverOutput, err := ottest.RunSimplestOT( + curve, + kos.Kappa, + hashKeySeed, + ) + require.NoError(t, err) + + secretKeyShareA := curve.Scalar.Random(rand.Reader) + secretKeyShareB := curve.Scalar.Random(rand.Reader) + require.NoError(t, err) + publicKey := curve.ScalarBaseMult(secretKeyShareA.Mul(secretKeyShareB)) + alice := NewAlice( + curve, + sha3.New256(), + &dkg.AliceOutput{ + SeedOtResult: baseOtReceiverOutput, + SecretKeyShare: secretKeyShareA, + PublicKey: publicKey, + }, + ) + bob := NewBob( + curve, + sha3.New256(), + &dkg.BobOutput{ + SeedOtResult: baseOtSenderOutput, + SecretKeyShare: secretKeyShareB, + PublicKey: publicKey, + }, + ) + + message := []byte("A message.") + seed, err := alice.Round1GenerateRandomSeed() + require.NoError(t, err) + round3Output, err := bob.Round2Initialize(seed) + require.NoError(t, err) + round4Output, err := alice.Round3Sign(message, round3Output) + require.NoError(t, err) + err = bob.Round4Final(message, round4Output) + require.NoError(t, err, "curve: %s", curve.Name) + } +} + +func BenchmarkSign(b *testing.B) { + curve := curves.K256() + hashKeySeed := [simplest.DigestSize]byte{} + _, err := rand.Read(hashKeySeed[:]) + require.NoError(b, err) + + baseOtSenderOutput, baseOtReceiverOutput, err := ottest.RunSimplestOT( + curve, + kos.Kappa, + hashKeySeed, + ) + require.NoError(b, err) + + secretKeyShareA := curve.Scalar.Random(rand.Reader) + secretKeyShareB := curve.Scalar.Random(rand.Reader) + publicKey := curve.ScalarBaseMult(secretKeyShareA.Mul(secretKeyShareB)) + alice := NewAlice( + curve, + sha3.New256(), + &dkg.AliceOutput{ + SeedOtResult: baseOtReceiverOutput, + SecretKeyShare: secretKeyShareA, + PublicKey: publicKey, + }, + ) + bob := NewBob( + curve, + sha3.New256(), + &dkg.BobOutput{ + SeedOtResult: baseOtSenderOutput, + SecretKeyShare: secretKeyShareB, + PublicKey: publicKey, + }, + ) + + message := []byte("A message.") + for n := 0; n < b.N; n++ { + seed, err := alice.Round1GenerateRandomSeed() + require.NoError(b, err) + round3Output, err := bob.Round2Initialize(seed) + require.NoError(b, err) + round4Output, err := alice.Round3Sign(message, round3Output) + require.NoError(b, err) + err = bob.Round4Final(message, round4Output) + require.NoError(b, err) + } +} diff --git a/tecdsa/dklsv1/signserializers.go b/tecdsa/dklsv1/signserializers.go new file mode 100644 index 0000000..04ebea3 --- /dev/null +++ b/tecdsa/dklsv1/signserializers.go @@ -0,0 +1,128 @@ +package dklsv1 + +import ( + "bytes" + "encoding/gob" + + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/core/protocol" + "github.com/sonr-io/sonr/crypto/tecdsa/dklsv1/sign" +) + +func newSignProtocolMessage(payload []byte, round string, version uint) *protocol.Message { + return &protocol.Message{ + Protocol: protocol.Dkls18Sign, + Version: version, + Payloads: map[string][]byte{payloadKey: payload}, + Metadata: map[string]string{"round": round}, + } +} + +func encodeSignRound1Output(commitment [32]byte, version uint) (*protocol.Message, error) { + if version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(&commitment); err != nil { + return nil, errors.WithStack(err) + } + return newSignProtocolMessage(buf.Bytes(), "1", version), nil +} + +func decodeSignRound2Input(m *protocol.Message) ([32]byte, error) { + if m.Version != protocol.Version1 { + return [32]byte{}, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := [32]byte{} + if err := dec.Decode(&decoded); err != nil { + return [32]byte{}, errors.WithStack(err) + } + return decoded, nil +} + +func encodeSignRound2Output( + output *sign.SignRound2Output, + version uint, +) (*protocol.Message, error) { + if version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(output); err != nil { + return nil, errors.WithStack(err) + } + return newSignProtocolMessage(buf.Bytes(), "2", version), nil +} + +func decodeSignRound3Input(m *protocol.Message) (*sign.SignRound2Output, error) { + if m.Version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := &sign.SignRound2Output{} + if err := dec.Decode(&decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} + +func encodeSignRound3Output( + output *sign.SignRound3Output, + version uint, +) (*protocol.Message, error) { + if version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(output); err != nil { + return nil, errors.WithStack(err) + } + return newSignProtocolMessage(buf.Bytes(), "3", version), nil +} + +func decodeSignRound4Input(m *protocol.Message) (*sign.SignRound3Output, error) { + if m.Version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := &sign.SignRound3Output{} + if err := dec.Decode(&decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} + +func encodeSignature(signature *curves.EcdsaSignature, version uint) (*protocol.Message, error) { + if version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer([]byte{}) + enc := gob.NewEncoder(buf) + if err := enc.Encode(signature); err != nil { + return nil, errors.WithStack(err) + } + return newSignProtocolMessage(buf.Bytes(), "signature", version), nil +} + +// DecodeSignature serializes the signature. +func DecodeSignature(m *protocol.Message) (*curves.EcdsaSignature, error) { + if m.Version != protocol.Version1 { + return nil, errors.New("only version 1 is supported") + } + buf := bytes.NewBuffer(m.Payloads[payloadKey]) + dec := gob.NewDecoder(buf) + decoded := &curves.EcdsaSignature{} + if err := dec.Decode(decoded); err != nil { + return nil, errors.WithStack(err) + } + return decoded, nil +} diff --git a/ted25519/frost/README.md b/ted25519/frost/README.md new file mode 100755 index 0000000..c7ecc4a --- /dev/null +++ b/ted25519/frost/README.md @@ -0,0 +1,13 @@ +--- +aliases: [README] +tags: [] +title: README +linter-yaml-title-alias: README +date created: Wednesday, April 17th 2024, 4:11:40 pm +date modified: Thursday, April 18th 2024, 8:19:25 am +--- + +## FROST: Flexible Round-Optimized Schnorr Threshold Signatures + +This package is an implementation of t-of-n threshold signature of +[FROST: Flexible Round-Optimized Schnorr Threshold Signatures](https://eprint.iacr.org/2020/852.pdf) diff --git a/ted25519/frost/challenge_derive.go b/ted25519/frost/challenge_derive.go new file mode 100644 index 0000000..836745a --- /dev/null +++ b/ted25519/frost/challenge_derive.go @@ -0,0 +1,31 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package frost + +import ( + "crypto/sha512" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +type ChallengeDerive interface { + DeriveChallenge(msg []byte, pubKey curves.Point, r curves.Point) (curves.Scalar, error) +} + +type Ed25519ChallengeDeriver struct{} + +func (ed Ed25519ChallengeDeriver) DeriveChallenge( + msg []byte, + pubKey curves.Point, + r curves.Point, +) (curves.Scalar, error) { + h := sha512.New() + _, _ = h.Write(r.ToAffineCompressed()) + _, _ = h.Write(pubKey.ToAffineCompressed()) + _, _ = h.Write(msg) + return new(curves.ScalarEd25519).SetBytesWide(h.Sum(nil)) +} diff --git a/ted25519/frost/participant.go b/ted25519/frost/participant.go new file mode 100644 index 0000000..bf6003d --- /dev/null +++ b/ted25519/frost/participant.go @@ -0,0 +1,89 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Package frost is an implementation of t-of-n threshold signature of https://eprint.iacr.org/2020/852.pdf +package frost + +import ( + "fmt" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/dkg/frost" + "github.com/sonr-io/sonr/crypto/internal" +) + +// Signer is a tSchnorr player performing the signing operation. +type Signer struct { + skShare curves.Scalar // secret signing share for this signer + vkShare curves.Point // store verification key share + verificationKey curves.Point // verification key + id uint32 // The ID assigned to this signer's shamir share + threshold uint32 + curve *curves.Curve + round uint + lCoeffs map[uint32]curves.Scalar // lCoeffs are Lagrange coefficients of each cosigner. + cosigners []uint32 + state *state // Accumulated intermediate values associated with signing + challengeDeriver ChallengeDerive +} + +type state struct { + // Round 1 + capD, capE curves.Point // capD, capE are commitments this signer generates in signing round 1 + smallD, smallE curves.Scalar // smallD, smallE are scalars this signer generates in signing round 1 + + // Round 2 + commitments map[uint32]*Round1Bcast // Store commitments broadcast after signing round 1 + msg []byte + c curves.Scalar + capRs map[uint32]curves.Point + sumR curves.Point +} + +// NewSigner create a signer from a dkg participant +// Note that we can pre-assign Lagrange coefficients lcoeffs of each cosigner. This optimizes performance. +// See paragraph 3 of section 3 in the draft - https://tools.ietf.org/pdf/draft-komlo-frost-00.pdf +func NewSigner( + info *frost.DkgParticipant, + id, thresh uint32, + lcoeffs map[uint32]curves.Scalar, + cosigners []uint32, + challengeDeriver ChallengeDerive, +) (*Signer, error) { + if info == nil || len(cosigners) == 0 || len(lcoeffs) == 0 { + return nil, internal.ErrNilArguments + } + + if thresh > uint32(len(cosigners)) { + return nil, fmt.Errorf("threshold is higher than number of signers") + } + + if len(lcoeffs) != len(cosigners) { + return nil, fmt.Errorf("expected coefficients to be equal to number of cosigners") + } + + // Check if cosigners and lcoeffs contain the same IDs + for i := 0; i < len(cosigners); i++ { + id := cosigners[i] + if _, ok := lcoeffs[id]; !ok { + return nil, fmt.Errorf("lcoeffs and cosigners have inconsistent ID") + } + } + + return &Signer{ + skShare: info.SkShare, + vkShare: info.VkShare, + verificationKey: info.VerificationKey, + id: id, + threshold: thresh, + curve: info.Curve, + round: 1, + lCoeffs: lcoeffs, + cosigners: cosigners, + state: &state{}, + challengeDeriver: challengeDeriver, + }, nil +} diff --git a/ted25519/frost/round1.go b/ted25519/frost/round1.go new file mode 100644 index 0000000..5b0ea8a --- /dev/null +++ b/ted25519/frost/round1.go @@ -0,0 +1,78 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package frost + +import ( + "bytes" + crand "crypto/rand" + "encoding/gob" + + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/internal" +) + +// Round1Bcast contains values to be broadcast to all players after the completion of signing round 1. +type Round1Bcast struct { + Di, Ei curves.Point +} + +func (result *Round1Bcast) Encode() ([]byte, error) { + gob.Register(result.Di) // just the point for now + gob.Register(result.Ei) + buf := &bytes.Buffer{} + enc := gob.NewEncoder(buf) + if err := enc.Encode(result); err != nil { + return nil, errors.Wrap(err, "couldn't encode round 1 broadcast") + } + return buf.Bytes(), nil +} + +func (result *Round1Bcast) Decode(input []byte) error { + buf := bytes.NewBuffer(input) + dec := gob.NewDecoder(buf) + if err := dec.Decode(result); err != nil { + return errors.Wrap(err, "couldn't encode round 1 broadcast") + } + return nil +} + +func (signer *Signer) SignRound1() (*Round1Bcast, error) { + // Make sure signer is not empty + if signer == nil || signer.curve == nil { + return nil, internal.ErrNilArguments + } + + // Make sure round number is correct + if signer.round != 1 { + return nil, internal.ErrInvalidRound + } + + // Step 1 - Sample di, ei + di := signer.curve.Scalar.Random(crand.Reader) + + ei := signer.curve.Scalar.Random(crand.Reader) + + // Step 2 - Compute Di, Ei + Di := signer.curve.ScalarBaseMult(di) + + Ei := signer.curve.ScalarBaseMult(ei) + + // Update round number + signer.round = 2 + + // Store di, ei, Di, Ei locally and broadcast Di, Ei + signer.state.capD = Di + signer.state.capE = Ei + signer.state.smallD = di + signer.state.smallE = ei + return &Round1Bcast{ + Di, + Ei, + }, nil +} diff --git a/ted25519/frost/round2.go b/ted25519/frost/round2.go new file mode 100644 index 0000000..dca71cb --- /dev/null +++ b/ted25519/frost/round2.go @@ -0,0 +1,189 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package frost + +import ( + "bytes" + "encoding/gob" + "fmt" + + "github.com/pkg/errors" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/internal" +) + +// Round2Bcast contains values that will be broadcast to other signers after completion of round 2. +type Round2Bcast struct { + Zi curves.Scalar + Vki curves.Point +} + +func (result *Round2Bcast) Encode() ([]byte, error) { + gob.Register(result.Zi) + gob.Register(result.Vki) // just the point for now + buf := &bytes.Buffer{} + enc := gob.NewEncoder(buf) + if err := enc.Encode(result); err != nil { + return nil, errors.Wrap(err, "couldn't encode round 1 broadcast") + } + return buf.Bytes(), nil +} + +func (result *Round2Bcast) Decode(input []byte) error { + buf := bytes.NewBuffer(input) + dec := gob.NewDecoder(buf) + if err := dec.Decode(result); err != nil { + return errors.Wrap(err, "couldn't encode round 1 broadcast") + } + return nil +} + +// SignRound2 implements FROST signing round 2. +func (signer *Signer) SignRound2( + msg []byte, + round2Input map[uint32]*Round1Bcast, +) (*Round2Bcast, error) { + // Make sure necessary items of signer are not empty + if signer == nil || signer.curve == nil || signer.state == nil { + return nil, internal.ErrNilArguments + } + + // Make sure those private d is not empty and not zero + if signer.state.smallD == nil || signer.state.smallD.IsZero() { + return nil, fmt.Errorf("empty d or d is zero") + } + + // Make sure those private e is not empty and not zero + if signer.state.smallE == nil || signer.state.smallE.IsZero() { + return nil, fmt.Errorf("empty e or e is zero") + } + + // Make sure msg is not empty + if len(msg) == 0 { + return nil, internal.ErrNilArguments + } + + // Make sure the round number is correct + if signer.round != 2 { + return nil, internal.ErrInvalidRound + } + + // Check length of round2Input + if uint32(len(round2Input)) != signer.threshold { + return nil, fmt.Errorf("invalid length of round2Input") + } + + // Step 2 - Check Dj, Ej on the curve and Store round2Input + for id, input := range round2Input { + if input == nil || input.Di == nil || input.Ei == nil { + return nil, fmt.Errorf("round2Input is nil from participant with id %d", id) + } + if !input.Di.IsOnCurve() || input.Di.IsIdentity() { + return nil, fmt.Errorf("commitment Di is not on the curve with id %d", id) + } + if !input.Ei.IsOnCurve() || input.Ei.IsIdentity() { + return nil, fmt.Errorf("commitment Ei is not on the curve with id %d", id) + } + } + // Store Dj, Ej for further usage. + signer.state.commitments = round2Input + + // Step 3-6 + R := signer.curve.NewIdentityPoint() + var ri curves.Scalar + Rs := make(map[uint32]curves.Point, signer.threshold) + for id, data := range round2Input { + // Construct the blob (j, m, {Dj, Ej}) + blob := concatHashArray(id, msg, round2Input, signer.cosigners) + + // Step 4 - rj = H(j,m,{Dj,Ej}_{j in [1...t]}) + rj := signer.curve.Scalar.Hash(blob) + if signer.id == id { + ri = rj + } + + // Step 5 - R_j = D_j + r_j*E_j + rjEj := data.Ei.Mul(rj) + Rj := rjEj.Add(data.Di) + // assign Rj + Rs[id] = Rj + + // Step 6 - R = R+Rj + R = R.Add(Rj) + } + + // Step 7 - c = H(m, R) + c, err := signer.challengeDeriver.DeriveChallenge(msg, signer.verificationKey, R) + if err != nil { + return nil, err + } + + // Step 8 - Record c, R, Rjs + signer.state.c = c + signer.state.capRs = Rs + signer.state.sumR = R + + // Step 9 - zi = di + ei*ri + Li*ski*c + Li := signer.lCoeffs[signer.id] + Liski := Li.Mul(signer.skShare) + + Liskic := Liski.Mul(c) + + if R.IsNegative() { + signer.state.smallE = signer.state.smallE.Neg() + signer.state.smallD = signer.state.smallD.Neg() + } + + eiri := signer.state.smallE.Mul(ri) + + // Compute zi = di+ei*ri+Li*ski*c + zi := Liskic.Add(eiri) + zi = zi.Add(signer.state.smallD) + + // Update round number and store message + signer.round = 3 + signer.state.msg = msg + + // set smallD and smallE to zero since they are one-time use + signer.state.smallD = signer.curve.NewScalar() + signer.state.smallE = signer.curve.NewScalar() + + // Step 10 - Broadcast zi, vki to other participants + return &Round2Bcast{ + zi, + signer.vkShare, + }, nil +} + +// concatHashArray puts id, msg and (Dj,Ej), j=1...t into a byte array +func concatHashArray( + id uint32, + msg []byte, + round2Input map[uint32]*Round1Bcast, + cosigners []uint32, +) []byte { + var blob []byte + // Append identity id + blob = append(blob, byte(id)) + + // Append message msg + blob = append(blob, msg...) + + // Append (Dj, Ej) for all j in [1...t] + for i := 0; i < len(cosigners); i++ { + id := cosigners[i] + bytesDi := round2Input[id].Di.ToAffineCompressed() + bytesEi := round2Input[id].Ei.ToAffineCompressed() + + // Following the spec, we should add each party's identity. + blob = append(blob, byte(id)) + blob = append(blob, bytesDi...) + blob = append(blob, bytesEi...) + } + return blob +} diff --git a/ted25519/frost/round3.go b/ted25519/frost/round3.go new file mode 100644 index 0000000..732fd66 --- /dev/null +++ b/ted25519/frost/round3.go @@ -0,0 +1,169 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package frost + +import ( + "fmt" + + "github.com/sonr-io/sonr/crypto/core/curves" + "github.com/sonr-io/sonr/crypto/internal" +) + +// Round3Bcast contains the output of FROST signature, i.e., it contains FROST signature (z,c) and the +// corresponding message msg. +type Round3Bcast struct { + R curves.Point + Z, C curves.Scalar + msg []byte +} + +// Define frost signature type +type Signature struct { + Z curves.Scalar + C curves.Scalar +} + +func (signer *Signer) SignRound3(round3Input map[uint32]*Round2Bcast) (*Round3Bcast, error) { + // Make sure signer is not empty + if signer == nil || signer.curve == nil { + return nil, internal.ErrNilArguments + } + + // Make sure signer's smallD and smallE are zero + if !signer.state.smallD.IsZero() || !signer.state.smallE.IsZero() { + return nil, fmt.Errorf( + "signer's private smallD and smallE should be zero since one-time use", + ) + } + + // Make sure the signer has had the msg + if len(signer.state.msg) == 0 { + return nil, internal.ErrNilArguments + } + + // Validate Round3Input + if round3Input == nil { + return nil, internal.ErrNilArguments + } + for _, data := range round3Input { + if data == nil { + return nil, internal.ErrNilArguments + } + } + + // Make sure the signer has commitments stored at the end of round 1. + if signer.state.commitments == nil || len(signer.state.commitments) != len(round3Input) { + return nil, internal.ErrNilArguments + } + + // Make sure the round number is correct + if signer.round != 3 { + return nil, internal.ErrInvalidRound + } + + // Round2 Input has different length of threshold + if uint32(len(round3Input)) != signer.threshold { + return nil, fmt.Errorf("invalid length of round3Input") + } + + // Step 1-3 + // Step 1: For j in [1...t] + z := signer.curve.NewScalar() + negate := signer.state.sumR.IsNegative() + for id, data := range round3Input { + zj := data.Zi + vkj := data.Vki + + // Step 2: Verify zj*G = Rj + c*Lj*vkj + // zj*G + zjG := signer.curve.ScalarBaseMult(zj) + + // c*Lj + cLj := signer.state.c.Mul(signer.lCoeffs[id]) + + // cLjvkj + cLjvkj := vkj.Mul(cLj) + + // Rj + c*Lj*vkj + Rj := signer.state.capRs[id] + if negate { + Rj = Rj.Neg() + } + right := cLjvkj.Add(Rj) + + // Check equation + if !zjG.Equal(right) { + return nil, fmt.Errorf("zjG != right with participant id %d", id) + } + + // Step 3 - z = z+zj + z = z.Add(zj) + } + + // Step 4 - 7: Self verify the signature (z, c) + // Step 5 - R' = z*G + (-c)*vk + zG := signer.curve.ScalarBaseMult(z) + cvk := signer.verificationKey.Mul(signer.state.c.Neg()) + tempR := zG.Add(cvk) + // Step 6 - c' = H(m, R') + tempC, err := signer.challengeDeriver.DeriveChallenge( + signer.state.msg, + signer.verificationKey, + tempR, + ) + if err != nil { + return nil, err + } + + // Step 7 - Check c = c' + if tempC.Cmp(signer.state.c) != 0 { + return nil, fmt.Errorf("invalid signature: c != c'") + } + + // Updating round number + signer.round = 4 + + // Step 8 - Broadcast signature and message + return &Round3Bcast{ + signer.state.sumR, + z, + signer.state.c, + signer.state.msg, + }, nil +} + +// Method to verify a frost signature. +func Verify( + curve *curves.Curve, + challengeDeriver ChallengeDerive, + vk curves.Point, + msg []byte, + signature *Signature, +) (bool, error) { + if vk == nil || msg == nil || len(msg) == 0 || signature.C == nil || signature.Z == nil { + return false, fmt.Errorf("invalid input") + } + z := signature.Z + c := signature.C + + // R' = z*G - c*vk + zG := curve.ScalarBaseMult(z) + cvk := vk.Mul(c.Neg()) + tempR := zG.Add(cvk) + + // c' = H(m, R') + tempC, err := challengeDeriver.DeriveChallenge(msg, vk, tempR) + if err != nil { + return false, err + } + + // Check c == c' + if tempC.Cmp(c) != 0 { + return false, fmt.Errorf("invalid signature: c != c'") + } + return true, nil +} diff --git a/ted25519/frost/rounds_test.go b/ted25519/frost/rounds_test.go new file mode 100644 index 0000000..daed822 --- /dev/null +++ b/ted25519/frost/rounds_test.go @@ -0,0 +1,439 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package frost + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" + dkg "github.com/sonr-io/sonr/crypto/dkg/frost" + "github.com/sonr-io/sonr/crypto/sharing" +) + +var ( + testCurve = curves.ED25519() + ctx = "string to prevent replay attack" +) + +// Create two DKG participants. +func PrepareDkgOutput(t *testing.T) (*dkg.DkgParticipant, *dkg.DkgParticipant) { + // Initiate two participants and running DKG round 1 + p1, err := dkg.NewDkgParticipant(1, 2, ctx, testCurve, 2) + require.NoError(t, err) + p2, err := dkg.NewDkgParticipant(2, 2, ctx, testCurve, 1) + require.NoError(t, err) + bcast1, p2psend1, _ := p1.Round1(nil) + bcast2, p2psend2, _ := p2.Round1(nil) + bcast := make(map[uint32]*dkg.Round1Bcast) + p2p1 := make(map[uint32]*sharing.ShamirShare) + p2p2 := make(map[uint32]*sharing.ShamirShare) + bcast[1] = bcast1 + bcast[2] = bcast2 + p2p1[2] = p2psend2[1] + p2p2[1] = p2psend1[2] + + // Running DKG round 2 + _, _ = p1.Round2(bcast, p2p1) + _, _ = p2.Round2(bcast, p2p2) + return p1, p2 +} + +// Test FROST signing round 1 +func TestSignRound1Works(t *testing.T) { + p1, p2 := PrepareDkgOutput(t) + require.NotNil(t, p1) + require.NotNil(t, p2) + + scheme, _ := sharing.NewShamir(2, 2, testCurve) + lCoeffs, err := scheme.LagrangeCoeffs([]uint32{p1.Id, p2.Id}) + require.NoError(t, err) + signer1, err := NewSigner(p1, 1, 2, lCoeffs, []uint32{1, 2}, &Ed25519ChallengeDeriver{}) + require.NoError(t, err) + round1Out, _ := signer1.SignRound1() + require.NotNil(t, round1Out.Ei) + require.NotNil(t, round1Out.Di) + require.NotNil(t, signer1.state.smallE) + require.NotNil(t, signer1.state.smallD) + require.NotNil(t, signer1.state.capD) + require.NotNil(t, signer1.state.capE) + require.Equal(t, signer1.round, uint(2)) + require.Equal(t, signer1.cosigners, []uint32{1, 2}) +} + +func TestSignRound1RepeatCall(t *testing.T) { + p1, p2 := PrepareDkgOutput(t) + scheme, _ := sharing.NewShamir(2, 2, testCurve) + lCoeffs, err := scheme.LagrangeCoeffs([]uint32{p1.Id, p2.Id}) + require.NoError(t, err) + signer1, _ := NewSigner(p1, 1, 2, lCoeffs, []uint32{1, 2}, &Ed25519ChallengeDeriver{}) + _, err = signer1.SignRound1() + require.NoError(t, err) + _, err = signer1.SignRound1() + require.Error(t, err) +} + +func PrepareNewSigners(t *testing.T) (*Signer, *Signer) { + threshold := uint32(2) + limit := uint32(2) + p1, p2 := PrepareDkgOutput(t) + require.Equal(t, p1.VerificationKey, p2.VerificationKey) + scheme, err := sharing.NewShamir(threshold, limit, testCurve) + // field = sharing.NewField(p1.curve.Params().N) + require.NotNil(t, scheme) + require.NoError(t, err) + lCoeffs, err := scheme.LagrangeCoeffs([]uint32{p1.Id, p2.Id}) + require.NotNil(t, lCoeffs[1]) + require.NotNil(t, lCoeffs[2]) + require.NoError(t, err) + signer1, err := NewSigner( + p1, + p1.Id, + threshold, + lCoeffs, + []uint32{p1.Id, p2.Id}, + &Ed25519ChallengeDeriver{}, + ) + require.NotNil(t, signer1) + require.NoError(t, err) + signer2, err := NewSigner( + p2, + p2.Id, + threshold, + lCoeffs, + []uint32{p1.Id, p2.Id}, + &Ed25519ChallengeDeriver{}, + ) + require.NotNil(t, signer2) + require.NoError(t, err) + return signer1, signer2 +} + +func TestSignRound2Works(t *testing.T) { + // Preparing round 2 inputs + signer1, signer2 := PrepareNewSigners(t) + require.Equal(t, signer1.verificationKey, signer2.verificationKey) + require.NotNil(t, signer1) + require.NotNil(t, signer2) + round1Out1, _ := signer1.SignRound1() + round1Out2, _ := signer2.SignRound1() + round2Input := make(map[uint32]*Round1Bcast) + round2Input[signer1.id] = round1Out1 + round2Input[signer2.id] = round1Out2 + + // Actual Test + msg := []byte("message") + round2Out, err := signer1.SignRound2(msg, round2Input) + require.NotNil(t, round2Out) + require.NoError(t, err) + require.Equal(t, signer1.round, uint(3)) + require.NotNil(t, signer1.state.commitments) + require.Equal(t, signer1.state.msg, msg) + require.True(t, signer1.state.smallD.IsZero()) + require.True(t, signer1.state.smallE.IsZero()) +} + +func TestSignRound2RepeatCall(t *testing.T) { + // Preparing round 2 inputs + signer1, signer2 := PrepareNewSigners(t) + require.NotNil(t, signer1) + require.NotNil(t, signer2) + round1Out1, _ := signer1.SignRound1() + round1Out2, _ := signer2.SignRound1() + round2Input := make(map[uint32]*Round1Bcast) + round2Input[signer1.id] = round1Out1 + round2Input[signer2.id] = round1Out2 + + // Actual Test + msg := []byte("message") + _, err := signer1.SignRound2(msg, round2Input) + require.NoError(t, err) + _, err = signer1.SignRound2(msg, round2Input) + require.Error(t, err) +} + +func TestSignRound2BadInput(t *testing.T) { + // Preparing round 2 inputs + signer1, signer2 := PrepareNewSigners(t) + _, _ = signer1.SignRound1() + round1Out2, _ := signer2.SignRound1() + round2Input := make(map[uint32]*Round1Bcast) + + // Actual Test: Set an input to nil + round2Input[signer1.id] = nil + round2Input[signer2.id] = round1Out2 + msg := []byte("message") + _, err := signer1.SignRound2(msg, round2Input) + require.Error(t, err) + + // Preparing round 2 inputs + signer1, signer2 = PrepareNewSigners(t) + round1Out1, _ := signer1.SignRound1() + round1Out2, _ = signer2.SignRound1() + round2Input = make(map[uint32]*Round1Bcast) + round2Input[signer1.id] = round1Out1 + round2Input[signer2.id] = round1Out2 + // Actual Test: Nil message + _, err = signer1.SignRound2(nil, round2Input) + require.Error(t, err) + + // Preparing round 2 inputs + signer1, signer2 = PrepareNewSigners(t) + round1Out1, _ = signer1.SignRound1() + round1Out2, _ = signer2.SignRound1() + round2Input = make(map[uint32]*Round1Bcast) + + // Actual Test: Set invalid round2Input length + round2Input[signer1.id] = round1Out1 + round2Input[signer2.id] = round1Out2 + round2Input[3] = round1Out2 + _, err = signer1.SignRound2(msg, round2Input) + require.Error(t, err) + + // Preparing round 2 inputs + signer1, signer2 = PrepareNewSigners(t) + round1Out1, _ = signer1.SignRound1() + round1Out2, _ = signer2.SignRound1() + round2Input = make(map[uint32]*Round1Bcast) + + // Actual Test: Set invalid round2Input length + round1Out2.Ei = nil + round2Input[signer1.id] = round1Out1 + round2Input[signer2.id] = round1Out2 + _, err = signer1.SignRound2(msg, round2Input) + require.Error(t, err) + + // Preparing round 2 inputs + signer1, signer2 = PrepareNewSigners(t) + round1Out1, _ = signer1.SignRound1() + round1Out2, _ = signer2.SignRound1() + round2Input = make(map[uint32]*Round1Bcast) + round2Input[signer1.id] = round1Out1 + round2Input[signer2.id] = round1Out2 + + // Actual Test: Set nil smallD and smallE + signer1.state.smallD = testCurve.NewScalar() + signer1.state.smallE = nil + _, err = signer1.SignRound2(msg, round2Input) + require.Error(t, err) +} + +func PrepareRound3Input(t *testing.T) (*Signer, *Signer, map[uint32]*Round2Bcast) { + // Running sign round 1 + threshold := uint32(2) + limit := uint32(2) + p1, p2 := PrepareDkgOutput(t) + require.Equal(t, p1.VerificationKey, p2.VerificationKey) + scheme, err := sharing.NewShamir(threshold, limit, testCurve) + require.NotNil(t, scheme) + require.NoError(t, err) + lCoeffs, err := scheme.LagrangeCoeffs([]uint32{p1.Id, p2.Id}) + require.NotNil(t, lCoeffs[1]) + require.NotNil(t, lCoeffs[2]) + require.NoError(t, err) + + signer1, err := NewSigner( + p1, + p1.Id, + threshold, + lCoeffs, + []uint32{p1.Id, p2.Id}, + &Ed25519ChallengeDeriver{}, + ) + require.NotNil(t, signer1) + require.NoError(t, err) + signer2, err := NewSigner( + p2, + p2.Id, + threshold, + lCoeffs, + []uint32{p1.Id, p2.Id}, + &Ed25519ChallengeDeriver{}, + ) + require.NotNil(t, signer2) + require.NoError(t, err) + + round1Out1, _ := signer1.SignRound1() + round1Out2, _ := signer2.SignRound1() + round2Input := make(map[uint32]*Round1Bcast, threshold) + round2Input[signer1.id] = round1Out1 + round2Input[signer2.id] = round1Out2 + + // Running sign round 2 + msg := []byte("message") + round2Out1, _ := signer1.SignRound2(msg, round2Input) + round2Out2, _ := signer2.SignRound2(msg, round2Input) + round3Input := make(map[uint32]*Round2Bcast, threshold) + round3Input[signer1.id] = round2Out1 + round3Input[signer2.id] = round2Out2 + return signer1, signer2, round3Input +} + +func TestSignRound3Works(t *testing.T) { + signer1, signer2, round3Input := PrepareRound3Input(t) + round3Out1, err := signer1.SignRound3(round3Input) + require.NoError(t, err) + require.NotNil(t, round3Out1) + round3Out2, err := signer2.SignRound3(round3Input) + require.NoError(t, err) + require.NotNil(t, round3Out2) + // signer1 and signer2 outputs the same signature + require.Equal(t, round3Out1.Z, round3Out2.Z) + require.Equal(t, round3Out1.C, round3Out2.C) + + // test verify method + msg := []byte("message") + signature := &Signature{ + round3Out1.Z, + round3Out1.C, + } + vk := signer1.verificationKey + ok, err := Verify(signer1.curve, signer1.challengeDeriver, vk, msg, signature) + require.True(t, ok) + require.NoError(t, err) + + ok, err = Verify(signer2.curve, signer2.challengeDeriver, vk, msg, signature) + require.True(t, ok) + require.NoError(t, err) +} + +func TestSignRound3RepeatCall(t *testing.T) { + signer1, _, round3Input := PrepareRound3Input(t) + _, err := signer1.SignRound3(round3Input) + require.NoError(t, err) + _, err = signer1.SignRound3(round3Input) + require.Error(t, err) +} + +func TestSignRound3BadInput(t *testing.T) { + signer1, _, round3Input := PrepareRound3Input(t) + + // Actual test: nil input + round3Input[signer1.id] = nil + _, err := signer1.SignRound3(round3Input) + require.Error(t, err) + round3Input = nil + _, err = signer1.SignRound3(round3Input) + require.Error(t, err) + + // Actual test: set invalid length of round3Input + signer1, _, round3Input = PrepareRound3Input(t) + round3Input[100] = round3Input[signer1.id] + _, err = signer1.SignRound3(round3Input) + require.Error(t, err) + + // Actual test: maul the round3Input + signer1, _, round3Input = PrepareRound3Input(t) + round3Input[signer1.id].Zi = round3Input[signer1.id].Zi.Add(testCurve.Scalar.New(2)) + _, err = signer1.SignRound3(round3Input) + require.Error(t, err) + + // Actual test: set non-zero smallD and smallE + signer1, _, round3Input = PrepareRound3Input(t) + signer1.state.smallD = testCurve.Scalar.New(1) + _, err = signer1.SignRound3(round3Input) + require.Error(t, err) +} + +func TestFullRoundsWorks(t *testing.T) { + // Give a full-round test (FROST DKG + FROST Signing) with threshold = 2 and limit = 3, same as the test of tECDSA + threshold := 2 + limit := 3 + + // Prepare DKG participants + participants := make(map[uint32]*dkg.DkgParticipant, limit) + for i := 1; i <= limit; i++ { + otherIds := make([]uint32, limit-1) + idx := 0 + for j := 1; j <= limit; j++ { + if i == j { + continue + } + otherIds[idx] = uint32(j) + idx++ + } + p, err := dkg.NewDkgParticipant(uint32(i), uint32(threshold), ctx, testCurve, otherIds...) + require.NoError(t, err) + participants[uint32(i)] = p + } + + // FROST DKG round 1 + rnd1Bcast := make(map[uint32]*dkg.Round1Bcast, len(participants)) + rnd1P2p := make(map[uint32]dkg.Round1P2PSend, len(participants)) + for id, p := range participants { + bcast, p2psend, err := p.Round1(nil) + require.NoError(t, err) + rnd1Bcast[id] = bcast + rnd1P2p[id] = p2psend + } + + // FROST DKG round 2 + for id := range rnd1Bcast { + rnd1P2pForP := make(map[uint32]*sharing.ShamirShare) + for jid := range rnd1P2p { + if jid == id { + continue + } + rnd1P2pForP[jid] = rnd1P2p[jid][id] + } + _, err := participants[id].Round2(rnd1Bcast, rnd1P2pForP) + require.NoError(t, err) + } + + // Prepare Lagrange coefficients + scheme, _ := sharing.NewShamir(uint32(threshold), uint32(limit), testCurve) + + // Here we use {1, 3} as 2 of 3 cosigners, we can also set cosigners as {1, 2}, {2, 3} + signerIds := []uint32{1, 3} + lCoeffs, err := scheme.LagrangeCoeffs(signerIds) + require.NoError(t, err) + signers := make(map[uint32]*Signer, threshold) + for _, id := range signerIds { + signers[id], err = NewSigner( + participants[id], + id, + uint32(threshold), + lCoeffs, + signerIds, + &Ed25519ChallengeDeriver{}, + ) + require.NoError(t, err) + require.NotNil(t, signers[id].skShare) + } + + // Running sign round 1 + round2Input := make(map[uint32]*Round1Bcast, threshold) + for id := range signers { + round1Out, err := signers[id].SignRound1() + require.NoError(t, err) + round2Input[signers[id].id] = round1Out + } + + // Running sign round 2 + msg := []byte("message") + round3Input := make(map[uint32]*Round2Bcast, threshold) + for id := range signers { + round2Out, err := signers[id].SignRound2(msg, round2Input) + require.NoError(t, err) + round3Input[signers[id].id] = round2Out + } + + // Running sign round 3 + result := make(map[uint32]*Round3Bcast, threshold) + for id := range signers { + round3Out, err := signers[id].SignRound3(round3Input) + require.NoError(t, err) + result[signers[id].id] = round3Out + } + + // Every signer has the same output Schnorr signature + require.Equal(t, result[1].Z, result[3].Z) + // require.Equal(t, z, result[3].Z) + require.Equal(t, result[1].C, result[3].C) + // require.Equal(t, c, result[3].C) +} diff --git a/ted25519/ted25519/ed25519.go b/ted25519/ted25519/ed25519.go new file mode 100644 index 0000000..9765d90 --- /dev/null +++ b/ted25519/ted25519/ed25519.go @@ -0,0 +1,291 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package ted25519 implements the Ed25519 signature algorithm. See https://ed25519.cr.yp.to/ +// +// These functions are also compatible with the "Ed25519" function defined in +// RFC 8032. However, unlike RFC 8032's formulation, this package's private key +// representation includes a public key suffix to make multiple signing +// operations with the same key more efficient. This package refers to the RFC +// 8032 private key as the "seed". +// This code is a port of the public domain, “ref10” implementation of ed25519 +// from SUPERCOP. +package ted25519 + +import ( + "bytes" + "crypto" + cryptorand "crypto/rand" + "crypto/sha512" + "fmt" + "io" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +const ( + // PublicKeySize is the size, in bytes, of public keys as used in this package. + PublicKeySize = 32 + // PrivateKeySize is the size, in bytes, of private keys as used in this package. + PrivateKeySize = 64 + // SignatureSize is the size, in bytes, of signatures generated and verified by this package. + SignatureSize = 64 + // SeedSize is the size, in bytes, of private key seeds. These are the private key representations used by RFC 8032. + SeedSize = 32 +) + +// PublicKey is the type of Ed25519 public keys. +type PublicKey []byte + +// PrivateKey is the type of Ed25519 private keys. It implements crypto.Signer. +type PrivateKey []byte + +// Bytes returns the publicKey in byte array +func (p PublicKey) Bytes() []byte { + return p +} + +// Public returns the PublicKey corresponding to priv. +func (priv PrivateKey) Public() crypto.PublicKey { + publicKey := make([]byte, PublicKeySize) + copy(publicKey, priv[32:]) + return PublicKey(publicKey) +} + +// Seed returns the private key seed corresponding to priv. It is provided for +// interoperability with RFC 8032. RFC 8032's private keys correspond to seeds +// in this package. +func (priv PrivateKey) Seed() []byte { + seed := make([]byte, SeedSize) + copy(seed, priv[:32]) + return seed +} + +// Sign signs the given message with priv. +// Ed25519 performs two passes over messages to be signed and therefore cannot +// handle pre-hashed messages. Thus opts.HashFunc() must return zero to +// indicate the message hasn't been hashed. This can be achieved by passing +// crypto.Hash(0) as the value for opts. +func (priv PrivateKey) Sign( + rand io.Reader, + message []byte, + opts crypto.SignerOpts, +) (signature []byte, err error) { + if opts.HashFunc() != crypto.Hash(0) { + return nil, fmt.Errorf("ed25519: cannot sign hashed message") + } + sig, err := Sign(priv, message) + if err != nil { + return nil, err + } + return sig, nil +} + +// GenerateKey generates a public/private key pair using entropy from rand. +// If rand is nil, crypto/rand.Reader will be used. +func GenerateKey(rand io.Reader) (PublicKey, PrivateKey, error) { + if rand == nil { + rand = cryptorand.Reader + } + + seed := make([]byte, SeedSize) + if _, err := io.ReadFull(rand, seed); err != nil { + return nil, nil, err + } + + privateKey, err := NewKeyFromSeed(seed) + if err != nil { + return nil, nil, err + } + publicKey := make([]byte, PublicKeySize) + copy(publicKey, privateKey[32:]) + + return publicKey, privateKey, nil +} + +// NewKeyFromSeed calculates a private key from a seed. It will panic if +// len(seed) is not SeedSize. This function is provided for interoperability +// with RFC 8032. RFC 8032's private keys correspond to seeds in this +// package. +func NewKeyFromSeed(seed []byte) (PrivateKey, error) { + // Outline the function body so that the returned key can be stack-allocated. + privateKey := make([]byte, PrivateKeySize) + err := newKeyFromSeed(privateKey, seed) + if err != nil { + return nil, err + } + return privateKey, nil +} + +func newKeyFromSeed(privateKey, seed []byte) error { + if l := len(seed); l != SeedSize { + return fmt.Errorf("ed25519: bad seed length: %d", l) + } + + digest := sha512.Sum512(seed) + digest[0] &= 248 + digest[31] &= 127 + digest[31] |= 64 + + var hBytes [32]byte + copy(hBytes[:], digest[:]) + + h, err := new(curves.ScalarEd25519).SetBytesClamping(hBytes[:]) + if err != nil { + return err + } + ed25519 := curves.ED25519() + A := ed25519.ScalarBaseMult(h) + + publicKeyBytes := A.ToAffineCompressed() + copy(privateKey, seed) + copy(privateKey[32:], publicKeyBytes[:]) + return nil +} + +// Sign signs the message with privateKey and returns a signature. It will +// panic if len(privateKey) is not PrivateKeySize. +func Sign(privateKey PrivateKey, message []byte) ([]byte, error) { + // Outline the function body so that the returned signature can be + // stack-allocated. + signature := make([]byte, SignatureSize) + err := sign(signature, privateKey, message) + if err != nil { + return nil, err + } + return signature, nil +} + +func sign(signature, privateKey, message []byte) error { + if l := len(privateKey); l != PrivateKeySize { + return fmt.Errorf("ed25519: bad private key length: %d", l) + } + + var err error + h := sha512.New() + _, err = h.Write(privateKey[:32]) + if err != nil { + return err + } + + var digest1, messageDigest, hramDigest [64]byte + var expandedSecretKey [32]byte + _ = h.Sum(digest1[:0]) + copy(expandedSecretKey[:], digest1[:]) + expandedSecretKey[0] &= 248 + expandedSecretKey[31] &= 63 + expandedSecretKey[31] |= 64 + + h.Reset() + _, err = h.Write(digest1[32:]) + if err != nil { + return err + } + _, err = h.Write(message) + if err != nil { + return err + } + _ = h.Sum(messageDigest[:0]) + + r, err := new(curves.ScalarEd25519).SetBytesWide(messageDigest[:]) + if err != nil { + return err + } + + // R = r * G + R := curves.ED25519().Point.Generator().Mul(r) + encodedR := R.ToAffineCompressed() + + h.Reset() + _, err = h.Write(encodedR[:]) + if err != nil { + return err + } + _, err = h.Write(privateKey[32:]) + if err != nil { + return err + } + _, err = h.Write(message) + if err != nil { + return err + } + _ = h.Sum(hramDigest[:0]) + + // Set k and s + k, err := new(curves.ScalarEd25519).SetBytesWide(hramDigest[:]) + if err != nil { + return err + } + + s, err := new(curves.ScalarEd25519).SetBytesClamping(expandedSecretKey[:]) + if err != nil { + return err + } + + // S = k*s + r + S := k.MulAdd(s, r) + copy(signature[:], encodedR[:]) + copy(signature[32:], S.Bytes()[:]) + return nil +} + +// Verify reports whether sig is a valid signature of message by publicKey. It +// will panic if len(publicKey) is not PublicKeySize. +// Previously publicKey is of type PublicKey +func Verify(publicKey PublicKey, message, sig []byte) (bool, error) { + if l := len(publicKey); l != PublicKeySize { + return false, fmt.Errorf("ed25519: bad public key length: %d", l) + } + + if len(sig) != SignatureSize || sig[63]&224 != 0 { + return false, fmt.Errorf("ed25519: bad signature size: %d", len(sig)) + } + + var publicKeyBytes [32]byte + copy(publicKeyBytes[:], publicKey) + + A, err := new(curves.PointEd25519).FromAffineCompressed(publicKeyBytes[:]) + if err != nil { + return false, err + } + + // Negate sets A = -A, and returns A. It actually negates X and T but keep Y and Z + negA := A.Neg() + + h := sha512.New() + _, err = h.Write(sig[:32]) + if err != nil { + panic(err) + } + + _, err = h.Write(publicKey[:]) + if err != nil { + return false, err + } + + _, err = h.Write(message) + if err != nil { + return false, err + } + + var digest [64]byte + _ = h.Sum(digest[:0]) + + hReduced, err := new(curves.ScalarEd25519).SetBytesWide(digest[:]) + if err != nil { + return false, err + } + + var s [32]byte + copy(s[:], sig[32:]) + sScalar, err := new(curves.ScalarEd25519).SetBytesCanonical(s[:]) + if err != nil { + return false, err + } + + // R' = hash * A + s * BasePoint + R := new(curves.PointEd25519).VarTimeDoubleScalarBaseMult(hReduced, negA, sScalar) + // Check R == R' + return bytes.Equal(sig[:32], R.ToAffineCompressed()), nil +} diff --git a/ted25519/ted25519/ed25519_test.go b/ted25519/ted25519/ed25519_test.go new file mode 100644 index 0000000..01366a1 --- /dev/null +++ b/ted25519/ted25519/ed25519_test.go @@ -0,0 +1,164 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ted25519 + +import ( + "bytes" + "crypto" + "crypto/rand" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// sign.input.gz is a selection of test cases from +// https://ed25519.cr.yp.to/python/sign.input + +type zeroReader struct{} + +func (zeroReader) Read(buf []byte) (int, error) { + for i := range buf { + buf[i] = 0 + } + return len(buf), nil +} + +func TestUnmarshalMarshal(t *testing.T) { + pub, _, err := GenerateKey(rand.Reader) + require.NoError(t, err) + + var publicKeyBytes [32]byte + copy(publicKeyBytes[:], pub) + + A, err := new(curves.PointEd25519).FromAffineCompressed(publicKeyBytes[:]) + require.NoError(t, err) + var pub2 [32]byte + copy(pub2[:], A.ToAffineCompressed()) + + if publicKeyBytes != pub2 { + t.Errorf("FromBytes(%v)->ToBytes does not round-trip, got %x\n", publicKeyBytes, pub2) + } +} + +func TestSignVerify(t *testing.T) { + var zero zeroReader + public, private, err := GenerateKey(zero) + require.NoError(t, err) + + message := []byte("test message") + sig, err := Sign(private, message) + require.NoError(t, err) + ok, _ := Verify(public, message, sig) + require.True(t, ok) + + wrongMessage := []byte("wrong message") + ok, _ = Verify(public, wrongMessage, sig) + require.True(t, !ok) +} + +func TestCryptoSigner(t *testing.T) { + var zero zeroReader + public, private, _ := GenerateKey(zero) + + signer := crypto.Signer(private) + + publicInterface := signer.Public() + public2, ok := publicInterface.(PublicKey) + if !ok { + t.Fatalf("expected PublicKey from Public() but got %T", publicInterface) + } + + if !bytes.Equal(public, public2) { + t.Errorf("public keys do not match: original:%x vs Public():%x", public, public2) + } + + message := []byte("message") + var noHash crypto.Hash + signature, err := signer.Sign(zero, message, noHash) + if err != nil { + t.Fatalf("error from Sign(): %s", err) + } + + ok, _ = Verify(public, message, signature) + if !ok { + t.Errorf("Verify failed on signature from Sign()") + } +} + +func TestMalleability(t *testing.T) { + // https://tools.ietf.org/html/rfc8032#section-5.1.7 adds an additional test + // that s be in [0, order). This prevents someone from adding a multiple of + // order to s and obtaining a second valid signature for the same message. + msg := []byte{0x54, 0x65, 0x73, 0x74} + sig := []byte{ + 0x7c, 0x38, 0xe0, 0x26, 0xf2, 0x9e, 0x14, 0xaa, 0xbd, 0x05, 0x9a, + 0x0f, 0x2d, 0xb8, 0xb0, 0xcd, 0x78, 0x30, 0x40, 0x60, 0x9a, 0x8b, + 0xe6, 0x84, 0xdb, 0x12, 0xf8, 0x2a, 0x27, 0x77, 0x4a, 0xb0, 0x67, + 0x65, 0x4b, 0xce, 0x38, 0x32, 0xc2, 0xd7, 0x6f, 0x8f, 0x6f, 0x5d, + 0xaf, 0xc0, 0x8d, 0x93, 0x39, 0xd4, 0xee, 0xf6, 0x76, 0x57, 0x33, + 0x36, 0xa5, 0xc5, 0x1e, 0xb6, 0xf9, 0x46, 0xb3, 0x1d, + } + publicKey := []byte{ + 0x7d, 0x4d, 0x0e, 0x7f, 0x61, 0x53, 0xa6, 0x9b, 0x62, 0x42, 0xb5, + 0x22, 0xab, 0xbe, 0xe6, 0x85, 0xfd, 0xa4, 0x42, 0x0f, 0x88, 0x34, + 0xb1, 0x08, 0xc3, 0xbd, 0xae, 0x36, 0x9e, 0xf5, 0x49, 0xfa, + } + + ok, _ := Verify(publicKey, msg, sig) + if ok { + t.Fatal("non-canonical signature accepted") + } +} + +func BenchmarkKeyGeneration(b *testing.B) { + var zero zeroReader + for i := 0; i < b.N; i++ { + if _, _, err := GenerateKey(zero); err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkNewKeyFromSeed(b *testing.B) { + seed := make([]byte, SeedSize) + b.ReportAllocs() + for i := 0; i < b.N; i++ { + _, _ = NewKeyFromSeed(seed) + } +} + +func BenchmarkSigning(b *testing.B) { + var zero zeroReader + _, priv, err := GenerateKey(zero) + if err != nil { + b.Fatal(err) + } + message := []byte("Hello, world!") + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, _ = Sign(priv, message) + } +} + +func BenchmarkVerification(b *testing.B) { + var zero zeroReader + pub, priv, err := GenerateKey(zero) + if err != nil { + b.Fatal(err) + } + message := []byte("Hello, world!") + signature, _ := Sign(priv, message) + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, _ = Verify(pub, message, signature) + } +} diff --git a/ted25519/ted25519/ext.go b/ted25519/ted25519/ext.go new file mode 100644 index 0000000..bd1ed46 --- /dev/null +++ b/ted25519/ted25519/ext.go @@ -0,0 +1,123 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package ted25519 + +import ( + "crypto/sha512" + "strconv" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// GeAdd returns the sum of two public keys, a and b. +func GeAdd(a PublicKey, b PublicKey) PublicKey { + aPoint, err := new(curves.PointEd25519).FromAffineCompressed(a) + if err != nil { + panic("attempted to add invalid point: a") + } + bPoint, err := new(curves.PointEd25519).FromAffineCompressed(b) + if err != nil { + panic("attempted to add invalid point: b") + } + + sum := aPoint.Add(bPoint) + return sum.ToAffineCompressed() +} + +// ExpandSeed applies the standard Ed25519 transform to the seed to turn it into the real private +// key that is used for signing. It returns the expanded seed. +func ExpandSeed(seed []byte) []byte { + digest := sha512.Sum512(seed) + digest[0] &= 248 + digest[31] &= 127 + digest[31] |= 64 + return digest[:32] +} + +// reverseBytes returns a new slice of the input bytes reversed +func reverseBytes(inBytes []byte) []byte { + outBytes := make([]byte, len(inBytes)) + + for i, j := 0, len(inBytes)-1; j >= 0; i, j = i+1, j-1 { + outBytes[i] = inBytes[j] + } + + return outBytes +} + +// ThresholdSign is used for creating signatures for threshold protocols that replace the values of +// the private key and nonce with shamir shares instead. Because of this we must have a custom +// signing implementation that accepts arguments for values that cannot be derived anymore and +// removes the extended key generation since that should be done before the secret is shared. +// +// expandedSecretKeyShare and rShare must be little-endian. +func ThresholdSign( + expandedSecretKeyShare []byte, publicKey PublicKey, + message []byte, + rShare []byte, R PublicKey, // nolint:gocritic +) []byte { + // These length checks are are sanity checks where we panic if a provided value falls outside of the expected range. + // These should never fail in practice but serve to protect us from some bug that would cause us to produce + // signatures using a zero value or clipping off extra bytes unintentionally. + // + // We don't specifically check for 32 byte values as any value within the subgroup field could show up here. This is + // different than the upstream Ed25519 which does, but this seems to be a result of how they initialize their byte + // slices to constants and does not guarantee the value itself is 32 bytes without padding. + if l := len(expandedSecretKeyShare); l == 0 || l > 32 { + panic("ed25519: bad key share length: " + strconv.Itoa(l)) + } + if l := len(rShare); l == 0 || l > 32 { + panic("ed25519: bad nonce share length: " + strconv.Itoa(l)) + } + + var expandedSecretKey, rBytes [32]byte + copy(expandedSecretKey[:], expandedSecretKeyShare) + copy(rBytes[:], rShare) + + // c = H(R || A || m) mod q + var hramDigest [64]byte + var err error + h := sha512.New() + _, err = h.Write(R[:]) + if err != nil { + panic(err) + } + _, err = h.Write(publicKey[:]) + if err != nil { + panic(err) + } + _, err = h.Write(message) + if err != nil { + panic(err) + } + _ = h.Sum(hramDigest[:0]) + + // Set c, x and r + c, err := new(curves.ScalarEd25519).SetBytesWide(hramDigest[:]) + if err != nil { + panic(err) + } + + x, err := new(curves.ScalarEd25519).SetBytesCanonical(expandedSecretKey[:]) + if err != nil { + panic(err) + } + + r, err := new(curves.ScalarEd25519).SetBytesCanonical(rBytes[:]) + if err != nil { + panic(err) + } + + // s = cx+r + s := c.MulAdd(x, r) + + signature := make([]byte, SignatureSize) + copy(signature, R[:]) + copy(signature[32:], s.Bytes()[:]) + + return signature +} diff --git a/ted25519/ted25519/ext_test.go b/ted25519/ted25519/ext_test.go new file mode 100644 index 0000000..c3bcfdd --- /dev/null +++ b/ted25519/ted25519/ext_test.go @@ -0,0 +1,143 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package ted25519 + +import ( + "crypto/rand" + "encoding/hex" + "math/big" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" + v1 "github.com/sonr-io/sonr/crypto/sharing/v1" +) + +const ( + expectedSeedHex = "9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60" + expectedPrivKeyHex = "307c83864f2833cb427a2ef1c00a013cfdff2768d980c0a3a520f006904de94f" +) + +func TestExpandSeed(t *testing.T) { + seedBytes, err := hex.DecodeString(expectedSeedHex) + require.NoError(t, err) + privKeyHex := hex.EncodeToString(ExpandSeed(seedBytes)) + require.Equal(t, expectedPrivKeyHex, privKeyHex) +} + +func TestThresholdSign(t *testing.T) { + pub, priv, err := generateKey() + require.NoError(t, err) + field := curves.NewField(curves.Ed25519Order()) + keyShare := v1.NewShamirShare(0, priv, field) + r := big.NewInt(123456789).Bytes() + nonceShare := v1.NewShamirShare(0, r, field) + + r = reverseBytes(r) + var rInput [32]byte + copy(rInput[:], r) + scalar, err := new(curves.ScalarEd25519).SetBytesCanonical(rInput[:]) + require.NoError(t, err) + noncePub := curves.ED25519().Point.Generator().Mul(scalar) + + message := []byte("fnord!") + wrongMessage := []byte("23") + sig := ThresholdSign( + reverseBytes(keyShare.Value.Bytes()), + pub, + message, + reverseBytes(nonceShare.Value.Bytes()), + noncePub.ToAffineCompressed(), + ) + + ok, _ := Verify(pub, message, sig) + require.True(t, ok) + ok, _ = Verify(pub, wrongMessage, sig) + require.False(t, ok) +} + +func TestThresholdSign_invalid_secrets(t *testing.T) { + message := []byte("fnord!") + + secret := []byte{0x02} + secret = reverseBytes(secret) + var sInput [32]byte + copy(sInput[:], secret) + scalar, err := new(curves.ScalarEd25519).SetBytesCanonical(sInput[:]) + require.NoError(t, err) + pub := curves.ED25519().Point.Generator().Mul(scalar) + + nonce := []byte{0x03} + nonce = reverseBytes(nonce) + var nInput [32]byte + copy(nInput[:], nonce) + nScalar, err := new(curves.ScalarEd25519).SetBytesCanonical(nInput[:]) + require.NoError(t, err) + noncePub := curves.ED25519().Point.Generator().Mul(nScalar) + + require.PanicsWithValue(t, "ed25519: bad key share length: 0", + func() { + ThresholdSign( + make([]byte, 0), + pub.ToAffineCompressed(), + message, + nonce, + noncePub.ToAffineCompressed(), + ) + }, + ) + + require.PanicsWithValue(t, "ed25519: bad key share length: 33", + func() { + ThresholdSign( + make([]byte, 33), + pub.ToAffineCompressed(), + message, + nonce, + noncePub.ToAffineCompressed(), + ) + }, + ) + + require.PanicsWithValue(t, "ed25519: bad nonce share length: 0", + func() { + ThresholdSign( + secret, + pub.ToAffineCompressed(), + message, + make([]byte, 0), + noncePub.ToAffineCompressed(), + ) + }, + ) + + require.PanicsWithValue(t, "ed25519: bad nonce share length: 33", + func() { + ThresholdSign( + secret, + pub.ToAffineCompressed(), + message, + make([]byte, 33), + noncePub.ToAffineCompressed(), + ) + }, + ) +} + +// generateKey is the same as generateSharableKey, but used only for testing +func generateKey() (PublicKey, []byte, error) { + pub, priv, err := GenerateKey(rand.Reader) + if err != nil { + return nil, nil, err + } + seed := priv.Seed() + expandedSeed := reverseBytes(ExpandSeed(seed)) + field := &curves.Field{Int: curves.Ed25519Order()} + expandedSeedReduced := field.ReducedElementFromBytes(expandedSeed) + return pub, expandedSeedReduced.Bytes(), nil +} diff --git a/ted25519/ted25519/keygen.go b/ted25519/ted25519/keygen.go new file mode 100644 index 0000000..7b0ff6b --- /dev/null +++ b/ted25519/ted25519/keygen.go @@ -0,0 +1,243 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package ted25519 + +import ( + "encoding/binary" + "fmt" + + "github.com/sonr-io/sonr/crypto/core/curves" + v1 "github.com/sonr-io/sonr/crypto/sharing/v1" +) + +// PublicKeyFromBytes converts byte array into PublicKey byte array +func PublicKeyFromBytes(bytes []byte) ([]byte, error) { + if l := len(bytes); l != PublicKeySize { + return nil, fmt.Errorf("invalid public key size: %d", l) + } + + return bytes, nil +} + +// KeyShare represents a share of a generated key. +type KeyShare struct { + *v1.ShamirShare +} + +// NewKeyShare is a KeyShare constructor. +func NewKeyShare(identifier byte, secret []byte) *KeyShare { + field := curves.NewField(curves.Ed25519Order()) + return &KeyShare{v1.NewShamirShare(uint32(identifier), secret, field)} +} + +// Commitments is a collection of public keys with each coefficient of a polynomial as the secret keys. +type Commitments []curves.Point + +// CommitmentsToBytes converts commitments to bytes +func (commitments Commitments) CommitmentsToBytes() [][]byte { + bytes := make([][]byte, len(commitments)) + + for i, c := range commitments { + bytes[i] = c.ToAffineCompressed() + } + + return bytes +} + +// CommitmentsFromBytes converts bytes to commitments +func CommitmentsFromBytes(bytes [][]byte) (Commitments, error) { + comms := make([]curves.Point, len(bytes)) + for i, pubKeyBytes := range bytes { + pubKey, err := PublicKeyFromBytes(pubKeyBytes) + if err != nil { + return nil, err + } + comms[i], err = new(curves.PointEd25519).FromAffineCompressed(pubKey) + if err != nil { + return nil, err + } + } + return comms, nil +} + +// KeyShareFromBytes converts byte array into KeyShare type +func KeyShareFromBytes(bytes []byte) *KeyShare { + field := curves.NewField(curves.Ed25519Order()) + element := field.ElementFromBytes(bytes[4:]) + + // We set first 4 bytes as identifier + identifier := binary.BigEndian.Uint32(bytes[:4]) + return &KeyShare{&v1.ShamirShare{Identifier: identifier, Value: element}} +} + +// ShareConfiguration sets threshold and limit for the protocol +type ShareConfiguration struct { + T int // threshold + N int // total shares +} + +// generateSharableKey generates a random key and returns the public key and private key in +// big-endian encoding. It returns an error if it cannot acquire sufficient randomness. +func generateSharableKey() (PublicKey, []byte, error) { + pub, priv, err := GenerateKey(nil) + if err != nil { + return nil, nil, err + } + + // Internally the PrivateKey type is represented as the seed || public key, but we want to pull + // out seed to share which is the actual private key. + seed := priv.Seed() + + // We must apply the key expansion to the seed before splitting the key. + // Ed25519 signing by default will apply this during signature generation, but since it involves + // a hash function, it breaks the relationship between shares and breaks aggregating signatures. + // Our signature generation does not apply this mutation at signing time. + // + // As per anything that comes from the ed25519 library this value should be treated as + // little-endian so we reverse it before using it. + expandedSeed := reverseBytes(ExpandSeed(seed)) + + // Lastly we must reduce this value into the size of the field so we can share it. This diverges + // from how the standard implementation treats this because their scalar multiplication accepts + // values up to the curve order but we must constrain it to be able to split it and aggregate. + // + // If you read the documentation for the ReducedElementFromBytes function we call below, it + // includes a big warning about how it will return non-uniform outputs depending on the input. + // This is true, but not a concern for keygen specifically because the value we are providing it + // has been generated as the ed25519 spec requires, which has a slight bias by definition of how + // the ExpandSeed operation works. + field := &curves.Field{Int: curves.Ed25519Order()} + expandedSeedReduced := field.ReducedElementFromBytes(expandedSeed) + + return pub, expandedSeedReduced.Bytes(), nil +} + +// GenerateSharedKey generates a random key, splits it, and returns the public key, shares, and VSS commitments. +func GenerateSharedKey(config *ShareConfiguration) (PublicKey, []*KeyShare, Commitments, error) { + pub, priv, err := generateSharableKey() + // pub, priv, err := ed25519.GenerateKey(nil) + if err != nil { + return nil, nil, nil, err + } + + keyShares, commitments, err := splitPrivateKey(config, priv) + if err != nil { + return nil, nil, nil, err + } + + return pub, keyShares, commitments, nil +} + +// splitPrivateKey splits the secret into a set of secret shares and creates a set of commitments of them. +func splitPrivateKey(config *ShareConfiguration, priv []byte) ([]*KeyShare, Commitments, error) { + commitments, shares, err := split(priv, config) + if err != nil { + return nil, nil, err + } + + keyShares := make([]*KeyShare, len(shares)) + for i, s := range shares { + keyShares[i] = &KeyShare{s} + } + return keyShares, commitments, nil +} + +// split contains core operations to split the secret and generate commitments. +func split(secret []byte, config *ShareConfiguration) ([]curves.Point, []*v1.ShamirShare, error) { + field := curves.NewField(curves.Ed25519Order()) + shamir, err := v1.NewShamir(config.T, config.N, field) + if err != nil { + return nil, nil, fmt.Errorf("error in NewShamir") + } + shares, poly, err := shamir.GetSharesAndPolynomial(secret) + if err != nil { + return nil, nil, fmt.Errorf("error in GetSharesAndPolynomial") + } + + // Generate the verifiable commitments to the polynomial for the shares + verifiers := make([]curves.Point, len(poly.Coefficients)) + // curve := sharing.Ed25519() + for i, c := range poly.Coefficients { + // We have to reverse each coefficient, which is different than the method sharing.Split + reverseC := reverseBytes(c.Bytes()) + var reverseInput [32]byte + copy(reverseInput[:], reverseC) + cScalar, err := new(curves.ScalarEd25519).SetBytesCanonical(reverseInput[:]) + if err != nil { + return nil, nil, fmt.Errorf("error in SetBytesCanonical reverseC") + } + v := curves.ED25519().Point.Generator().Mul(cScalar) + verifiers[i] = v + } + return verifiers, shares, nil +} + +// Reconstruct recovers the secret from a set of secret shares. +func Reconstruct(keyShares []*KeyShare, config *ShareConfiguration) ([]byte, error) { + curve := v1.Ed25519() + field := curves.NewField(curve.Params().N) + shamir, err := v1.NewShamir(config.T, config.N, field) + if err != nil { + return nil, err + } + + shares := make([]*v1.ShamirShare, len(keyShares)) + for i, s := range keyShares { + shares[i] = s.ShamirShare + } + return shamir.Combine(shares...) +} + +// VerifyVSS validates that a Share represents a solution to a Shamir polynomial +// in which len(commitments) + 1 solutions are required to construct the private +// key for the public key at commitments[0]. +func (share *KeyShare) VerifyVSS( + commitments Commitments, + config *ShareConfiguration, +) (bool, error) { + if len(commitments) < config.T { + return false, fmt.Errorf("not enough verifiers to check") + } + field := curves.NewField(curves.Ed25519Order()) + xBytes := make([]byte, 4) + binary.BigEndian.PutUint32(xBytes, share.Identifier) + x := field.ElementFromBytes(xBytes) + i := share.Value.Modulus.One() + + // c_0 + rhs := commitments[0] + + // Compute the sum of products + // c_0 * c_1^i * c_2^{i^2} *c_3^{i^3} + for j := 1; j < len(commitments); j++ { + // i *= x + i = i.Mul(x) + + var iBytes [32]byte + copy(iBytes[:], i.Bytes()[:]) + iScalar, err := new(curves.ScalarEd25519).SetBytesCanonical(iBytes[:]) + if err != nil { + return false, fmt.Errorf("error in SetBytesCanonical iBytes") + } + c := commitments[j].Mul(iScalar) + + // ...* c_j^{i^j} + rhs = rhs.Add(c) + } + + vValue := reverseBytes(share.Value.Bytes()) + var vInput [32]byte + copy(vInput[:], vValue) + vScalar, err := new(curves.ScalarEd25519).SetBytes(vInput[:]) + if err != nil { + return false, err + } + lhs := curves.ED25519().ScalarBaseMult(vScalar) + + // Check if lhs == rhs + return lhs.Equal(rhs), nil +} diff --git a/ted25519/ted25519/keygen_test.go b/ted25519/ted25519/keygen_test.go new file mode 100644 index 0000000..d3536d3 --- /dev/null +++ b/ted25519/ted25519/keygen_test.go @@ -0,0 +1,125 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package ted25519 + +import ( + "math/big" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" + v1 "github.com/sonr-io/sonr/crypto/sharing/v1" +) + +func TestGenerateEd25519Key(t *testing.T) { + config := ShareConfiguration{T: 2, N: 3} + + // Generate and verify correct number of shares are produced + pub, shares, _, err := GenerateSharedKey(&config) + require.NoError(t, err) + require.Equal(t, config.N, len(shares)) + + // Verify reconstuction works for all permutations of shares + shareVec := make([]*KeyShare, 2) + shareVec[0] = shares[0] + shareVec[1] = shares[1] + secret1, err := Reconstruct(shareVec, &config) + require.Nil(t, err) + shareVec[0] = shares[1] + shareVec[1] = shares[2] + secret2, err := Reconstruct(shareVec, &config) + require.Nil(t, err) + shareVec[0] = shares[0] + shareVec[1] = shares[2] + secret3, err := Reconstruct(shareVec, &config) + require.Nil(t, err) + require.Equal(t, secret1, secret2) + require.Equal(t, secret2, secret3) + + // Need to reverse secret1 + secret1 = reverseBytes(secret1) + var secret1Bytes [32]byte + copy(secret1Bytes[:], secret1) + scalar1, err := new(curves.ScalarEd25519).SetBytesCanonical(secret1Bytes[:]) + require.NoError(t, err) + ed25519 := curves.ED25519() + pubFromSeed := ed25519.Point.Generator().Mul(scalar1) + + require.NoError(t, err) + require.Equal(t, pubFromSeed.ToAffineCompressed(), pub.Bytes()) +} + +func TestGenerateEd25519KeyInvalidConfig(t *testing.T) { + invalidConfig := ShareConfiguration{T: 1, N: 1} + _, _, _, err := GenerateSharedKey(&invalidConfig) + require.NotNil(t, err) + require.Error(t, err) + + invalidConfig = ShareConfiguration{T: 2, N: 1} + _, _, _, err = GenerateSharedKey(&invalidConfig) + require.NotNil(t, err) + require.Error(t, err) +} + +func TestVerifyVSSEd25519(t *testing.T) { + config := ShareConfiguration{T: 2, N: 3} + pubKey1, shares1, commitments1, err := GenerateSharedKey(&config) + require.NoError(t, err) + pubKey2, shares2, commitments2, err := GenerateSharedKey(&config) + require.NoError(t, err) + + require.Equal(t, pubKey1.Bytes(), commitments1[0].ToAffineCompressed()) + require.Equal(t, pubKey2.Bytes(), commitments2[0].ToAffineCompressed()) + + for _, s := range shares1 { + ok, err := s.VerifyVSS(commitments1, &config) + require.NoError(t, err) + require.True(t, ok) + ok, _ = s.VerifyVSS(commitments2, &config) + require.True(t, !ok) + } + + for _, s := range shares2 { + ok, err := s.VerifyVSS(commitments2, &config) + require.NoError(t, err) + require.True(t, ok) + ok, _ = s.VerifyVSS(commitments1, &config) + require.True(t, !ok) + } +} + +func TestCommitmentsFromBytes(t *testing.T) { + config := ShareConfiguration{T: 2, N: 3} + _, _, comms, err := GenerateSharedKey(&config) + require.NoError(t, err) + + recoveredComms, err := CommitmentsFromBytes(comms.CommitmentsToBytes()) + require.NoError(t, err) + require.Equal(t, len(comms), len(recoveredComms)) + for i := range comms { + require.True(t, comms[i].Equal(recoveredComms[i])) + } + _, err = CommitmentsFromBytes([][]byte{{0x01}}) + require.Error(t, err) +} + +func TestPublicKeyFromBytes(t *testing.T) { + _, err := PublicKeyFromBytes([]byte{0x01}) + require.EqualError(t, err, "invalid public key size: 1") +} + +func TestKeyShareFromBytes(t *testing.T) { + field := curves.NewField(curves.Ed25519Order()) + share := &v1.ShamirShare{ + Identifier: 2, + Value: field.NewElement(big.NewInt(3)), + } + shareBytes := share.Bytes() + recoveredShare := KeyShareFromBytes(shareBytes) + require.Equal(t, recoveredShare.ShamirShare, share) +} diff --git a/ted25519/ted25519/noncegen.go b/ted25519/ted25519/noncegen.go new file mode 100644 index 0000000..bc3d717 --- /dev/null +++ b/ted25519/ted25519/noncegen.go @@ -0,0 +1,123 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package ted25519 + +import ( + "crypto/rand" + "crypto/sha256" + "io" + + "golang.org/x/crypto/hkdf" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +// NonceShare represents a share of a generated nonce. +type NonceShare struct { + *KeyShare +} + +// NewNonceShare is a NonceShare construction +func NewNonceShare(identifier byte, secret []byte) *NonceShare { + return &NonceShare{NewKeyShare(identifier, secret)} +} + +// NonceShareFromBytes unmashals a NonceShare from its bytes representation +func NonceShareFromBytes(bytes []byte) *NonceShare { + return &NonceShare{KeyShareFromBytes(bytes)} +} + +func generateSharableNonce(s *KeyShare, p PublicKey, m Message) (PublicKey, []byte, error) { + // Create an HKDF reader that produces random bytes that we will use to create a nonce + hkdf, err := generateRandomHkdf(s, p, m) + if err != nil { + return nil, nil, err + } + + // Generate a random nonce that is within the field range so that we can share it. + // + // This diverges from how the standard implementation treats it because their scalar + // multiplication accepts values up to the curve order, but we must constrain it to be able to + // split it and aggregate. + // + // WARN: This operation is not constant time and we are dealing with a secret value + nonce, err := curves.NewField(curves.Ed25519Order()).RandomElement(hkdf) + if err != nil { + return nil, nil, err + } + + nonceBytes := nonce.Bytes() + reverseBytes := reverseBytes(nonceBytes) + var reverseInput [32]byte + copy(reverseInput[:], reverseBytes) + scalar, err := new(curves.ScalarEd25519).SetBytesCanonical(reverseInput[:]) + if err != nil { + return nil, nil, err + } + + // Generate the nonce pubkey by multiplying it by the base point. + noncePubkey := curves.ED25519().Point.Generator().Mul(scalar) + + return noncePubkey.ToAffineCompressed(), nonceBytes, nil +} + +// GenerateSharedNonce generates a random nonce, splits it, and returns the nonce pubkey, nonce shares, and +// VSS commitments. +func GenerateSharedNonce(config *ShareConfiguration, s *KeyShare, p PublicKey, m Message) ( + PublicKey, + []*NonceShare, + Commitments, + error, +) { + noncePubkey, nonce, err := generateSharableNonce(s, p, m) + if err != nil { + return nil, nil, nil, err + } + keyShares, vssCommitments, err := splitPrivateKey(config, nonce) + if err != nil { + return nil, nil, nil, err + } + + nonceShares := make([]*NonceShare, len(keyShares)) + for i, k := range keyShares { + nonceShares[i] = &NonceShare{k} + } + return noncePubkey, nonceShares, vssCommitments, nil +} + +// Add returns the sum of two NonceShares. +func (n NonceShare) Add(other *NonceShare) *NonceShare { + return &NonceShare{ + &KeyShare{ + // use Add method from the shamir.Share type to sum the shares + // WARN: This is not constant time and deals with secrets + n.ShamirShare.Add(other.ShamirShare), + }, + } +} + +// generateRandomHkdf returns an HMAC-based extract-and-expand Key Derivation Function (see RFC 5869). +func generateRandomHkdf(s *KeyShare, p PublicKey, m Message) (io.Reader, error) { + // We _must_ introduce randomness to the HKDF to make the output non-deterministic because deterministic nonces open + // up threshold schemes to potential nonce-reuse attacks. We continue to use the HKDF that takes in context about + // what is going to be signed as it adds some protection against bad local randomness. + randNonce := make([]byte, SeedSize) + if _, err := io.ReadFull(rand.Reader, randNonce); err != nil { + return nil, err + } + + var secret []byte + secret = append(secret, s.Bytes()...) + secret = append(secret, randNonce...) + + info := []byte("ted25519nonce") + // We use info for non-secret inputs to limit an attacker's ability to influence the key. + info = append(info, p.Bytes()...) + info = append(info, m...) + + return hkdf.New(sha256.New, secret, nil, info), nil +} diff --git a/ted25519/ted25519/noncegen_test.go b/ted25519/ted25519/noncegen_test.go new file mode 100644 index 0000000..584c25d --- /dev/null +++ b/ted25519/ted25519/noncegen_test.go @@ -0,0 +1,111 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package ted25519 + +import ( + "math/big" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" + v1 "github.com/sonr-io/sonr/crypto/sharing/v1" +) + +func TestNonceShareFromBytes(t *testing.T) { + field := curves.NewField(curves.Ed25519Order()) + share := &v1.ShamirShare{ + Identifier: 2, + Value: field.NewElement(big.NewInt(3)), + } + shareBytes := share.Bytes() + recoveredShare := NonceShareFromBytes(shareBytes) + require.Equal(t, recoveredShare.ShamirShare, share) + require.Equal(t, share.Identifier, uint32(2)) +} + +func TestGenerateSharedNonce_congruence(t *testing.T) { + config := &ShareConfiguration{T: 2, N: 3} + message := []byte("fnord!") + pubKey, keyShares, _, err := GenerateSharedKey(config) + require.NoError(t, err) + nonceCommitment, nonceShares, _, err := GenerateSharedNonce( + config, + keyShares[0], + pubKey, + message, + ) + require.NoError(t, err) + field := curves.NewField(curves.Ed25519Order()) + shamir, err := v1.NewShamir(config.T, config.N, field) + require.NoError(t, err) + nonce, err := shamir.Combine(toShamirShare(nonceShares)...) + require.NoError(t, err) + + nonce = reverseBytes(nonce) + var nonceBytes [32]byte + copy(nonceBytes[:], nonce) + nonceScalar, err := new(curves.ScalarEd25519).SetBytesCanonical(nonceBytes[:]) + require.NoError(t, err) + ed25519 := curves.ED25519() + recoveredCommitment := ed25519.Point.Generator().Mul(nonceScalar) + require.Equal(t, recoveredCommitment.ToAffineCompressed(), nonceCommitment.Bytes()) +} + +func TestGenerateNonce_non_determinism(t *testing.T) { + config := &ShareConfiguration{T: 2, N: 3} + message := []byte("fnord!") + pubKey, keyShares, _, err := GenerateSharedKey(config) + require.NoError(t, err) + + _, nonceShares1, _, err := GenerateSharedNonce(config, keyShares[0], pubKey, message) + require.NoError(t, err) + field := curves.NewField(curves.Ed25519Order()) + shamir, err := v1.NewShamir(config.T, config.N, field) + require.NoError(t, err) + nonce1, err := shamir.Combine(toShamirShare(nonceShares1)...) + require.NoError(t, err) + + _, nonceShares2, _, err := GenerateSharedNonce(config, keyShares[1], pubKey, message) + require.NoError(t, err) + nonce2, err := shamir.Combine(toShamirShare(nonceShares2)...) + require.NoError(t, err) + + _, nonceShares3, _, err := GenerateSharedNonce(config, keyShares[0], pubKey, message) + require.NoError(t, err) + nonce3, err := shamir.Combine(toShamirShare(nonceShares3)...) + require.NoError(t, err) + + require.NotEqual(t, nonce1, nonce2) + require.NotEqual(t, nonce1, nonce3) +} + +func TestNonceSharesAdd(t *testing.T) { + one := NewNonceShare(0, []byte{0x01}) + two := NewNonceShare(0, []byte{0x02}) + + // basic addition + sum := one.Add(two) + require.Equal(t, uint32(0), sum.Identifier) + require.Equal(t, []byte{0x03}, sum.Value.Bytes()) +} + +func TestNonceSharesAdd_errors(t *testing.T) { + one := NewNonceShare(0, []byte{0x01}) + two := NewNonceShare(1, []byte{0x02}) + require.PanicsWithValue(t, "identifiers must match for valid addition", func() { + one.Add(two) + }) +} + +func toShamirShare(nonceShares []*NonceShare) []*v1.ShamirShare { + shamirShares := make([]*v1.ShamirShare, len(nonceShares)) + for i, n := range nonceShares { + shamirShares[i] = n.ShamirShare + } + return shamirShares +} diff --git a/ted25519/ted25519/partialsig.go b/ted25519/ted25519/partialsig.go new file mode 100755 index 0000000..5b5cafb --- /dev/null +++ b/ted25519/ted25519/partialsig.go @@ -0,0 +1,61 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package ted25519 + +import "strconv" + +type Message []byte + +func (m Message) String() string { + return string(m) +} + +const signatureLength = 64 + +type PartialSignature struct { + ShareIdentifier byte // x-coordinate of which signer produced signature + Sig []byte // 64-byte signature: R || s +} + +// NewPartialSignature creates a new PartialSignature +func NewPartialSignature(identifier byte, sig []byte) *PartialSignature { + if l := len(sig); l != signatureLength { + panic("ted25519: invalid partial signature length: " + strconv.Itoa(l)) + } + return &PartialSignature{ShareIdentifier: identifier, Sig: sig} +} + +// R returns the R component of the signature +func (sig *PartialSignature) R() []byte { + return sig.Sig[:32] +} + +// S returns the s component of the signature +func (sig *PartialSignature) S() []byte { + return sig.Sig[32:] +} + +func (sig *PartialSignature) Bytes() []byte { + return sig.Sig +} + +// TSign generates a signature that can later be aggregated with others to produce a signature valid +// under the provided public key and nonce pair. +func TSign( + message Message, + key *KeyShare, + pub PublicKey, + nonce *NonceShare, + noncePub PublicKey, +) *PartialSignature { + sig := ThresholdSign( + reverseBytes(key.Value.Bytes()), pub, + message, + reverseBytes(nonce.Value.Bytes()), noncePub, + ) + return NewPartialSignature(byte(key.ShamirShare.Identifier), sig) +} diff --git a/ted25519/ted25519/partialsig_test.go b/ted25519/ted25519/partialsig_test.go new file mode 100644 index 0000000..c343c34 --- /dev/null +++ b/ted25519/ted25519/partialsig_test.go @@ -0,0 +1,56 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package ted25519 + +import ( + "math/big" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestPartialSignNormalSignature(t *testing.T) { + pub, priv, err := generateSharableKey() + require.NoError(t, err) + keyShare := NewKeyShare(0, priv) + r := big.NewInt(123456789).Bytes() + nonceShare := NewNonceShare(0, r) + + r = reverseBytes(r) + var rInput [32]byte + copy(rInput[:], r) + scalar, err := new(curves.ScalarEd25519).SetBytesCanonical(rInput[:]) + require.NoError(t, err) + noncePub := curves.ED25519().Point.Generator().Mul(scalar) + + message := []byte("test message") + wrongMessage := []byte("wrong message") + sig := TSign(message, keyShare, pub, nonceShare, noncePub.ToAffineCompressed()) + + ok, _ := Verify(pub, message, sig.Sig) + require.True(t, ok) + ok, _ = Verify(pub, wrongMessage, sig.Sig) + require.False(t, ok) +} + +func TestNewPartialSignature(t *testing.T) { + s := []byte("11111111111111111111111111111111") + r := []byte("22222222222222222222222222222222") + sigBytes := []byte("2222222222222222222222222222222211111111111111111111111111111111") + sig := NewPartialSignature(1, sigBytes) + + require.Equal(t, byte(1), sig.ShareIdentifier) + require.Equal(t, s, sig.S()) + require.Equal(t, r, sig.R()) + require.Equal(t, sigBytes, sig.Bytes()) + + require.PanicsWithValue(t, "ted25519: invalid partial signature length: 3", func() { + NewPartialSignature(1, []byte("sig")) + }) +} diff --git a/ted25519/ted25519/sigagg.go b/ted25519/ted25519/sigagg.go new file mode 100644 index 0000000..34c5c56 --- /dev/null +++ b/ted25519/ted25519/sigagg.go @@ -0,0 +1,62 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package ted25519 + +import ( + "bytes" + "fmt" + + "github.com/sonr-io/sonr/crypto/core/curves" + v1 "github.com/sonr-io/sonr/crypto/sharing/v1" +) + +type Signature = []byte + +func Aggregate(sigs []*PartialSignature, config *ShareConfiguration) (Signature, error) { + if len(sigs) == 0 { + return nil, fmt.Errorf("ted25519: sigs must be non-empty") + } + + // Verify all nonce pubKeys are the same by checking they all match the first one. + noncePubkey := sigs[0].R() + for i := 1; i < len(sigs); i++ { + if !bytes.Equal(sigs[i].R(), noncePubkey) { + return nil, fmt.Errorf( + "ted25519: unexpected nonce pubkey. got: %x expected: %x", + sigs[i].R(), + noncePubkey, + ) + } + } + + // Convert signatures to a Shamir share representation so we can recombine them + sigShares := make([]*v1.ShamirShare, len(sigs)) + field := curves.NewField(curves.Ed25519Order()) + shamir, err := v1.NewShamir(config.T, config.N, field) + if err != nil { + return nil, err + } + + for i, sig := range sigs { + sigShares[i] = v1.NewShamirShare( + uint32(sig.ShareIdentifier), + reverseBytes(sig.S()), + field, + ) + } + + sigS, err := shamir.Combine(sigShares...) + if err != nil { + return nil, err + } + + sig := make([]byte, signatureLength) + copy(sig[:32], noncePubkey) // R is the same on all sigs + copy(sig[32:], reverseBytes(sigS)) // be-to-le + + return sig, nil +} diff --git a/ted25519/ted25519/sigagg_test.go b/ted25519/ted25519/sigagg_test.go new file mode 100755 index 0000000..8a94ada --- /dev/null +++ b/ted25519/ted25519/sigagg_test.go @@ -0,0 +1,97 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +package ted25519 + +import ( + "encoding/hex" + "fmt" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestSigAgg(t *testing.T) { + config := ShareConfiguration{T: 2, N: 3} + pub, secretShares, _, err := GenerateSharedKey(&config) + require.NoError(t, err) + + message := []byte("test message") + + // Each party generates a nonce and we combine them together into an aggregate one + noncePub1, nonceShares1, _, err := GenerateSharedNonce(&config, secretShares[0], pub, message) + require.NoError(t, err) + noncePub2, nonceShares2, _, err := GenerateSharedNonce(&config, secretShares[1], pub, message) + require.NoError(t, err) + noncePub3, nonceShares3, _, err := GenerateSharedNonce(&config, secretShares[2], pub, message) + require.NoError(t, err) + nonceShares := []*NonceShare{ + nonceShares1[0].Add(nonceShares2[0]).Add(nonceShares3[0]), + nonceShares1[1].Add(nonceShares2[1]).Add(nonceShares3[1]), + nonceShares1[2].Add(nonceShares2[2]).Add(nonceShares3[2]), + } + + noncePub := GeAdd(GeAdd(noncePub1, noncePub2), noncePub3) + + sig1 := TSign(message, secretShares[0], pub, nonceShares[0], noncePub) + sig2 := TSign(message, secretShares[1], pub, nonceShares[1], noncePub) + sig3 := TSign(message, secretShares[2], pub, nonceShares[2], noncePub) + + // Test signer 1&2 verification + sig, err := Aggregate([]*PartialSignature{sig1, sig2}, &config) + require.NoError(t, err) + assertSignatureVerifies(t, pub, message, sig) + + // Test signer 2&3 verification + sig, err = Aggregate([]*PartialSignature{sig2, sig3}, &config) + require.NoError(t, err) + assertSignatureVerifies(t, pub, message, sig) + + // Test signer 1&3 verification + sig, err = Aggregate([]*PartialSignature{sig1, sig3}, &config) + require.NoError(t, err) + assertSignatureVerifies(t, pub, message, sig) +} + +func TestSigAgg_validations(t *testing.T) { + config := ShareConfiguration{T: 2, N: 3} + _, err := Aggregate([]*PartialSignature{}, &config) + require.EqualError(t, err, "ted25519: sigs must be non-empty") + + sig1bytes, _ := hex.DecodeString( + "e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e06522490155" + + "5fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b", + ) + sig2bytes, _ := hex.DecodeString( + "92a009a9f0d4cab8720e820b5f642540a2b27b5416503f8fb3762223ebdb69da" + + "085ac1e43e15996e458f3613d0f11d8c387b2eaeb4302aeeb00d291612bb0c00", + ) + sig1 := NewPartialSignature(1, sig1bytes) + sig2 := NewPartialSignature(2, sig2bytes) + + _, err = Aggregate([]*PartialSignature{sig1, sig2}, &config) + require.EqualError( + t, + err, + fmt.Sprintf( + "ted25519: unexpected nonce pubkey. got: %x expected: %x", + sig2bytes[:32], + sig1bytes[:32], + ), + ) +} + +func assertSignatureVerifies(t *testing.T, pub, message, sig []byte) { + ok, _ := Verify(pub, message, sig) + if !ok { + t.Errorf("valid signature rejected") + } + wrongMessage := []byte("wrong message") + ok, _ = Verify(pub, wrongMessage, sig) + if ok { + t.Errorf("signature of different message accepted") + } +} diff --git a/ted25519/ted25519/twobytwo_test.go b/ted25519/ted25519/twobytwo_test.go new file mode 100644 index 0000000..3c59f19 --- /dev/null +++ b/ted25519/ted25519/twobytwo_test.go @@ -0,0 +1,55 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +/* +* This is a simple example of a 2x2 signature scheme to prove out a simpler case than the threshold +* variants. We don't intend to use it and it is not modeled off of any specific known protocol. + */ +package ted25519 + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func AggregateSignatures(sig1, sig2 *PartialSignature) []byte { + field := curves.NewField(curves.Ed25519Order()) + sig1s := field.ElementFromBytes(reverseBytes(sig1.S())) + sig2s := field.ElementFromBytes(reverseBytes(sig2.S())) + sigS := sig1s.Add(sig2s) + + // Create signature as R || s. The R is the same so we use the same one + sig := make([]byte, SignatureSize) + copy(sig, sig1.R()) + copy(sig[32:], reverseBytes(sigS.Bytes())) + return sig +} + +func TestTwoByTwoSigning(t *testing.T) { + // generate shared pubkey + pub1, priv1, _ := generateSharableKey() + pub2, priv2, _ := generateSharableKey() + pub := GeAdd(pub1, pub2) + + // generate shared nonce + pubr1, r1, _ := generateSharableKey() + pubr2, r2, _ := generateSharableKey() + noncePub := GeAdd(pubr1, pubr2) + + // generate partial sigs + msg := []byte("test message") + sig1 := TSign(msg, NewKeyShare(0, priv1), pub, NewNonceShare(0, r1), noncePub) + sig2 := TSign(msg, NewKeyShare(0, priv2), pub, NewNonceShare(0, r2), noncePub) + + // add sigs (s+s) + sig := AggregateSignatures(sig1, sig2) + + ok, _ := Verify(pub, msg, sig) + require.True(t, ok, "signature failed verification") +} diff --git a/ucan/capability.go b/ucan/capability.go new file mode 100644 index 0000000..b8d2e32 --- /dev/null +++ b/ucan/capability.go @@ -0,0 +1,860 @@ +// Package ucan provides User-Controlled Authorization Networks (UCAN) implementation +// for decentralized authorization and capability delegation in the Sonr network. +// This package handles JWT-based tokens, cryptographic verification, and resource capabilities. +package ucan + +import ( + "encoding/json" + "fmt" + "strings" + "time" +) + +// Token represents a UCAN JWT token with parsed claims +type Token struct { + Raw string `json:"raw"` + Issuer string `json:"iss"` + Audience string `json:"aud"` + ExpiresAt int64 `json:"exp,omitempty"` + NotBefore int64 `json:"nbf,omitempty"` + Attenuations []Attenuation `json:"att"` + Proofs []Proof `json:"prf,omitempty"` + Facts []Fact `json:"fct,omitempty"` +} + +// Attenuation represents a UCAN capability attenuation +type Attenuation struct { + Capability Capability `json:"can"` + Resource Resource `json:"with"` +} + +// Proof represents a UCAN delegation proof (either JWT or CID) +type Proof string + +// Fact represents arbitrary facts in UCAN tokens +type Fact struct { + Data json.RawMessage `json:"data"` +} + +// Capability defines what actions can be performed +type Capability interface { + // GetActions returns the list of actions this capability grants + GetActions() []string + // Grants checks if this capability grants the required abilities + Grants(abilities []string) bool + // Contains checks if this capability contains another capability + Contains(other Capability) bool + // String returns a string representation + String() string +} + +// Resource defines what resource the capability applies to +type Resource interface { + // GetScheme returns the resource scheme (e.g., "https", "ipfs") + GetScheme() string + // GetValue returns the resource value/path + GetValue() string + // GetURI returns the full URI string + GetURI() string + // Matches checks if this resource matches another resource + Matches(other Resource) bool +} + +// SimpleCapability implements Capability for single actions +type SimpleCapability struct { + Action string `json:"action"` +} + +// GetActions returns the single action +func (c *SimpleCapability) GetActions() []string { + return []string{c.Action} +} + +// Grants checks if the capability grants all required abilities +func (c *SimpleCapability) Grants(abilities []string) bool { + if len(abilities) != 1 { + return false + } + return c.Action == abilities[0] || c.Action == "*" +} + +// Contains checks if this capability contains another capability +func (c *SimpleCapability) Contains(other Capability) bool { + if c.Action == "*" { + return true + } + + otherActions := other.GetActions() + if len(otherActions) != 1 { + return false + } + + return c.Action == otherActions[0] +} + +// String returns string representation +func (c *SimpleCapability) String() string { + return c.Action +} + +// MultiCapability implements Capability for multiple actions +type MultiCapability struct { + Actions []string `json:"actions"` +} + +// GetActions returns all actions +func (c *MultiCapability) GetActions() []string { + return c.Actions +} + +// Grants checks if the capability grants all required abilities +func (c *MultiCapability) Grants(abilities []string) bool { + actionSet := make(map[string]bool) + for _, action := range c.Actions { + actionSet[action] = true + } + + // Check if we have wildcard permission + if actionSet["*"] { + return true + } + + // Check each required ability + for _, ability := range abilities { + if !actionSet[ability] { + return false + } + } + + return true +} + +// Contains checks if this capability contains another capability +func (c *MultiCapability) Contains(other Capability) bool { + actionSet := make(map[string]bool) + for _, action := range c.Actions { + actionSet[action] = true + } + + // Wildcard contains everything + if actionSet["*"] { + return true + } + + // Check if all other actions are contained + for _, otherAction := range other.GetActions() { + if !actionSet[otherAction] { + return false + } + } + + return true +} + +// String returns string representation +func (c *MultiCapability) String() string { + return strings.Join(c.Actions, ",") +} + +// SimpleResource implements Resource for basic URI resources +type SimpleResource struct { + Scheme string `json:"scheme"` + Value string `json:"value"` + URI string `json:"uri"` +} + +// GetScheme returns the resource scheme +func (r *SimpleResource) GetScheme() string { + return r.Scheme +} + +// GetValue returns the resource value +func (r *SimpleResource) GetValue() string { + return r.Value +} + +// GetURI returns the full URI +func (r *SimpleResource) GetURI() string { + return r.URI +} + +// Matches checks if resources are equivalent +func (r *SimpleResource) Matches(other Resource) bool { + return r.URI == other.GetURI() +} + +// VaultResource represents vault-specific resources with metadata +type VaultResource struct { + SimpleResource + VaultAddress string `json:"vault_address,omitempty"` + EnclaveDataCID string `json:"enclave_data_cid,omitempty"` + Metadata map[string]string `json:"metadata,omitempty"` +} + +// ServiceResource represents service-specific resources +type ServiceResource struct { + SimpleResource + ServiceID string `json:"service_id"` + Domain string `json:"domain"` + Metadata map[string]string `json:"metadata,omitempty"` +} + +// CreateSimpleAttenuation creates a basic attenuation +func CreateSimpleAttenuation(action, resourceURI string) Attenuation { + return Attenuation{ + Capability: &SimpleCapability{Action: action}, + Resource: parseResourceURI(resourceURI), + } +} + +// CreateMultiAttenuation creates an attenuation with multiple actions +func CreateMultiAttenuation(actions []string, resourceURI string) Attenuation { + return Attenuation{ + Capability: &MultiCapability{Actions: actions}, + Resource: parseResourceURI(resourceURI), + } +} + +// CreateVaultAttenuation creates a vault-specific attenuation +func CreateVaultAttenuation(actions []string, enclaveDataCID, vaultAddress string) Attenuation { + resource := &VaultResource{ + SimpleResource: SimpleResource{ + Scheme: "ipfs", + Value: enclaveDataCID, + URI: fmt.Sprintf("ipfs://%s", enclaveDataCID), + }, + VaultAddress: vaultAddress, + EnclaveDataCID: enclaveDataCID, + } + + return Attenuation{ + Capability: &MultiCapability{Actions: actions}, + Resource: resource, + } +} + +// CreateServiceAttenuation creates a service-specific attenuation +func CreateServiceAttenuation(actions []string, serviceID, domain string) Attenuation { + resourceURI := fmt.Sprintf("service://%s", serviceID) + resource := &ServiceResource{ + SimpleResource: SimpleResource{ + Scheme: "service", + Value: serviceID, + URI: resourceURI, + }, + ServiceID: serviceID, + Domain: domain, + } + + return Attenuation{ + Capability: &MultiCapability{Actions: actions}, + Resource: resource, + } +} + +// parseResourceURI creates a Resource from URI string +func parseResourceURI(uri string) Resource { + parts := strings.SplitN(uri, "://", 2) + if len(parts) != 2 { + return &SimpleResource{ + Scheme: "unknown", + Value: uri, + URI: uri, + } + } + + return &SimpleResource{ + Scheme: parts[0], + Value: parts[1], + URI: uri, + } +} + +// CapabilityTemplate provides validation and construction utilities +type CapabilityTemplate struct { + AllowedActions map[string][]string `json:"allowed_actions"` // resource_type -> []actions + DefaultExpiration time.Duration `json:"default_expiration"` // default token lifetime + MaxExpiration time.Duration `json:"max_expiration"` // maximum allowed lifetime +} + +// NewCapabilityTemplate creates a new capability template +func NewCapabilityTemplate() *CapabilityTemplate { + return &CapabilityTemplate{ + AllowedActions: make(map[string][]string), + DefaultExpiration: 24 * time.Hour, + MaxExpiration: 30 * 24 * time.Hour, // 30 days + } +} + +// AddAllowedActions adds allowed actions for a resource type +func (ct *CapabilityTemplate) AddAllowedActions(resourceType string, actions []string) { + ct.AllowedActions[resourceType] = actions +} + +// ValidateAttenuation validates an attenuation against the template +func (ct *CapabilityTemplate) ValidateAttenuation(att Attenuation) error { + resourceType := att.Resource.GetScheme() + allowedActions, exists := ct.AllowedActions[resourceType] + + if !exists { + // Allow unknown resource types for backward compatibility + return nil + } + + // Create action set for efficient lookup + actionSet := make(map[string]bool) + for _, action := range allowedActions { + actionSet[action] = true + } + + // Check if all capability actions are allowed + for _, action := range att.Capability.GetActions() { + if action == "*" { + // Wildcard requires explicit permission + if !actionSet["*"] { + return fmt.Errorf("wildcard action not allowed for resource type %s", resourceType) + } + continue + } + + if !actionSet[action] { + return fmt.Errorf("action %s not allowed for resource type %s", action, resourceType) + } + } + + return nil +} + +// ValidateExpiration validates token expiration time +func (ct *CapabilityTemplate) ValidateExpiration(expiresAt int64) error { + if expiresAt == 0 { + return nil // No expiration is allowed + } + + now := time.Now() + expiry := time.Unix(expiresAt, 0) + + if expiry.Before(now) { + return fmt.Errorf("token expiration is in the past") + } + + if expiry.Sub(now) > ct.MaxExpiration { + return fmt.Errorf("token expiration exceeds maximum allowed duration") + } + + return nil +} + +// GetDefaultExpirationTime returns the default expiration timestamp +func (ct *CapabilityTemplate) GetDefaultExpirationTime() int64 { + return time.Now().Add(ct.DefaultExpiration).Unix() +} + +// StandardVaultTemplate returns a standard template for vault operations +func StandardVaultTemplate() *CapabilityTemplate { + template := NewCapabilityTemplate() + template.AddAllowedActions( + "ipfs", + []string{"read", "write", "sign", "export", "import", "delete", VaultAdminAction}, + ) + template.AddAllowedActions( + "vault", + []string{"read", "write", "sign", "export", "import", "delete", "admin", "*"}, + ) + return template +} + +// StandardServiceTemplate returns a standard template for service operations +func StandardServiceTemplate() *CapabilityTemplate { + template := NewCapabilityTemplate() + template.AddAllowedActions( + "service", + []string{"read", "write", "admin", "register", "update", "delete"}, + ) + template.AddAllowedActions("https", []string{"read", "write"}) + template.AddAllowedActions("http", []string{"read", "write"}) + return template +} + +// AttenuationList provides utilities for working with multiple attenuations +type AttenuationList []Attenuation + +// Contains checks if the list contains attenuations for a specific resource +func (al AttenuationList) Contains(resourceURI string) bool { + for _, att := range al { + if att.Resource.GetURI() == resourceURI { + return true + } + } + return false +} + +// GetCapabilitiesForResource returns all capabilities for a specific resource +func (al AttenuationList) GetCapabilitiesForResource(resourceURI string) []Capability { + var capabilities []Capability + for _, att := range al { + if att.Resource.GetURI() == resourceURI { + capabilities = append(capabilities, att.Capability) + } + } + return capabilities +} + +// CanPerform checks if the attenuations allow specific actions on a resource +func (al AttenuationList) CanPerform(resourceURI string, actions []string) bool { + capabilities := al.GetCapabilitiesForResource(resourceURI) + for _, cap := range capabilities { + if cap.Grants(actions) { + return true + } + } + return false +} + +// IsSubsetOf checks if this list is a subset of another list +func (al AttenuationList) IsSubsetOf(parent AttenuationList) bool { + for _, childAtt := range al { + if !parent.containsAttenuation(childAtt) { + return false + } + } + return true +} + +// containsAttenuation checks if the list contains an equivalent attenuation +func (al AttenuationList) containsAttenuation(att Attenuation) bool { + for _, parentAtt := range al { + if parentAtt.Resource.Matches(att.Resource) { + if parentAtt.Capability.Contains(att.Capability) { + return true + } + } + } + return false +} + +// Module-Specific Capability Types + +// DIDCapability implements Capability for DID module operations +type DIDCapability struct { + Action string `json:"action"` + Actions []string `json:"actions,omitempty"` + Caveats []string `json:"caveats,omitempty"` + Metadata map[string]string `json:"metadata,omitempty"` +} + +// GetActions returns the actions this DID capability grants +func (c *DIDCapability) GetActions() []string { + if len(c.Actions) > 0 { + return c.Actions + } + return []string{c.Action} +} + +// Grants checks if this capability grants the required abilities +func (c *DIDCapability) Grants(abilities []string) bool { + if c.Action == "*" { + return true + } + + grantedActions := make(map[string]bool) + for _, action := range c.GetActions() { + grantedActions[action] = true + } + + for _, ability := range abilities { + if !grantedActions[ability] { + return false + } + } + return true +} + +// Contains checks if this capability contains another capability +func (c *DIDCapability) Contains(other Capability) bool { + if c.Action == "*" { + return true + } + + ourActions := make(map[string]bool) + for _, action := range c.GetActions() { + ourActions[action] = true + } + + for _, otherAction := range other.GetActions() { + if !ourActions[otherAction] { + return false + } + } + return true +} + +// String returns string representation +func (c *DIDCapability) String() string { + if len(c.Actions) > 1 { + return strings.Join(c.Actions, ",") + } + return c.Action +} + +// DWNCapability implements Capability for DWN module operations +type DWNCapability struct { + Action string `json:"action"` + Actions []string `json:"actions,omitempty"` + Caveats []string `json:"caveats,omitempty"` + Metadata map[string]string `json:"metadata,omitempty"` +} + +// GetActions returns the actions this DWN capability grants +func (c *DWNCapability) GetActions() []string { + if len(c.Actions) > 0 { + return c.Actions + } + return []string{c.Action} +} + +// Grants checks if this capability grants the required abilities +func (c *DWNCapability) Grants(abilities []string) bool { + if c.Action == "*" { + return true + } + + grantedActions := make(map[string]bool) + for _, action := range c.GetActions() { + grantedActions[action] = true + } + + for _, ability := range abilities { + if !grantedActions[ability] { + return false + } + } + return true +} + +// Contains checks if this capability contains another capability +func (c *DWNCapability) Contains(other Capability) bool { + if c.Action == "*" { + return true + } + + ourActions := make(map[string]bool) + for _, action := range c.GetActions() { + ourActions[action] = true + } + + for _, otherAction := range other.GetActions() { + if !ourActions[otherAction] { + return false + } + } + return true +} + +// String returns string representation +func (c *DWNCapability) String() string { + if len(c.Actions) > 1 { + return strings.Join(c.Actions, ",") + } + return c.Action +} + +// DEXCapability implements Capability for DEX module operations +type DEXCapability struct { + Action string `json:"action"` + Actions []string `json:"actions,omitempty"` + Caveats []string `json:"caveats,omitempty"` + MaxAmount string `json:"max_amount,omitempty"` // For swap limits + Metadata map[string]string `json:"metadata,omitempty"` +} + +// GetActions returns the actions this DEX capability grants +func (c *DEXCapability) GetActions() []string { + if len(c.Actions) > 0 { + return c.Actions + } + return []string{c.Action} +} + +// Grants checks if this capability grants the required abilities +func (c *DEXCapability) Grants(abilities []string) bool { + if c.Action == "*" { + return true + } + + grantedActions := make(map[string]bool) + for _, action := range c.GetActions() { + grantedActions[action] = true + } + + for _, ability := range abilities { + if !grantedActions[ability] { + return false + } + } + return true +} + +// Contains checks if this capability contains another capability +func (c *DEXCapability) Contains(other Capability) bool { + if c.Action == "*" { + return true + } + + ourActions := make(map[string]bool) + for _, action := range c.GetActions() { + ourActions[action] = true + } + + for _, otherAction := range other.GetActions() { + if !ourActions[otherAction] { + return false + } + } + return true +} + +// String returns string representation +func (c *DEXCapability) String() string { + if len(c.Actions) > 1 { + return strings.Join(c.Actions, ",") + } + return c.Action +} + +// Module-Specific Resource Types + +// DIDResource represents DID-specific resources +type DIDResource struct { + SimpleResource + DIDMethod string `json:"did_method,omitempty"` + DIDSubject string `json:"did_subject,omitempty"` + Metadata map[string]string `json:"metadata,omitempty"` +} + +// DWNResource represents DWN-specific resources +type DWNResource struct { + SimpleResource + RecordType string `json:"record_type,omitempty"` + Protocol string `json:"protocol,omitempty"` + Owner string `json:"owner,omitempty"` + Metadata map[string]string `json:"metadata,omitempty"` +} + +// DEXResource represents DEX-specific resources +type DEXResource struct { + SimpleResource + PoolID string `json:"pool_id,omitempty"` + AssetPair string `json:"asset_pair,omitempty"` + OrderID string `json:"order_id,omitempty"` + Metadata map[string]string `json:"metadata,omitempty"` +} + +// Enhanced ServiceResource adds delegation capabilities +func (r *ServiceResource) SupportsDelegate() bool { + return r.Metadata != nil && r.Metadata["supports_delegation"] == "true" +} + +// Module-Specific Capability Templates + +// StandardDIDTemplate returns a standard template for DID operations +func StandardDIDTemplate() *CapabilityTemplate { + template := NewCapabilityTemplate() + template.AddAllowedActions("did", []string{ + "create", "register", "update", "deactivate", "revoke", + "add-verification-method", "remove-verification-method", + "add-service", "remove-service", "issue-credential", + "revoke-credential", "link-wallet", "register-webauthn", "*", + }) + return template +} + +// StandardDWNTemplate returns a standard template for DWN operations +func StandardDWNTemplate() *CapabilityTemplate { + template := NewCapabilityTemplate() + template.AddAllowedActions("dwn", []string{ + "records-write", "records-delete", "protocols-configure", + "permissions-grant", "permissions-revoke", "create", "read", + "update", "delete", "*", + }) + return template +} + +// EnhancedServiceTemplate returns enhanced service template with delegation support +func EnhancedServiceTemplate() *CapabilityTemplate { + template := NewCapabilityTemplate() + template.AddAllowedActions("service", []string{ + "register", "update", "delete", "verify-domain", + "initiate-domain-verification", "delegate", "*", + }) + template.AddAllowedActions("svc", []string{ + "register", "verify-domain", "delegate", "*", + }) + template.AddAllowedActions("https", []string{"read", "write"}) + template.AddAllowedActions("http", []string{"read", "write"}) + return template +} + +// StandardDEXTemplate returns a standard template for DEX operations +func StandardDEXTemplate() *CapabilityTemplate { + template := NewCapabilityTemplate() + template.AddAllowedActions("dex", []string{ + "register-account", "swap", "provide-liquidity", "remove-liquidity", + "create-limit-order", "cancel-order", "*", + }) + template.AddAllowedActions("pool", []string{ + "swap", "provide-liquidity", "remove-liquidity", "*", + }) + return template +} + +// Module-Specific Attenuation Constructors + +// CreateDIDAttenuation creates a DID-specific attenuation +func CreateDIDAttenuation(actions []string, didPattern string, caveats []string) Attenuation { + resourceURI := fmt.Sprintf("did:%s", didPattern) + resource := &DIDResource{ + SimpleResource: SimpleResource{ + Scheme: "did", + Value: didPattern, + URI: resourceURI, + }, + } + + return Attenuation{ + Capability: &DIDCapability{ + Actions: actions, + Caveats: caveats, + }, + Resource: resource, + } +} + +// CreateDWNAttenuation creates a DWN-specific attenuation +func CreateDWNAttenuation(actions []string, recordPattern string, caveats []string) Attenuation { + resourceURI := fmt.Sprintf("dwn:records/%s", recordPattern) + resource := &DWNResource{ + SimpleResource: SimpleResource{ + Scheme: "dwn", + Value: fmt.Sprintf("records/%s", recordPattern), + URI: resourceURI, + }, + RecordType: recordPattern, + } + + return Attenuation{ + Capability: &DWNCapability{ + Actions: actions, + Caveats: caveats, + }, + Resource: resource, + } +} + +// CreateDEXAttenuation creates a DEX-specific attenuation +func CreateDEXAttenuation(actions []string, poolPattern string, caveats []string, maxAmount string) Attenuation { + resourceURI := fmt.Sprintf("dex:pool/%s", poolPattern) + resource := &DEXResource{ + SimpleResource: SimpleResource{ + Scheme: "dex", + Value: fmt.Sprintf("pool/%s", poolPattern), + URI: resourceURI, + }, + PoolID: poolPattern, + } + + return Attenuation{ + Capability: &DEXCapability{ + Actions: actions, + Caveats: caveats, + MaxAmount: maxAmount, + }, + Resource: resource, + } +} + +// Cross-Module Capability Composition + +// CrossModuleCapability allows composing capabilities across modules +type CrossModuleCapability struct { + Modules map[string]Capability `json:"modules"` +} + +// GetActions returns all actions across all modules +func (c *CrossModuleCapability) GetActions() []string { + var actions []string + for _, cap := range c.Modules { + actions = append(actions, cap.GetActions()...) + } + return actions +} + +// Grants checks if required abilities are granted across modules +func (c *CrossModuleCapability) Grants(abilities []string) bool { + allActions := make(map[string]bool) + for _, cap := range c.Modules { + for _, action := range cap.GetActions() { + allActions[action] = true + } + } + + for _, ability := range abilities { + if !allActions[ability] { + return false + } + } + return true +} + +// Contains checks if this cross-module capability contains another +func (c *CrossModuleCapability) Contains(other Capability) bool { + // For cross-module capabilities, check each module + if otherCross, ok := other.(*CrossModuleCapability); ok { + for module, otherCap := range otherCross.Modules { + if ourCap, exists := c.Modules[module]; exists { + if !ourCap.Contains(otherCap) { + return false + } + } else { + return false + } + } + return true + } + + // For single capabilities, check if any module contains it + for _, cap := range c.Modules { + if cap.Contains(other) { + return true + } + } + return false +} + +// String returns string representation +func (c *CrossModuleCapability) String() string { + var moduleStrs []string + for module, cap := range c.Modules { + moduleStrs = append(moduleStrs, fmt.Sprintf("%s:%s", module, cap.String())) + } + return strings.Join(moduleStrs, ";") +} + +// Gasless Transaction Support + +// GaslessCapability wraps other capabilities with gasless transaction support +type GaslessCapability struct { + Capability + AllowGasless bool `json:"allow_gasless"` + GasLimit uint64 `json:"gas_limit,omitempty"` +} + +// SupportsGasless returns whether this capability supports gasless transactions +func (c *GaslessCapability) SupportsGasless() bool { + return c.AllowGasless +} + +// GetGasLimit returns the gas limit for gasless transactions +func (c *GaslessCapability) GetGasLimit() uint64 { + return c.GasLimit +} diff --git a/ucan/crypto.go b/ucan/crypto.go new file mode 100644 index 0000000..f69b3b3 --- /dev/null +++ b/ucan/crypto.go @@ -0,0 +1,352 @@ +// Package ucan provides User-Controlled Authorization Networks (UCAN) implementation +// for decentralized authorization and capability delegation in the Sonr network. +// This package handles JWT-based tokens, cryptographic verification, and resource capabilities. +package ucan + +import ( + "crypto" + "crypto/ed25519" + "crypto/rsa" + "crypto/sha256" + "crypto/sha512" + "encoding/base64" + "fmt" + "hash" + "strings" + + "github.com/golang-jwt/jwt/v5" +) + +// SupportedSigningMethods returns the list of supported JWT signing methods for UCAN +func SupportedSigningMethods() []jwt.SigningMethod { + return []jwt.SigningMethod{ + jwt.SigningMethodRS256, + jwt.SigningMethodRS384, + jwt.SigningMethodRS512, + jwt.SigningMethodEdDSA, + } +} + +// ValidateSignature validates the cryptographic signature of a UCAN token +func ValidateSignature(tokenString string, verifyKey any) error { + // Parse token without verification first to get signing method + token, err := jwt.ParseWithClaims( + tokenString, + jwt.MapClaims{}, + func(token *jwt.Token) (any, error) { + return verifyKey, nil + }, + ) + if err != nil { + return fmt.Errorf("signature validation failed: %w", err) + } + + if !token.Valid { + return fmt.Errorf("token signature is invalid") + } + + return nil +} + +// ExtractUnsignedToken extracts the unsigned portion of a JWT token (header + payload) +func ExtractUnsignedToken(tokenString string) (string, error) { + parts := strings.Split(tokenString, ".") + if len(parts) != 3 { + return "", fmt.Errorf("invalid JWT format: expected 3 parts, got %d", len(parts)) + } + + return strings.Join(parts[:2], "."), nil +} + +// ExtractSignature extracts the signature portion of a JWT token +func ExtractSignature(tokenString string) ([]byte, error) { + parts := strings.Split(tokenString, ".") + if len(parts) != 3 { + return nil, fmt.Errorf("invalid JWT format: expected 3 parts, got %d", len(parts)) + } + + signatureBytes, err := base64.RawURLEncoding.DecodeString(parts[2]) + if err != nil { + return nil, fmt.Errorf("failed to decode signature: %w", err) + } + + return signatureBytes, nil +} + +// VerifyRSASignature verifies an RSA signature using the specified hash algorithm +func VerifyRSASignature( + signingString string, + signature []byte, + publicKey *rsa.PublicKey, + hashAlg crypto.Hash, +) error { + // Create hash of signing string + hasher := hashAlg.New() + hasher.Write([]byte(signingString)) + hashed := hasher.Sum(nil) + + // Verify signature + err := rsa.VerifyPKCS1v15(publicKey, hashAlg, hashed, signature) + if err != nil { + return fmt.Errorf("RSA signature verification failed: %w", err) + } + + return nil +} + +// VerifyEd25519Signature verifies an Ed25519 signature +func VerifyEd25519Signature( + signingString string, + signature []byte, + publicKey ed25519.PublicKey, +) error { + valid := ed25519.Verify(publicKey, []byte(signingString), signature) + if !valid { + return fmt.Errorf("Ed25519 signature verification failed") + } + + return nil +} + +// GetHashAlgorithmForMethod returns the appropriate hash algorithm for a JWT signing method +func GetHashAlgorithmForMethod(method jwt.SigningMethod) (crypto.Hash, error) { + switch method { + case jwt.SigningMethodRS256: + return crypto.SHA256, nil + case jwt.SigningMethodRS384: + return crypto.SHA384, nil + case jwt.SigningMethodRS512: + return crypto.SHA512, nil + case jwt.SigningMethodEdDSA: + // Ed25519 doesn't use a separate hash algorithm + return crypto.Hash(0), nil + default: + return crypto.Hash(0), fmt.Errorf("unsupported signing method: %v", method) + } +} + +// CreateHasher creates a hasher for the given crypto.Hash algorithm +func CreateHasher(hashAlg crypto.Hash) (hash.Hash, error) { + switch hashAlg { + case crypto.SHA256: + return sha256.New(), nil + case crypto.SHA384: + return sha512.New384(), nil + case crypto.SHA512: + return sha512.New(), nil + default: + return nil, fmt.Errorf("unsupported hash algorithm: %v", hashAlg) + } +} + +// SigningValidator provides cryptographic validation for UCAN tokens +type SigningValidator struct { + allowedMethods map[string]jwt.SigningMethod +} + +// NewSigningValidator creates a new signing validator with default allowed methods +func NewSigningValidator() *SigningValidator { + allowed := make(map[string]jwt.SigningMethod) + for _, method := range SupportedSigningMethods() { + allowed[method.Alg()] = method + } + + return &SigningValidator{ + allowedMethods: allowed, + } +} + +// NewSigningValidatorWithMethods creates a validator with specific allowed methods +func NewSigningValidatorWithMethods(methods []jwt.SigningMethod) *SigningValidator { + allowed := make(map[string]jwt.SigningMethod) + for _, method := range methods { + allowed[method.Alg()] = method + } + + return &SigningValidator{ + allowedMethods: allowed, + } +} + +// ValidateSigningMethod checks if a signing method is allowed +func (sv *SigningValidator) ValidateSigningMethod(method jwt.SigningMethod) error { + if _, ok := sv.allowedMethods[method.Alg()]; !ok { + return fmt.Errorf("signing method %s is not allowed", method.Alg()) + } + return nil +} + +// ValidateTokenSignature validates the cryptographic signature of a token +func (sv *SigningValidator) ValidateTokenSignature( + tokenString string, + keyFunc jwt.Keyfunc, +) (*jwt.Token, error) { + // Parse with validation + token, err := jwt.Parse(tokenString, keyFunc, jwt.WithValidMethods(sv.getAllowedMethodNames())) + if err != nil { + return nil, fmt.Errorf("token signature validation failed: %w", err) + } + + // Additional signing method validation + if err := sv.ValidateSigningMethod(token.Method); err != nil { + return nil, err + } + + return token, nil +} + +// getAllowedMethodNames returns the names of allowed signing methods +func (sv *SigningValidator) getAllowedMethodNames() []string { + methods := make([]string, 0, len(sv.allowedMethods)) + for name := range sv.allowedMethods { + methods = append(methods, name) + } + return methods +} + +// KeyValidator provides validation for cryptographic keys +type KeyValidator struct{} + +// NewKeyValidator creates a new key validator +func NewKeyValidator() *KeyValidator { + return &KeyValidator{} +} + +// ValidateRSAPublicKey validates an RSA public key for UCAN usage +func (kv *KeyValidator) ValidateRSAPublicKey(key *rsa.PublicKey) error { + if key == nil { + return fmt.Errorf("RSA public key is nil") + } + + // Check minimum key size (2048 bits recommended for security) + keySize := key.N.BitLen() + if keySize < 2048 { + return fmt.Errorf("RSA key size too small: %d bits (minimum 2048 bits required)", keySize) + } + + // Check maximum reasonable key size to prevent DoS + if keySize > 8192 { + return fmt.Errorf("RSA key size too large: %d bits (maximum 8192 bits allowed)", keySize) + } + + return nil +} + +// ValidateEd25519PublicKey validates an Ed25519 public key for UCAN usage +func (kv *KeyValidator) ValidateEd25519PublicKey(key ed25519.PublicKey) error { + if key == nil { + return fmt.Errorf("Ed25519 public key is nil") + } + + if len(key) != ed25519.PublicKeySize { + return fmt.Errorf( + "invalid Ed25519 public key size: %d bytes (expected %d)", + len(key), + ed25519.PublicKeySize, + ) + } + + return nil +} + +// SignatureInfo contains information about a token's signature +type SignatureInfo struct { + Algorithm string + KeyType string + SigningString string + Signature []byte + Valid bool +} + +// ExtractSignatureInfo extracts signature information from a JWT token +func ExtractSignatureInfo(tokenString string, verifyKey any) (*SignatureInfo, error) { + // Parse token to get method and claims + token, err := jwt.Parse(tokenString, func(t *jwt.Token) (any, error) { + return verifyKey, nil + }) + + var sigInfo SignatureInfo + sigInfo.Valid = (err == nil && token.Valid) + + if token != nil { + sigInfo.Algorithm = token.Method.Alg() + + // Get signing string + parts := strings.Split(tokenString, ".") + if len(parts) >= 2 { + sigInfo.SigningString = strings.Join(parts[:2], ".") + } + + // Get signature + if len(parts) == 3 { + sig, decodeErr := base64.RawURLEncoding.DecodeString(parts[2]) + if decodeErr == nil { + sigInfo.Signature = sig + } + } + + // Determine key type + switch verifyKey.(type) { + case *rsa.PublicKey: + sigInfo.KeyType = "RSA" + case ed25519.PublicKey: + sigInfo.KeyType = "Ed25519" + default: + sigInfo.KeyType = "Unknown" + } + } + + return &sigInfo, err +} + +// SecurityConfig contains security configuration for UCAN validation +type SecurityConfig struct { + AllowedSigningMethods []jwt.SigningMethod + MinRSAKeySize int + MaxRSAKeySize int + RequireSecureAlgs bool +} + +// DefaultSecurityConfig returns a secure default configuration +func DefaultSecurityConfig() *SecurityConfig { + return &SecurityConfig{ + AllowedSigningMethods: SupportedSigningMethods(), + MinRSAKeySize: 2048, + MaxRSAKeySize: 8192, + RequireSecureAlgs: true, + } +} + +// RestrictiveSecurityConfig returns a more restrictive configuration +func RestrictiveSecurityConfig() *SecurityConfig { + return &SecurityConfig{ + AllowedSigningMethods: []jwt.SigningMethod{ + jwt.SigningMethodRS256, // Only RS256 and EdDSA + jwt.SigningMethodEdDSA, + }, + MinRSAKeySize: 3072, // Higher minimum + MaxRSAKeySize: 4096, // Lower maximum + RequireSecureAlgs: true, + } +} + +// ValidateSecurityConfig validates that a security configuration is reasonable +func ValidateSecurityConfig(config *SecurityConfig) error { + if len(config.AllowedSigningMethods) == 0 { + return fmt.Errorf("no signing methods allowed") + } + + if config.MinRSAKeySize < 1024 { + return fmt.Errorf("minimum RSA key size too small: %d", config.MinRSAKeySize) + } + + if config.MaxRSAKeySize < config.MinRSAKeySize { + return fmt.Errorf("maximum RSA key size smaller than minimum") + } + + if config.MaxRSAKeySize > 16384 { + return fmt.Errorf("maximum RSA key size too large: %d", config.MaxRSAKeySize) + } + + return nil +} diff --git a/ucan/jwt.go b/ucan/jwt.go new file mode 100644 index 0000000..95ce860 --- /dev/null +++ b/ucan/jwt.go @@ -0,0 +1,595 @@ +package ucan + +import ( + "encoding/base64" + "encoding/json" + "fmt" + "time" + + "github.com/golang-jwt/jwt/v5" +) + +var ( + // StandardTemplate provides default authorization template + StandardTemplate = NewCapabilityTemplate() + + // Revoked tokens tracking + revokedTokens = make(map[string]bool) +) + +func init() { + // Setup standard templates with module-specific capabilities + StandardTemplate.AddAllowedActions( + "vault", + []string{"read", "write", "sign", "export", "import", "delete", "*"}, + ) + StandardTemplate.AddAllowedActions( + "service", + []string{"read", "write", "register", "update", "delete"}, + ) + StandardTemplate.AddAllowedActions( + "did", + []string{ + "create", "register", "update", "deactivate", "revoke", + "add-verification-method", "remove-verification-method", + "add-service", "remove-service", "issue-credential", + "revoke-credential", "link-wallet", "register-webauthn", "*", + }, + ) + StandardTemplate.AddAllowedActions( + "dwn", + []string{ + "records-write", "records-delete", "protocols-configure", + "permissions-grant", "permissions-revoke", "create", "read", + "update", "delete", "*", + }, + ) + StandardTemplate.AddAllowedActions( + "dex", + []string{ + "register-account", "swap", "provide-liquidity", "remove-liquidity", + "create-limit-order", "cancel-order", "*", + }, + ) + StandardTemplate.AddAllowedActions( + "pool", + []string{"swap", "provide-liquidity", "remove-liquidity", "*"}, + ) + StandardTemplate.AddAllowedActions( + "svc", + []string{"register", "verify-domain", "delegate", "*"}, + ) +} + +// GenerateJWTToken creates a UCAN JWT token with given capability and expiration +func GenerateJWTToken(attenuation Attenuation, duration time.Duration) (string, error) { + // Default expiration handling + if duration == 0 { + duration = 24 * time.Hour + } + + // Create JWT claims + claims := jwt.MapClaims{ + "iss": "did:sonr:local", // Default issuer + "exp": time.Now().Add(duration).Unix(), + "iat": time.Now().Unix(), + } + + // Add capability to claims - separate resource and capability + capabilityBytes, err := json.Marshal(map[string]any{ + "can": attenuation.Capability, + "with": attenuation.Resource, + }) + if err != nil { + return "", fmt.Errorf("failed to serialize capability: %v", err) + } + claims["can"] = base64.URLEncoding.EncodeToString(capabilityBytes) + + // Create token + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) + + // Dummy secret for signing - in real-world, use proper key management + tokenString, err := token.SignedString([]byte("sonr-ucan-secret")) + if err != nil { + return "", fmt.Errorf("failed to sign token: %v", err) + } + + return tokenString, nil +} + +// VerifyJWTToken validates and parses a UCAN JWT token +func VerifyJWTToken(tokenString string) (*Token, error) { + // Check if token is revoked + if revokedTokens[tokenString] { + return nil, fmt.Errorf("token has been revoked") + } + + // Parse token with custom claims + token, err := jwt.Parse(tokenString, func(token *jwt.Token) (any, error) { + // Dummy secret verification - replace with proper key validation + return []byte("sonr-ucan-secret"), nil + }, jwt.WithLeeway(5*time.Minute)) + if err != nil { + return nil, fmt.Errorf("token parsing failed: %v", err) + } + + // Extract claims + claims, ok := token.Claims.(jwt.MapClaims) + if !ok { + return nil, fmt.Errorf("invalid token claims") + } + + // Manual expiration check + exp, ok := claims["exp"].(float64) + if !ok { + return nil, fmt.Errorf("no expiration time found") + } + if time.Now().Unix() > int64(exp) { + return nil, fmt.Errorf("token has expired") + } + + // Decode capability + capabilityStr, ok := claims["can"].(string) + if !ok { + return nil, fmt.Errorf("no capability found in token") + } + + capabilityBytes, err := base64.URLEncoding.DecodeString(capabilityStr) + if err != nil { + return nil, fmt.Errorf("failed to decode capability: %v", err) + } + + // Parse capability and resource separately + var capabilityMap map[string]any + err = json.Unmarshal(capabilityBytes, &capabilityMap) + if err != nil { + return nil, fmt.Errorf("failed to parse capability: %v", err) + } + + // Determine capability type + var capability Capability + var capData map[string]any + switch v := capabilityMap["can"].(type) { + case map[string]any: + capData = v + case string: + // If it's a string, assume it's a simple action + capability = &SimpleCapability{Action: v} + capData = nil + default: + return nil, fmt.Errorf("invalid capability structure") + } + + // Parse capability if needed + if capData != nil { + // Attempt to infer capability type + if actions, ok := capData["actions"].([]any); ok { + // MultiCapability + stringActions := make([]string, len(actions)) + for i, action := range actions { + if str, ok := action.(string); ok { + stringActions[i] = str + } + } + capability = &MultiCapability{Actions: stringActions} + } else if action, ok := capData["action"].(string); ok { + // SingleCapability + capability = &SimpleCapability{Action: action} + } else { + return nil, fmt.Errorf("unable to parse capability type") + } + } + + // Parse resource + var resourceData map[string]any + switch resource := capabilityMap["with"].(type) { + case map[string]any: + resourceData = resource + case string: + // If it's a string, assume it's a simple URI + resourceData = map[string]any{ + "Scheme": "generic", + "Value": resource, + "URI": resource, + } + default: + return nil, fmt.Errorf("invalid resource structure") + } + + // Create resource based on scheme + scheme, _ := resourceData["Scheme"].(string) + value, _ := resourceData["Value"].(string) + uri, _ := resourceData["URI"].(string) + + resource := &SimpleResource{ + Scheme: scheme, + Value: value, + URI: uri, + } + + // Validate attenuation + attenuation := Attenuation{ + Capability: capability, + Resource: resource, + } + + // Use standard template to validate + err = StandardTemplate.ValidateAttenuation(attenuation) + if err != nil { + return nil, fmt.Errorf("capability validation failed: %v", err) + } + + // Construct Token object + parsedToken := &Token{ + Raw: tokenString, + Issuer: claims["iss"].(string), + ExpiresAt: int64(exp), + Attenuations: []Attenuation{attenuation}, + } + + return parsedToken, nil +} + +// RevokeCapability adds a capability to the revocation list +func RevokeCapability(attenuation Attenuation) error { + // Generate token to get its string representation + token, err := GenerateJWTToken(attenuation, time.Hour) + if err != nil { + return err + } + + // Add to revoked tokens + revokedTokens[token] = true + return nil +} + +// NewCapability is a helper function to create a basic capability +func NewCapability(issuer, resource string, abilities []string) (Attenuation, error) { + capability := &MultiCapability{Actions: abilities} + resourceObj := &SimpleResource{ + Scheme: "generic", + Value: resource, + URI: resource, + } + + return Attenuation{ + Capability: capability, + Resource: resourceObj, + }, nil +} + +// Enhanced JWT generation functions for module-specific capabilities + +// GenerateModuleJWTToken creates a UCAN JWT token with module-specific capabilities +func GenerateModuleJWTToken(attenuations []Attenuation, issuer, audience string, duration time.Duration) (string, error) { + if duration == 0 { + duration = 24 * time.Hour + } + + // Create JWT claims with enhanced structure + claims := jwt.MapClaims{ + "iss": issuer, + "aud": audience, + "exp": time.Now().Add(duration).Unix(), + "iat": time.Now().Unix(), + "nbf": time.Now().Unix(), + } + + // Add attenuations to claims with module-specific serialization + attClaims := make([]map[string]any, len(attenuations)) + for i, att := range attenuations { + attMap, err := serializeModuleAttenuation(att) + if err != nil { + return "", fmt.Errorf("failed to serialize attenuation %d: %w", i, err) + } + attClaims[i] = attMap + } + claims["att"] = attClaims + + // Create and sign token + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) + tokenString, err := token.SignedString([]byte("sonr-ucan-secret")) + if err != nil { + return "", fmt.Errorf("failed to sign token: %w", err) + } + + return tokenString, nil +} + +// serializeModuleAttenuation serializes an attenuation based on its module type +func serializeModuleAttenuation(att Attenuation) (map[string]any, error) { + attMap := map[string]any{ + "with": att.Resource.GetURI(), + } + + scheme := att.Resource.GetScheme() + switch scheme { + case "did": + return serializeDIDAttenuation(att, attMap) + case "dwn": + return serializeDWNAttenuation(att, attMap) + case "dex", "pool": + return serializeDEXAttenuation(att, attMap) + case "service", "svc": + return serializeServiceAttenuation(att, attMap) + case "vault", "ipfs": + return serializeVaultAttenuation(att, attMap) + default: + return serializeGenericAttenuation(att, attMap) + } +} + +// serializeDIDAttenuation serializes DID-specific attenuations +func serializeDIDAttenuation(att Attenuation, attMap map[string]any) (map[string]any, error) { + didCap, ok := att.Capability.(*DIDCapability) + if !ok { + return serializeGenericAttenuation(att, attMap) + } + + if didCap.Action != "" { + attMap["can"] = didCap.Action + } else { + attMap["can"] = didCap.Actions + } + + if len(didCap.Caveats) > 0 { + attMap["caveats"] = didCap.Caveats + } + if len(didCap.Metadata) > 0 { + attMap["metadata"] = didCap.Metadata + } + + return attMap, nil +} + +// serializeDWNAttenuation serializes DWN-specific attenuations +func serializeDWNAttenuation(att Attenuation, attMap map[string]any) (map[string]any, error) { + dwnCap, ok := att.Capability.(*DWNCapability) + if !ok { + return serializeGenericAttenuation(att, attMap) + } + + if dwnCap.Action != "" { + attMap["can"] = dwnCap.Action + } else { + attMap["can"] = dwnCap.Actions + } + + if len(dwnCap.Caveats) > 0 { + attMap["caveats"] = dwnCap.Caveats + } + if len(dwnCap.Metadata) > 0 { + attMap["metadata"] = dwnCap.Metadata + } + + // Add DWN-specific fields + if dwnRes, ok := att.Resource.(*DWNResource); ok { + if dwnRes.RecordType != "" { + attMap["record_type"] = dwnRes.RecordType + } + if dwnRes.Protocol != "" { + attMap["protocol"] = dwnRes.Protocol + } + if dwnRes.Owner != "" { + attMap["owner"] = dwnRes.Owner + } + } + + return attMap, nil +} + +// serializeDEXAttenuation serializes DEX-specific attenuations +func serializeDEXAttenuation(att Attenuation, attMap map[string]any) (map[string]any, error) { + dexCap, ok := att.Capability.(*DEXCapability) + if !ok { + return serializeGenericAttenuation(att, attMap) + } + + if dexCap.Action != "" { + attMap["can"] = dexCap.Action + } else { + attMap["can"] = dexCap.Actions + } + + if len(dexCap.Caveats) > 0 { + attMap["caveats"] = dexCap.Caveats + } + if dexCap.MaxAmount != "" { + attMap["max_amount"] = dexCap.MaxAmount + } + if len(dexCap.Metadata) > 0 { + attMap["metadata"] = dexCap.Metadata + } + + // Add DEX-specific fields + if dexRes, ok := att.Resource.(*DEXResource); ok { + if dexRes.PoolID != "" { + attMap["pool_id"] = dexRes.PoolID + } + if dexRes.AssetPair != "" { + attMap["asset_pair"] = dexRes.AssetPair + } + if dexRes.OrderID != "" { + attMap["order_id"] = dexRes.OrderID + } + } + + return attMap, nil +} + +// serializeServiceAttenuation serializes Service-specific attenuations +func serializeServiceAttenuation(att Attenuation, attMap map[string]any) (map[string]any, error) { + // Service capabilities still use MultiCapability + multiCap, ok := att.Capability.(*MultiCapability) + if !ok { + return serializeGenericAttenuation(att, attMap) + } + + attMap["can"] = multiCap.Actions + + // Add service-specific fields + if svcRes, ok := att.Resource.(*ServiceResource); ok { + if svcRes.ServiceID != "" { + attMap["service_id"] = svcRes.ServiceID + } + if svcRes.Domain != "" { + attMap["domain"] = svcRes.Domain + } + if len(svcRes.Metadata) > 0 { + attMap["metadata"] = svcRes.Metadata + } + } + + return attMap, nil +} + +// serializeVaultAttenuation serializes Vault-specific attenuations +func serializeVaultAttenuation(att Attenuation, attMap map[string]any) (map[string]any, error) { + vaultCap, ok := att.Capability.(*VaultCapability) + if !ok { + return serializeGenericAttenuation(att, attMap) + } + + if vaultCap.Action != "" { + attMap["can"] = vaultCap.Action + } else { + attMap["can"] = vaultCap.Actions + } + + if vaultCap.VaultAddress != "" { + attMap["vault"] = vaultCap.VaultAddress + } + if len(vaultCap.Caveats) > 0 { + attMap["caveats"] = vaultCap.Caveats + } + if vaultCap.EnclaveDataCID != "" { + attMap["enclave_data_cid"] = vaultCap.EnclaveDataCID + } + if len(vaultCap.Metadata) > 0 { + attMap["metadata"] = vaultCap.Metadata + } + + return attMap, nil +} + +// serializeGenericAttenuation serializes generic attenuations +func serializeGenericAttenuation(att Attenuation, attMap map[string]any) (map[string]any, error) { + actions := att.Capability.GetActions() + if len(actions) == 1 { + attMap["can"] = actions[0] + } else { + attMap["can"] = actions + } + return attMap, nil +} + +// Enhanced verification with module-specific support + +// VerifyModuleJWTToken validates and parses a UCAN JWT token with module-specific capabilities +func VerifyModuleJWTToken(tokenString string, expectedIssuer, expectedAudience string) (*Token, error) { + // Check if token is revoked + if revokedTokens[tokenString] { + return nil, fmt.Errorf("token has been revoked") + } + + // Parse token with custom claims + token, err := jwt.Parse(tokenString, func(token *jwt.Token) (any, error) { + // Dummy secret verification - replace with proper key validation + return []byte("sonr-ucan-secret"), nil + }, jwt.WithLeeway(5*time.Minute)) + if err != nil { + return nil, fmt.Errorf("token parsing failed: %w", err) + } + + // Extract claims + claims, ok := token.Claims.(jwt.MapClaims) + if !ok { + return nil, fmt.Errorf("invalid token claims") + } + + // Validate issuer and audience if provided + if expectedIssuer != "" { + if iss, ok := claims["iss"].(string); !ok || iss != expectedIssuer { + return nil, fmt.Errorf("invalid issuer: expected %s", expectedIssuer) + } + } + if expectedAudience != "" { + if aud, ok := claims["aud"].(string); !ok || aud != expectedAudience { + return nil, fmt.Errorf("invalid audience: expected %s", expectedAudience) + } + } + + // Manual expiration check + exp, ok := claims["exp"].(float64) + if !ok { + return nil, fmt.Errorf("no expiration time found") + } + if time.Now().Unix() > int64(exp) { + return nil, fmt.Errorf("token has expired") + } + + // Parse attenuations with module-specific support + attenuations, err := parseEnhancedAttenuations(claims) + if err != nil { + return nil, fmt.Errorf("failed to parse attenuations: %w", err) + } + + // Validate attenuations against templates + for _, att := range attenuations { + if err := StandardTemplate.ValidateAttenuation(att); err != nil { + return nil, fmt.Errorf("capability validation failed: %w", err) + } + } + + // Construct Token object + issuer, _ := claims["iss"].(string) + audience, _ := claims["aud"].(string) + nbf, _ := claims["nbf"].(float64) + + parsedToken := &Token{ + Raw: tokenString, + Issuer: issuer, + Audience: audience, + ExpiresAt: int64(exp), + NotBefore: int64(nbf), + Attenuations: attenuations, + } + + return parsedToken, nil +} + +// parseEnhancedAttenuations parses attenuations with module-specific capabilities +func parseEnhancedAttenuations(claims jwt.MapClaims) ([]Attenuation, error) { + attClaims, ok := claims["att"] + if !ok { + return nil, fmt.Errorf("no attenuations found in token") + } + + attSlice, ok := attClaims.([]any) + if !ok { + return nil, fmt.Errorf("invalid attenuations format") + } + + attenuations := make([]Attenuation, 0, len(attSlice)) + for i, attItem := range attSlice { + attMap, ok := attItem.(map[string]any) + if !ok { + return nil, fmt.Errorf("invalid attenuation %d format", i) + } + + att, err := parseEnhancedAttenuation(attMap) + if err != nil { + return nil, fmt.Errorf("failed to parse attenuation %d: %w", i, err) + } + attenuations = append(attenuations, att) + } + + return attenuations, nil +} + +// parseEnhancedAttenuation parses a single attenuation with module-specific support +func parseEnhancedAttenuation(attMap map[string]any) (Attenuation, error) { + // Use the existing enhanced verifier logic + verifier := &Verifier{} // Create temporary verifier for parsing + return verifier.parseAttenuation(attMap) +} diff --git a/ucan/mpc.go b/ucan/mpc.go new file mode 100644 index 0000000..947a7e0 --- /dev/null +++ b/ucan/mpc.go @@ -0,0 +1,625 @@ +// Package ucan provides User-Controlled Authorization Networks (UCAN) implementation +// for decentralized authorization and capability delegation in the Sonr network. +// This package handles JWT-based tokens, cryptographic verification, and resource capabilities. +package ucan + +import ( + "context" + "crypto/sha256" + "fmt" + "time" + + "github.com/golang-jwt/jwt/v5" + "github.com/ipfs/go-cid" + "github.com/multiformats/go-multihash" + "github.com/sonr-io/sonr/crypto/keys" + "github.com/sonr-io/sonr/crypto/mpc" +) + +// MPCSigningMethod implements JWT signing using MPC enclaves +type MPCSigningMethod struct { + Name string + enclave mpc.Enclave +} + +// NewMPCSigningMethod creates a new MPC-based JWT signing method +func NewMPCSigningMethod(name string, enclave mpc.Enclave) *MPCSigningMethod { + return &MPCSigningMethod{ + Name: name, + enclave: enclave, + } +} + +// Alg returns the signing method algorithm name +func (m *MPCSigningMethod) Alg() string { + return m.Name +} + +// Verify verifies a JWT signature using the MPC enclave +func (m *MPCSigningMethod) Verify(signingString string, signature []byte, key any) error { + // signature is already decoded bytes + sig := signature + + // Hash the signing string + hasher := sha256.New() + hasher.Write([]byte(signingString)) + digest := hasher.Sum(nil) + + // Use MPC enclave to verify signature + valid, err := m.enclave.Verify(digest, sig) + if err != nil { + return fmt.Errorf("failed to verify signature: %w", err) + } + + if !valid { + return fmt.Errorf("signature verification failed") + } + + return nil +} + +// Sign signs a JWT string using the MPC enclave +func (m *MPCSigningMethod) Sign(signingString string, key any) ([]byte, error) { + // Hash the signing string + hasher := sha256.New() + hasher.Write([]byte(signingString)) + digest := hasher.Sum(nil) + + // Use MPC enclave to sign the digest + sig, err := m.enclave.Sign(digest) + if err != nil { + return nil, fmt.Errorf("failed to sign with MPC: %w", err) + } + + return sig, nil +} + +// MPCTokenBuilder creates UCAN tokens using MPC signing +type MPCTokenBuilder struct { + enclave mpc.Enclave + issuerDID string + address string + signingMethod *MPCSigningMethod +} + +// NewMPCTokenBuilder creates a new MPC-based UCAN token builder +func NewMPCTokenBuilder(enclave mpc.Enclave) (*MPCTokenBuilder, error) { + if !enclave.IsValid() { + return nil, fmt.Errorf("invalid MPC enclave provided") + } + + // Derive issuer DID and address from enclave public key + pubKeyBytes := enclave.PubKeyBytes() + issuerDID, address := deriveIssuerDIDFromBytes(pubKeyBytes) + + signingMethod := NewMPCSigningMethod("MPC256", enclave) + + return &MPCTokenBuilder{ + enclave: enclave, + issuerDID: issuerDID, + address: address, + signingMethod: signingMethod, + }, nil +} + +// GetIssuerDID returns the issuer DID derived from the enclave +func (b *MPCTokenBuilder) GetIssuerDID() string { + return b.issuerDID +} + +// GetAddress returns the address derived from the enclave +func (b *MPCTokenBuilder) GetAddress() string { + return b.address +} + +// CreateOriginToken creates a new origin UCAN token using MPC signing +func (b *MPCTokenBuilder) CreateOriginToken( + audienceDID string, + attenuations []Attenuation, + facts []Fact, + notBefore, expiresAt time.Time, +) (*Token, error) { + return b.createToken(audienceDID, nil, attenuations, facts, notBefore, expiresAt) +} + +// CreateDelegatedToken creates a delegated UCAN token using MPC signing +func (b *MPCTokenBuilder) CreateDelegatedToken( + parent *Token, + audienceDID string, + attenuations []Attenuation, + facts []Fact, + notBefore, expiresAt time.Time, +) (*Token, error) { + proofs, err := prepareDelegationProofs(parent, attenuations) + if err != nil { + return nil, err + } + + return b.createToken(audienceDID, proofs, attenuations, facts, notBefore, expiresAt) +} + +// createToken creates a UCAN token with MPC signing +func (b *MPCTokenBuilder) createToken( + audienceDID string, + proofs []Proof, + attenuations []Attenuation, + facts []Fact, + notBefore, expiresAt time.Time, +) (*Token, error) { + // Validate inputs + if !isValidDID(audienceDID) { + return nil, fmt.Errorf("invalid audience DID format: %s", audienceDID) + } + if len(attenuations) == 0 { + return nil, fmt.Errorf("at least one attenuation is required") + } + + // Create JWT token with MPC signing method + token := jwt.New(b.signingMethod) + + // Set UCAN version in header + token.Header["ucv"] = "0.9.0" + + // Prepare time claims + var nbfUnix, expUnix int64 + if !notBefore.IsZero() { + nbfUnix = notBefore.Unix() + } + if !expiresAt.IsZero() { + expUnix = expiresAt.Unix() + } + + // Convert attenuations to claim format + attClaims := make([]map[string]any, len(attenuations)) + for i, att := range attenuations { + attClaims[i] = map[string]any{ + "can": att.Capability.GetActions(), + "with": att.Resource.GetURI(), + } + } + + // Convert proofs to strings + proofStrings := make([]string, len(proofs)) + for i, proof := range proofs { + proofStrings[i] = string(proof) + } + + // Convert facts to any slice + factData := make([]any, len(facts)) + for i, fact := range facts { + // Facts are stored as raw JSON, convert to any + factData[i] = string(fact.Data) + } + + // Set claims + claims := jwt.MapClaims{ + "iss": b.issuerDID, + "aud": audienceDID, + "att": attClaims, + } + + if nbfUnix > 0 { + claims["nbf"] = nbfUnix + } + if expUnix > 0 { + claims["exp"] = expUnix + } + if len(proofStrings) > 0 { + claims["prf"] = proofStrings + } + if len(factData) > 0 { + claims["fct"] = factData + } + + token.Claims = claims + + // Sign the token using MPC enclave (key parameter is ignored for MPC signing) + tokenString, err := token.SignedString(nil) + if err != nil { + return nil, fmt.Errorf("failed to sign token with MPC: %w", err) + } + + return &Token{ + Raw: tokenString, + Issuer: b.issuerDID, + Audience: audienceDID, + ExpiresAt: expUnix, + NotBefore: nbfUnix, + Attenuations: attenuations, + Proofs: proofs, + Facts: facts, + }, nil +} + +// CreateVaultCapabilityToken creates a vault-specific UCAN token +func (b *MPCTokenBuilder) CreateVaultCapabilityToken( + audienceDID string, + vaultAddress string, + enclaveDataCID string, + actions []string, + expiresAt time.Time, +) (*Token, error) { + // Create vault-specific attenuation + attenuation := CreateVaultAttenuation(actions, enclaveDataCID, vaultAddress) + + return b.CreateOriginToken( + audienceDID, + []Attenuation{attenuation}, + nil, + time.Time{}, // No not-before restriction + expiresAt, + ) +} + +// MPCDIDResolver resolves DIDs with special handling for MPC-derived DIDs +type MPCDIDResolver struct { + enclave mpc.Enclave + issuerDID string + fallback DIDResolver +} + +// NewMPCDIDResolver creates a new MPC DID resolver +func NewMPCDIDResolver(enclave mpc.Enclave, fallback DIDResolver) *MPCDIDResolver { + pubKeyBytes := enclave.PubKeyBytes() + issuerDID, _ := deriveIssuerDIDFromBytes(pubKeyBytes) + + return &MPCDIDResolver{ + enclave: enclave, + issuerDID: issuerDID, + fallback: fallback, + } +} + +// ResolveDIDKey resolves DID keys with MPC enclave support +func (r *MPCDIDResolver) ResolveDIDKey(ctx context.Context, didStr string) (keys.DID, error) { + // Check if this is the MPC-derived DID + if didStr == r.issuerDID { + return r.createDIDFromEnclave() + } + + // Fall back to standard DID resolution + if r.fallback != nil { + return r.fallback.ResolveDIDKey(ctx, didStr) + } + + // Default fallback to string parsing + return keys.Parse(didStr) +} + +// createDIDFromEnclave creates a DID from the MPC enclave's public key +func (r *MPCDIDResolver) createDIDFromEnclave() (keys.DID, error) { + // This would need to be implemented based on how MPC public keys + // are converted to the keys.DID format + // For now, parse from the derived DID string + return keys.Parse(r.issuerDID) +} + +// MPCVerifier provides UCAN verification with MPC support +type MPCVerifier struct { + *Verifier + enclave mpc.Enclave +} + +// NewMPCVerifier creates a UCAN verifier with MPC support +func NewMPCVerifier(enclave mpc.Enclave) *MPCVerifier { + resolver := NewMPCDIDResolver(enclave, StringDIDResolver{}) + verifier := NewVerifier(resolver) + + return &MPCVerifier{ + Verifier: verifier, + enclave: enclave, + } +} + +// VerifyMPCToken verifies a UCAN token that may be signed with MPC +func (v *MPCVerifier) VerifyMPCToken(ctx context.Context, tokenString string) (*Token, error) { + // Try standard verification first + token, err := v.VerifyToken(ctx, tokenString) + if err == nil { + return token, nil + } + + // If standard verification fails, try MPC-specific verification + return v.verifyWithMPC(ctx, tokenString) +} + +// verifyWithMPC attempts to verify using MPC signing method +func (v *MPCVerifier) verifyWithMPC(_ context.Context, tokenString string) (*Token, error) { + // Create MPC signing method for verification + mpcMethod := NewMPCSigningMethod("MPC256", v.enclave) + + // Parse with MPC method + token, err := jwt.Parse(tokenString, func(token *jwt.Token) (any, error) { + // Ensure the token uses MPC signing method + if token.Method.Alg() != mpcMethod.Alg() { + return nil, fmt.Errorf("unexpected signing method: %v", token.Method) + } + // For MPC verification, the key is not used + return nil, nil + }) + if err != nil { + return nil, fmt.Errorf("MPC token verification failed: %w", err) + } + + // Extract and parse claims + claims, ok := token.Claims.(jwt.MapClaims) + if !ok { + return nil, fmt.Errorf("invalid token claims type") + } + + ucanToken, err := v.parseUCANClaims(claims, tokenString) + if err != nil { + return nil, fmt.Errorf("failed to parse UCAN claims: %w", err) + } + + return ucanToken, nil +} + +// MPCTokenValidator provides comprehensive UCAN token validation with MPC support +type MPCTokenValidator struct { + *MPCVerifier + enclaveValidation bool +} + +// NewMPCTokenValidator creates a comprehensive UCAN token validator with MPC support +func NewMPCTokenValidator(enclave mpc.Enclave, enableEnclaveValidation bool) *MPCTokenValidator { + verifier := NewMPCVerifier(enclave) + return &MPCTokenValidator{ + MPCVerifier: verifier, + enclaveValidation: enableEnclaveValidation, + } +} + +// ValidateTokenForVaultOperation performs comprehensive validation for vault operations +func (v *MPCTokenValidator) ValidateTokenForVaultOperation( + ctx context.Context, + tokenString string, + enclaveDataCID string, + requiredAction string, + vaultAddress string, +) (*Token, error) { + // Step 1: Verify token signature and structure + token, err := v.VerifyMPCToken(ctx, tokenString) + if err != nil { + return nil, fmt.Errorf("token verification failed: %w", err) + } + + // Step 2: Validate vault-specific capability + if err := ValidateVaultTokenCapability(token, enclaveDataCID, requiredAction); err != nil { + return nil, fmt.Errorf("vault capability validation failed: %w", err) + } + + // Step 3: Validate enclave data CID if enabled + if v.enclaveValidation { + if err := v.validateEnclaveDataCID(token, enclaveDataCID); err != nil { + return nil, fmt.Errorf("enclave data validation failed: %w", err) + } + } + + // Step 4: Validate vault address if provided + if vaultAddress != "" { + if err := v.validateVaultAddress(token, vaultAddress); err != nil { + return nil, fmt.Errorf("vault address validation failed: %w", err) + } + } + + // Step 5: Verify delegation chain if proofs exist + if len(token.Proofs) > 0 { + if err := v.VerifyDelegationChain(ctx, tokenString); err != nil { + return nil, fmt.Errorf("delegation chain validation failed: %w", err) + } + } + + return token, nil +} + +// ValidateTokenForResource validates token capabilities for a specific resource +func (v *MPCTokenValidator) ValidateTokenForResource( + ctx context.Context, + tokenString string, + resourceURI string, + requiredAbilities []string, +) (*Token, error) { + token, err := v.VerifyCapability(ctx, tokenString, resourceURI, requiredAbilities) + if err != nil { + return nil, fmt.Errorf("capability verification failed: %w", err) + } + + // Additional MPC-specific validation + if v.enclaveValidation { + if err := v.validateMPCIssuer(token); err != nil { + return nil, fmt.Errorf("MPC issuer validation failed: %w", err) + } + } + + return token, nil +} + +// validateEnclaveDataCID validates that the token contains the expected enclave data CID +func (v *MPCTokenValidator) validateEnclaveDataCID(token *Token, expectedCID string) error { + tokenCID, err := GetEnclaveDataCID(token) + if err != nil { + return fmt.Errorf("failed to extract enclave data CID from token: %w", err) + } + + if tokenCID != expectedCID { + return fmt.Errorf("enclave data CID mismatch: token=%s, expected=%s", tokenCID, expectedCID) + } + + return nil +} + +// validateVaultAddress validates the vault address in token capabilities +func (v *MPCTokenValidator) validateVaultAddress(token *Token, expectedAddress string) error { + for _, att := range token.Attenuations { + if vaultCap, ok := att.Capability.(*VaultCapability); ok { + if vaultCap.VaultAddress != "" && vaultCap.VaultAddress != expectedAddress { + return fmt.Errorf("vault address mismatch: token=%s, expected=%s", + vaultCap.VaultAddress, expectedAddress) + } + } + } + return nil +} + +// validateMPCIssuer validates that the token issuer matches the MPC enclave +func (v *MPCTokenValidator) validateMPCIssuer(token *Token) error { + expectedIssuer, _ := deriveIssuerDIDFromBytes(v.enclave.PubKeyBytes()) + + if token.Issuer != expectedIssuer { + return fmt.Errorf("token issuer does not match MPC enclave: token=%s, expected=%s", + token.Issuer, expectedIssuer) + } + + return nil +} + +// createMPCVaultAttenuation creates MPC-specific vault attenuations +func createMPCVaultAttenuation(actions []string, enclaveDataCID, vaultAddress string) Attenuation { + // Use the existing CreateVaultAttenuation function but add MPC-specific validation + return CreateVaultAttenuation(actions, enclaveDataCID, vaultAddress) +} + +// containsAdminAction checks if actions contain admin-level permissions +func containsAdminAction(actions []string) bool { + adminActions := map[string]bool{ + "admin": true, "export": true, "import": true, "delete": true, + } + + for _, action := range actions { + if adminActions[action] { + return true + } + } + return false +} + +// ValidateEnclaveDataIntegrity validates enclave data against IPFS CID +func ValidateEnclaveDataIntegrity(enclaveData *mpc.EnclaveData, expectedCID string) error { + if enclaveData == nil { + return fmt.Errorf("enclave data cannot be nil") + } + + // Basic validation of enclave structure + if len(enclaveData.PubBytes) == 0 { + return fmt.Errorf("enclave public key bytes cannot be empty") + } + + if enclaveData.PubHex == "" { + return fmt.Errorf("enclave public key hex cannot be empty") + } + + // Implement IPFS CID validation against enclave data hash + // Serialize the enclave data for consistent hashing + enclaveDataBytes, err := enclaveData.Marshal() + if err != nil { + return fmt.Errorf("failed to marshal enclave data: %w", err) + } + + // 1. Hash the enclave data using SHA-256 + hasher := sha256.New() + hasher.Write(enclaveDataBytes) + digest := hasher.Sum(nil) + + // 2. Create multihash with SHA-256 prefix + mhash, err := multihash.EncodeName(digest, "sha2-256") + if err != nil { + return fmt.Errorf("failed to create multihash: %w", err) + } + + // 3. Create CID and compare with expected + parsedExpectedCID, err := cid.Parse(expectedCID) + if err != nil { + return fmt.Errorf("failed to parse expected CID: %w", err) + } + + // Create CID v1 with dag-pb codec (IPFS default) + calculatedCID := cid.NewCidV1(cid.DagProtobuf, mhash) + + // Compare CIDs + if !parsedExpectedCID.Equals(calculatedCID) { + return fmt.Errorf( + "CID verification failed: expected %s, calculated %s", + parsedExpectedCID.String(), + calculatedCID.String(), + ) + } + + return nil +} + +// MPCCapabilityBuilder helps build MPC-specific capabilities +type MPCCapabilityBuilder struct { + enclave mpc.Enclave + builder *MPCTokenBuilder +} + +// NewMPCCapabilityBuilder creates a new MPC capability builder +func NewMPCCapabilityBuilder(enclave mpc.Enclave) (*MPCCapabilityBuilder, error) { + builder, err := NewMPCTokenBuilder(enclave) + if err != nil { + return nil, fmt.Errorf("failed to create MPC token builder: %w", err) + } + + return &MPCCapabilityBuilder{ + enclave: enclave, + builder: builder, + }, nil +} + +// CreateVaultAdminCapability creates admin-level vault capabilities +func (b *MPCCapabilityBuilder) CreateVaultAdminCapability( + vaultAddress, enclaveDataCID string, +) Attenuation { + allActions := []string{"read", "write", "sign", "export", "import", "delete", "admin"} + return CreateVaultAttenuation(allActions, enclaveDataCID, vaultAddress) +} + +// CreateVaultReadOnlyCapability creates read-only vault capabilities +func (b *MPCCapabilityBuilder) CreateVaultReadOnlyCapability( + vaultAddress, enclaveDataCID string, +) Attenuation { + readActions := []string{"read"} + return CreateVaultAttenuation(readActions, enclaveDataCID, vaultAddress) +} + +// CreateVaultSigningCapability creates signing-specific vault capabilities +func (b *MPCCapabilityBuilder) CreateVaultSigningCapability( + vaultAddress, enclaveDataCID string, +) Attenuation { + signActions := []string{"read", "sign"} + return CreateVaultAttenuation(signActions, enclaveDataCID, vaultAddress) +} + +// CreateCustomCapability creates a custom capability with specified actions +func (b *MPCCapabilityBuilder) CreateCustomCapability( + actions []string, + vaultAddress, enclaveDataCID string, +) Attenuation { + return CreateVaultAttenuation(actions, enclaveDataCID, vaultAddress) +} + +// Utility functions + +// deriveIssuerDIDFromBytes creates issuer DID and address from public key bytes +// Enhanced version using the crypto/keys package +func deriveIssuerDIDFromBytes(pubKeyBytes []byte) (string, string) { + // Use the enhanced NewFromMPCPubKey method from crypto/keys + did, err := keys.NewFromMPCPubKey(pubKeyBytes) + if err != nil { + // Fallback to simplified implementation + address := fmt.Sprintf("addr_%x", pubKeyBytes[:8]) + issuerDID := fmt.Sprintf("did:sonr:%s", address) + return issuerDID, address + } + + // Use the proper DID generation and address derivation + didStr := did.String() + address, err := did.Address() + if err != nil { + // Fallback to simplified address + address = fmt.Sprintf("addr_%x", pubKeyBytes[:8]) + } + + return didStr, address +} diff --git a/ucan/source.go b/ucan/source.go new file mode 100644 index 0000000..35ae159 --- /dev/null +++ b/ucan/source.go @@ -0,0 +1,302 @@ +// Package ucan provides User-Controlled Authorization Networks (UCAN) implementation +// for decentralized authorization and capability delegation in the Sonr network. +// This package handles JWT-based tokens, cryptographic verification, and resource capabilities. +package ucan + +import ( + "fmt" + "time" + + "github.com/golang-jwt/jwt/v5" + "github.com/sonr-io/sonr/crypto/keys" + "github.com/sonr-io/sonr/crypto/mpc" + "lukechampine.com/blake3" +) + +// KeyshareSource provides MPC-based UCAN token creation and validation +type KeyshareSource interface { + Address() string + Issuer() string + ChainCode() ([]byte, error) + OriginToken() (*Token, error) + SignData(data []byte) ([]byte, error) + VerifyData(data []byte, sig []byte) (bool, error) + Enclave() mpc.Enclave + + // UCAN token creation methods + NewOriginToken( + audienceDID string, + att []Attenuation, + fct []Fact, + notBefore, expires time.Time, + ) (*Token, error) + NewAttenuatedToken( + parent *Token, + audienceDID string, + att []Attenuation, + fct []Fact, + nbf, exp time.Time, + ) (*Token, error) +} + +// mpcKeyshareSource implements KeyshareSource using MPC enclave +type mpcKeyshareSource struct { + enclave mpc.Enclave + issuerDID string + addr string +} + +// NewMPCKeyshareSource creates a new MPC-based keyshare source from an enclave +func NewMPCKeyshareSource(enclave mpc.Enclave) (KeyshareSource, error) { + if !enclave.IsValid() { + return nil, fmt.Errorf("invalid MPC enclave provided") + } + + pubKeyBytes := enclave.PubKeyBytes() + issuerDID, addr, err := getIssuerDIDFromBytes(pubKeyBytes) + if err != nil { + return nil, fmt.Errorf("failed to derive issuer DID: %w", err) + } + + return &mpcKeyshareSource{ + enclave: enclave, + issuerDID: issuerDID, + addr: addr, + }, nil +} + +// Address returns the address derived from the enclave public key +func (k *mpcKeyshareSource) Address() string { + return k.addr +} + +// Issuer returns the DID of the issuer derived from the enclave public key +func (k *mpcKeyshareSource) Issuer() string { + return k.issuerDID +} + +// Enclave returns the underlying MPC enclave +func (k *mpcKeyshareSource) Enclave() mpc.Enclave { + return k.enclave +} + +// ChainCode derives a deterministic chain code from the enclave +func (k *mpcKeyshareSource) ChainCode() ([]byte, error) { + // Sign the address to create a deterministic chain code + sig, err := k.SignData([]byte(k.addr)) + if err != nil { + return nil, fmt.Errorf("failed to sign address for chain code: %w", err) + } + + // Hash the signature to create a 32-byte chain code + hash := blake3.Sum256(sig) + return hash[:32], nil +} + +// OriginToken creates a default origin token with basic capabilities +func (k *mpcKeyshareSource) OriginToken() (*Token, error) { + // Create basic capability for the MPC keyshare + resource := &SimpleResource{ + Scheme: "mpc", + Value: k.addr, + URI: fmt.Sprintf("mpc://%s", k.addr), + } + + capability := &SimpleCapability{Action: "sign"} + + attenuation := Attenuation{ + Capability: capability, + Resource: resource, + } + + // Create token with no expiration for origin token + zero := time.Time{} + return k.NewOriginToken(k.issuerDID, []Attenuation{attenuation}, nil, zero, zero) +} + +// SignData signs data using the MPC enclave +func (k *mpcKeyshareSource) SignData(data []byte) ([]byte, error) { + if !k.enclave.IsValid() { + return nil, fmt.Errorf("enclave is not valid") + } + + return k.enclave.Sign(data) +} + +// VerifyData verifies a signature using the MPC enclave +func (k *mpcKeyshareSource) VerifyData(data []byte, sig []byte) (bool, error) { + if !k.enclave.IsValid() { + return false, fmt.Errorf("enclave is not valid") + } + + return k.enclave.Verify(data, sig) +} + +// NewOriginToken creates a new UCAN origin token using MPC signing +func (k *mpcKeyshareSource) NewOriginToken( + audienceDID string, + att []Attenuation, + fct []Fact, + notBefore, expires time.Time, +) (*Token, error) { + return k.newToken(audienceDID, nil, att, fct, notBefore, expires) +} + +// NewAttenuatedToken creates a new attenuated UCAN token using MPC signing +func (k *mpcKeyshareSource) NewAttenuatedToken( + parent *Token, + audienceDID string, + att []Attenuation, + fct []Fact, + nbf, exp time.Time, +) (*Token, error) { + // Validate that new attenuations are more restrictive than parent + if !isAttenuationSubset(att, parent.Attenuations) { + return nil, fmt.Errorf("scope of ucan attenuations must be less than its parent") + } + + // Add parent as proof + proofs := []Proof{} + if parent.Raw != "" { + proofs = append(proofs, Proof(parent.Raw)) + } + proofs = append(proofs, parent.Proofs...) + + return k.newToken(audienceDID, proofs, att, fct, nbf, exp) +} + +// newToken creates a new UCAN token with MPC signing +func (k *mpcKeyshareSource) newToken( + audienceDID string, + proofs []Proof, + att []Attenuation, + fct []Fact, + nbf, exp time.Time, +) (*Token, error) { + // Validate audience DID + if !isValidDID(audienceDID) { + return nil, fmt.Errorf("invalid audience DID: %s", audienceDID) + } + + // Create JWT with MPC signing method + signingMethod := NewMPCSigningMethod("MPC256", k.enclave) + t := jwt.New(signingMethod) + + // Set UCAN version header + t.Header["ucv"] = "0.9.0" + + var ( + nbfUnix int64 + expUnix int64 + ) + + if !nbf.IsZero() { + nbfUnix = nbf.Unix() + } + if !exp.IsZero() { + expUnix = exp.Unix() + } + + // Convert attenuations to claim format + attClaims := make([]map[string]any, len(att)) + for i, a := range att { + attClaims[i] = map[string]any{ + "can": a.Capability.GetActions(), + "with": a.Resource.GetURI(), + } + } + + // Convert proofs to strings + proofStrings := make([]string, len(proofs)) + for i, proof := range proofs { + proofStrings[i] = string(proof) + } + + // Convert facts to any slice + factData := make([]any, len(fct)) + for i, fact := range fct { + factData[i] = string(fact.Data) + } + + // Set claims + claims := jwt.MapClaims{ + "iss": k.issuerDID, + "aud": audienceDID, + "att": attClaims, + } + + if nbfUnix > 0 { + claims["nbf"] = nbfUnix + } + if expUnix > 0 { + claims["exp"] = expUnix + } + if len(proofStrings) > 0 { + claims["prf"] = proofStrings + } + if len(factData) > 0 { + claims["fct"] = factData + } + + t.Claims = claims + + // Sign the token using MPC enclave + tokenString, err := t.SignedString(nil) + if err != nil { + return nil, fmt.Errorf("failed to sign token: %w", err) + } + + return &Token{ + Raw: tokenString, + Issuer: k.issuerDID, + Audience: audienceDID, + ExpiresAt: expUnix, + NotBefore: nbfUnix, + Attenuations: att, + Proofs: proofs, + Facts: fct, + }, nil +} + +// getIssuerDIDFromBytes creates an issuer DID and address from public key bytes +func getIssuerDIDFromBytes(pubKeyBytes []byte) (string, string, error) { + // Use the enhanced NewFromMPCPubKey method for proper MPC integration + did, err := keys.NewFromMPCPubKey(pubKeyBytes) + if err != nil { + return "", "", fmt.Errorf("failed to create DID from MPC public key: %w", err) + } + + didStr := did.String() + + // Use the enhanced Address method for blockchain-compatible address derivation + address, err := did.Address() + if err != nil { + return "", "", fmt.Errorf("failed to derive address from DID: %w", err) + } + + return didStr, address, nil +} + +// isAttenuationSubset checks if child attenuations are a subset of parent attenuations +func isAttenuationSubset(child, parent []Attenuation) bool { + for _, childAtt := range child { + if !containsAttenuation(parent, childAtt) { + return false + } + } + return true +} + +// containsAttenuation checks if the parent list contains an equivalent attenuation +func containsAttenuation(parent []Attenuation, att Attenuation) bool { + for _, parentAtt := range parent { + if parentAtt.Resource.Matches(att.Resource) && + parentAtt.Capability.Contains(att.Capability) { + return true + } + } + return false +} + +// Note: MPC signing methods are already implemented in mpc.go +// Note: isValidDID is already implemented in stubs.go diff --git a/ucan/stubs.go b/ucan/stubs.go new file mode 100644 index 0000000..c50600d --- /dev/null +++ b/ucan/stubs.go @@ -0,0 +1,87 @@ +package ucan + +import ( + "time" +) + +// TokenBuilderInterface defines token building methods +type TokenBuilderInterface interface { + CreateOriginToken( + issuer string, + capabilities []Attenuation, + facts []Fact, + start, expiry time.Time, + ) (*Token, error) + CreateDelegatedToken( + parentToken *Token, + issuer string, + capabilities []Attenuation, + facts []Fact, + start, expiry time.Time, + ) (*Token, error) +} + +// TokenBuilder implements token builder functionality +type TokenBuilder struct { + Capability Attenuation +} + +// CreateOriginToken creates a new origin token +func (tb *TokenBuilder) CreateOriginToken( + issuer string, + capabilities []Attenuation, + facts []Fact, + start, expiry time.Time, +) (*Token, error) { + return &Token{ + Raw: "", + Issuer: issuer, + Audience: "", + ExpiresAt: expiry.Unix(), + NotBefore: start.Unix(), + Attenuations: capabilities, + Proofs: []Proof{}, + Facts: facts, + }, nil +} + +// CreateDelegatedToken creates a delegated token +func (tb *TokenBuilder) CreateDelegatedToken( + parentToken *Token, + issuer string, + capabilities []Attenuation, + facts []Fact, + start, expiry time.Time, +) (*Token, error) { + proofs := []Proof{} + if parentToken.Raw != "" { + proofs = append(proofs, Proof(parentToken.Raw)) + } + + return &Token{ + Raw: "", + Issuer: issuer, + Audience: parentToken.Issuer, + ExpiresAt: expiry.Unix(), + NotBefore: start.Unix(), + Attenuations: capabilities, + Proofs: proofs, + Facts: facts, + }, nil +} + +// Stub for DID validation +func isValidDID(did string) bool { + // Basic DID validation stub + return did != "" && len(did) > 5 && did[:4] == "did:" +} + +// Stub for preparing delegation proofs +func prepareDelegationProofs(token *Token, capabilities []Attenuation) ([]Proof, error) { + // Minimal stub implementation + proofs := []Proof{} + if token.Raw != "" { + proofs = append(proofs, Proof(token.Raw)) + } + return proofs, nil +} diff --git a/ucan/ucan_test.go b/ucan/ucan_test.go new file mode 100644 index 0000000..3a38d4f --- /dev/null +++ b/ucan/ucan_test.go @@ -0,0 +1,313 @@ +package ucan + +import ( + "crypto/sha256" + "testing" + "time" + + "github.com/ipfs/go-cid" + "github.com/multiformats/go-multihash" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestCapabilityCreation(t *testing.T) { + testCases := []struct { + name string + actions []string + expected bool + }{ + { + name: "Basic Capability Creation", + actions: []string{"read", "write"}, + expected: true, + }, + { + name: "Empty Actions", + actions: []string{}, + expected: true, + }, + { + name: "Complex Actions", + actions: []string{"create", "update", "delete", "admin"}, + expected: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + capability := &MultiCapability{Actions: tc.actions} + + assert.NotNil(t, capability) + assert.Equal(t, len(tc.actions), len(capability.Actions)) + + for _, action := range tc.actions { + assert.Contains(t, capability.Actions, action) + } + }) + } +} + +func TestCapabilityValidation(t *testing.T) { + testCases := []struct { + name string + actions []string + resourceScheme string + shouldPass bool + }{ + { + name: "Valid Standard Actions", + actions: []string{"read", "write"}, + resourceScheme: "example", + shouldPass: true, + }, + { + name: "Invalid Actions", + actions: []string{"delete", "admin"}, + resourceScheme: "restricted", + shouldPass: false, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + capability := &MultiCapability{Actions: tc.actions} + resource := &SimpleResource{ + Scheme: tc.resourceScheme, + Value: "test", + URI: tc.resourceScheme + "://test", + } + + attenuation := Attenuation{ + Capability: capability, + Resource: resource, + } + + StandardTemplate.AddAllowedActions(tc.resourceScheme, []string{"read", "write"}) + err := StandardTemplate.ValidateAttenuation(attenuation) + + if tc.shouldPass { + assert.NoError(t, err) + } else { + assert.Error(t, err) + } + }) + } +} + +func TestJWTTokenLifecycle(t *testing.T) { + testCases := []struct { + name string + actions []string + resourceScheme string + duration time.Duration + shouldPass bool + }{ + { + name: "Valid Token Generation and Verification", + actions: []string{"read", "write"}, + resourceScheme: "example", + duration: time.Hour, + shouldPass: true, + }, + { + name: "Expired Token", + actions: []string{"read"}, + resourceScheme: "test", + duration: -time.Hour, // Expired token + shouldPass: false, + }, + } + + // Use standard service template for testing + StandardTemplate := StandardServiceTemplate() + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + capability := &MultiCapability{Actions: tc.actions} + resource := &SimpleResource{ + Scheme: tc.resourceScheme, + Value: "test", + URI: tc.resourceScheme + "://test", + } + + attenuation := Attenuation{ + Capability: capability, + Resource: resource, + } + + // Validate attenuation against template + err := StandardTemplate.ValidateAttenuation(attenuation) + require.NoError(t, err) + + // Simulate JWT token generation and verification + token := "test_token_" + time.Now().String() + + if tc.shouldPass { + // Simulate verification + verifiedToken := &Token{ + Raw: token, + Issuer: "did:sonr:local", + Attenuations: []Attenuation{attenuation}, + ExpiresAt: time.Now().Add(tc.duration).Unix(), + } + + assert.NotNil(t, verifiedToken) + assert.Equal(t, "did:sonr:local", verifiedToken.Issuer) + assert.Len(t, verifiedToken.Attenuations, 1) + assert.Equal( + t, + tc.resourceScheme+"://test", + verifiedToken.Attenuations[0].Resource.GetURI(), + ) + } else { + // Simulate expired token verification + assert.True(t, time.Now().Unix() > time.Now().Add(tc.duration).Unix()) + } + }) + } +} + +func TestCapabilityRevocation(t *testing.T) { + capability := &MultiCapability{Actions: []string{"read", "write"}} + resource := &SimpleResource{ + Scheme: "example", + Value: "test", + URI: "example://test", + } + + attenuation := Attenuation{ + Capability: capability, + Resource: resource, + } + + // Generate token + token, err := GenerateJWTToken(attenuation, time.Hour) + require.NoError(t, err) + + // Revoke capability + err = RevokeCapability(attenuation) + assert.NoError(t, err) + + // Attempt to verify revoked token should fail + _, err = VerifyJWTToken(token) + assert.Error(t, err) + assert.Contains(t, err.Error(), "token has been revoked") +} + +func TestResourceValidation(t *testing.T) { + testCases := []struct { + name string + resourceScheme string + resourceValue string + resourceURI string + expectValid bool + }{ + { + name: "Valid Resource", + resourceScheme: "sonr", + resourceValue: "test-resource", + resourceURI: "sonr://test-resource", + expectValid: true, + }, + { + name: "Invalid Resource URI", + resourceScheme: "invalid", + resourceValue: "test-resource", + resourceURI: "invalid-malformed-uri", + expectValid: false, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + resource := &SimpleResource{ + Scheme: tc.resourceScheme, + Value: tc.resourceValue, + URI: tc.resourceURI, + } + + // Simplified resource validation + if tc.expectValid { + assert.Regexp(t, `^[a-z]+://[a-z-]+$`, resource.URI) + } else { + assert.NotRegexp(t, `^[a-z]+://[a-z-]+$`, resource.URI) + } + }) + } +} + +func TestValidateEnclaveDataCIDIntegrity(t *testing.T) { + testCases := []struct { + name string + data []byte + expectedCID string + expectError bool + errorContains string + }{ + { + name: "Empty CID", + data: []byte("test data"), + expectedCID: "", + expectError: true, + errorContains: "enclave data CID cannot be empty", + }, + { + name: "Empty data", + data: []byte{}, + expectedCID: "QmTest", + expectError: true, + errorContains: "enclave data cannot be empty", + }, + { + name: "Invalid CID format", + data: []byte("test data"), + expectedCID: "invalid-cid", + expectError: true, + errorContains: "invalid IPFS CID format", + }, + { + name: "Valid CID verification - should pass", + data: []byte("test data"), + expectedCID: generateValidCIDForData([]byte("test data")), + expectError: false, + }, + { + name: "Mismatched CID - should fail", + data: []byte("test data"), + expectedCID: generateValidCIDForData([]byte("different data")), + expectError: true, + errorContains: "CID verification failed", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + err := ValidateEnclaveDataCIDIntegrity(tc.expectedCID, tc.data) + + if tc.expectError { + assert.Error(t, err) + if tc.errorContains != "" { + assert.Contains(t, err.Error(), tc.errorContains) + } + } else { + assert.NoError(t, err) + } + }) + } +} + +// Helper function to generate a valid CID for test data +func generateValidCIDForData(data []byte) string { + hasher := sha256.New() + hasher.Write(data) + digest := hasher.Sum(nil) + + mhash, err := multihash.EncodeName(digest, "sha2-256") + if err != nil { + panic(err) + } + + calculatedCID := cid.NewCidV1(cid.DagProtobuf, mhash) + return calculatedCID.String() +} diff --git a/ucan/vault.go b/ucan/vault.go new file mode 100644 index 0000000..579f533 --- /dev/null +++ b/ucan/vault.go @@ -0,0 +1,485 @@ +// Package ucan provides User-Controlled Authorization Networks (UCAN) implementation +// for decentralized authorization and capability delegation in the Sonr network. +// This package handles JWT-based tokens, cryptographic verification, and resource capabilities. +package ucan + +import ( + "crypto/sha256" + "fmt" + "slices" + "strings" + "time" + + z "github.com/Oudwins/zog" + "github.com/ipfs/go-cid" + "github.com/multiformats/go-multihash" +) + +// Constants for vault capability actions +const ( + VaultAdminAction = "vault/admin" +) + +// VaultCapabilitySchema defines validation specifically for vault capabilities +var VaultCapabilitySchema = z.Struct(z.Shape{ + "can": z.String().Required().OneOf( + []string{ + VaultAdminAction, + "vault/read", + "vault/write", + "vault/sign", + "vault/export", + "vault/import", + "vault/delete", + }, + z.Message("Invalid vault capability"), + ), + "with": z.String(). + Required(). + TestFunc(ValidateIPFSCID, z.Message("Vault resource must be IPFS CID in format 'ipfs://CID'")), + "actions": z.Slice(z.String().OneOf( + []string{"read", "write", "sign", "export", "import", "delete"}, + z.Message("Invalid vault action"), + )).Optional(), + "vault": z.String().Required().Min(1, z.Message("Vault address cannot be empty")), + "cavs": z.Slice(z.String()).Optional(), // Caveats as string array for vault capabilities +}) + +// VaultCapability implements Capability for vault-specific operations +// with support for admin permissions, actions, and enclave data management. +type VaultCapability struct { + Action string `json:"can"` + Actions []string `json:"actions,omitempty"` + VaultAddress string `json:"vault,omitempty"` + Caveats []string `json:"cavs,omitempty"` + EnclaveDataCID string `json:"enclave_data_cid,omitempty"` + Metadata map[string]string `json:"metadata,omitempty"` +} + +// GetActions returns the actions this vault capability grants +func (c *VaultCapability) GetActions() []string { + if c.Action == VaultAdminAction { + // Admin capability grants all vault actions + return []string{"read", "write", "sign", "export", "import", "delete", VaultAdminAction} + } + + if len(c.Actions) > 0 { + return c.Actions + } + + // Extract action from the main capability string + if strings.HasPrefix(c.Action, "vault/") { + return []string{c.Action[6:]} // Remove "vault/" prefix + } + + return []string{c.Action} +} + +// Grants checks if this capability grants the required abilities +func (c *VaultCapability) Grants(abilities []string) bool { + if c.Action == VaultAdminAction { + // Admin capability grants everything + return true + } + + grantedActions := make(map[string]bool) + for _, action := range c.GetActions() { + grantedActions[action] = true + grantedActions["vault/"+action] = true // Support both formats + } + + // Check each required ability + for _, ability := range abilities { + if !grantedActions[ability] { + return false + } + } + + return true +} + +// Contains checks if this capability contains another capability +func (c *VaultCapability) Contains(other Capability) bool { + if c.Action == VaultAdminAction { + // Admin contains all vault capabilities + if otherVault, ok := other.(*VaultCapability); ok { + return strings.HasPrefix(otherVault.Action, "vault/") + } + // Admin contains any action that starts with vault-related actions + for _, action := range other.GetActions() { + if strings.HasPrefix(action, "vault/") || + action == "read" || action == "write" || action == "sign" || + action == "export" || action == "import" || action == "delete" { + return true + } + } + return false + } + + // Check if our actions contain all of the other capability's actions + ourActions := make(map[string]bool) + for _, action := range c.GetActions() { + ourActions[action] = true + ourActions["vault/"+action] = true + } + + for _, otherAction := range other.GetActions() { + if !ourActions[otherAction] { + return false + } + } + + return true +} + +// String returns string representation +func (c *VaultCapability) String() string { + return c.Action +} + +// VaultResourceExt represents an extended IPFS-based vault resource (to avoid redeclaration) +type VaultResourceExt struct { + SimpleResource + VaultAddress string `json:"vault_address"` + EnclaveDataCID string `json:"enclave_data_cid"` +} + +// ValidateIPFSCID validates IPFS CID format for vault resources +func ValidateIPFSCID(value *string, ctx z.Ctx) bool { + if !strings.HasPrefix(*value, "ipfs://") { + return false + } + cidStr := (*value)[7:] // Remove "ipfs://" prefix + + // Enhanced CID validation + return validateCIDFormat(cidStr) +} + +// validateCIDFormat performs comprehensive IPFS CID format validation +func validateCIDFormat(cidStr string) bool { + if len(cidStr) == 0 { + return false + } + + // CIDv0: Base58-encoded SHA-256 multihash (starts with 'Qm' and is 46 characters) + if strings.HasPrefix(cidStr, "Qm") && len(cidStr) == 46 { + return isValidBase58(cidStr) + } + + // CIDv1: Base32 or Base58 encoded (starts with 'b' for base32 or other prefixes) + if len(cidStr) >= 59 { + // CIDv1 in base32 typically starts with 'b' and is longer + if strings.HasPrefix(cidStr, "b") { + return isValidBase32(cidStr[1:]) // Remove 'b' prefix + } + // CIDv1 in base58 or other encodings + return isValidBase58(cidStr) + } + + return false +} + +// isValidBase58 checks if string contains valid base58 characters +func isValidBase58(s string) bool { + base58Chars := "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" + for _, char := range s { + if !strings.Contains(base58Chars, string(char)) { + return false + } + } + return true +} + +// isValidBase32 checks if string contains valid base32 characters +func isValidBase32(s string) bool { + base32Chars := "abcdefghijklmnopqrstuvwxyz234567" + for _, char := range s { + if !strings.Contains(base32Chars, string(char)) { + return false + } + } + return true +} + +// ValidateEnclaveDataCIDIntegrity validates enclave data against expected CID +func ValidateEnclaveDataCIDIntegrity(enclaveDataCID string, enclaveData []byte) error { + if enclaveDataCID == "" { + return fmt.Errorf("enclave data CID cannot be empty") + } + + if len(enclaveData) == 0 { + return fmt.Errorf("enclave data cannot be empty") + } + + // Validate CID format first + if !validateCIDFormat(enclaveDataCID) { + return fmt.Errorf("invalid IPFS CID format: %s", enclaveDataCID) + } + + // Implement actual CID verification by hashing enclave data + // 1. Hash the enclave data using SHA-256 + hasher := sha256.New() + hasher.Write(enclaveData) + digest := hasher.Sum(nil) + + // 2. Create multihash with SHA-256 prefix + mhash, err := multihash.EncodeName(digest, "sha2-256") + if err != nil { + return fmt.Errorf("failed to create multihash: %w", err) + } + + // 3. Create CID and compare with expected + expectedCID, err := cid.Parse(enclaveDataCID) + if err != nil { + return fmt.Errorf("failed to parse expected CID: %w", err) + } + + // Create CID v1 with dag-pb codec (IPFS default) + calculatedCID := cid.NewCidV1(cid.DagProtobuf, mhash) + + // Compare CIDs + if !expectedCID.Equals(calculatedCID) { + return fmt.Errorf( + "CID verification failed: expected %s, calculated %s", + expectedCID.String(), + calculatedCID.String(), + ) + } + + return nil +} + +// ValidateVaultCapability validates vault-specific capabilities +func ValidateVaultCapability(att map[string]any) error { + var validated struct { + Can string `json:"can"` + With string `json:"with"` + Actions []string `json:"actions,omitempty"` + Vault string `json:"vault"` + Cavs []string `json:"cavs,omitempty"` + } + + errs := VaultCapabilitySchema.Parse(att, &validated) + if errs != nil { + return fmt.Errorf("vault capability validation failed: %v", errs) + } + + return nil +} + +// VaultAttenuationConstructor creates vault-specific attenuations with enhanced validation +func VaultAttenuationConstructor(m map[string]any) (Attenuation, error) { + // First validate using vault-specific schema + if err := ValidateVaultCapability(m); err != nil { + return Attenuation{}, fmt.Errorf("vault attenuation validation failed: %w", err) + } + + capStr, withStr, err := extractRequiredFields(m) + if err != nil { + return Attenuation{}, err + } + + vaultCap := createVaultCapability(capStr, m) + resource := createVaultResource(withStr, vaultCap.VaultAddress) + + // Set enclave data CID if using IPFS resource + if vaultRes, ok := resource.(*VaultResource); ok { + vaultCap.EnclaveDataCID = vaultRes.EnclaveDataCID + } + + return Attenuation{ + Capability: vaultCap, + Resource: resource, + }, nil +} + +// extractRequiredFields extracts and validates required 'can' and 'with' fields +func extractRequiredFields(m map[string]any) (string, string, error) { + capValue, exists := m["can"] + if !exists { + return "", "", fmt.Errorf("missing 'can' field in attenuation") + } + capStr, ok := capValue.(string) + if !ok { + return "", "", fmt.Errorf("'can' field must be a string") + } + + withValue, exists := m["with"] + if !exists { + return "", "", fmt.Errorf("missing 'with' field in attenuation") + } + withStr, ok := withValue.(string) + if !ok { + return "", "", fmt.Errorf("'with' field must be a string") + } + + return capStr, withStr, nil +} + +// createVaultCapability creates and populates a VaultCapability from the input map +func createVaultCapability(action string, m map[string]any) *VaultCapability { + vaultCap := &VaultCapability{Action: action} + + if actions, exists := m["actions"]; exists { + vaultCap.Actions = extractStringSlice(actions) + } + + if vault, exists := m["vault"]; exists { + if vaultStr, ok := vault.(string); ok { + vaultCap.VaultAddress = vaultStr + } + } + + if cavs, exists := m["cavs"]; exists { + vaultCap.Caveats = extractStringSlice(cavs) + } + + return vaultCap +} + +// extractStringSlice safely extracts a string slice from an any +func extractStringSlice(value any) []string { + if slice, ok := value.([]any); ok { + result := make([]string, 0, len(slice)) + for _, item := range slice { + if str, ok := item.(string); ok { + result = append(result, str) + } + } + return result + } + return nil +} + +// createVaultResource creates appropriate Resource based on the URI scheme +func createVaultResource(withStr, vaultAddress string) Resource { + parts := strings.SplitN(withStr, "://", 2) + if len(parts) == 2 && parts[0] == "ipfs" { + return &VaultResource{ + SimpleResource: SimpleResource{ + Scheme: "ipfs", + Value: parts[1], + URI: withStr, + }, + VaultAddress: vaultAddress, + EnclaveDataCID: parts[1], + } + } + + return &SimpleResource{ + Scheme: "ipfs", + Value: withStr, + URI: withStr, + } +} + +// NewVaultAdminToken creates a new UCAN token with vault admin capabilities +func NewVaultAdminToken( + builder TokenBuilderInterface, + vaultOwnerDID string, + vaultAddress string, + enclaveDataCID string, + exp time.Time, +) (*Token, error) { + // Validate input parameters + if !isValidDID(vaultOwnerDID) { + return nil, fmt.Errorf("invalid vault owner DID: %s", vaultOwnerDID) + } + + // Create vault admin attenuation with full permissions + vaultResource := &VaultResource{ + SimpleResource: SimpleResource{ + Scheme: "ipfs", + Value: enclaveDataCID, + URI: fmt.Sprintf("ipfs://%s", enclaveDataCID), + }, + VaultAddress: vaultAddress, + EnclaveDataCID: enclaveDataCID, + } + + vaultCap := &VaultCapability{ + Action: VaultAdminAction, + Actions: []string{"read", "write", "sign", "export", "import", "delete"}, + VaultAddress: vaultAddress, + EnclaveDataCID: enclaveDataCID, + } + + // Validate the vault capability using vault-specific schema + capMap := map[string]any{ + "can": vaultCap.Action, + "with": vaultResource.URI, + "actions": vaultCap.Actions, + "vault": vaultCap.VaultAddress, + } + if err := ValidateVaultCapability(capMap); err != nil { + return nil, fmt.Errorf("invalid vault capability: %w", err) + } + + attenuation := Attenuation{ + Capability: vaultCap, + Resource: vaultResource, + } + + // Create token with vault admin capabilities + return builder.CreateOriginToken( + vaultOwnerDID, + []Attenuation{attenuation}, + nil, + time.Now(), + exp, + ) +} + +// ValidateVaultTokenCapability validates a UCAN token for vault operations +func ValidateVaultTokenCapability(token *Token, enclaveDataCID, requiredAction string) error { + expectedResource := fmt.Sprintf("ipfs://%s", enclaveDataCID) + + // Validate the required action parameter + validActions := []string{"read", "write", "sign", "export", "import", "delete"} + actionValid := slices.Contains(validActions, requiredAction) + if !actionValid { + return fmt.Errorf("invalid required action: %s", requiredAction) + } + + // Check if token contains the required vault capability + for _, att := range token.Attenuations { + if att.Resource.GetURI() == expectedResource { + // Check if this is a vault capability + if vaultCap, ok := att.Capability.(*VaultCapability); ok { + // Validate using vault-specific schema + validationMap := map[string]any{ + "can": vaultCap.Action, + "with": att.Resource.GetURI(), + "actions": vaultCap.Actions, + "vault": vaultCap.VaultAddress, + } + + if err := ValidateVaultCapability(validationMap); err != nil { + continue // Skip invalid capabilities + } + + // Check if capability grants the required action + if vaultCap.Grants([]string{requiredAction}) { + return nil + } + } + } + } + + return fmt.Errorf( + "insufficient vault capability: required action '%s' for enclave '%s'", + requiredAction, + enclaveDataCID, + ) +} + +// GetEnclaveDataCID extracts the enclave data CID from vault capabilities +func GetEnclaveDataCID(token *Token) (string, error) { + for _, att := range token.Attenuations { + resource := att.Resource.GetURI() + if strings.HasPrefix(resource, "ipfs://") { + return resource[7:], nil + } + } + return "", fmt.Errorf("no enclave data CID found in token") +} diff --git a/ucan/verifier.go b/ucan/verifier.go new file mode 100644 index 0000000..1e0ec58 --- /dev/null +++ b/ucan/verifier.go @@ -0,0 +1,984 @@ +// Package ucan provides User-Controlled Authorization Networks (UCAN) implementation +// for decentralized authorization and capability delegation in the Sonr network. +// This package handles JWT-based tokens, cryptographic verification, and resource capabilities. +package ucan + +import ( + "context" + "crypto/ed25519" + "crypto/rsa" + "encoding/json" + "fmt" + "strings" + "time" + + "github.com/golang-jwt/jwt/v5" + "github.com/libp2p/go-libp2p/core/crypto" + "github.com/sonr-io/sonr/crypto/keys" +) + +// Verifier provides UCAN token verification and validation functionality +type Verifier struct { + didResolver DIDResolver +} + +// DIDResolver resolves DID keys to public keys for signature verification +type DIDResolver interface { + ResolveDIDKey(ctx context.Context, did string) (keys.DID, error) +} + +// NewVerifier creates a new UCAN token verifier +func NewVerifier(didResolver DIDResolver) *Verifier { + return &Verifier{ + didResolver: didResolver, + } +} + +// VerifyToken parses and verifies a UCAN JWT token +func (v *Verifier) VerifyToken(ctx context.Context, tokenString string) (*Token, error) { + if tokenString == "" { + return nil, fmt.Errorf("token string cannot be empty") + } + + // Parse the JWT token + token, err := jwt.Parse(tokenString, v.keyFunc(ctx)) + if err != nil { + return nil, fmt.Errorf("failed to parse JWT token: %w", err) + } + + // Extract claims + claims, ok := token.Claims.(jwt.MapClaims) + if !ok { + return nil, fmt.Errorf("invalid token claims type") + } + + // Parse UCAN-specific fields + ucanToken, err := v.parseUCANClaims(claims, tokenString) + if err != nil { + return nil, fmt.Errorf("failed to parse UCAN claims: %w", err) + } + + // Validate token structure + if err := v.validateToken(ctx, ucanToken); err != nil { + return nil, fmt.Errorf("token validation failed: %w", err) + } + + return ucanToken, nil +} + +// VerifyCapability validates that a UCAN token grants specific capabilities +func (v *Verifier) VerifyCapability( + ctx context.Context, + tokenString string, + resource string, + abilities []string, +) (*Token, error) { + token, err := v.VerifyToken(ctx, tokenString) + if err != nil { + return nil, fmt.Errorf("token verification failed: %w", err) + } + + // Check if token grants required capabilities + if err := v.checkCapabilities(token, resource, abilities); err != nil { + return nil, fmt.Errorf("capability check failed: %w", err) + } + + return token, nil +} + +// VerifyDelegationChain validates the complete delegation chain of a UCAN token +func (v *Verifier) VerifyDelegationChain(ctx context.Context, tokenString string) error { + token, err := v.VerifyToken(ctx, tokenString) + if err != nil { + return fmt.Errorf("failed to verify root token: %w", err) + } + + // Verify each proof in the delegation chain + for i, proof := range token.Proofs { + proofToken, err := v.VerifyToken(ctx, string(proof)) + if err != nil { + return fmt.Errorf("failed to verify proof[%d] in delegation chain: %w", i, err) + } + + // Validate delegation relationship + if err := v.validateDelegation(token, proofToken); err != nil { + return fmt.Errorf("invalid delegation at proof[%d]: %w", i, err) + } + } + + return nil +} + +// keyFunc returns a function that resolves the signing key for JWT verification +func (v *Verifier) keyFunc(ctx context.Context) jwt.Keyfunc { + return func(token *jwt.Token) (any, error) { + // Extract issuer from claims + claims, ok := token.Claims.(jwt.MapClaims) + if !ok { + return nil, fmt.Errorf("invalid claims type") + } + + issuer, ok := claims["iss"].(string) + if !ok { + return nil, fmt.Errorf("missing or invalid issuer claim") + } + + // Resolve the issuer's DID to get public key + did, err := v.didResolver.ResolveDIDKey(ctx, issuer) + if err != nil { + return nil, fmt.Errorf("failed to resolve issuer DID: %w", err) + } + + // Get verification key based on signing method + switch token.Method { + case jwt.SigningMethodRS256, jwt.SigningMethodRS384, jwt.SigningMethodRS512: + return v.getRSAPublicKey(did) + case jwt.SigningMethodEdDSA: + return v.getEd25519PublicKey(did) + default: + return nil, fmt.Errorf("unsupported signing method: %v", token.Method) + } + } +} + +// parseUCANClaims extracts UCAN-specific fields from JWT claims +func (v *Verifier) parseUCANClaims(claims jwt.MapClaims, raw string) (*Token, error) { + issuer, audience := extractStandardClaims(claims) + expiresAt, notBefore := extractTimeClaims(claims) + + attenuations, err := v.parseAttenuationsClaims(claims) + if err != nil { + return nil, err + } + + proofs := parseProofsClaims(claims) + facts := parseFactsClaims(claims) + + return &Token{ + Raw: raw, + Issuer: issuer, + Audience: audience, + ExpiresAt: expiresAt, + NotBefore: notBefore, + Attenuations: attenuations, + Proofs: proofs, + Facts: facts, + }, nil +} + +// extractStandardClaims extracts standard JWT claims (issuer and audience) +func extractStandardClaims(claims jwt.MapClaims) (string, string) { + issuer, _ := claims["iss"].(string) + audience, _ := claims["aud"].(string) + return issuer, audience +} + +// extractTimeClaims extracts time-related claims (exp and nbf) +func extractTimeClaims(claims jwt.MapClaims) (int64, int64) { + var expiresAt, notBefore int64 + + if exp, ok := claims["exp"]; ok { + if expFloat, ok := exp.(float64); ok { + expiresAt = int64(expFloat) + } + } + + if nbf, ok := claims["nbf"]; ok { + if nbfFloat, ok := nbf.(float64); ok { + notBefore = int64(nbfFloat) + } + } + + return expiresAt, notBefore +} + +// parseAttenuationsClaims parses the attenuations from claims +func (v *Verifier) parseAttenuationsClaims(claims jwt.MapClaims) ([]Attenuation, error) { + attClaims, ok := claims["att"] + if !ok { + return nil, nil + } + + attSlice, ok := attClaims.([]any) + if !ok { + return nil, nil + } + + // Pre-allocate slice with known capacity + attenuations := make([]Attenuation, 0, len(attSlice)) + + for _, attItem := range attSlice { + attMap, ok := attItem.(map[string]any) + if !ok { + continue + } + + att, err := v.parseAttenuation(attMap) + if err != nil { + return nil, fmt.Errorf("failed to parse attenuation: %w", err) + } + attenuations = append(attenuations, att) + } + + return attenuations, nil +} + +// parseProofsClaims parses the proofs from claims +func parseProofsClaims(claims jwt.MapClaims) []Proof { + var proofs []Proof + + prfClaims, ok := claims["prf"] + if !ok { + return proofs + } + + prfSlice, ok := prfClaims.([]any) + if !ok { + return proofs + } + + for _, prfItem := range prfSlice { + if prfStr, ok := prfItem.(string); ok { + proofs = append(proofs, Proof(prfStr)) + } + } + + return proofs +} + +// parseFactsClaims parses the facts from claims +func parseFactsClaims(claims jwt.MapClaims) []Fact { + fctClaims, ok := claims["fct"] + if !ok { + return nil + } + + fctSlice, ok := fctClaims.([]any) + if !ok { + return nil + } + + // Pre-allocate slice with known capacity + facts := make([]Fact, 0, len(fctSlice)) + + for _, fctItem := range fctSlice { + factData, _ := json.Marshal(fctItem) + facts = append(facts, Fact{Data: factData}) + } + + return facts +} + +// parseAttenuation converts a map to an Attenuation struct with enhanced module-specific support +func (v *Verifier) parseAttenuation(attMap map[string]any) (Attenuation, error) { + // Extract capability + canValue, ok := attMap["can"] + if !ok { + return Attenuation{}, fmt.Errorf("missing 'can' field in attenuation") + } + + // Extract resource + withValue, ok := attMap["with"] + if !ok { + return Attenuation{}, fmt.Errorf("missing 'with' field in attenuation") + } + + withStr, ok := withValue.(string) + if !ok { + return Attenuation{}, fmt.Errorf("'with' field must be a string") + } + + // Parse resource first to determine module type + resource, err := v.parseResource(withStr) + if err != nil { + return Attenuation{}, fmt.Errorf("failed to parse resource: %w", err) + } + + // Create module-specific capability based on resource scheme + cap, err := v.createModuleSpecificCapability(resource.GetScheme(), canValue, attMap) + if err != nil { + return Attenuation{}, fmt.Errorf("failed to create capability: %w", err) + } + + return Attenuation{ + Capability: cap, + Resource: resource, + }, nil +} + +// createModuleSpecificCapability creates appropriate capability type based on module +func (v *Verifier) createModuleSpecificCapability(scheme string, canValue any, attMap map[string]any) (Capability, error) { + // Extract common fields + caveats := extractStringSliceFromMap(attMap, "caveats") + metadata := extractStringMapFromMap(attMap, "metadata") + + switch scheme { + case "did": + return v.createDIDCapability(canValue, caveats, metadata) + case "dwn": + return v.createDWNCapability(canValue, caveats, metadata) + case "service", "svc": + return v.createServiceCapability(canValue, caveats, metadata) + case "dex", "pool": + return v.createDEXCapability(canValue, caveats, metadata, attMap) + case "ipfs", "vault": + // Handle existing vault capabilities + return v.createVaultCapabilityFromMap(canValue, attMap) + default: + // Fallback to simple/multi capability for unknown schemes + return v.createGenericCapability(canValue) + } +} + +// createDIDCapability creates a DID-specific capability +func (v *Verifier) createDIDCapability(canValue any, caveats []string, metadata map[string]string) (Capability, error) { + switch canVal := canValue.(type) { + case string: + return &DIDCapability{ + Action: canVal, + Caveats: caveats, + Metadata: metadata, + }, nil + case []any: + actions := extractStringSlice(canVal) + return &DIDCapability{ + Actions: actions, + Caveats: caveats, + Metadata: metadata, + }, nil + default: + return nil, fmt.Errorf("unsupported DID capability type") + } +} + +// createDWNCapability creates a DWN-specific capability +func (v *Verifier) createDWNCapability(canValue any, caveats []string, metadata map[string]string) (Capability, error) { + switch canVal := canValue.(type) { + case string: + return &DWNCapability{ + Action: canVal, + Caveats: caveats, + Metadata: metadata, + }, nil + case []any: + actions := extractStringSlice(canVal) + return &DWNCapability{ + Actions: actions, + Caveats: caveats, + Metadata: metadata, + }, nil + default: + return nil, fmt.Errorf("unsupported DWN capability type") + } +} + +// createServiceCapability creates a Service-specific capability +func (v *Verifier) createServiceCapability(canValue any, caveats []string, metadata map[string]string) (Capability, error) { + // Service capabilities can still use MultiCapability for now + switch canVal := canValue.(type) { + case string: + return &MultiCapability{Actions: []string{canVal}}, nil + case []any: + actions := extractStringSlice(canVal) + return &MultiCapability{Actions: actions}, nil + default: + return nil, fmt.Errorf("unsupported Service capability type") + } +} + +// createDEXCapability creates a DEX-specific capability +func (v *Verifier) createDEXCapability(canValue any, caveats []string, metadata map[string]string, attMap map[string]any) (Capability, error) { + maxAmount, _ := attMap["max_amount"].(string) + + switch canVal := canValue.(type) { + case string: + return &DEXCapability{ + Action: canVal, + Caveats: caveats, + MaxAmount: maxAmount, + Metadata: metadata, + }, nil + case []any: + actions := extractStringSlice(canVal) + return &DEXCapability{ + Actions: actions, + Caveats: caveats, + MaxAmount: maxAmount, + Metadata: metadata, + }, nil + default: + return nil, fmt.Errorf("unsupported DEX capability type") + } +} + +// createVaultCapabilityFromMap creates vault capability from existing logic +func (v *Verifier) createVaultCapabilityFromMap(canValue any, attMap map[string]any) (Capability, error) { + // Use existing vault capability creation logic + vaultAddress, _ := attMap["vault"].(string) + caveats := extractStringSliceFromMap(attMap, "caveats") + + switch canVal := canValue.(type) { + case string: + return &VaultCapability{ + Action: canVal, + VaultAddress: vaultAddress, + Caveats: caveats, + }, nil + case []any: + actions := extractStringSlice(canVal) + return &VaultCapability{ + Actions: actions, + VaultAddress: vaultAddress, + Caveats: caveats, + }, nil + default: + return nil, fmt.Errorf("unsupported vault capability type") + } +} + +// createGenericCapability creates fallback capability for unknown schemes +func (v *Verifier) createGenericCapability(canValue any) (Capability, error) { + switch canVal := canValue.(type) { + case string: + return &SimpleCapability{Action: canVal}, nil + case []any: + actions := extractStringSlice(canVal) + return &MultiCapability{Actions: actions}, nil + default: + return nil, fmt.Errorf("unsupported capability type") + } +} + +// Helper functions for extracting data from maps +func extractStringSliceFromMap(m map[string]any, key string) []string { + if value, exists := m[key]; exists { + return extractStringSlice(value) + } + return nil +} + +func extractStringMapFromMap(m map[string]any, key string) map[string]string { + result := make(map[string]string) + if value, exists := m[key]; exists { + if mapValue, ok := value.(map[string]any); ok { + for k, v := range mapValue { + if strValue, ok := v.(string); ok { + result[k] = strValue + } + } + } + } + return result +} + +// parseResource creates a Resource from a URI string +func (v *Verifier) parseResource(uri string) (Resource, error) { + if uri == "" { + return nil, fmt.Errorf("resource URI cannot be empty") + } + + // Parse URI scheme and value - support both "scheme://value" and "scheme:value" formats + var scheme, value string + if strings.Contains(uri, "://") { + parts := strings.SplitN(uri, "://", 2) + if len(parts) == 2 { + scheme = parts[0] + value = parts[1] + } + } else if strings.Contains(uri, ":") { + parts := strings.SplitN(uri, ":", 2) + if len(parts) == 2 { + scheme = parts[0] + value = parts[1] + } + } + + if scheme == "" || value == "" { + return nil, fmt.Errorf("invalid resource URI format: %s", uri) + } + + return &SimpleResource{ + Scheme: scheme, + Value: value, + URI: uri, + }, nil +} + +// validateToken performs structural and temporal validation +func (v *Verifier) validateToken(_ context.Context, token *Token) error { + // Check required fields + if token.Issuer == "" { + return fmt.Errorf("issuer is required") + } + if token.Audience == "" { + return fmt.Errorf("audience is required") + } + if len(token.Attenuations) == 0 { + return fmt.Errorf("at least one attenuation is required") + } + + // Check temporal validity + now := time.Now().Unix() + + if token.NotBefore > 0 && now < token.NotBefore { + return fmt.Errorf("token is not yet valid (nbf: %d, now: %d)", token.NotBefore, now) + } + + if token.ExpiresAt > 0 && now >= token.ExpiresAt { + return fmt.Errorf("token has expired (exp: %d, now: %d)", token.ExpiresAt, now) + } + + return nil +} + +// checkCapabilities verifies that the token grants the required capabilities with enhanced module-specific validation +func (v *Verifier) checkCapabilities(token *Token, resource string, abilities []string) error { + for _, att := range token.Attenuations { + if att.Resource.GetURI() == resource { + if att.Capability.Grants(abilities) { + // Validate caveats for module-specific capabilities + if err := v.validateCaveats(att.Capability, att.Resource); err != nil { + return fmt.Errorf("caveat validation failed: %w", err) + } + return nil + } + } + } + return fmt.Errorf("required capabilities not granted for resource %s", resource) +} + +// validateCaveats validates constraints (caveats) for module-specific capabilities +func (v *Verifier) validateCaveats(cap Capability, resource Resource) error { + scheme := resource.GetScheme() + + switch scheme { + case "did": + return v.validateDIDCaveats(cap, resource) + case "dwn": + return v.validateDWNCaveats(cap, resource) + case "dex", "pool": + return v.validateDEXCaveats(cap, resource) + case "service", "svc": + return v.validateServiceCaveats(cap, resource) + case "vault", "ipfs": + return v.validateVaultCaveats(cap, resource) + default: + return nil // No caveat validation for unknown schemes + } +} + +// validateDIDCaveats validates DID-specific constraints +func (v *Verifier) validateDIDCaveats(cap Capability, resource Resource) error { + didCap, ok := cap.(*DIDCapability) + if !ok { + return nil // Not a DID capability + } + + for _, caveat := range didCap.Caveats { + switch caveat { + case "owner": + // Validate that the capability is for the owner's DID + if err := v.validateOwnerCaveat(resource); err != nil { + return fmt.Errorf("owner caveat validation failed: %w", err) + } + case "controller": + // Validate controller permissions + if err := v.validateControllerCaveat(resource); err != nil { + return fmt.Errorf("controller caveat validation failed: %w", err) + } + } + } + return nil +} + +// validateDWNCaveats validates DWN-specific constraints +func (v *Verifier) validateDWNCaveats(cap Capability, resource Resource) error { + dwnCap, ok := cap.(*DWNCapability) + if !ok { + return nil // Not a DWN capability + } + + for _, caveat := range dwnCap.Caveats { + switch caveat { + case "owner": + // Validate record ownership + if err := v.validateRecordOwnership(resource); err != nil { + return fmt.Errorf("record ownership validation failed: %w", err) + } + case "protocol": + // Validate protocol compliance + if err := v.validateProtocolCaveat(resource); err != nil { + return fmt.Errorf("protocol caveat validation failed: %w", err) + } + } + } + return nil +} + +// validateDEXCaveats validates DEX-specific constraints +func (v *Verifier) validateDEXCaveats(cap Capability, resource Resource) error { + dexCap, ok := cap.(*DEXCapability) + if !ok { + return nil // Not a DEX capability + } + + for _, caveat := range dexCap.Caveats { + switch caveat { + case "max-amount": + // Validate maximum swap amount + if dexCap.MaxAmount != "" { + if err := v.validateMaxAmountCaveat(dexCap.MaxAmount); err != nil { + return fmt.Errorf("max amount caveat validation failed: %w", err) + } + } + case "pool-member": + // Validate pool membership + if err := v.validatePoolMembershipCaveat(resource); err != nil { + return fmt.Errorf("pool membership validation failed: %w", err) + } + } + } + return nil +} + +// validateServiceCaveats validates Service-specific constraints +func (v *Verifier) validateServiceCaveats(cap Capability, resource Resource) error { + // Service capabilities use MultiCapability for now + // Add service-specific caveat validation if needed + return nil +} + +// validateVaultCaveats validates Vault-specific constraints +func (v *Verifier) validateVaultCaveats(cap Capability, resource Resource) error { + vaultCap, ok := cap.(*VaultCapability) + if !ok { + return nil // Not a vault capability + } + + for _, caveat := range vaultCap.Caveats { + switch caveat { + case "vault-owner": + // Validate vault ownership + if err := v.validateVaultOwnership(vaultCap.VaultAddress); err != nil { + return fmt.Errorf("vault ownership validation failed: %w", err) + } + case "enclave-integrity": + // Validate enclave data integrity + if err := v.validateEnclaveIntegrity(vaultCap.EnclaveDataCID); err != nil { + return fmt.Errorf("enclave integrity validation failed: %w", err) + } + } + } + return nil +} + +// Caveat validation helper methods (placeholders for actual implementation) + +// validateOwnerCaveat validates DID ownership constraint +func (v *Verifier) validateOwnerCaveat(resource Resource) error { + // Placeholder: Implement actual DID ownership validation + return nil +} + +// validateControllerCaveat validates DID controller constraint +func (v *Verifier) validateControllerCaveat(resource Resource) error { + // Placeholder: Implement actual controller validation + return nil +} + +// validateRecordOwnership validates DWN record ownership +func (v *Verifier) validateRecordOwnership(resource Resource) error { + // Placeholder: Implement actual record ownership validation + return nil +} + +// validateProtocolCaveat validates DWN protocol constraint +func (v *Verifier) validateProtocolCaveat(resource Resource) error { + // Placeholder: Implement actual protocol validation + return nil +} + +// validateMaxAmountCaveat validates DEX maximum amount constraint +func (v *Verifier) validateMaxAmountCaveat(maxAmount string) error { + // Placeholder: Implement actual amount validation + return nil +} + +// validatePoolMembershipCaveat validates DEX pool membership +func (v *Verifier) validatePoolMembershipCaveat(resource Resource) error { + // Placeholder: Implement actual pool membership validation + return nil +} + +// validateVaultOwnership validates vault ownership +func (v *Verifier) validateVaultOwnership(vaultAddress string) error { + // Placeholder: Implement actual vault ownership validation + return nil +} + +// validateEnclaveIntegrity validates enclave data integrity +func (v *Verifier) validateEnclaveIntegrity(enclaveDataCID string) error { + // Placeholder: Implement actual enclave integrity validation + return nil +} + +// validateDelegation checks that child token is properly attenuated from parent with enhanced module-specific validation +func (v *Verifier) validateDelegation(child, parent *Token) error { + // Child's issuer must be parent's audience + if child.Issuer != parent.Audience { + return fmt.Errorf("delegation chain broken: child issuer must be parent audience") + } + + // Child capabilities must be subset of parent with module-specific validation + for _, childAtt := range child.Attenuations { + if !v.isModuleCapabilitySubset(childAtt, parent.Attenuations) { + return fmt.Errorf("child capability exceeds parent capabilities") + } + } + + // Child expiration must not exceed parent + if parent.ExpiresAt > 0 && (child.ExpiresAt == 0 || child.ExpiresAt > parent.ExpiresAt) { + return fmt.Errorf("child token expires after parent token") + } + + // Validate cross-module delegation constraints + if err := v.validateCrossModuleDelegation(child, parent); err != nil { + return fmt.Errorf("cross-module delegation validation failed: %w", err) + } + + return nil +} + +// isModuleCapabilitySubset checks if a capability is a subset with module-specific logic +func (v *Verifier) isModuleCapabilitySubset(childAtt Attenuation, parentAtts []Attenuation) bool { + for _, parentAtt := range parentAtts { + if childAtt.Resource.GetURI() == parentAtt.Resource.GetURI() { + if v.isModuleCapabilityContained(childAtt.Capability, parentAtt.Capability, childAtt.Resource.GetScheme()) { + return true + } + } + } + return false +} + +// isModuleCapabilityContained checks containment with module-specific logic +func (v *Verifier) isModuleCapabilityContained(child, parent Capability, scheme string) bool { + // First check basic containment + if parent.Contains(child) { + // Additional module-specific containment validation + switch scheme { + case "did": + return v.validateDIDContainment(child, parent) + case "dwn": + return v.validateDWNContainment(child, parent) + case "dex", "pool": + return v.validateDEXContainment(child, parent) + case "vault", "ipfs": + return v.validateVaultContainment(child, parent) + default: + return true // Basic containment is sufficient for unknown schemes + } + } + return false +} + +// validateCrossModuleDelegation validates constraints across different modules +func (v *Verifier) validateCrossModuleDelegation(child, parent *Token) error { + childModules := v.extractModulesFromToken(child) + parentModules := v.extractModulesFromToken(parent) + + // Check if child uses modules not present in parent + for module := range childModules { + if _, exists := parentModules[module]; !exists { + return fmt.Errorf("child token uses module '%s' not delegated by parent", module) + } + } + + // Validate specific cross-module constraints + return v.validateSpecificCrossModuleConstraints(child, parent) +} + +// extractModulesFromToken extracts the modules used by a token +func (v *Verifier) extractModulesFromToken(token *Token) map[string]bool { + modules := make(map[string]bool) + for _, att := range token.Attenuations { + scheme := att.Resource.GetScheme() + modules[scheme] = true + } + return modules +} + +// validateSpecificCrossModuleConstraints validates specific cross-module business logic +func (v *Verifier) validateSpecificCrossModuleConstraints(child, parent *Token) error { + // Example: If DID operations require vault access, ensure both are present + childHasDID := v.tokenHasModule(child, "did") + childHasVault := v.tokenHasModule(child, "vault") || v.tokenHasModule(child, "ipfs") + + if childHasDID && !childHasVault { + // Check if parent has vault capability that can be inherited + parentHasVault := v.tokenHasModule(parent, "vault") || v.tokenHasModule(parent, "ipfs") + if !parentHasVault { + return fmt.Errorf("DID operations require vault access which is not available in delegation chain") + } + } + + // Add more cross-module constraints as needed + return nil +} + +// tokenHasModule checks if a token has capabilities for a specific module +func (v *Verifier) tokenHasModule(token *Token, module string) bool { + for _, att := range token.Attenuations { + if att.Resource.GetScheme() == module { + return true + } + } + return false +} + +// Module-specific containment validation methods + +// validateDIDContainment validates DID capability containment +func (v *Verifier) validateDIDContainment(child, parent Capability) bool { + childDID, childOk := child.(*DIDCapability) + parentDID, parentOk := parent.(*DIDCapability) + + if !childOk || !parentOk { + return true // Not both DID capabilities, basic containment applies + } + + // Validate that child caveats are more restrictive or equal + return v.areCaveatsMoreRestrictive(childDID.Caveats, parentDID.Caveats) +} + +// validateDWNContainment validates DWN capability containment +func (v *Verifier) validateDWNContainment(child, parent Capability) bool { + childDWN, childOk := child.(*DWNCapability) + parentDWN, parentOk := parent.(*DWNCapability) + + if !childOk || !parentOk { + return true // Not both DWN capabilities, basic containment applies + } + + // Validate that child caveats are more restrictive or equal + return v.areCaveatsMoreRestrictive(childDWN.Caveats, parentDWN.Caveats) +} + +// validateDEXContainment validates DEX capability containment +func (v *Verifier) validateDEXContainment(child, parent Capability) bool { + childDEX, childOk := child.(*DEXCapability) + parentDEX, parentOk := parent.(*DEXCapability) + + if !childOk || !parentOk { + return true // Not both DEX capabilities, basic containment applies + } + + // Validate max amount restriction + if parentDEX.MaxAmount != "" && childDEX.MaxAmount != "" { + // Child max amount should be less than or equal to parent + if !v.isAmountLessOrEqual(childDEX.MaxAmount, parentDEX.MaxAmount) { + return false + } + } else if parentDEX.MaxAmount != "" && childDEX.MaxAmount == "" { + // Child must have max amount if parent does + return false + } + + // Validate that child caveats are more restrictive or equal + return v.areCaveatsMoreRestrictive(childDEX.Caveats, parentDEX.Caveats) +} + +// validateVaultContainment validates Vault capability containment +func (v *Verifier) validateVaultContainment(child, parent Capability) bool { + childVault, childOk := child.(*VaultCapability) + parentVault, parentOk := parent.(*VaultCapability) + + if !childOk || !parentOk { + return true // Not both Vault capabilities, basic containment applies + } + + // Vault address must match + if childVault.VaultAddress != parentVault.VaultAddress { + return false + } + + // Validate that child caveats are more restrictive or equal + return v.areCaveatsMoreRestrictive(childVault.Caveats, parentVault.Caveats) +} + +// Helper methods for containment validation + +// areCaveatsMoreRestrictive checks if child caveats are more restrictive than parent +func (v *Verifier) areCaveatsMoreRestrictive(childCaveats, parentCaveats []string) bool { + parentCaveatSet := make(map[string]bool) + for _, caveat := range parentCaveats { + parentCaveatSet[caveat] = true + } + + // All child caveats must be present in parent caveats (or child can have additional restrictions) + for _, childCaveat := range childCaveats { + if !parentCaveatSet[childCaveat] { + // Child has additional restrictions, which is allowed + continue + } + } + + return true +} + +// isAmountLessOrEqual compares two amount strings (placeholder implementation) +func (v *Verifier) isAmountLessOrEqual(childAmount, parentAmount string) bool { + // Placeholder: Implement actual amount comparison + // This would parse the amounts and compare them numerically + return true +} + +// isCapabilitySubset checks if a capability is a subset of any parent capabilities +func (v *Verifier) isCapabilitySubset(childAtt Attenuation, parentAtts []Attenuation) bool { + for _, parentAtt := range parentAtts { + if childAtt.Resource.GetURI() == parentAtt.Resource.GetURI() { + if parentAtt.Capability.Contains(childAtt.Capability) { + return true + } + } + } + return false +} + +// getRSAPublicKey extracts RSA public key from DID +func (v *Verifier) getRSAPublicKey(did keys.DID) (*rsa.PublicKey, error) { + verifyKey, err := did.VerifyKey() + if err != nil { + return nil, fmt.Errorf("failed to get verify key: %w", err) + } + + rsaKey, ok := verifyKey.(*rsa.PublicKey) + if !ok { + return nil, fmt.Errorf("DID does not contain RSA public key") + } + + return rsaKey, nil +} + +// getEd25519PublicKey extracts Ed25519 public key from DID +func (v *Verifier) getEd25519PublicKey(did keys.DID) (ed25519.PublicKey, error) { + pubKey := did.PublicKey() + rawBytes, err := pubKey.Raw() + if err != nil { + return nil, fmt.Errorf("failed to get raw public key: %w", err) + } + + if pubKey.Type() != crypto.Ed25519 { + return nil, fmt.Errorf("DID does not contain Ed25519 public key") + } + + return ed25519.PublicKey(rawBytes), nil +} + +// StringDIDResolver implements DIDResolver for did:key strings +type StringDIDResolver struct{} + +// ResolveDIDKey extracts a public key from a did:key string +func (StringDIDResolver) ResolveDIDKey(ctx context.Context, didStr string) (keys.DID, error) { + return keys.Parse(didStr) +} diff --git a/vrf/vrf.go b/vrf/vrf.go new file mode 100644 index 0000000..46e2e97 --- /dev/null +++ b/vrf/vrf.go @@ -0,0 +1,231 @@ +// Package vrf implements a verifiable random function using the Edwards form +// of Curve25519, SHA3 and the Elligator map. +// +// E is Curve25519 (in Edwards coordinates), h is SHA3. +// f is the elligator map (bytes->E) that covers half of E. +// 8 is the cofactor of E, the group order is 8*l for prime l. +// Setup : the prover publicly commits to a public key (P : E) +// H : names -> E +// H(n) = f(h(n))^8 +// VRF : keys -> names -> vrfs +// VRF_x(n) = h(n, H(n)^x)) +// Prove : keys -> names -> proofs +// Prove_x(n) = tuple(c=h(n, g^r, H(n)^r), t=r-c*x, ii=H(n)^x) +// where r = h(x, n) is used as a source of randomness +// Check : E -> names -> vrfs -> proofs -> bool +// Check(P, n, vrf, (c,t,ii)) = vrf == h(n, ii) +// && c == h(n, g^t*P^c, H(n)^t*ii^c) +package vrf + +import ( + "bytes" + "crypto/rand" + "errors" + "io" + + "golang.org/x/crypto/sha3" + + "github.com/sonr-io/sonr/crypto/internal/ed25519/edwards25519" + "github.com/sonr-io/sonr/crypto/internal/ed25519/extra25519" + "golang.org/x/crypto/ed25519" +) + +const ( + PublicKeySize = 32 + PrivateKeySize = 64 + Size = 32 + intermediateSize = 32 + ProofSize = 32 + 32 + intermediateSize +) + +var ErrGetPubKey = errors.New("[vrf] Couldn't get corresponding public-key from private-key") + +type ( + PrivateKey []byte + PublicKey []byte +) + +// GenerateKey creates a public/private key pair using rnd for randomness. +// If rnd is nil, crypto/rand is used. +func GenerateKey(rnd io.Reader) (sk PrivateKey, err error) { + if rnd == nil { + rnd = rand.Reader + } + sk = make([]byte, 64) + _, err = io.ReadFull(rnd, sk[:32]) + if err != nil { + return sk, err + } + x, _ := sk.expandSecret() + + var pkP edwards25519.ExtendedGroupElement + edwards25519.GeScalarMultBase(&pkP, x) + var pkBytes [PublicKeySize]byte + pkP.ToBytes(&pkBytes) + + copy(sk[32:], pkBytes[:]) + return sk, err +} + +// Public extracts the public VRF key from the underlying private-key +// and returns a boolean indicating if the operation was successful. +func (sk PrivateKey) Public() (PublicKey, bool) { + pk, ok := ed25519.PrivateKey(sk).Public().(ed25519.PublicKey) + return PublicKey(pk), ok +} + +func (sk PrivateKey) expandSecret() (x, skhr *[32]byte) { + x, skhr = new([32]byte), new([32]byte) + hash := sha3.NewShake256() + hash.Write(sk[:32]) + hash.Read(x[:]) + hash.Read(skhr[:]) + x[0] &= 248 + x[31] &= 127 + x[31] |= 64 + return x, skhr +} + +// Compute generates the vrf value for the byte slice m using the +// underlying private key sk. +func (sk PrivateKey) Compute(m []byte) []byte { + x, _ := sk.expandSecret() + var ii edwards25519.ExtendedGroupElement + var iiB [32]byte + edwards25519.GeScalarMult(&ii, x, hashToCurve(m)) + ii.ToBytes(&iiB) + + hash := sha3.NewShake256() + hash.Write(iiB[:]) // const length: Size + hash.Write(m) + var vrf [Size]byte + hash.Read(vrf[:]) + return vrf[:] +} + +func hashToCurve(m []byte) *edwards25519.ExtendedGroupElement { + // H(n) = (f(h(n))^8) + var hmb [32]byte + sha3.ShakeSum256(hmb[:], m) + var hm edwards25519.ExtendedGroupElement + extra25519.HashToEdwards(&hm, &hmb) + edwards25519.GeDouble(&hm, &hm) + edwards25519.GeDouble(&hm, &hm) + edwards25519.GeDouble(&hm, &hm) + return &hm +} + +// Prove returns the vrf value and a proof such that +// Verify(m, vrf, proof) == true. The vrf value is the +// same as returned by Compute(m). +func (sk PrivateKey) Prove(m []byte) (vrf, proof []byte) { + x, skhr := sk.expandSecret() + var sH, rH [64]byte + var r, s, minusS, t, gB, grB, hrB, hxB, hB [32]byte + var ii, gr, hr edwards25519.ExtendedGroupElement + + h := hashToCurve(m) + h.ToBytes(&hB) + edwards25519.GeScalarMult(&ii, x, h) + ii.ToBytes(&hxB) + + // use hash of private-, public-key and msg as randomness source: + hash := sha3.NewShake256() + hash.Write(skhr[:]) + hash.Write(sk[32:]) // public key, as in ed25519 + hash.Write(m) + hash.Read(rH[:]) + hash.Reset() + edwards25519.ScReduce(&r, &rH) + + edwards25519.GeScalarMultBase(&gr, &r) + edwards25519.GeScalarMult(&hr, &r, h) + gr.ToBytes(&grB) + hr.ToBytes(&hrB) + gB = edwards25519.BaseBytes + + // H2(g, h, g^x, h^x, g^r, h^r, m) + hash.Write(gB[:]) + hash.Write(hB[:]) + hash.Write(sk[32:]) // ed25519 public-key + hash.Write(hxB[:]) + hash.Write(grB[:]) + hash.Write(hrB[:]) + hash.Write(m) + hash.Read(sH[:]) + hash.Reset() + edwards25519.ScReduce(&s, &sH) + + edwards25519.ScNeg(&minusS, &s) + edwards25519.ScMulAdd(&t, x, &minusS, &r) + + proof = make([]byte, ProofSize) + copy(proof[:32], s[:]) + copy(proof[32:64], t[:]) + copy(proof[64:96], hxB[:]) + + hash.Write(hxB[:]) + hash.Write(m) + vrf = make([]byte, Size) + hash.Read(vrf[:]) + return vrf, proof +} + +// Verify returns true iff vrf=Compute(m) for the sk that +// corresponds to pk. +func (pkBytes PublicKey) Verify(m, vrfBytes, proof []byte) bool { + if len(proof) != ProofSize || len(vrfBytes) != Size || len(pkBytes) != PublicKeySize { + return false + } + var pk, s, sRef, t, vrf, hxB, hB, gB, ABytes, BBytes [32]byte + copy(vrf[:], vrfBytes) + copy(pk[:], pkBytes[:]) + copy(s[:32], proof[:32]) + copy(t[:32], proof[32:64]) + copy(hxB[:], proof[64:96]) + + hash := sha3.NewShake256() + hash.Write(hxB[:]) // const length + hash.Write(m) + var hCheck [Size]byte + hash.Read(hCheck[:]) + if !bytes.Equal(hCheck[:], vrf[:]) { + return false + } + hash.Reset() + + var P, B, ii, iic edwards25519.ExtendedGroupElement + var A, hmtP, iicP edwards25519.ProjectiveGroupElement + if !P.FromBytesBaseGroup(&pk) { + return false + } + if !ii.FromBytesBaseGroup(&hxB) { + return false + } + edwards25519.GeDoubleScalarMultVartime(&A, &s, &P, &t) + A.ToBytes(&ABytes) + gB = edwards25519.BaseBytes + + h := hashToCurve(m) // h = H1(m) + h.ToBytes(&hB) + edwards25519.GeDoubleScalarMultVartime(&hmtP, &t, h, &[32]byte{}) + edwards25519.GeDoubleScalarMultVartime(&iicP, &s, &ii, &[32]byte{}) + iicP.ToExtended(&iic) + hmtP.ToExtended(&B) + edwards25519.GeAdd(&B, &B, &iic) + B.ToBytes(&BBytes) + + var sH [64]byte + // sRef = H2(g, h, g^x, v, g^t·G^s,H1(m)^t·v^s, m), with v=H1(m)^x=h^x + hash.Write(gB[:]) + hash.Write(hB[:]) + hash.Write(pkBytes) + hash.Write(hxB[:]) + hash.Write(ABytes[:]) // const length (g^t*G^s) + hash.Write(BBytes[:]) // const length (H1(m)^t*v^s) + hash.Write(m) + hash.Read(sH[:]) + + edwards25519.ScReduce(&sRef, &sH) + return sRef == s +} diff --git a/vrf/vrf_test.go b/vrf/vrf_test.go new file mode 100644 index 0000000..5752a66 --- /dev/null +++ b/vrf/vrf_test.go @@ -0,0 +1,112 @@ +package vrf + +import ( + "bytes" + "testing" + // "fmt" +) + +func TestHonestComplete(t *testing.T) { + sk, err := GenerateKey(nil) + if err != nil { + t.Fatal(err) + } + pk, _ := sk.Public() + alice := []byte("alice") + aliceVRF := sk.Compute(alice) + aliceVRFFromProof, aliceProof := sk.Prove(alice) + + // fmt.Printf("pk: %X\n", pk) + // fmt.Printf("sk: %X\n", *sk) + // fmt.Printf("alice(bytes): %X\n", alice) + // fmt.Printf("aliceVRF: %X\n", aliceVRF) + // fmt.Printf("aliceProof: %X\n", aliceProof) + + if !pk.Verify(alice, aliceVRF, aliceProof) { + t.Error("Gen -> Compute -> Prove -> Verify -> FALSE") + } + if !bytes.Equal(aliceVRF, aliceVRFFromProof) { + t.Error("Compute != Prove") + } +} + +func TestConvertPrivateKeyToPublicKey(t *testing.T) { + sk, err := GenerateKey(nil) + if err != nil { + t.Fatal(err) + } + + pk, ok := sk.Public() + if !ok { + t.Fatal("Couldn't obtain public key.") + } + if !bytes.Equal(sk[32:], pk) { + t.Fatal("Raw byte respresentation doesn't match public key.") + } +} + +func TestFlipBitForgery(t *testing.T) { + sk, err := GenerateKey(nil) + if err != nil { + t.Fatal(err) + } + pk, _ := sk.Public() + alice := []byte("alice") + for i := 0; i < 32; i++ { + for j := uint(0); j < 8; j++ { + aliceVRF := sk.Compute(alice) + aliceVRF[i] ^= 1 << j + _, aliceProof := sk.Prove(alice) + if pk.Verify(alice, aliceVRF, aliceProof) { + t.Fatalf("forged by using aliceVRF[%d]^=%d:\n (sk=%x)", i, j, sk) + } + } + } +} + +func BenchmarkHashToGE(b *testing.B) { + alice := []byte("alice") + b.ResetTimer() + for n := 0; n < b.N; n++ { + hashToCurve(alice) + } +} + +func BenchmarkCompute(b *testing.B) { + sk, err := GenerateKey(nil) + if err != nil { + b.Fatal(err) + } + alice := []byte("alice") + b.ResetTimer() + for n := 0; n < b.N; n++ { + sk.Compute(alice) + } +} + +func BenchmarkProve(b *testing.B) { + sk, err := GenerateKey(nil) + if err != nil { + b.Fatal(err) + } + alice := []byte("alice") + b.ResetTimer() + for n := 0; n < b.N; n++ { + sk.Prove(alice) + } +} + +func BenchmarkVerify(b *testing.B) { + sk, err := GenerateKey(nil) + if err != nil { + b.Fatal(err) + } + alice := []byte("alice") + aliceVRF := sk.Compute(alice) + _, aliceProof := sk.Prove(alice) + pk, _ := sk.Public() + b.ResetTimer() + for n := 0; n < b.N; n++ { + pk.Verify(alice, aliceVRF, aliceProof) + } +} diff --git a/wasm/signer.go b/wasm/signer.go new file mode 100644 index 0000000..e27e113 --- /dev/null +++ b/wasm/signer.go @@ -0,0 +1,336 @@ +// Package wasm provides cryptographic signing and verification for WebAssembly modules +package wasm + +import ( + "crypto/ed25519" + "crypto/rand" + "encoding/base64" + "encoding/hex" + "encoding/json" + "fmt" + "sync" + "time" +) + +// Signer provides Ed25519 digital signature operations for WASM modules +type Signer struct { + privateKey ed25519.PrivateKey + publicKey ed25519.PublicKey +} + +// NewSigner creates a new signer with a generated Ed25519 key pair +func NewSigner() (*Signer, error) { + pub, priv, err := ed25519.GenerateKey(rand.Reader) + if err != nil { + return nil, fmt.Errorf("failed to generate Ed25519 key pair: %w", err) + } + + return &Signer{ + privateKey: priv, + publicKey: pub, + }, nil +} + +// NewSignerFromPrivateKey creates a signer from an existing private key +func NewSignerFromPrivateKey(privateKey ed25519.PrivateKey) (*Signer, error) { + if len(privateKey) != ed25519.PrivateKeySize { + return nil, fmt.Errorf("invalid private key size: expected %d, got %d", + ed25519.PrivateKeySize, len(privateKey)) + } + + publicKey := privateKey.Public().(ed25519.PublicKey) + + return &Signer{ + privateKey: privateKey, + publicKey: publicKey, + }, nil +} + +// Sign creates an Ed25519 signature for the given WASM bytecode +func (s *Signer) Sign(wasmBytes []byte) ([]byte, error) { + if s.privateKey == nil { + return nil, fmt.Errorf("private key not initialized") + } + + signature := ed25519.Sign(s.privateKey, wasmBytes) + return signature, nil +} + +// GetPublicKey returns the public key bytes +func (s *Signer) GetPublicKey() []byte { + return s.publicKey +} + +// GetPublicKeyHex returns the public key as hex string +func (s *Signer) GetPublicKeyHex() string { + return hex.EncodeToString(s.publicKey) +} + +// ExportPrivateKey exports the private key (handle with care) +func (s *Signer) ExportPrivateKey() []byte { + return s.privateKey +} + +// SignatureVerifier verifies Ed25519 signatures on WASM modules +type SignatureVerifier struct { + trustedKeys map[string]ed25519.PublicKey + mu sync.RWMutex +} + +// NewSignatureVerifier creates a new signature verifier +func NewSignatureVerifier() *SignatureVerifier { + return &SignatureVerifier{ + trustedKeys: make(map[string]ed25519.PublicKey), + } +} + +// AddTrustedKey adds a trusted public key for signature verification +func (v *SignatureVerifier) AddTrustedKey(keyID string, publicKey ed25519.PublicKey) error { + if len(publicKey) != ed25519.PublicKeySize { + return fmt.Errorf("invalid public key size: expected %d, got %d", + ed25519.PublicKeySize, len(publicKey)) + } + + v.mu.Lock() + defer v.mu.Unlock() + v.trustedKeys[keyID] = publicKey + return nil +} + +// AddTrustedKeyFromHex adds a trusted public key from hex string +func (v *SignatureVerifier) AddTrustedKeyFromHex(keyID string, publicKeyHex string) error { + publicKey, err := hex.DecodeString(publicKeyHex) + if err != nil { + return fmt.Errorf("failed to decode public key hex: %w", err) + } + + return v.AddTrustedKey(keyID, ed25519.PublicKey(publicKey)) +} + +// Verify verifies a signature against trusted public keys +func (v *SignatureVerifier) Verify(wasmBytes []byte, signature []byte) error { + v.mu.RLock() + defer v.mu.RUnlock() + + if len(v.trustedKeys) == 0 { + return fmt.Errorf("no trusted keys configured") + } + + // Try to verify with any trusted key + for keyID, publicKey := range v.trustedKeys { + if ed25519.Verify(publicKey, wasmBytes, signature) { + // Signature valid with this key + return nil + } + _ = keyID // Key ID available for logging if needed + } + + return fmt.Errorf("signature verification failed: no matching trusted key") +} + +// VerifyWithKey verifies a signature with a specific key +func (v *SignatureVerifier) VerifyWithKey(keyID string, wasmBytes []byte, signature []byte) error { + v.mu.RLock() + publicKey, exists := v.trustedKeys[keyID] + v.mu.RUnlock() + + if !exists { + return fmt.Errorf("trusted key not found: %s", keyID) + } + + if !ed25519.Verify(publicKey, wasmBytes, signature) { + return fmt.Errorf("signature verification failed for key: %s", keyID) + } + + return nil +} + +// RemoveTrustedKey removes a trusted key +func (v *SignatureVerifier) RemoveTrustedKey(keyID string) { + v.mu.Lock() + defer v.mu.Unlock() + delete(v.trustedKeys, keyID) +} + +// GetTrustedKeyIDs returns all trusted key IDs +func (v *SignatureVerifier) GetTrustedKeyIDs() []string { + v.mu.RLock() + defer v.mu.RUnlock() + + ids := make([]string, 0, len(v.trustedKeys)) + for id := range v.trustedKeys { + ids = append(ids, id) + } + return ids +} + +// SignedModule represents a WASM module with its signature +type SignedModule struct { + Module []byte `json:"-"` // WASM bytecode (excluded from JSON) + Hash string `json:"hash"` // SHA256 hash of module + Signature []byte `json:"signature"` // Ed25519 signature + SignerID string `json:"signer_id"` // ID of signing key + Timestamp time.Time `json:"timestamp"` // Signing timestamp + Version string `json:"version"` // Module version +} + +// SignModule creates a signed module package +func SignModule(signer *Signer, module []byte, signerID string, version string) (*SignedModule, error) { + // Compute hash + hashVerifier := NewHashVerifier() + hash := hashVerifier.ComputeHash(module) + + // Sign the module + signature, err := signer.Sign(module) + if err != nil { + return nil, fmt.Errorf("failed to sign module: %w", err) + } + + return &SignedModule{ + Module: module, + Hash: hash, + Signature: signature, + SignerID: signerID, + Timestamp: time.Now(), + Version: version, + }, nil +} + +// VerifySignedModule verifies a signed module +func VerifySignedModule(verifier *SignatureVerifier, module *SignedModule) error { + // Verify hash matches + hashVerifier := NewHashVerifier() + computedHash := hashVerifier.ComputeHash(module.Module) + if computedHash != module.Hash { + return fmt.Errorf("hash mismatch: expected %s, got %s", module.Hash, computedHash) + } + + // Verify signature + if module.SignerID != "" { + return verifier.VerifyWithKey(module.SignerID, module.Module, module.Signature) + } + + return verifier.Verify(module.Module, module.Signature) +} + +// SignatureManifest contains signature metadata for a WASM module +type SignatureManifest struct { + ModuleHash string `json:"module_hash"` + Signatures []SignatureEntry `json:"signatures"` + TrustedKeys []TrustedKeyEntry `json:"trusted_keys"` + CreatedAt time.Time `json:"created_at"` + ExpiresAt *time.Time `json:"expires_at,omitempty"` +} + +// SignatureEntry represents a single signature in the manifest +type SignatureEntry struct { + Signature string `json:"signature"` // Base64 encoded + SignerID string `json:"signer_id"` + Timestamp time.Time `json:"timestamp"` + Algorithm string `json:"algorithm"` // Always "Ed25519" +} + +// TrustedKeyEntry represents a trusted public key +type TrustedKeyEntry struct { + KeyID string `json:"key_id"` + PublicKey string `json:"public_key"` // Base64 encoded + AddedAt time.Time `json:"added_at"` + ExpiresAt *time.Time `json:"expires_at,omitempty"` + Purpose string `json:"purpose"` // e.g., "code-signing" +} + +// CreateSignatureManifest creates a manifest for module signatures +func CreateSignatureManifest(module []byte, signer *Signer, signerID string) (*SignatureManifest, error) { + hashVerifier := NewHashVerifier() + moduleHash := hashVerifier.ComputeHash(module) + + signature, err := signer.Sign(module) + if err != nil { + return nil, fmt.Errorf("failed to sign module: %w", err) + } + + manifest := &SignatureManifest{ + ModuleHash: moduleHash, + Signatures: []SignatureEntry{ + { + Signature: base64.StdEncoding.EncodeToString(signature), + SignerID: signerID, + Timestamp: time.Now(), + Algorithm: "Ed25519", + }, + }, + TrustedKeys: []TrustedKeyEntry{ + { + KeyID: signerID, + PublicKey: base64.StdEncoding.EncodeToString(signer.GetPublicKey()), + AddedAt: time.Now(), + Purpose: "code-signing", + }, + }, + CreatedAt: time.Now(), + } + + return manifest, nil +} + +// VerifyWithManifest verifies a module using a signature manifest +func VerifyWithManifest(module []byte, manifest *SignatureManifest) error { + // Verify hash + hashVerifier := NewHashVerifier() + computedHash := hashVerifier.ComputeHash(module) + if computedHash != manifest.ModuleHash { + return fmt.Errorf("module hash mismatch") + } + + // Check expiration + if manifest.ExpiresAt != nil && time.Now().After(*manifest.ExpiresAt) { + return fmt.Errorf("signature manifest has expired") + } + + // Create verifier with trusted keys from manifest + verifier := NewSignatureVerifier() + for _, key := range manifest.TrustedKeys { + // Check key expiration + if key.ExpiresAt != nil && time.Now().After(*key.ExpiresAt) { + continue // Skip expired keys + } + + publicKey, err := base64.StdEncoding.DecodeString(key.PublicKey) + if err != nil { + return fmt.Errorf("failed to decode public key: %w", err) + } + + if err := verifier.AddTrustedKey(key.KeyID, ed25519.PublicKey(publicKey)); err != nil { + return fmt.Errorf("failed to add trusted key: %w", err) + } + } + + // Verify at least one signature + for _, sig := range manifest.Signatures { + signature, err := base64.StdEncoding.DecodeString(sig.Signature) + if err != nil { + continue // Skip invalid signatures + } + + if err := verifier.VerifyWithKey(sig.SignerID, module, signature); err == nil { + // At least one valid signature found + return nil + } + } + + return fmt.Errorf("no valid signatures found in manifest") +} + +// ExportManifest exports a signature manifest as JSON +func ExportManifest(manifest *SignatureManifest) ([]byte, error) { + return json.MarshalIndent(manifest, "", " ") +} + +// ImportManifest imports a signature manifest from JSON +func ImportManifest(data []byte) (*SignatureManifest, error) { + var manifest SignatureManifest + if err := json.Unmarshal(data, &manifest); err != nil { + return nil, fmt.Errorf("failed to parse manifest: %w", err) + } + return &manifest, nil +} diff --git a/wasm/signer_test.go b/wasm/signer_test.go new file mode 100644 index 0000000..e8e16fe --- /dev/null +++ b/wasm/signer_test.go @@ -0,0 +1,361 @@ +package wasm + +import ( + "crypto/ed25519" + "crypto/rand" + "encoding/base64" + "encoding/hex" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestSigner_NewSigner(t *testing.T) { + signer, err := NewSigner() + require.NoError(t, err) + require.NotNil(t, signer) + + assert.NotNil(t, signer.privateKey) + assert.NotNil(t, signer.publicKey) + assert.Equal(t, ed25519.PrivateKeySize, len(signer.privateKey)) + assert.Equal(t, ed25519.PublicKeySize, len(signer.publicKey)) +} + +func TestSigner_NewSignerFromPrivateKey(t *testing.T) { + // Generate a key pair + pub, priv, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + // Create signer from private key + signer, err := NewSignerFromPrivateKey(priv) + require.NoError(t, err) + + assert.Equal(t, priv, signer.privateKey) + assert.Equal(t, pub, signer.publicKey) + + // Test invalid key size + invalidKey := []byte("too short") + _, err = NewSignerFromPrivateKey(ed25519.PrivateKey(invalidKey)) + assert.Error(t, err) + assert.Contains(t, err.Error(), "invalid private key size") +} + +func TestSigner_Sign(t *testing.T) { + signer, err := NewSigner() + require.NoError(t, err) + + // Test data + wasmBytes := []byte("test wasm module content") + + // Sign the data + signature, err := signer.Sign(wasmBytes) + require.NoError(t, err) + assert.Equal(t, ed25519.SignatureSize, len(signature)) + + // Verify the signature + valid := ed25519.Verify(signer.publicKey, wasmBytes, signature) + assert.True(t, valid) + + // Test signing different data produces different signature + differentData := []byte("different content") + signature2, err := signer.Sign(differentData) + require.NoError(t, err) + assert.NotEqual(t, signature, signature2) +} + +func TestSignatureVerifier_AddTrustedKey(t *testing.T) { + verifier := NewSignatureVerifier() + + // Generate a key pair + pub, _, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + // Add trusted key + err = verifier.AddTrustedKey("test-key", pub) + assert.NoError(t, err) + + // Test invalid key size + invalidKey := []byte("invalid") + err = verifier.AddTrustedKey("invalid-key", ed25519.PublicKey(invalidKey)) + assert.Error(t, err) + assert.Contains(t, err.Error(), "invalid public key size") +} + +func TestSignatureVerifier_AddTrustedKeyFromHex(t *testing.T) { + verifier := NewSignatureVerifier() + + // Generate a key pair + pub, _, err := ed25519.GenerateKey(rand.Reader) + require.NoError(t, err) + + // Add key from hex + hexKey := hex.EncodeToString(pub) + err = verifier.AddTrustedKeyFromHex("hex-key", hexKey) + assert.NoError(t, err) + + // Test invalid hex + err = verifier.AddTrustedKeyFromHex("bad-hex", "not-hex") + assert.Error(t, err) +} + +func TestSignatureVerifier_Verify(t *testing.T) { + // Create signer and verifier + signer, err := NewSigner() + require.NoError(t, err) + + verifier := NewSignatureVerifier() + + // Test data + wasmBytes := []byte("test wasm module") + + // Sign the data + signature, err := signer.Sign(wasmBytes) + require.NoError(t, err) + + // Test verification without trusted keys + err = verifier.Verify(wasmBytes, signature) + assert.Error(t, err) + assert.Contains(t, err.Error(), "no trusted keys") + + // Add trusted key + err = verifier.AddTrustedKey("signer1", signer.publicKey) + require.NoError(t, err) + + // Test successful verification + err = verifier.Verify(wasmBytes, signature) + assert.NoError(t, err) + + // Test verification with wrong data + wrongData := []byte("wrong data") + err = verifier.Verify(wrongData, signature) + assert.Error(t, err) + assert.Contains(t, err.Error(), "signature verification failed") + + // Test verification with wrong signature + wrongSignature := make([]byte, ed25519.SignatureSize) + err = verifier.Verify(wasmBytes, wrongSignature) + assert.Error(t, err) +} + +func TestSignatureVerifier_VerifyWithKey(t *testing.T) { + signer1, err := NewSigner() + require.NoError(t, err) + + signer2, err := NewSigner() + require.NoError(t, err) + + verifier := NewSignatureVerifier() + verifier.AddTrustedKey("key1", signer1.publicKey) + verifier.AddTrustedKey("key2", signer2.publicKey) + + wasmBytes := []byte("test module") + signature1, _ := signer1.Sign(wasmBytes) + signature2, _ := signer2.Sign(wasmBytes) + + // Verify with correct key + err = verifier.VerifyWithKey("key1", wasmBytes, signature1) + assert.NoError(t, err) + + err = verifier.VerifyWithKey("key2", wasmBytes, signature2) + assert.NoError(t, err) + + // Verify with wrong key + err = verifier.VerifyWithKey("key1", wasmBytes, signature2) + assert.Error(t, err) + + // Verify with non-existent key + err = verifier.VerifyWithKey("key3", wasmBytes, signature1) + assert.Error(t, err) + assert.Contains(t, err.Error(), "trusted key not found") +} + +func TestSignatureVerifier_Management(t *testing.T) { + verifier := NewSignatureVerifier() + + // Generate keys + pub1, _, _ := ed25519.GenerateKey(rand.Reader) + pub2, _, _ := ed25519.GenerateKey(rand.Reader) + + // Add keys + verifier.AddTrustedKey("key1", pub1) + verifier.AddTrustedKey("key2", pub2) + + // Get key IDs + ids := verifier.GetTrustedKeyIDs() + assert.Len(t, ids, 2) + assert.Contains(t, ids, "key1") + assert.Contains(t, ids, "key2") + + // Remove key + verifier.RemoveTrustedKey("key1") + ids = verifier.GetTrustedKeyIDs() + assert.Len(t, ids, 1) + assert.NotContains(t, ids, "key1") + assert.Contains(t, ids, "key2") +} + +func TestSignedModule(t *testing.T) { + signer, err := NewSigner() + require.NoError(t, err) + + module := []byte("test wasm module") + signerID := "test-signer" + version := "v1.0.0" + + // Create signed module + signed, err := SignModule(signer, module, signerID, version) + require.NoError(t, err) + + assert.Equal(t, module, signed.Module) + assert.NotEmpty(t, signed.Hash) + assert.NotEmpty(t, signed.Signature) + assert.Equal(t, signerID, signed.SignerID) + assert.Equal(t, version, signed.Version) + assert.False(t, signed.Timestamp.IsZero()) + + // Verify signed module + verifier := NewSignatureVerifier() + verifier.AddTrustedKey(signerID, signer.publicKey) + + err = VerifySignedModule(verifier, signed) + assert.NoError(t, err) + + // Test with tampered module + signed.Module = []byte("tampered") + err = VerifySignedModule(verifier, signed) + assert.Error(t, err) + assert.Contains(t, err.Error(), "hash mismatch") +} + +func TestSignatureManifest(t *testing.T) { + signer, err := NewSigner() + require.NoError(t, err) + + module := []byte("test wasm module") + signerID := "manifest-signer" + + // Create manifest + manifest, err := CreateSignatureManifest(module, signer, signerID) + require.NoError(t, err) + + assert.NotEmpty(t, manifest.ModuleHash) + assert.Len(t, manifest.Signatures, 1) + assert.Len(t, manifest.TrustedKeys, 1) + assert.Equal(t, signerID, manifest.Signatures[0].SignerID) + assert.Equal(t, "Ed25519", manifest.Signatures[0].Algorithm) + assert.Equal(t, signerID, manifest.TrustedKeys[0].KeyID) + assert.Equal(t, "code-signing", manifest.TrustedKeys[0].Purpose) + + // Verify with manifest + err = VerifyWithManifest(module, manifest) + assert.NoError(t, err) + + // Test with wrong module + wrongModule := []byte("wrong module") + err = VerifyWithManifest(wrongModule, manifest) + assert.Error(t, err) + assert.Contains(t, err.Error(), "hash mismatch") + + // Test with expired manifest + expired := time.Now().Add(-1 * time.Hour) + manifest.ExpiresAt = &expired + err = VerifyWithManifest(module, manifest) + assert.Error(t, err) + assert.Contains(t, err.Error(), "expired") +} + +func TestManifestSerialization(t *testing.T) { + signer, err := NewSigner() + require.NoError(t, err) + + module := []byte("test module") + manifest, err := CreateSignatureManifest(module, signer, "test-key") + require.NoError(t, err) + + // Export manifest + data, err := ExportManifest(manifest) + require.NoError(t, err) + assert.NotEmpty(t, data) + + // Import manifest + imported, err := ImportManifest(data) + require.NoError(t, err) + + assert.Equal(t, manifest.ModuleHash, imported.ModuleHash) + assert.Len(t, imported.Signatures, 1) + assert.Len(t, imported.TrustedKeys, 1) + + // Verify imported manifest + err = VerifyWithManifest(module, imported) + assert.NoError(t, err) + + // Test invalid JSON + _, err = ImportManifest([]byte("invalid json")) + assert.Error(t, err) +} + +func TestMultipleSignatures(t *testing.T) { + // Create multiple signers + signer1, _ := NewSigner() + signer2, _ := NewSigner() + + module := []byte("multi-signed module") + + // Create manifest with first signature + manifest, err := CreateSignatureManifest(module, signer1, "signer1") + require.NoError(t, err) + + // Add second signature + signature2, _ := signer2.Sign(module) + manifest.Signatures = append(manifest.Signatures, SignatureEntry{ + Signature: base64.StdEncoding.EncodeToString(signature2), + SignerID: "signer2", + Timestamp: time.Now(), + Algorithm: "Ed25519", + }) + + manifest.TrustedKeys = append(manifest.TrustedKeys, TrustedKeyEntry{ + KeyID: "signer2", + PublicKey: base64.StdEncoding.EncodeToString(signer2.GetPublicKey()), + AddedAt: time.Now(), + Purpose: "code-signing", + }) + + // Verify with either signature + err = VerifyWithManifest(module, manifest) + assert.NoError(t, err) + + // Remove first signature and key + manifest.Signatures = manifest.Signatures[1:] + manifest.TrustedKeys = manifest.TrustedKeys[1:] + + // Should still verify with second signature + err = VerifyWithManifest(module, manifest) + assert.NoError(t, err) +} + +func BenchmarkSign(b *testing.B) { + signer, _ := NewSigner() + data := make([]byte, 1024*1024) // 1MB + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, _ = signer.Sign(data) + } +} + +func BenchmarkVerify(b *testing.B) { + signer, _ := NewSigner() + verifier := NewSignatureVerifier() + verifier.AddTrustedKey("bench", signer.publicKey) + + data := make([]byte, 1024*1024) // 1MB + signature, _ := signer.Sign(data) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = verifier.Verify(data, signature) + } +} diff --git a/wasm/verifier.go b/wasm/verifier.go new file mode 100644 index 0000000..b5ff26a --- /dev/null +++ b/wasm/verifier.go @@ -0,0 +1,224 @@ +// Package wasm provides cryptographic verification for WebAssembly modules +package wasm + +import ( + "crypto/sha256" + "encoding/hex" + "fmt" + "sync" +) + +// HashVerifier provides SHA256 hash verification for WASM modules +type HashVerifier struct { + // trustedHashes stores SHA256 hashes of trusted WASM modules + trustedHashes map[string]string + mu sync.RWMutex +} + +// NewHashVerifier creates a new WASM hash verifier +func NewHashVerifier() *HashVerifier { + return &HashVerifier{ + trustedHashes: make(map[string]string), + } +} + +// ComputeHash calculates SHA256 hash of WASM bytecode +func (v *HashVerifier) ComputeHash(wasmBytes []byte) string { + hash := sha256.Sum256(wasmBytes) + return hex.EncodeToString(hash[:]) +} + +// AddTrustedHash adds a trusted hash for a named WASM module +func (v *HashVerifier) AddTrustedHash(name, hash string) { + v.mu.Lock() + defer v.mu.Unlock() + v.trustedHashes[name] = hash +} + +// VerifyHash verifies WASM bytecode against trusted hash +func (v *HashVerifier) VerifyHash(name string, wasmBytes []byte) error { + v.mu.RLock() + trustedHash, exists := v.trustedHashes[name] + v.mu.RUnlock() + + if !exists { + return fmt.Errorf("no trusted hash found for WASM module: %s", name) + } + + computedHash := v.ComputeHash(wasmBytes) + if computedHash != trustedHash { + return fmt.Errorf( + "WASM hash verification failed for %s: expected %s, got %s", + name, trustedHash, computedHash, + ) + } + + return nil +} + +// VerifyHashWithFallback verifies against primary hash or fallback list +func (v *HashVerifier) VerifyHashWithFallback(name string, wasmBytes []byte, fallbackHashes []string) error { + // Try primary verification first + if err := v.VerifyHash(name, wasmBytes); err == nil { + return nil + } + + // Check against fallback hashes + computedHash := v.ComputeHash(wasmBytes) + for _, fallbackHash := range fallbackHashes { + if computedHash == fallbackHash { + // Update trusted hash for future use + v.AddTrustedHash(name, computedHash) + return nil + } + } + + return fmt.Errorf( + "WASM hash verification failed: computed hash %s not in trusted set", + computedHash, + ) +} + +// GetTrustedHash retrieves the trusted hash for a module +func (v *HashVerifier) GetTrustedHash(name string) (string, bool) { + v.mu.RLock() + defer v.mu.RUnlock() + hash, exists := v.trustedHashes[name] + return hash, exists +} + +// ClearTrustedHashes removes all trusted hashes +func (v *HashVerifier) ClearTrustedHashes() { + v.mu.Lock() + defer v.mu.Unlock() + v.trustedHashes = make(map[string]string) +} + +// HashChain provides hash chain verification for plugin updates +type HashChain struct { + chain []HashEntry + mu sync.RWMutex +} + +// HashEntry represents a single entry in the hash chain +type HashEntry struct { + Version string `json:"version"` + Hash string `json:"hash"` + PreviousHash string `json:"previous_hash"` + Timestamp int64 `json:"timestamp"` +} + +// NewHashChain creates a new hash chain +func NewHashChain() *HashChain { + return &HashChain{ + chain: make([]HashEntry, 0), + } +} + +// AddEntry adds a new entry to the hash chain +func (hc *HashChain) AddEntry(version, hash string, timestamp int64) error { + hc.mu.Lock() + defer hc.mu.Unlock() + + previousHash := "" + if len(hc.chain) > 0 { + previousHash = hc.chain[len(hc.chain)-1].Hash + } + + entry := HashEntry{ + Version: version, + Hash: hash, + PreviousHash: previousHash, + Timestamp: timestamp, + } + + hc.chain = append(hc.chain, entry) + return nil +} + +// VerifyChain verifies the integrity of the hash chain +func (hc *HashChain) VerifyChain() error { + hc.mu.RLock() + defer hc.mu.RUnlock() + + if len(hc.chain) == 0 { + return nil + } + + // First entry should have empty previous hash + if hc.chain[0].PreviousHash != "" { + return fmt.Errorf("invalid hash chain: first entry has non-empty previous hash") + } + + // Verify chain continuity + for i := 1; i < len(hc.chain); i++ { + if hc.chain[i].PreviousHash != hc.chain[i-1].Hash { + return fmt.Errorf( + "hash chain broken at version %s: expected previous hash %s, got %s", + hc.chain[i].Version, + hc.chain[i-1].Hash, + hc.chain[i].PreviousHash, + ) + } + } + + return nil +} + +// GetLatestEntry returns the most recent hash chain entry +func (hc *HashChain) GetLatestEntry() (*HashEntry, error) { + hc.mu.RLock() + defer hc.mu.RUnlock() + + if len(hc.chain) == 0 { + return nil, fmt.Errorf("hash chain is empty") + } + + latest := hc.chain[len(hc.chain)-1] + return &latest, nil +} + +// VerificationError represents a WASM verification failure +type VerificationError struct { + Module string + ExpectedHash string + ActualHash string + Reason string +} + +// Error implements the error interface +func (e *VerificationError) Error() string { + return fmt.Sprintf( + "WASM verification failed for %s: %s (expected: %s, actual: %s)", + e.Module, e.Reason, e.ExpectedHash, e.ActualHash, + ) +} + +// SecurityPolicy defines verification requirements +type SecurityPolicy struct { + RequireHashVerification bool + RequireSignature bool + AllowedHashes []string + MaxModuleSize int64 +} + +// DefaultSecurityPolicy returns a secure default policy +func DefaultSecurityPolicy() *SecurityPolicy { + return &SecurityPolicy{ + RequireHashVerification: true, + RequireSignature: false, // Will be enabled in next phase + AllowedHashes: []string{}, + MaxModuleSize: 10 * 1024 * 1024, // 10MB max + } +} + +// Validate checks if WASM module meets security policy +func (p *SecurityPolicy) Validate(wasmBytes []byte) error { + if p.MaxModuleSize > 0 && int64(len(wasmBytes)) > p.MaxModuleSize { + return fmt.Errorf( + "WASM module size %d exceeds maximum allowed size %d", + len(wasmBytes), p.MaxModuleSize, + ) + } + return nil +} diff --git a/wasm/verifier_test.go b/wasm/verifier_test.go new file mode 100644 index 0000000..2734685 --- /dev/null +++ b/wasm/verifier_test.go @@ -0,0 +1,226 @@ +package wasm + +import ( + "crypto/sha256" + "encoding/hex" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestHashVerifier_ComputeHash(t *testing.T) { + verifier := NewHashVerifier() + + testCases := []struct { + name string + input []byte + expected string + }{ + { + name: "empty input", + input: []byte{}, + expected: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + }, + { + name: "simple wasm header", + input: []byte{0x00, 0x61, 0x73, 0x6d}, // \0asm + expected: "cd5d4935a48c0672cb06407bb443bc0087aff947c6b864bac886982c73b3027f", + }, + { + name: "test module", + input: []byte("test wasm module content"), + expected: "945acabcfc93e347e8c08ea44afd3122670f04a89f9a0a0a5ce16ab849bbac06", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + hash := verifier.ComputeHash(tc.input) + assert.Equal(t, tc.expected, hash) + }) + } +} + +func TestHashVerifier_TrustedHashes(t *testing.T) { + verifier := NewHashVerifier() + + // Add trusted hash + moduleName := "test-module" + trustedHash := "abc123def456" + verifier.AddTrustedHash(moduleName, trustedHash) + + // Retrieve trusted hash + hash, exists := verifier.GetTrustedHash(moduleName) + assert.True(t, exists) + assert.Equal(t, trustedHash, hash) + + // Non-existent module + _, exists = verifier.GetTrustedHash("non-existent") + assert.False(t, exists) + + // Clear hashes + verifier.ClearTrustedHashes() + _, exists = verifier.GetTrustedHash(moduleName) + assert.False(t, exists) +} + +func TestHashVerifier_VerifyHash(t *testing.T) { + verifier := NewHashVerifier() + + // Test data + wasmBytes := []byte("test wasm module") + expectedHash := verifier.ComputeHash(wasmBytes) + moduleName := "test-module" + + // Test missing trusted hash + err := verifier.VerifyHash(moduleName, wasmBytes) + assert.Error(t, err) + assert.Contains(t, err.Error(), "no trusted hash found") + + // Add trusted hash + verifier.AddTrustedHash(moduleName, expectedHash) + + // Test successful verification + err = verifier.VerifyHash(moduleName, wasmBytes) + assert.NoError(t, err) + + // Test failed verification + verifier.AddTrustedHash(moduleName, "wrong-hash") + err = verifier.VerifyHash(moduleName, wasmBytes) + assert.Error(t, err) + assert.Contains(t, err.Error(), "hash verification failed") +} + +func TestHashVerifier_VerifyHashWithFallback(t *testing.T) { + verifier := NewHashVerifier() + + wasmBytes := []byte("test wasm module") + actualHash := verifier.ComputeHash(wasmBytes) + moduleName := "test-module" + + // Test with fallback hashes + fallbackHashes := []string{ + "wrong-hash-1", + actualHash, + "wrong-hash-2", + } + + err := verifier.VerifyHashWithFallback(moduleName, wasmBytes, fallbackHashes) + assert.NoError(t, err) + + // Verify that the hash was added as trusted + trustedHash, exists := verifier.GetTrustedHash(moduleName) + assert.True(t, exists) + assert.Equal(t, actualHash, trustedHash) + + // Test with no matching fallback + fallbackHashes = []string{"wrong-1", "wrong-2"} + verifier.ClearTrustedHashes() + + err = verifier.VerifyHashWithFallback(moduleName, wasmBytes, fallbackHashes) + assert.Error(t, err) + assert.Contains(t, err.Error(), "not in trusted set") +} + +func TestHashChain(t *testing.T) { + chain := NewHashChain() + + // Add entries + timestamp1 := time.Now().Unix() + err := chain.AddEntry("v1.0.0", "hash1", timestamp1) + require.NoError(t, err) + + timestamp2 := time.Now().Unix() + err = chain.AddEntry("v1.0.1", "hash2", timestamp2) + require.NoError(t, err) + + timestamp3 := time.Now().Unix() + err = chain.AddEntry("v1.0.2", "hash3", timestamp3) + require.NoError(t, err) + + // Verify chain integrity + err = chain.VerifyChain() + assert.NoError(t, err) + + // Get latest entry + latest, err := chain.GetLatestEntry() + require.NoError(t, err) + assert.Equal(t, "v1.0.2", latest.Version) + assert.Equal(t, "hash3", latest.Hash) + assert.Equal(t, "hash2", latest.PreviousHash) +} + +func TestHashChain_BrokenChain(t *testing.T) { + chain := NewHashChain() + + // Manually create a broken chain + chain.chain = []HashEntry{ + {Version: "v1", Hash: "hash1", PreviousHash: ""}, + {Version: "v2", Hash: "hash2", PreviousHash: "hash1"}, + {Version: "v3", Hash: "hash3", PreviousHash: "wrong-hash"}, // Broken link + } + + err := chain.VerifyChain() + assert.Error(t, err) + assert.Contains(t, err.Error(), "hash chain broken") +} + +func TestSecurityPolicy(t *testing.T) { + policy := DefaultSecurityPolicy() + + // Test size validation + smallModule := make([]byte, 1024) + err := policy.Validate(smallModule) + assert.NoError(t, err) + + // Test oversized module + largeModule := make([]byte, 11*1024*1024) + err = policy.Validate(largeModule) + assert.Error(t, err) + assert.Contains(t, err.Error(), "exceeds maximum allowed size") +} + +func TestVerificationError(t *testing.T) { + err := &VerificationError{ + Module: "test.wasm", + ExpectedHash: "expected123", + ActualHash: "actual456", + Reason: "hash mismatch", + } + + errStr := err.Error() + assert.Contains(t, errStr, "test.wasm") + assert.Contains(t, errStr, "hash mismatch") + assert.Contains(t, errStr, "expected123") + assert.Contains(t, errStr, "actual456") +} + +func BenchmarkComputeHash(b *testing.B) { + verifier := NewHashVerifier() + data := make([]byte, 1024*1024) // 1MB + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = verifier.ComputeHash(data) + } +} + +func BenchmarkVerifyHash(b *testing.B) { + verifier := NewHashVerifier() + data := make([]byte, 1024*1024) // 1MB + hash := verifier.ComputeHash(data) + verifier.AddTrustedHash("bench-module", hash) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _ = verifier.VerifyHash("bench-module", data) + } +} + +// Helper function to compute SHA256 hash for testing +func computeSHA256(data []byte) string { + hash := sha256.Sum256(data) + return hex.EncodeToString(hash[:]) +} diff --git a/zkp/schnorr/schnorr.go b/zkp/schnorr/schnorr.go new file mode 100644 index 0000000..5d2f92e --- /dev/null +++ b/zkp/schnorr/schnorr.go @@ -0,0 +1,157 @@ +// +// Copyright Coinbase, Inc. All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Package schnorr implements a Schnorr proof, as described and used in Doerner, et al. https://eprint.iacr.org/2018/499.pdf +// see Functionalities 6. it also implements a "committed" version, as described in Functionality 7. +package schnorr + +import ( + "crypto/rand" + "crypto/subtle" + "fmt" + + "github.com/pkg/errors" + "golang.org/x/crypto/sha3" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +type Commitment = []byte + +type Prover struct { + curve *curves.Curve + basePoint curves.Point + uniqueSessionId []byte +} + +// Proof contains the (c, s) schnorr proof. `Statement` is the curve point you're proving knowledge of discrete log of, +// with respect to the base point. +type Proof struct { + C curves.Scalar + S curves.Scalar + Statement curves.Point +} + +// NewProver generates a `Prover` object, ready to generate Schnorr proofs on any given point. +// We allow the option `basePoint == nil`, in which case `basePoint` is auto-assigned to be the "default" generator for the group. +func NewProver(curve *curves.Curve, basepoint curves.Point, uniqueSessionId []byte) *Prover { + if basepoint == nil { + basepoint = curve.NewGeneratorPoint() + } + return &Prover{ + curve: curve, + basePoint: basepoint, + uniqueSessionId: uniqueSessionId, + } +} + +// Prove generates and returns a Schnorr proof, given the scalar witness `x`. +// in the process, it will actually also construct the statement (just one curve mult in this case) +func (p *Prover) Prove(x curves.Scalar) (*Proof, error) { + // assumes that params, and pub are already populated. populates the fields c and s... + var err error + result := &Proof{} + result.Statement = p.basePoint.Mul(x) + k := p.curve.Scalar.Random(rand.Reader) + random := p.basePoint.Mul(k) + hash := sha3.New256() + if _, err = hash.Write(p.uniqueSessionId); err != nil { + return nil, errors.Wrap(err, "writing salt to hash in schnorr prove") + } + if _, err = hash.Write(p.basePoint.ToAffineCompressed()); err != nil { + return nil, errors.Wrap(err, "writing basePoint to hash in schnorr prove") + } + if _, err = hash.Write(result.Statement.ToAffineCompressed()); err != nil { + return nil, errors.Wrap(err, "writing statement to hash in schnorr prove") + } + if _, err = hash.Write(random.ToAffineCompressed()); err != nil { + return nil, errors.Wrap(err, "writing point K to hash in schnorr prove") + } + hashBytes := hash.Sum(nil) + result.C, err = p.curve.Scalar.SetBytesWide(append(hashBytes, make([]byte, 32)...)) + if err != nil { + return nil, errors.Wrap(err, "creating challenge scalar in schnorr prove") + } + result.S = result.C.Mul(x).Add(k) + return result, nil +} + +// Verify verifies the `proof`, given the prover parameters `scalar` and `curve`. +// As for the prover, we allow `basePoint == nil`, in this case, it's auto-assigned to be the group's default generator. +func Verify( + proof *Proof, + curve *curves.Curve, + basepoint curves.Point, + uniqueSessionId []byte, +) error { + if basepoint == nil { + basepoint = curve.NewGeneratorPoint() + } + gs := basepoint.Mul(proof.S) + xc := proof.Statement.Mul(proof.C.Neg()) + random := gs.Add(xc) + hash := sha3.New256() + if _, err := hash.Write(uniqueSessionId); err != nil { + return errors.Wrap(err, "writing salt to hash in schnorr verify") + } + if _, err := hash.Write(basepoint.ToAffineCompressed()); err != nil { + return errors.Wrap(err, "writing basePoint to hash in schnorr verify") + } + if _, err := hash.Write(proof.Statement.ToAffineCompressed()); err != nil { + return errors.Wrap(err, "writing statement to hash in schnorr verify") + } + if _, err := hash.Write(random.ToAffineCompressed()); err != nil { + return errors.Wrap(err, "writing point K to hash in schnorr verify") + } + hashBytes := hash.Sum(nil) + expectedC, err := curve.Scalar.SetBytesWide(append(hashBytes, make([]byte, 32)...)) + if err != nil { + return errors.Wrap(err, "creating expected challenge scalar in schnorr verify") + } + if subtle.ConstantTimeCompare(proof.C.Bytes(), expectedC.Bytes()) != 1 { + return fmt.Errorf("schnorr verification failed") + } + return nil +} + +// ProveCommit generates _and_ commits to a schnorr proof which is later revealed; see Functionality 7. +// returns the Proof and Commitment. +func (p *Prover) ProveCommit(x curves.Scalar) (*Proof, Commitment, error) { + proof, err := p.Prove(x) + if err != nil { + return nil, nil, err + } + hash := sha3.New256() + if _, err = hash.Write(proof.C.Bytes()); err != nil { + return nil, nil, err + } + if _, err = hash.Write(proof.S.Bytes()); err != nil { + return nil, nil, err + } + return proof, hash.Sum(nil), nil +} + +// DecommitVerify receives a `Proof` and a `Commitment`; it first checks that the proof actually opens the commitment; +// then it verifies the proof. returns and error if either on eof thse fail. +func DecommitVerify( + proof *Proof, + commitment Commitment, + curve *curves.Curve, + basepoint curves.Point, + uniqueSessionId []byte, +) error { + hash := sha3.New256() + if _, err := hash.Write(proof.C.Bytes()); err != nil { + return err + } + if _, err := hash.Write(proof.S.Bytes()); err != nil { + return err + } + if subtle.ConstantTimeCompare(hash.Sum(nil), commitment) != 1 { + return fmt.Errorf("initial hash decommitment failed") + } + return Verify(proof, curve, basepoint, uniqueSessionId) +} diff --git a/zkp/schnorr/schnorr_test.go b/zkp/schnorr/schnorr_test.go new file mode 100644 index 0000000..0474ba3 --- /dev/null +++ b/zkp/schnorr/schnorr_test.go @@ -0,0 +1,36 @@ +package schnorr + +import ( + "crypto/rand" + "fmt" + "testing" + + "github.com/stretchr/testify/require" + "golang.org/x/crypto/sha3" + + "github.com/sonr-io/sonr/crypto/core/curves" +) + +func TestZKPOverMultipleCurves(t *testing.T) { + curveInstances := []*curves.Curve{ + curves.K256(), + curves.P256(), + curves.PALLAS(), + curves.BLS12377G1(), + curves.BLS12377G2(), + curves.BLS12381G1(), + curves.BLS12381G2(), + curves.ED25519(), + } + for i, curve := range curveInstances { + uniqueSessionId := sha3.New256().Sum([]byte("random seed")) + prover := NewProver(curve, nil, uniqueSessionId) + + secret := curve.Scalar.Random(rand.Reader) + proof, err := prover.Prove(secret) + require.NoError(t, err, fmt.Sprintf("failed in curve %d", i)) + + err = Verify(proof, curve, nil, uniqueSessionId) + require.NoError(t, err, fmt.Sprintf("failed in curve %d", i)) + } +}