-
-
Notifications
You must be signed in to change notification settings - Fork 43
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
add: ipa+pedersen, vector commitments for multipoint (#276)
* add: direct type and function lookup for ECP_TwistedEdwards * feat: barycentric form using precompute optimisation * feat: common reference string generator for multiproof * add: transcript generation in fiat shamir style for multiproof * add: random element generator for pedersen * fix: random elem generator * cleanup * fix: minor fixes in barycentric * add: added inner product and folding functions for ipa * add: initial type def of ipa * fix: shifted common utils to generic * fix(common_utils): slight cleanup * fix: StridedView for split_scalars() * fix: refactored barycentric to Nim conventions * fix(barycentric_form): refactored rest of funcs to Nim convention * fix: function naming conventions * fix: common util, naming conventions * fix: transcript func naming * add: init createIPAProof() * add: MSM helper func for ECP_TwEdwards_Prj * fix: remove case where domain is rootsOfUnity * fix: removed bool return from computeInnerProducts() * feat: createIPAProof() * fix: refactoring func name * add: fixed some comments * add: generate challenge scalars for ipa, checkIPAProof() * add: Multiproof creation func, some TODOs and minor enhancements * add: init multiproof verifier * fix: switch to generics * add: multiproof verifier complete * identation fix * cleanup * refactored to batchInvert(), resolved unused imports * add: init ipa tests, minor refactor and cleanup * cleanup * add: helper types for code clarity * refactor helper_types a lil bit * add: scalar ser/de for banderwagon * add: transcript test * refactor: matchingOrderBigInt for scalar ser/de in transcript * other refactorings and cleanups * minor cleanup * add: more helper functions to test_helper * add: added functions for generating test polynomials * add: added helper func for poly long div * add: basic interpolation test * refactor * debug, add: helper functions for test and minor cleanups * add: IPA test, init IPAProofConfig * init: ipa proof check and verify tests * fix: msm * fix: ipa proof check and create, some helper funcs * add: test ipa proof create and verify * add: IPA proof equality test passing * add: init multiproof tests * few fixes in multiproof * slight fix * better readability for multiproof * fix: refactored to using generalised MSM * fix: createMutliproof() to new changes * cleanups and fixes * more cleanup * mutliproof create and verify test passing, cleaned up unused imports * typo * more cleanup of unused stuff * beautify and cleanup * fix: file/folder restructuring * fix: moved tests, restructure mostly * fix: most * fix: create IpaTranscript[H: CryptoHash, N: static int], smoothening left * rm space * rm more unwanted space * add: notes, minor refactors, upstreamed * smol fix * cleanup * docs moved, changed to chunk() for Views * fixed style * minor style fixes
- Loading branch information
Showing
11 changed files
with
1,899 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,195 @@ | ||
# Constantine | ||
# Copyright (c) 2018-2019 Status Research & Development GmbH | ||
# Copyright (c) 2020-Present Mamy André-Ratsimbazafy | ||
# Licensed and distributed under either of | ||
# * MIT license (license terms in the root directory or at http://opensource.org/licenses/MIT). | ||
# * Apache v2 license (license terms in the root directory or at http://www.apache.org/licenses/LICENSE-2.0). | ||
# at your option. This file may not be copied, modified, or distributed except according to those terms. | ||
import | ||
./eth_verkle_constants, | ||
../math/config/[type_ff, curves], | ||
../math/elliptic/[ec_twistededwards_projective, ec_twistededwards_batch_ops], | ||
../math/arithmetic/[finite_fields], | ||
../math/arithmetic | ||
|
||
# ############################################################ | ||
# | ||
# Barycentric Form using Precompute Optimisation | ||
# | ||
# ############################################################ | ||
|
||
# Please refer to https://hackmd.io/mJeCRcawTRqr9BooVpHv5g | ||
|
||
|
||
|
||
func newPrecomputedWeights* [PrecomputedWeights] (res: var PrecomputedWeights)= | ||
## newPrecomputedWeights generates the precomputed weights for the barycentric formula | ||
## Let's say we have 2 arrays of the same length and we join them together | ||
## This is how we shall be storing A'(x_i) and 1/A'(x_i), this midpoint is used to compute | ||
## the offset to wherever we need to access the 1/A'(x_i) value. | ||
|
||
var midpoint: uint64 = 256 | ||
for i in uint64(0) ..< midpoint: | ||
var weights {.noInit.}: Fr[Banderwagon] | ||
weights.computeBarycentricWeights(i) | ||
|
||
## Here we are storing the VerkleDomain no. of weights, but additionally we are also | ||
## storing their inverses, hence the array length for barycentric weights as well as | ||
## inverted domain should roughly be twice the size of the VerkleDomain. | ||
var inverseWeights {.noInit.}: Fr[Banderwagon] | ||
inverseWeights.inv(weights) | ||
|
||
res.barycentricWeights[i] = weights | ||
res.barycentricWeights[i + midpoint] = inverseWeights | ||
|
||
## Computing 1/k and -1/k for k in [0,255], | ||
## We have one less element because we cannot do 1/0 | ||
## That is, division by 0. | ||
midpoint = uint64(VerkleDomain) - 1 | ||
|
||
for i in 1 ..< VerkleDomain: | ||
var k {.noInit.}: Fr[Banderwagon] | ||
var i_bg {.noInit.} : matchingOrderBigInt(Banderwagon) | ||
i_bg.setUint(uint64(i)) | ||
k.fromBig(i_bg) | ||
|
||
k.inv(k) | ||
|
||
var neg_k : Fr[Banderwagon] | ||
var zero : Fr[Banderwagon] | ||
zero.setZero() | ||
neg_k.diff(zero, k) | ||
res.invertedDomain[i-1] = k | ||
res.invertedDomain[(i-1) + int(midpoint)] = neg_k | ||
|
||
|
||
func computeBarycentricWeights*(res: var Fr[Banderwagon], element : uint64)= | ||
## Computing A'(x_j) where x_j must be an element in the domain | ||
## This is computed as the product of x_j - x_i where x_i is an element in the domain | ||
## also, where x_i != x_j | ||
if element <= uint64(VerkleDomain): | ||
|
||
var domain_element_Fr: Fr[Banderwagon] | ||
var bigndom : matchingOrderBigInt(Banderwagon) | ||
bigndom.setUint(uint64(element)) | ||
domain_element_Fr.fromBig(bigndom) | ||
|
||
res.setOne() | ||
|
||
for i in uint64(0) ..< uint64(VerkleDomain): | ||
if i == element: | ||
continue | ||
|
||
var i_Fr: Fr[Banderwagon] | ||
|
||
var bigi: matchingOrderBigInt(Banderwagon) | ||
bigi.setUint(uint64(i)) | ||
i_Fr.fromBig(bigi) | ||
|
||
var temp: Fr[Banderwagon] | ||
temp.diff(domain_element_Fr,i_Fr) | ||
res.prod(res, temp) | ||
|
||
|
||
func computeBarycentricCoefficients*( res_inv: var openArray[Fr[Banderwagon]], precomp: PrecomputedWeights, point : Fr[Banderwagon]) = | ||
## computeBarycentricCoefficients computes the coefficients for a point `z` such that | ||
## when we have a polynomial `p` in Lagrange basis, the inner product of `p` and barycentric coefficients is | ||
## equal to p(z). Here `z` is a point outside of the domain. We can also term this as Lagrange Coefficients L_i. | ||
var res {.noInit.}: array[VerkleDomain, Fr[Banderwagon]] | ||
for i in 0 ..< VerkleDomain: | ||
var weight: Fr[Banderwagon] | ||
weight = precomp.barycentricWeights[i] | ||
var i_bg: matchingOrderBigInt(Banderwagon) | ||
i_bg.setUint(uint64(i)) | ||
var i_fr: Fr[Banderwagon] | ||
i_fr.fromBig(i_bg) | ||
|
||
res[i].diff(point, i_fr) | ||
res[i].prod(res[i], weight) | ||
|
||
var totalProd: Fr[Banderwagon] | ||
totalProd.setOne() | ||
|
||
for i in 0 ..< VerkleDomain: | ||
var i_bg: matchingOrderBigInt(Banderwagon) | ||
i_bg.setUint(uint64(i)) | ||
var i_fr: Fr[Banderwagon] | ||
i_fr.fromBig(i_bg) | ||
|
||
var tmp: Fr[Banderwagon] | ||
tmp.diff(point, i_fr) | ||
|
||
totalProd.prod(totalProd, tmp) | ||
|
||
res_inv.batchInvert(res) | ||
|
||
for i in 0 ..< VerkleDomain: | ||
res_inv[i].prod(res_inv[i], totalprod) | ||
|
||
|
||
func getInvertedElement*(res: var Fr[Banderwagon], precomp : PrecomputedWeights, element : int, is_negative: bool) = | ||
var index: int | ||
index = element - 1 | ||
|
||
if is_negative: | ||
var midpoint = int((len(precomp.invertedDomain) / 2)) - 1 | ||
index = index + midpoint | ||
|
||
res = precomp.invertedDomain[index] | ||
|
||
func getWeightRatios*(result: var Fr[Banderwagon], precomp: PrecomputedWeights, numerator: var int, denominator: var int)= | ||
|
||
var a = precomp.barycentricWeights[numerator] | ||
var midpoint = int((len(precomp.barycentricWeights) / 2)) - 1 | ||
|
||
var b = precomp.barycentricWeights[denominator + midpoint] | ||
|
||
result.prod(a, b) | ||
|
||
|
||
func getBarycentricInverseWeight*(res: var Fr[Banderwagon], precomp: PrecomputedWeights, i: int) = | ||
var midpoint = int((len(precomp.barycentricWeights) / 2)) - 1 | ||
res = precomp.barycentricWeights[i+midpoint] | ||
|
||
func absIntChecker*[int] (res: var int, x : int) = | ||
var is_negative {.noInit.}: bool | ||
if x < 0: | ||
is_negative = true | ||
|
||
if is_negative == true: | ||
res = - x | ||
else: | ||
res = x | ||
|
||
|
||
func divisionOnDomain*(res: var array[VerkleDomain,Fr[Banderwagon]], precomp: PrecomputedWeights, index: var int, f: openArray[Fr[Banderwagon]])= | ||
## Computes f(x) - f(x_i) / x - x_i using the barycentric weights, where x_i is an element in the | ||
var is_negative : bool = true | ||
var y = f[index] | ||
|
||
for i in 0 ..< VerkleDomain: | ||
if not(i == index).bool() == true: | ||
var denominator = i - int(index) | ||
var absDenominator {.noInit.}: int | ||
absDenominator.absIntChecker(denominator) | ||
|
||
if (absDenominator > 0).bool == true: | ||
is_negative = false | ||
|
||
var denominatorInv {.noInit.} : Fr[Banderwagon] | ||
denominatorInv.getInvertedElement(precomp, absDenominator, is_negative) | ||
|
||
res[i].diff(f[i], y) | ||
res[i].prod(res[i], denominatorInv) | ||
|
||
var weight_ratios {.noInit.}: Fr[Banderwagon] | ||
var dummy {.noInit.} : int | ||
dummy = i | ||
weight_ratios.getWeightRatios(precomp, index, dummy) | ||
|
||
# var weight_ratios = precomp.getWeightRatios(int(index), i) | ||
|
||
var tmp {.noInit.}: Fr[Banderwagon] | ||
tmp.prod(weight_ratios, res[i]) | ||
|
||
res[index].diff(res[index], tmp) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,142 @@ | ||
# Constantine | ||
# Copyright (c) 2018-2019 Status Research & Development GmbH | ||
# Copyright (c) 2020-Present Mamy André-Ratsimbazafy | ||
# Licensed and distributed under either of | ||
# * MIT license (license terms in the root directory or at http://opensource.org/licenses/MIT). | ||
# * Apache v2 license (license terms in the root directory or at http://www.apache.org/licenses/LICENSE-2.0). | ||
# at your option. This file may not be copied, modified, or distributed except according to those terms. | ||
|
||
## IPAConfiguration contains all of the necessary information to create Pedersen + IPA proofs | ||
## such as the SRS | ||
import | ||
./[eth_verkle_constants], | ||
../platforms/primitives, | ||
../math/config/[type_ff, curves], | ||
../math/elliptic/ec_twistededwards_projective, | ||
../hashes, | ||
../math/arithmetic, | ||
../math/elliptic/ec_scalar_mul, | ||
../math/elliptic/[ec_multi_scalar_mul, ec_multi_scalar_mul_scheduler], | ||
../platforms/[bithacks,views], | ||
../curves_primitives, | ||
../serialization/[codecs_banderwagon,codecs_status_codes, endians] | ||
|
||
# ############################################################ | ||
# | ||
# Random Element Generator | ||
# | ||
# ############################################################ | ||
|
||
|
||
func generate_random_points* [EC_P](points: var openArray[EC_P], ipaTranscript: var IpaTranscript, num_points: uint64) = | ||
## generate_random_points generates random points on the curve with the hardcoded VerkleSeed -> VerkleSeed | ||
var incrementer: uint64 = 0 | ||
var idx: int = 0 | ||
while uint64(len(points)) != num_points: | ||
|
||
var digest : IpaTranscript.H | ||
digest.init() | ||
digest.update(VerkleSeed) | ||
|
||
digest.update(incrementer.toBytes(bigEndian)) | ||
var hash {.noInit.} : array[IpaTranscript.H.digestSize(), byte] | ||
digest.finish(hash) | ||
|
||
var x {.noInit.}: EC_P | ||
|
||
let stat1 = x.deserialize(hash) | ||
doAssert stat1 == cttCodecEcc_Success, "Deserialization Failure!" | ||
incrementer = incrementer + 1 | ||
|
||
var x_as_Bytes {.noInit.} : array[IpaTranscript.H.digestSize(), byte] | ||
let stat2 = x_as_Bytes.serialize(x) | ||
doAssert stat2 == cttCodecEcc_Success, "Serialization Failure!" | ||
|
||
var point_found {.noInit.} : EC_P | ||
let stat3 = point_found.deserialize(x_as_Bytes) | ||
|
||
doAssert stat3 == cttCodecEcc_Success, "Deserialization Failure!" | ||
points[idx] = point_found | ||
idx = idx + 1 | ||
|
||
# ############################################################ | ||
# | ||
# Inner Products | ||
# | ||
# ############################################################ | ||
|
||
func computeInnerProducts* [Fr] (res: var Fr, a,b : openArray[Fr])= | ||
debug: doAssert (a.len == b.len).bool() == true, "Scalar lengths don't match!" | ||
res.setZero() | ||
for i in 0 ..< b.len: | ||
var tmp : Fr | ||
tmp.prod(a[i], b[i]) | ||
res.sum(res,tmp) | ||
|
||
func computeInnerProducts* [Fr] (res: var Fr, a,b : View[Fr])= | ||
debug: doAssert (a.len == b.len).bool() == true, "Scalar lengths don't match!" | ||
res.setZero() | ||
for i in 0 ..< b.len: | ||
var tmp : Fr | ||
tmp.prod(a[i], b[i]) | ||
res.sum(res,tmp) | ||
|
||
# ############################################################ | ||
# | ||
# Folding functions | ||
# | ||
# ############################################################ | ||
|
||
func foldScalars* [Fr] (res: var openArray[Fr], a,b : openArray[Fr], x: Fr)= | ||
## Computes res[i] = a[i] + b[i] * x | ||
debug: doAssert a.len == b.len , "Lengths should be equal!" | ||
|
||
for i in 0 ..< a.len: | ||
var bx {.noInit.}: Fr | ||
bx.prod(x, b[i]) | ||
res[i].sum(bx, a[i]) | ||
|
||
func foldPoints* [EC_P] (res: var openArray[EC_P], a,b : var openArray[EC_P], x: Fr)= | ||
## Computes res[i] = a[i] + b[i] * x | ||
debug: doAssert a.len == b.len , "Should have equal lengths!" | ||
|
||
for i in 0 ..< a.len: | ||
var bx {.noInit.}: EC_P | ||
|
||
b[i].scalarMul(x.toBig()) | ||
bx = b[i] | ||
res[i].sum(bx, a[i]) | ||
|
||
|
||
func computeNumRounds*(res: var uint32, vectorSize: SomeUnsignedInt)= | ||
## This method takes the log2(vectorSize), a separate checker is added to prevent 0 sized vectors | ||
## An additional checker is added because we also do not allow for vectors whose size is a power of 2. | ||
debug: doAssert (vectorSize == uint64(0)).bool() == false, "Zero is not a valid input!" | ||
|
||
var isP2 : bool = isPowerOf2_vartime(vectorSize) | ||
|
||
debug: doAssert isP2 == true, "not a power of 2, hence not a valid inputs" | ||
|
||
res = uint32(log2_vartime(vectorSize)) | ||
|
||
# ############################################################ | ||
# | ||
# Pedersen Commitment | ||
# | ||
# ############################################################ | ||
|
||
func pedersen_commit_varbasis*[EC_P] (res: var EC_P, groupPoints: openArray[EC_P], g: int, polynomial: openArray[Fr], n: int)= | ||
# This Pedersen Commitment function shall be used in specifically the Split scalars | ||
# and Split points that are used in the IPA polynomial | ||
|
||
# Further reference refer to this https://dankradfeist.de/ethereum/2021/07/27/inner-product-arguments.html | ||
debug: doAssert groupPoints.len == polynomial.len, "Group Elements and Polynomials should be having the same length!" | ||
var poly_big = newSeq[matchingOrderBigInt(Banderwagon)](n) | ||
for i in 0 ..< n: | ||
poly_big[i] = polynomial[i].toBig() | ||
|
||
var groupPoints_aff = newSeq[EC_P_Aff](g) | ||
for i in 0 ..< g: | ||
groupPoints_aff[i].affine(groupPoints[i]) | ||
|
||
res.multiScalarMul_reference_vartime(poly_big,groupPoints) |
Oops, something went wrong.