-
Notifications
You must be signed in to change notification settings - Fork 144
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Adds test vectors to hashToCurve and expanders.
- Loading branch information
Showing
18 changed files
with
1,373 additions
and
135 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,147 @@ | ||
package group | ||
|
||
import ( | ||
"crypto" | ||
"encoding/binary" | ||
"errors" | ||
"io" | ||
|
||
"github.com/cloudflare/circl/xof" | ||
) | ||
|
||
type Expander interface { | ||
// Expand generates a pseudo-random byte string of a determined length by | ||
// expanding an input string. | ||
Expand(in []byte, length uint) (pseudo []byte) | ||
} | ||
|
||
type expanderMD struct { | ||
h crypto.Hash | ||
dst []byte | ||
} | ||
|
||
// NewExpanderMD returns a hash function based on a Merkle-Damgård hash function. | ||
func NewExpanderMD(h crypto.Hash, dst []byte) *expanderMD { | ||
return &expanderMD{h, dst} | ||
} | ||
|
||
func (e *expanderMD) calcDSTPrime() []byte { | ||
var dstPrime []byte | ||
if l := len(e.dst); l > maxDSTLength { | ||
H := e.h.New() | ||
mustWrite(H, longDSTPrefix[:]) | ||
mustWrite(H, e.dst) | ||
dstPrime = H.Sum(nil) | ||
} else { | ||
dstPrime = make([]byte, l, l+1) | ||
copy(dstPrime, e.dst) | ||
} | ||
return append(dstPrime, byte(len(dstPrime))) | ||
} | ||
|
||
func (e *expanderMD) Expand(in []byte, n uint) []byte { | ||
H := e.h.New() | ||
bLen := uint(H.Size()) | ||
ell := (n + (bLen - 1)) / bLen | ||
if ell > 255 { | ||
panic(errorLongOutput) | ||
} | ||
|
||
zPad := make([]byte, H.BlockSize()) | ||
libStr := []byte{0, 0} | ||
libStr[0] = byte((n >> 8) & 0xFF) | ||
libStr[1] = byte(n & 0xFF) | ||
dstPrime := e.calcDSTPrime() | ||
|
||
H.Reset() | ||
mustWrite(H, zPad) | ||
mustWrite(H, in) | ||
mustWrite(H, libStr) | ||
mustWrite(H, []byte{0}) | ||
mustWrite(H, dstPrime) | ||
b0 := H.Sum(nil) | ||
|
||
H.Reset() | ||
mustWrite(H, b0) | ||
mustWrite(H, []byte{1}) | ||
mustWrite(H, dstPrime) | ||
bi := H.Sum(nil) | ||
pseudo := append([]byte{}, bi...) | ||
for i := uint(2); i <= ell; i++ { | ||
H.Reset() | ||
for i := range b0 { | ||
bi[i] ^= b0[i] | ||
} | ||
mustWrite(H, bi) | ||
mustWrite(H, []byte{byte(i)}) | ||
mustWrite(H, dstPrime) | ||
bi = H.Sum(nil) | ||
pseudo = append(pseudo, bi...) | ||
} | ||
return pseudo[0:n] | ||
} | ||
|
||
// expanderXOF is based on an extendable output function. | ||
type expanderXOF struct { | ||
id xof.ID | ||
kSecLevel uint | ||
dst []byte | ||
} | ||
|
||
// NewExpanderXOF returns an Expander based on an extendable output function. | ||
// The kSecLevel parameter is the target security level in bits, and dst is | ||
// a domain separation string. | ||
func NewExpanderXOF(id xof.ID, kSecLevel uint, dst []byte) *expanderXOF { | ||
return &expanderXOF{id, kSecLevel, dst} | ||
} | ||
|
||
// Expand panics if output's length is longer than 2^16 bytes. | ||
func (e *expanderXOF) Expand(in []byte, n uint) []byte { | ||
bLen := []byte{0, 0} | ||
binary.BigEndian.PutUint16(bLen, uint16(n)) | ||
pseudo := make([]byte, n) | ||
dstPrime := e.calcDSTPrime() | ||
|
||
H := e.id.New() | ||
mustWrite(H, in) | ||
mustWrite(H, bLen) | ||
mustWrite(H, dstPrime) | ||
mustReadFull(H, pseudo) | ||
return pseudo | ||
} | ||
|
||
func (e *expanderXOF) calcDSTPrime() []byte { | ||
var dstPrime []byte | ||
if l := len(e.dst); l > maxDSTLength { | ||
H := e.id.New() | ||
mustWrite(H, longDSTPrefix[:]) | ||
mustWrite(H, e.dst) | ||
max := ((2 * e.kSecLevel) + 7) / 8 | ||
dstPrime = make([]byte, max, max+1) | ||
mustReadFull(H, dstPrime) | ||
} else { | ||
dstPrime = make([]byte, l, l+1) | ||
copy(dstPrime, e.dst) | ||
} | ||
return append(dstPrime, byte(len(dstPrime))) | ||
} | ||
|
||
func mustWrite(w io.Writer, b []byte) { | ||
if n, err := w.Write(b); err != nil || n != len(b) { | ||
panic(err) | ||
} | ||
} | ||
|
||
func mustReadFull(r io.Reader, b []byte) { | ||
if n, err := io.ReadFull(r, b); err != nil || n != len(b) { | ||
panic(err) | ||
} | ||
} | ||
|
||
const maxDSTLength = 255 | ||
|
||
var ( | ||
longDSTPrefix = [17]byte{'H', '2', 'C', '-', 'O', 'V', 'E', 'R', 'S', 'I', 'Z', 'E', '-', 'D', 'S', 'T', '-'} | ||
|
||
errorLongOutput = errors.New("requested too many bytes") | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,112 @@ | ||
package group_test | ||
|
||
import ( | ||
"bytes" | ||
"crypto" | ||
"encoding/hex" | ||
"encoding/json" | ||
"fmt" | ||
"os" | ||
"path/filepath" | ||
"strconv" | ||
"testing" | ||
|
||
"github.com/cloudflare/circl/group" | ||
"github.com/cloudflare/circl/internal/test" | ||
"github.com/cloudflare/circl/xof" | ||
) | ||
|
||
func TestExpander(t *testing.T) { | ||
fileNames, err := filepath.Glob("./testdata/expand*.json") | ||
if err != nil { | ||
t.Fatal(err) | ||
} | ||
|
||
for _, fileName := range fileNames { | ||
f, err := os.Open(fileName) | ||
if err != nil { | ||
t.Fatal(err) | ||
} | ||
dec := json.NewDecoder(f) | ||
var v vectorExpanderSuite | ||
err = dec.Decode(&v) | ||
if err != nil { | ||
t.Fatal(err) | ||
} | ||
f.Close() | ||
|
||
t.Run(v.Name+"/"+v.Hash, func(t *testing.T) { testExpander(t, &v) }) | ||
} | ||
} | ||
|
||
func testExpander(t *testing.T, vs *vectorExpanderSuite) { | ||
var exp group.Expander | ||
switch vs.Hash { | ||
case "SHA256": | ||
exp = group.NewExpanderMD(crypto.SHA256, []byte(vs.DST)) | ||
case "SHA512": | ||
exp = group.NewExpanderMD(crypto.SHA512, []byte(vs.DST)) | ||
case "SHAKE128": | ||
exp = group.NewExpanderXOF(xof.SHAKE128, 0, []byte(vs.DST)) | ||
case "SHAKE256": | ||
exp = group.NewExpanderXOF(xof.SHAKE256, 0, []byte(vs.DST)) | ||
default: | ||
t.Skip("hash not supported: " + vs.Hash) | ||
} | ||
|
||
for i, v := range vs.Tests { | ||
lenBytes, err := strconv.ParseUint(v.Len, 0, 64) | ||
if err != nil { | ||
t.Fatal(err) | ||
} | ||
|
||
got := exp.Expand([]byte(v.Msg), uint(lenBytes)) | ||
want, err := hex.DecodeString(v.UniformBytes) | ||
if err != nil { | ||
t.Fatal(err) | ||
} | ||
|
||
if !bytes.Equal(got, want) { | ||
test.ReportError(t, got, want, i) | ||
} | ||
} | ||
} | ||
|
||
type vectorExpanderSuite struct { | ||
DST string `json:"DST"` | ||
Hash string `json:"hash"` | ||
Name string `json:"name"` | ||
Tests []struct { | ||
DstPrime string `json:"DST_prime"` | ||
Len string `json:"len_in_bytes"` | ||
Msg string `json:"msg"` | ||
MsgPrime string `json:"msg_prime"` | ||
UniformBytes string `json:"uniform_bytes"` | ||
} `json:"tests"` | ||
} | ||
|
||
func BenchmarkExpander(b *testing.B) { | ||
in := []byte("input") | ||
dst := []byte("dst") | ||
|
||
for _, v := range []struct { | ||
Name string | ||
Exp group.Expander | ||
}{ | ||
{"XMD", group.NewExpanderMD(crypto.SHA256, dst)}, | ||
{"XOF", group.NewExpanderXOF(xof.SHAKE128, 0, dst)}, | ||
} { | ||
exp := v.Exp | ||
for l := 8; l <= 10; l++ { | ||
max := int64(1) << uint(l) | ||
|
||
b.Run(fmt.Sprintf("%v/%v", v.Name, max), func(b *testing.B) { | ||
b.SetBytes(max) | ||
b.ResetTimer() | ||
for i := 0; i < b.N; i++ { | ||
exp.Expand(in, uint(max)) | ||
} | ||
}) | ||
} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.