Skip to content

Commit

Permalink
Used base16 format for json test vectors.
Browse files Browse the repository at this point in the history
  • Loading branch information
BjoernMHaase committed Sep 24, 2024
1 parent 5ab7e34 commit 04c63f0
Show file tree
Hide file tree
Showing 8 changed files with 860 additions and 8,685 deletions.
1,754 changes: 595 additions & 1,159 deletions draft-irtf-cfrg-cpace.md

Large diffs are not rendered by default.

30 changes: 13 additions & 17 deletions poc/CPace_coffee.sage
Original file line number Diff line number Diff line change
Expand Up @@ -83,19 +83,17 @@ class G_CoffeeEcosystem():
result_dict = {}
result_dict["H"] = H.name
result_dict["H.s_in_bytes"] = int(H.s_in_bytes)
result_dict["PRS"] = list (PRS)
result_dict["PRS"] = byte_string_to_json (PRS)
result_dict["ZPAD length"] = int(len_zpad)
result_dict["DSI"] = list(self.DSI)
result_dict["CI"] = list(CI)
result_dict["sid"] = list(sid)
result_dict["generator_string(G.DSI,PRS,CI,sid,H.s_in_bytes)"] = list(gen_string)
result_dict["hash result"] = list(string_hash)
result_dict["encoded generator g"] = list(result.encode())
result_dict["DSI"] = byte_string_to_json(self.DSI)
result_dict["CI"] = byte_string_to_json(CI)
result_dict["sid"] = byte_string_to_json(sid)
result_dict["generator_string(G.DSI,PRS,CI,sid,H.s_in_bytes)"] = byte_string_to_json(gen_string)
result_dict["hash result"] = byte_string_to_json(string_hash)
result_dict["encoded generator g"] = byte_string_to_json(result.encode())

print ("\n#### Testvectors as JSON file encoded as BASE64\n", file=file)
print ("~~~", file=file)
tv_output_python_dictionary_as_json_base64(result_dict,line_prefix = " ",file=file)
print ("~~~\n", file=file)



Expand All @@ -120,10 +118,10 @@ def output_coffee_invalid_point_test_cases(G, file = sys.stdout):
print ("~~~\n", file = file)

dict_valid = {}
dict_valid["s"] = list(y)
dict_valid["X"] = list(X)
dict_valid["G.scalar_mult(s,decode(X))"] = list(Z)
dict_valid["G.scalar_mult_vfy(s,X)"] = list(K)
dict_valid["s"] = byte_string_to_json(y)
dict_valid["X"] = byte_string_to_json(X)
dict_valid["G.scalar_mult(s,decode(X))"] = byte_string_to_json(Z)
dict_valid["G.scalar_mult_vfy(s,X)"] = byte_string_to_json(K)

result_dict["Valid"] = dict_valid

Expand All @@ -147,13 +145,11 @@ def output_coffee_invalid_point_test_cases(G, file = sys.stdout):
print (" G.scalar_mult_vfy(s,Y_i1) = G.scalar_mult_vfy(s,Y_i2) = G.I", file = file)
print ("~~~\n", file = file)

result_dict["Invalid Y1"] = list(Y_inv1)
result_dict["Invalid Y2"] = list(G.I)
result_dict["Invalid Y1"] = byte_string_to_json(Y_inv1)
result_dict["Invalid Y2"] = byte_string_to_json(G.I)

print ("\n#### Testvectors as JSON file encoded as BASE64\n", file=file)
print ("~~~", file=file)
tv_output_python_dictionary_as_json_base64(result_dict,line_prefix = " ",file=file)
print ("~~~\n", file=file)


return result_dict
Expand Down
24 changes: 11 additions & 13 deletions poc/CPace_montgomery.sage
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class G_Montgomery:
## Decoding and Encoding functions according to RFC7748
~~~
~~~ python
def decodeLittleEndian(b, bits):
return sum([b[i] << 8*i for i in range((bits+7)/8)])
Expand All @@ -60,7 +60,7 @@ class G_Montgomery:
The Elligator 2 map requires a non-square field element Z which shall be calculated
as follows.
~~~
~~~ python
def find_z_ell2(F):
# Find nonsquare for Elligator2
# Argument: F, a field object, e.g., F = GF(2^255 - 19)
Expand All @@ -80,7 +80,7 @@ results in a value of Z = 2 for Curve25519 and Z=-1 for Ed448.
The following code maps a field element r to an encoded field element which
is a valid u-coordinate of a Montgomery curve with curve parameter A.
~~~
~~~ python
def elligator2(r, q, A, field_size_bits):
# Inputs: field element r, field order q,
# curve parameter A and field size in bits
Expand Down Expand Up @@ -157,20 +157,18 @@ is a valid u-coordinate of a Montgomery curve with curve parameter A.
result_dict = {}
result_dict["H"] = H.name
result_dict["H.s_in_bytes"] = int(H.s_in_bytes)
result_dict["PRS"] = list (PRS)
result_dict["PRS"] = byte_string_to_json (PRS)
result_dict["ZPAD length"] = int(len_zpad)
result_dict["DSI"] = list(self.DSI)
result_dict["CI"] = list(CI)
result_dict["sid"] = list(sid)
result_dict["generator_string(G.DSI,PRS,CI,sid,H.s_in_bytes)"] = list(gen_string)
result_dict["hash generator string"] = list(string_hash)
result_dict["decoded field element of %i bits" % self.field_size_bits] = list(IntegerToByteArray(u,self.field_size_bytes))
result_dict["generator g"] = list(result)
result_dict["DSI"] = byte_string_to_json(self.DSI)
result_dict["CI"] = byte_string_to_json(CI)
result_dict["sid"] = byte_string_to_json(sid)
result_dict["generator_string(G.DSI,PRS,CI,sid,H.s_in_bytes)"] = byte_string_to_json(gen_string)
result_dict["hash generator string"] = byte_string_to_json(string_hash)
result_dict["decoded field element of %i bits" % self.field_size_bits] = byte_string_to_json(IntegerToByteArray(u,self.field_size_bytes))
result_dict["generator g"] = byte_string_to_json(result)

print ("\n#### Testvectors as JSON file encoded as BASE64\n", file=file)
print ("~~~", file=file)
tv_output_python_dictionary_as_json_base64(result_dict,line_prefix = " ",file=file)
print ("~~~\n", file=file)

return result

Expand Down
98 changes: 45 additions & 53 deletions poc/CPace_string_utils.sage
Original file line number Diff line number Diff line change
Expand Up @@ -68,20 +68,22 @@ def tv_output_byte_array(data, test_vector_name = "", line_prefix = " ", max_le
print("\n",end="",file=file)
return

def tv_output_python_dictionary_as_json_base64(dictionary, line_prefix = " ", max_len = 60, file = sys.stdout):
def byte_string_to_json(bytestring):
return base64.b16encode(bytestring).decode("ASCII")

def tv_output_python_dictionary_as_json_base64(dictionary, line_prefix = " ", max_len = 63, file = sys.stdout):
json_text = json.dumps(dictionary).encode("ASCII")
json_string = base64.standard_b64encode(json_text).decode("ASCII")

result = "\n"
base64_header = line_prefix + "###"
base64_trailer = "\n"
result = "\n~~~ test-vectors"
base64_header = "\n" + line_prefix + "##"

offset = 0;
while offset < len(json_string):
next_chunk_len = min(len(json_string), max_len)
result += base64_header + json_string[offset:(offset+next_chunk_len)] + base64_trailer
result += base64_header + json_string[offset:(offset+next_chunk_len)]
offset += next_chunk_len
result += "\n"
result += "\n~~~\n\n"

print(result,end="",file=file)

Expand Down Expand Up @@ -150,7 +152,7 @@ def generate_testvectors_string_functions(file = sys.stdout):

print (
"""
~~~
~~~ python
def prepend_len(data):
"prepend LEB128 encoding of length"
length = len(data)
Expand Down Expand Up @@ -187,23 +189,21 @@ def prepend_len(data):
print ("~~~", file = file)

result_dict = {}
result_dict["prepend_len(b"")"] = list(prepend_len(b""))
result_dict['b"1234"'] = list(b"1234")
result_dict['prepend_len(b"1234")'] = list(prepend_len(b"1234"))
result_dict["prepend_len(bytes(range(127)))"] = list(prepend_len(bytes(range(127))))
result_dict["prepend_len(bytes(range(128)))"] = list(prepend_len(bytes(range(128))))
result_dict["prepend_len(b"")"] = byte_string_to_json(prepend_len(b""))
result_dict['b"1234"'] = byte_string_to_json(b"1234")
result_dict['prepend_len(b"1234")'] = byte_string_to_json(prepend_len(b"1234"))
result_dict["prepend_len(bytes(range(127)))"] = byte_string_to_json(prepend_len(bytes(range(127))))
result_dict["prepend_len(bytes(range(128)))"] = byte_string_to_json(prepend_len(bytes(range(128))))

print ("\n#### Testvectors as JSON file encoded as BASE64\n", file=file)
print ("~~~", file=file)
tv_output_python_dictionary_as_json_base64(result_dict,line_prefix = " ",file=file)
print ("~~~\n", file=file)


print ("\n\n### lv\\_cat function\n", file = file)

print (
"""
~~~
~~~ python
def lv_cat(*args):
result = b""
for arg in args:
Expand All @@ -222,20 +222,18 @@ def prepend_len(data):
print ("~~~", file = file)

result_dict = {}
result_dict["ba1"] = list(b"1234")
result_dict["ba2"] = list(b"5")
result_dict["ba3"] = list(b"6789")
result_dict["lv_cat(ba1,ba2,ba3)"] = list(lv_cat(b"1234",b"5",b"",b"6789"))
result_dict["ba1"] = byte_string_to_json(b"1234")
result_dict["ba2"] = byte_string_to_json(b"5")
result_dict["ba3"] = byte_string_to_json(b"6789")
result_dict["lv_cat(ba1,ba2,ba3)"] = byte_string_to_json(lv_cat(b"1234",b"5",b"",b"6789"))

print ("\n#### Testvectors as JSON file encoded as BASE64\n", file=file)
print ("~~~", file=file)
tv_output_python_dictionary_as_json_base64(result_dict,line_prefix = " ",file=file)
print ("~~~\n", file=file)


print ("\n## Definition of generator\\_string function.\n\n" +
"""
~~~
~~~ python
def generator_string(DSI,PRS,CI,sid,s_in_bytes):
# Concat all input fields with prepended length information.
# Add zero padding in the first hash block after DSI and PRS.
Expand All @@ -252,7 +250,7 @@ def generator_string(DSI,PRS,CI,sid,s_in_bytes):

print ("\nFor ordered concatenation lexiographical ordering of byte sequences is used:\n\n" +
"""
~~~
~~~ python
def lexiographically_larger(bytes1,bytes2):
"Returns True if bytes1 > bytes2 using lexiographical ordering."
min_len = min (len(bytes1), len(bytes2))
Expand All @@ -271,7 +269,7 @@ With the above definition of lexiographical ordering ordered concatenation is sp
""" + "\n\n", file = file)


print ("~~~", file = file)
print ("~~~ python", file = file)
print (" def o_cat(bytes1,bytes2):", file = file);
print (" if lexiographically_larger(bytes1,bytes2):", file = file);
print (' return b"oc" + bytes1 + bytes2', file = file);
Expand Down Expand Up @@ -300,23 +298,21 @@ With the above definition of lexiographical ordering ordered concatenation is sp
print ("~~~", file = file)

result_dict = {}
result_dict['b"ABCD"'] = list(b"ABCD")
result_dict['b"BCD"'] = list(b"BCD")
result_dict['b"ABCDE"'] = list(b"ABCDE")
result_dict['o_cat(b"ABCD",b"BCD")'] = list(o_cat(b"ABCD",b"BCD"))
result_dict['o_cat(b"BCD",b"ABCDE")'] = list(o_cat(b"BCD",b"ABCDE"))
result_dict['b"ABCD"'] = byte_string_to_json(b"ABCD")
result_dict['b"BCD"'] = byte_string_to_json(b"BCD")
result_dict['b"ABCDE"'] = byte_string_to_json(b"ABCDE")
result_dict['o_cat(b"ABCD",b"BCD")'] = byte_string_to_json(o_cat(b"ABCD",b"BCD"))
result_dict['o_cat(b"BCD",b"ABCDE")'] = byte_string_to_json(o_cat(b"BCD",b"ABCDE"))

print ("\n#### Testvectors as JSON file encoded as BASE64\n", file=file)
print ("~~~", file=file)
tv_output_python_dictionary_as_json_base64(result_dict,line_prefix = " ",file=file)
print ("~~~\n", file=file)


print ("""
### Definitions for transcript\\_ir function
~~~
~~~ python
def transcript_ir(Ya,ADa,Yb,ADb):
result = lv_cat(Ya,ADa) + lv_cat(Yb,ADb)
return result
Expand All @@ -336,26 +332,24 @@ def transcript_ir(Ya,ADa,Yb,ADb):
print ("~~~", file = file)

result_dict = {}
result_dict['b"123"'] = list(b"123")
result_dict['b"234"'] = list(b"234")
result_dict['b"PartyA"'] = list(b"PartyA")
result_dict['b"PartyB"'] = list(b"PartyB")
result_dict['b"3456"'] = list(b"3456")
result_dict['b"2345"'] = list(b"2345")
result_dict['transcript_ir(b"123",b"PartyA",b"234",b"PartyB")'] = list(transcript_ir(b"123",b"PartyA",b"234",b"PartyB"))
result_dict['transcript_ir(b"3456",b"PartyA",b"2345",b"PartyB")'] = list(transcript_ir(b"3456",b"PartyA",b"2345",b"PartyB"))
result_dict['b"123"'] = byte_string_to_json(b"123")
result_dict['b"234"'] = byte_string_to_json(b"234")
result_dict['b"PartyA"'] = byte_string_to_json(b"PartyA")
result_dict['b"PartyB"'] = byte_string_to_json(b"PartyB")
result_dict['b"3456"'] = byte_string_to_json(b"3456")
result_dict['b"2345"'] = byte_string_to_json(b"2345")
result_dict['transcript_ir(b"123",b"PartyA",b"234",b"PartyB")'] = byte_string_to_json(transcript_ir(b"123",b"PartyA",b"234",b"PartyB"))
result_dict['transcript_ir(b"3456",b"PartyA",b"2345",b"PartyB")'] = byte_string_to_json(transcript_ir(b"3456",b"PartyA",b"2345",b"PartyB"))

print ("\n#### Testvectors as JSON file encoded as BASE64\n", file=file)
print ("~~~", file=file)
tv_output_python_dictionary_as_json_base64(result_dict,line_prefix = " ",file=file)
print ("~~~\n", file=file)


print ("""
### Definitions for transcript\\_oc function
~~~
~~~ python
def transcript_oc(Ya,ADa,Yb,ADb):
result = o_cat(lv_cat(Ya,ADa),lv_cat(Yb,ADb))
return result
Expand All @@ -374,19 +368,17 @@ def transcript_oc(Ya,ADa,Yb,ADb):
print ("~~~", file = file)

result_dict = {}
result_dict['b"123"'] = list(b"123")
result_dict['b"234"'] = list(b"234")
result_dict['b"PartyA"'] = list(b"PartyA")
result_dict['b"PartyB"'] = list(b"PartyB")
result_dict['b"3456"'] = list(b"3456")
result_dict['b"2345"'] = list(b"2345")
result_dict['transcript_oc(b"123",b"PartyA",b"234",b"PartyB")'] = list(transcript_oc(b"123",b"PartyA",b"234",b"PartyB"))
result_dict['transcript_oc(b"3456",b"PartyA",b"2345",b"PartyB")'] = list(transcript_oc(b"3456",b"PartyA",b"2345",b"PartyB"))
result_dict['b"123"'] = byte_string_to_json(b"123")
result_dict['b"234"'] = byte_string_to_json(b"234")
result_dict['b"PartyA"'] = byte_string_to_json(b"PartyA")
result_dict['b"PartyB"'] = byte_string_to_json(b"PartyB")
result_dict['b"3456"'] = byte_string_to_json(b"3456")
result_dict['b"2345"'] = byte_string_to_json(b"2345")
result_dict['transcript_oc(b"123",b"PartyA",b"234",b"PartyB")'] = byte_string_to_json(transcript_oc(b"123",b"PartyA",b"234",b"PartyB"))
result_dict['transcript_oc(b"3456",b"PartyA",b"2345",b"PartyB")'] = byte_string_to_json(transcript_oc(b"3456",b"PartyA",b"2345",b"PartyB"))

print ("\n#### Testvectors as JSON file encoded as BASE64\n", file=file)
print ("~~~", file=file)
tv_output_python_dictionary_as_json_base64(result_dict,line_prefix = " ",file=file)
print ("~~~\n", file=file)



Expand Down
34 changes: 16 additions & 18 deletions poc/CPace_testvectors.sage
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ def generate_test_vector(H,G, with_ANSI_C_initializers = True,file=sys.stdout, p

if with_ANSI_C_initializers:
print ("\n### Corresponding C programming language initializers\n", file=file)
print ("~~~", file=file)
print ("~~~ c", file=file)
print (ByteArrayToCInitializer(PRS, "tc_PRS"), file=file)
print (ByteArrayToCInitializer(CI, "tc_CI"), file=file)
print (ByteArrayToCInitializer(sid, "tc_sid"), file=file)
Expand All @@ -153,27 +153,25 @@ def generate_test_vector(H,G, with_ANSI_C_initializers = True,file=sys.stdout, p


dictionary = {}
dictionary["PRS"] = list(PRS)
dictionary["CI"] = list(CI)
dictionary["sid"] = list(sid)
dictionary["g"] = list(g)
dictionary["ya"] = list(ya)
dictionary["ADa"] = list(ADa)
dictionary["Ya"] = list(Ya)
dictionary["yb"] = list(yb)
dictionary["ADb"] = list(ADb)
dictionary["Yb"] = list(Yb)
dictionary["K"] = list(K)
dictionary["ISK_IR"] = list(ISK_IR)
dictionary["ISK_SY"] = list(ISK_SY)
dictionary["sid_output_ir"] = list(sid_output_ir)
dictionary["sid_output_oc"] = list(sid_output_oc)
dictionary["PRS"] = byte_string_to_json(PRS)
dictionary["CI"] = byte_string_to_json(CI)
dictionary["sid"] = byte_string_to_json(sid)
dictionary["g"] = byte_string_to_json(g)
dictionary["ya"] = byte_string_to_json(ya)
dictionary["ADa"] = byte_string_to_json(ADa)
dictionary["Ya"] = byte_string_to_json(Ya)
dictionary["yb"] = byte_string_to_json(yb)
dictionary["ADb"] = byte_string_to_json(ADb)
dictionary["Yb"] = byte_string_to_json(Yb)
dictionary["K"] = byte_string_to_json(K)
dictionary["ISK_IR"] = byte_string_to_json(ISK_IR)
dictionary["ISK_SY"] = byte_string_to_json(ISK_SY)
dictionary["sid_output_ir"] = byte_string_to_json(sid_output_ir)
dictionary["sid_output_oc"] = byte_string_to_json(sid_output_oc)


print ("\n### Testvectors as JSON file encoded as BASE64\n", file=file)
print ("~~~", file=file)
tv_output_python_dictionary_as_json_base64(dictionary,file=file)
print ("~~~\n", file=file)

return dictionary

Expand Down
Loading

0 comments on commit 04c63f0

Please sign in to comment.