Skip to content

Commit

Permalink
Added JSON encoded test vectors
Browse files Browse the repository at this point in the history
  • Loading branch information
BjoernMHaase committed Sep 20, 2024
1 parent e7aaab6 commit 4899030
Show file tree
Hide file tree
Showing 7 changed files with 7,209 additions and 172 deletions.
785 changes: 781 additions & 4 deletions draft-irtf-cfrg-cpace.md

Large diffs are not rendered by default.

18 changes: 12 additions & 6 deletions poc/CPace_coffee.sage
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import sys
import base64

sys.path.append("sagelib")

Expand Down Expand Up @@ -101,10 +100,10 @@ def output_coffee_invalid_point_test_cases(G, file = sys.stdout):
print ("~~~\n", file = file)

dict_valid = {}
dict_valid["s"] = base64.b64encode(y).decode('ascii')
dict_valid["X"] = base64.b64encode(X).decode('ascii')
dict_valid["G.scalar_mult(s,decode(X))"] = base64.b64encode(Z).decode('ascii')
dict_valid["G.scalar_mult_vfy(s,X)"] = base64.b64encode(K).decode('ascii')
dict_valid["s"] = list(y)
dict_valid["X"] = list(X)
dict_valid["G.scalar_mult(s,decode(X))"] = list(Z)
dict_valid["G.scalar_mult_vfy(s,X)"] = list(K)

result_dict["Valid"] = dict_valid

Expand All @@ -128,7 +127,14 @@ def output_coffee_invalid_point_test_cases(G, file = sys.stdout):
print (" G.scalar_mult_vfy(s,Y_i1) = G.scalar_mult_vfy(s,Y_i2) = G.I", file = file)
print ("~~~\n", file = file)

result_dict["Invalid Y"] = base64.b64encode(Y_inv1).decode('ascii')
result_dict["Invalid Y1"] = list(Y_inv1)
result_dict["Invalid Y2"] = list(G.I)

print ("\n#### Testvectors as JSON file encoded as BASE64\n", file=file)
print ("~~~", file=file)
tv_output_python_dictionary_as_json_base64(result_dict,line_prefix = " ",file=file)
print ("~~~\n", file=file)


return result_dict

Expand Down
34 changes: 28 additions & 6 deletions poc/CPace_string_utils.sage
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import base64
import json
import sys

def ByteArrayToInteger(k,numBytes=32):
Expand Down Expand Up @@ -65,6 +67,23 @@ def tv_output_byte_array(data, test_vector_name = "", line_prefix = " ", max_le
if len(string) == 0:
print("\n",end="",file=file)
return

def tv_output_python_dictionary_as_json_base64(dictionary, line_prefix = " ", max_len = 60, file = sys.stdout):
json_text = json.dumps(dictionary).encode("ASCII")
json_string = base64.standard_b64encode(json_text).decode("ASCII")

result = "\n"
base64_header = line_prefix + "###"
base64_trailer = "\n"

offset = 0;
while offset < len(json_string):
next_chunk_len = min(len(json_string), max_len)
result += base64_header + json_string[offset:(offset+next_chunk_len)] + base64_trailer
offset += next_chunk_len
result += "\n"

print(result,end="",file=file)

def prepend_len(data):
"prepend LEB128 encoding of length"
Expand Down Expand Up @@ -257,17 +276,17 @@ With the above definition of lexiographical ordering ordered concatenation is sp
print ("~~~", file = file)


print ("""
print ("""
### Definitions for transcript\_ir function
### Definitions for transcript\\_ir function
~~~
def transcript_ir(Ya,ADa,Yb,ADb):
result = lv_cat(Ya,ADa) + lv_cat(Yb,ADb)
return result
~~~""", file = file)

print ("\n### Test vectors transcript\_ir function\n", file = file)
print ("\n### Test vectors transcript\\_ir function\n", file = file)

print ("~~~", file = file)
tv_output_byte_array(transcript_ir(b"123",b"PartyA",b"234",b"PartyB"),
Expand All @@ -280,17 +299,17 @@ def transcript_ir(Ya,ADa,Yb,ADb):

print ("~~~", file = file)

print ("""
print ("""
### Definitions for transcript\_oc function
### Definitions for transcript\\_oc function
~~~
def transcript_oc(Ya,ADa,Yb,ADb):
result = o_cat(lv_cat(Ya,ADa),lv_cat(Yb,ADb))
return result
~~~""", file = file)

print ("\n### Test vectors for transcript\_oc function\n", file = file)
print ("\n### Test vectors for transcript\\_oc function\n", file = file)

print ("~~~", file = file)
tv_output_byte_array(transcript_oc(b"123",b"PartyA",b"234",b"PartyB"),
Expand All @@ -303,6 +322,7 @@ def transcript_oc(Ya,ADa,Yb,ADb):
print ("~~~", file = file)



def zero_bytes(length):
result = b"\0" * length
return result
Expand All @@ -316,3 +336,5 @@ def random_bytes(length):

if __name__ == "__main__":
generate_testvectors_string_functions()

tv_output_python_dictionary_as_json_base64(test_dictionary)
38 changes: 22 additions & 16 deletions poc/CPace_testvectors.sage
Original file line number Diff line number Diff line change
Expand Up @@ -153,22 +153,28 @@ def generate_test_vector(H,G, with_ANSI_C_initializers = True,file=sys.stdout, p


dictionary = {}
dictionary["PRS"] = base64.b64encode(PRS).decode('ascii');
dictionary["CI"] = base64.b64encode(CI).decode('ascii');
dictionary["sid"] = base64.b64encode(sid).decode('ascii');
dictionary["g"] = base64.b64encode(g).decode('ascii');
dictionary["ya"] = base64.b64encode(ya).decode('ascii');
dictionary["ADa"] = base64.b64encode(ADa).decode('ascii');
dictionary["Ya"] = base64.b64encode(Ya).decode('ascii');
dictionary["yb"] = base64.b64encode(yb).decode('ascii');
dictionary["ADb"] = base64.b64encode(ADb).decode('ascii');
dictionary["Yb"] = base64.b64encode(Yb).decode('ascii');
dictionary["K"] = base64.b64encode(K).decode('ascii');
dictionary["ISK_IR"] = base64.b64encode(ISK_IR).decode('ascii');
dictionary["ISK_SY"] = base64.b64encode(ISK_SY).decode('ascii');
dictionary["sid_output_ir"] = base64.b64encode(sid_output_ir).decode('ascii');
dictionary["sid_output_oc"] = base64.b64encode(sid_output_oc).decode('ascii');

dictionary["PRS"] = list(PRS)
dictionary["CI"] = list(CI)
dictionary["sid"] = list(sid)
dictionary["g"] = list(g)
dictionary["ya"] = list(ya)
dictionary["ADa"] = list(ADa)
dictionary["Ya"] = list(Ya)
dictionary["yb"] = list(yb)
dictionary["ADb"] = list(ADb)
dictionary["Yb"] = list(Yb)
dictionary["K"] = list(K)
dictionary["ISK_IR"] = list(ISK_IR)
dictionary["ISK_SY"] = list(ISK_SY)
dictionary["sid_output_ir"] = list(sid_output_ir)
dictionary["sid_output_oc"] = list(sid_output_oc)


print ("\n### Testvectors as JSON file encoded as BASE64\n", file=file)
print ("~~~", file=file)
tv_output_python_dictionary_as_json_base64(dictionary,file=file)
print ("~~~\n", file=file)

return dictionary


Expand Down
18 changes: 11 additions & 7 deletions poc/CPace_weierstrass.sage
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import base64
import sys

from sagelib.CPace_string_utils import *
Expand Down Expand Up @@ -147,10 +146,10 @@ def output_weierstrass_invalid_point_test_cases(G, file = sys.stdout):
line_prefix = " ", max_len = 60, file = file)
print ("~~~\n", file = file)
dict_valid = {}
dict_valid["s"] = base64.b64encode(y).decode('ascii')
dict_valid["X"] = base64.b64encode(X).decode('ascii')
dict_valid["G.scalar_mult(s,X) (full coordinates)"] = base64.b64encode(Z).decode('ascii')
dict_valid["G.scalar_mult_vfy(s,X) (only X-coordinate)"] = base64.b64encode(K).decode('ascii')
dict_valid["s"] = list(y)
dict_valid["X"] = list(X)
dict_valid["G.scalar_mult(s,X) (full coordinates)"] = list(Z)
dict_valid["G.scalar_mult_vfy(s,X) (only X-coordinate)"] = list(K)
result_dict["Valid"] = dict_valid

Y_inv1 = bytearray(X)
Expand All @@ -172,8 +171,13 @@ def output_weierstrass_invalid_point_test_cases(G, file = sys.stdout):
print (" G.scalar_mult_vfy(s,Y_i1) = G.scalar_mult_vfy(s,Y_i2) = G.I", file = file)
print ("~~~\n", file = file)

result_dict["Invalid Y1"] = base64.b64encode(Y_inv1).decode('ascii')
result_dict["Invalid Y2"] = base64.b64encode(Y_inv2).decode('ascii')
result_dict["Invalid Y1"] = list(Y_inv1)
result_dict["Invalid Y2"] = list(Y_inv2)

print ("\n#### Testvectors as JSON file encoded as BASE64\n", file=file)
print ("~~~", file=file)
tv_output_python_dictionary_as_json_base64(result_dict,line_prefix = " ",file=file)
print ("~~~\n", file=file)

return result_dict

Expand Down
6 changes: 6 additions & 0 deletions poc/test_vectors_X448_X25519.sage
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,12 @@ def output_test_vectors_for_weak_points_255(file = sys.stdout):
ctr += 1;

print ("~~~\n", file = file)

print ("\n#### Testvectors as JSON file encoded as BASE64\n", file=file)
print ("~~~", file=file)
tv_output_python_dictionary_as_json_base64(result_dict,line_prefix = " ",file=file)
print ("~~~\n", file=file)

return result_dict


Expand Down
Loading

0 comments on commit 4899030

Please sign in to comment.