Skip to content

Commit

Permalink
Modified string encodings in JSON tags.
Browse files Browse the repository at this point in the history
  • Loading branch information
BjoernMHaase committed Oct 8, 2024
1 parent 678025b commit dbc648b
Show file tree
Hide file tree
Showing 4 changed files with 69 additions and 62 deletions.
40 changes: 20 additions & 20 deletions draft-irtf-cfrg-cpace.md
Original file line number Diff line number Diff line change
Expand Up @@ -1063,10 +1063,10 @@ With the above definition of lexiographical ordering ordered concatenation is sp


~~~ test-vectors
#eyJiXCJBQkNEXCIiOiAiNDE0MjQzNDQiLCAiYlwiQkNEXCIiOiAiNDI0MzQ0IiwgIm
#JcIkFCQ0RFXCIiOiAiNDE0MjQzNDQ0NSIsICJvX2NhdChiXCJBQkNEXCIsYlwiQkNE
#XCIpIjogIjZGNjM0MjQzNDQ0MTQyNDM0NCIsICJvX2NhdChiXCJCQ0RcIixiXCJBQk
#NERVwiKSI6ICI2RjYzNDI0MzQ0NDE0MjQzNDQ0NSJ9
#eyJiJ0FCQ0QnIjogIjQxNDI0MzQ0IiwgImInQkNEJyI6ICI0MjQzNDQiLCAiYidBQk
#NERSciOiAiNDE0MjQzNDQ0NSIsICJvX2NhdChiJ0FCQ0QnLGInQkNEJykiOiAiNkY2
#MzQyNDM0NDQxNDI0MzQ0IiwgIm9fY2F0KGInQkNEJyxiJ0FCQ0RFJykiOiAiNkY2Mz
#QyNDM0NDQxNDI0MzQ0NDUifQ==
~~~


Expand Down Expand Up @@ -1094,14 +1094,14 @@ def transcript_ir(Ya,ADa,Yb,ADb):


~~~ test-vectors
#eyJiXCIxMjNcIiI6ICIzMTMyMzMiLCAiYlwiMjM0XCIiOiAiMzIzMzM0IiwgImJcIl
#BhcnR5QVwiIjogIjUwNjE3Mjc0Nzk0MSIsICJiXCJQYXJ0eUJcIiI6ICI1MDYxNzI3
#NDc5NDIiLCAiYlwiMzQ1NlwiIjogIjMzMzQzNTM2IiwgImJcIjIzNDVcIiI6ICIzMj
#MzMzQzNSIsICJ0cmFuc2NyaXB0X2lyKGJcIjEyM1wiLGJcIlBhcnR5QVwiLGJcIjIz
#NFwiLGJcIlBhcnR5QlwiKSI6ICIwMzMxMzIzMzA2NTA2MTcyNzQ3OTQxMDMzMjMzMz
#QwNjUwNjE3Mjc0Nzk0MiIsICJ0cmFuc2NyaXB0X2lyKGJcIjM0NTZcIixiXCJQYXJ0
#eUFcIixiXCIyMzQ1XCIsYlwiUGFydHlCXCIpIjogIjA0MzMzNDM1MzYwNjUwNjE3Mj
#c0Nzk0MTA0MzIzMzM0MzUwNjUwNjE3Mjc0Nzk0MiJ9
#eyJiJzEyMyciOiAiMzEzMjMzIiwgImInMjM0JyI6ICIzMjMzMzQiLCAiYidQYXJ0eU
#EnIjogIjUwNjE3Mjc0Nzk0MSIsICJiJ1BhcnR5QiciOiAiNTA2MTcyNzQ3OTQyIiwg
#ImInMzQ1NiciOiAiMzMzNDM1MzYiLCAiYicyMzQ1JyI6ICIzMjMzMzQzNSIsICJ0cm
#Fuc2NyaXB0X2lyKGInMTIzJyxiJ1BhcnR5QScsYicyMzQnLGInUGFydHlCJykiOiAi
#MDMzMTMyMzMwNjUwNjE3Mjc0Nzk0MTAzMzIzMzM0MDY1MDYxNzI3NDc5NDIiLCAidH
#JhbnNjcmlwdF9pcihiJzM0NTYnLGInUGFydHlBJyxiJzIzNDUnLGInUGFydHlCJyki
#OiAiMDQzMzM0MzUzNjA2NTA2MTcyNzQ3OTQxMDQzMjMzMzQzNTA2NTA2MTcyNzQ3OT
#QyIn0=
~~~


Expand Down Expand Up @@ -1129,14 +1129,14 @@ def transcript_oc(Ya,ADa,Yb,ADb):


~~~ test-vectors
#eyJiXCIxMjNcIiI6ICIzMTMyMzMiLCAiYlwiMjM0XCIiOiAiMzIzMzM0IiwgImJcIl
#BhcnR5QVwiIjogIjUwNjE3Mjc0Nzk0MSIsICJiXCJQYXJ0eUJcIiI6ICI1MDYxNzI3
#NDc5NDIiLCAiYlwiMzQ1NlwiIjogIjMzMzQzNTM2IiwgImJcIjIzNDVcIiI6ICIzMj
#MzMzQzNSIsICJ0cmFuc2NyaXB0X29jKGJcIjEyM1wiLGJcIlBhcnR5QVwiLGJcIjIz
#NFwiLGJcIlBhcnR5QlwiKSI6ICI2RjYzMDMzMjMzMzQwNjUwNjE3Mjc0Nzk0MjAzMz
#EzMjMzMDY1MDYxNzI3NDc5NDEiLCAidHJhbnNjcmlwdF9vYyhiXCIzNDU2XCIsYlwi
#UGFydHlBXCIsYlwiMjM0NVwiLGJcIlBhcnR5QlwiKSI6ICI2RjYzMDQzMzM0MzUzNj
#A2NTA2MTcyNzQ3OTQxMDQzMjMzMzQzNTA2NTA2MTcyNzQ3OTQyIn0=
#eyJiJzEyMyciOiAiMzEzMjMzIiwgImInMjM0JyI6ICIzMjMzMzQiLCAiYidQYXJ0eU
#EnIjogIjUwNjE3Mjc0Nzk0MSIsICJiJ1BhcnR5QiciOiAiNTA2MTcyNzQ3OTQyIiwg
#ImInMzQ1NiciOiAiMzMzNDM1MzYiLCAiYicyMzQ1JyI6ICIzMjMzMzQzNSIsICJ0cm
#Fuc2NyaXB0X29jKGInMTIzJyxiJ1BhcnR5QScsYicyMzQnLGInUGFydHlCJykiOiAi
#NkY2MzAzMzIzMzM0MDY1MDYxNzI3NDc5NDIwMzMxMzIzMzA2NTA2MTcyNzQ3OTQxIi
#wgInRyYW5zY3JpcHRfb2MoYiczNDU2JyxiJ1BhcnR5QScsYicyMzQ1JyxiJ1BhcnR5
#QicpIjogIjZGNjMwNDMzMzQzNTM2MDY1MDYxNzI3NDc5NDEwNDMyMzMzNDM1MDY1MD
#YxNzI3NDc5NDIifQ==
~~~


Expand Down
44 changes: 22 additions & 22 deletions poc/CPace_string_utils.sage
Original file line number Diff line number Diff line change
Expand Up @@ -298,11 +298,11 @@ With the above definition of lexiographical ordering ordered concatenation is sp
print ("~~~", file = file)

result_dict = {}
result_dict['b"ABCD"'] = byte_string_to_json(b"ABCD")
result_dict['b"BCD"'] = byte_string_to_json(b"BCD")
result_dict['b"ABCDE"'] = byte_string_to_json(b"ABCDE")
result_dict['o_cat(b"ABCD",b"BCD")'] = byte_string_to_json(o_cat(b"ABCD",b"BCD"))
result_dict['o_cat(b"BCD",b"ABCDE")'] = byte_string_to_json(o_cat(b"BCD",b"ABCDE"))
result_dict["b'ABCD'"] = byte_string_to_json(b"ABCD")
result_dict["b'BCD'"] = byte_string_to_json(b"BCD")
result_dict["b'ABCDE'"] = byte_string_to_json(b"ABCDE")
result_dict["o_cat(b'ABCD',b'BCD')"] = byte_string_to_json(o_cat(b"ABCD",b"BCD"))
result_dict["o_cat(b'BCD',b'ABCDE')"] = byte_string_to_json(o_cat(b"BCD",b"ABCDE"))

print ("\n#### Testvectors as JSON file encoded as BASE64\n", file=file)
tv_output_python_dictionary_as_json_base64(result_dict,file=file)
Expand Down Expand Up @@ -332,14 +332,14 @@ def transcript_ir(Ya,ADa,Yb,ADb):
print ("~~~", file = file)

result_dict = {}
result_dict['b"123"'] = byte_string_to_json(b"123")
result_dict['b"234"'] = byte_string_to_json(b"234")
result_dict['b"PartyA"'] = byte_string_to_json(b"PartyA")
result_dict['b"PartyB"'] = byte_string_to_json(b"PartyB")
result_dict['b"3456"'] = byte_string_to_json(b"3456")
result_dict['b"2345"'] = byte_string_to_json(b"2345")
result_dict['transcript_ir(b"123",b"PartyA",b"234",b"PartyB")'] = byte_string_to_json(transcript_ir(b"123",b"PartyA",b"234",b"PartyB"))
result_dict['transcript_ir(b"3456",b"PartyA",b"2345",b"PartyB")'] = byte_string_to_json(transcript_ir(b"3456",b"PartyA",b"2345",b"PartyB"))
result_dict["b'123'"] = byte_string_to_json(b"123")
result_dict["b'234'"] = byte_string_to_json(b"234")
result_dict["b'PartyA'"] = byte_string_to_json(b"PartyA")
result_dict["b'PartyB'"] = byte_string_to_json(b"PartyB")
result_dict["b'3456'"] = byte_string_to_json(b"3456")
result_dict["b'2345'"] = byte_string_to_json(b"2345")
result_dict["transcript_ir(b'123',b'PartyA',b'234',b'PartyB')"] = byte_string_to_json(transcript_ir(b"123",b"PartyA",b"234",b"PartyB"))
result_dict["transcript_ir(b'3456',b'PartyA',b'2345',b'PartyB')"] = byte_string_to_json(transcript_ir(b"3456",b"PartyA",b"2345",b"PartyB"))

print ("\n#### Testvectors as JSON file encoded as BASE64\n", file=file)
tv_output_python_dictionary_as_json_base64(result_dict,file=file)
Expand Down Expand Up @@ -368,14 +368,14 @@ def transcript_oc(Ya,ADa,Yb,ADb):
print ("~~~", file = file)

result_dict = {}
result_dict['b"123"'] = byte_string_to_json(b"123")
result_dict['b"234"'] = byte_string_to_json(b"234")
result_dict['b"PartyA"'] = byte_string_to_json(b"PartyA")
result_dict['b"PartyB"'] = byte_string_to_json(b"PartyB")
result_dict['b"3456"'] = byte_string_to_json(b"3456")
result_dict['b"2345"'] = byte_string_to_json(b"2345")
result_dict['transcript_oc(b"123",b"PartyA",b"234",b"PartyB")'] = byte_string_to_json(transcript_oc(b"123",b"PartyA",b"234",b"PartyB"))
result_dict['transcript_oc(b"3456",b"PartyA",b"2345",b"PartyB")'] = byte_string_to_json(transcript_oc(b"3456",b"PartyA",b"2345",b"PartyB"))
result_dict["b'123'"] = byte_string_to_json(b"123")
result_dict["b'234'"] = byte_string_to_json(b"234")
result_dict["b'PartyA'"] = byte_string_to_json(b"PartyA")
result_dict["b'PartyB'"] = byte_string_to_json(b"PartyB")
result_dict["b'3456'"] = byte_string_to_json(b"3456")
result_dict["b'2345'"] = byte_string_to_json(b"2345")
result_dict["transcript_oc(b'123',b'PartyA',b'234',b'PartyB')"] = byte_string_to_json(transcript_oc(b"123",b"PartyA",b"234",b"PartyB"))
result_dict["transcript_oc(b'3456',b'PartyA',b'2345',b'PartyB')"] = byte_string_to_json(transcript_oc(b"3456",b"PartyA",b"2345",b"PartyB"))

print ("\n#### Testvectors as JSON file encoded as BASE64\n", file=file)
tv_output_python_dictionary_as_json_base64(result_dict,file=file)
Expand All @@ -390,7 +390,7 @@ def random_bytes(length):
values = [randint(0, 255) for i in range(length)]
result = b""
for v in values:
result += v.to_bytes(1, 'little')
result += v.to_bytes(1, little)
return result

if __name__ == "__main__":
Expand Down
7 changes: 7 additions & 0 deletions poc/README
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
For generating test vectors with sage use

make clean
make
make testvectors

There will be a json file and a markdown file generated and stored at ../
40 changes: 20 additions & 20 deletions testvectors.md
Original file line number Diff line number Diff line change
Expand Up @@ -168,10 +168,10 @@ With the above definition of lexiographical ordering ordered concatenation is sp


~~~ test-vectors
#eyJiXCJBQkNEXCIiOiAiNDE0MjQzNDQiLCAiYlwiQkNEXCIiOiAiNDI0MzQ0IiwgIm
#JcIkFCQ0RFXCIiOiAiNDE0MjQzNDQ0NSIsICJvX2NhdChiXCJBQkNEXCIsYlwiQkNE
#XCIpIjogIjZGNjM0MjQzNDQ0MTQyNDM0NCIsICJvX2NhdChiXCJCQ0RcIixiXCJBQk
#NERVwiKSI6ICI2RjYzNDI0MzQ0NDE0MjQzNDQ0NSJ9
#eyJiJ0FCQ0QnIjogIjQxNDI0MzQ0IiwgImInQkNEJyI6ICI0MjQzNDQiLCAiYidBQk
#NERSciOiAiNDE0MjQzNDQ0NSIsICJvX2NhdChiJ0FCQ0QnLGInQkNEJykiOiAiNkY2
#MzQyNDM0NDQxNDI0MzQ0IiwgIm9fY2F0KGInQkNEJyxiJ0FCQ0RFJykiOiAiNkY2Mz
#QyNDM0NDQxNDI0MzQ0NDUifQ==
~~~


Expand Down Expand Up @@ -199,14 +199,14 @@ def transcript_ir(Ya,ADa,Yb,ADb):


~~~ test-vectors
#eyJiXCIxMjNcIiI6ICIzMTMyMzMiLCAiYlwiMjM0XCIiOiAiMzIzMzM0IiwgImJcIl
#BhcnR5QVwiIjogIjUwNjE3Mjc0Nzk0MSIsICJiXCJQYXJ0eUJcIiI6ICI1MDYxNzI3
#NDc5NDIiLCAiYlwiMzQ1NlwiIjogIjMzMzQzNTM2IiwgImJcIjIzNDVcIiI6ICIzMj
#MzMzQzNSIsICJ0cmFuc2NyaXB0X2lyKGJcIjEyM1wiLGJcIlBhcnR5QVwiLGJcIjIz
#NFwiLGJcIlBhcnR5QlwiKSI6ICIwMzMxMzIzMzA2NTA2MTcyNzQ3OTQxMDMzMjMzMz
#QwNjUwNjE3Mjc0Nzk0MiIsICJ0cmFuc2NyaXB0X2lyKGJcIjM0NTZcIixiXCJQYXJ0
#eUFcIixiXCIyMzQ1XCIsYlwiUGFydHlCXCIpIjogIjA0MzMzNDM1MzYwNjUwNjE3Mj
#c0Nzk0MTA0MzIzMzM0MzUwNjUwNjE3Mjc0Nzk0MiJ9
#eyJiJzEyMyciOiAiMzEzMjMzIiwgImInMjM0JyI6ICIzMjMzMzQiLCAiYidQYXJ0eU
#EnIjogIjUwNjE3Mjc0Nzk0MSIsICJiJ1BhcnR5QiciOiAiNTA2MTcyNzQ3OTQyIiwg
#ImInMzQ1NiciOiAiMzMzNDM1MzYiLCAiYicyMzQ1JyI6ICIzMjMzMzQzNSIsICJ0cm
#Fuc2NyaXB0X2lyKGInMTIzJyxiJ1BhcnR5QScsYicyMzQnLGInUGFydHlCJykiOiAi
#MDMzMTMyMzMwNjUwNjE3Mjc0Nzk0MTAzMzIzMzM0MDY1MDYxNzI3NDc5NDIiLCAidH
#JhbnNjcmlwdF9pcihiJzM0NTYnLGInUGFydHlBJyxiJzIzNDUnLGInUGFydHlCJyki
#OiAiMDQzMzM0MzUzNjA2NTA2MTcyNzQ3OTQxMDQzMjMzMzQzNTA2NTA2MTcyNzQ3OT
#QyIn0=
~~~


Expand Down Expand Up @@ -234,14 +234,14 @@ def transcript_oc(Ya,ADa,Yb,ADb):


~~~ test-vectors
#eyJiXCIxMjNcIiI6ICIzMTMyMzMiLCAiYlwiMjM0XCIiOiAiMzIzMzM0IiwgImJcIl
#BhcnR5QVwiIjogIjUwNjE3Mjc0Nzk0MSIsICJiXCJQYXJ0eUJcIiI6ICI1MDYxNzI3
#NDc5NDIiLCAiYlwiMzQ1NlwiIjogIjMzMzQzNTM2IiwgImJcIjIzNDVcIiI6ICIzMj
#MzMzQzNSIsICJ0cmFuc2NyaXB0X29jKGJcIjEyM1wiLGJcIlBhcnR5QVwiLGJcIjIz
#NFwiLGJcIlBhcnR5QlwiKSI6ICI2RjYzMDMzMjMzMzQwNjUwNjE3Mjc0Nzk0MjAzMz
#EzMjMzMDY1MDYxNzI3NDc5NDEiLCAidHJhbnNjcmlwdF9vYyhiXCIzNDU2XCIsYlwi
#UGFydHlBXCIsYlwiMjM0NVwiLGJcIlBhcnR5QlwiKSI6ICI2RjYzMDQzMzM0MzUzNj
#A2NTA2MTcyNzQ3OTQxMDQzMjMzMzQzNTA2NTA2MTcyNzQ3OTQyIn0=
#eyJiJzEyMyciOiAiMzEzMjMzIiwgImInMjM0JyI6ICIzMjMzMzQiLCAiYidQYXJ0eU
#EnIjogIjUwNjE3Mjc0Nzk0MSIsICJiJ1BhcnR5QiciOiAiNTA2MTcyNzQ3OTQyIiwg
#ImInMzQ1NiciOiAiMzMzNDM1MzYiLCAiYicyMzQ1JyI6ICIzMjMzMzQzNSIsICJ0cm
#Fuc2NyaXB0X29jKGInMTIzJyxiJ1BhcnR5QScsYicyMzQnLGInUGFydHlCJykiOiAi
#NkY2MzAzMzIzMzM0MDY1MDYxNzI3NDc5NDIwMzMxMzIzMzA2NTA2MTcyNzQ3OTQxIi
#wgInRyYW5zY3JpcHRfb2MoYiczNDU2JyxiJ1BhcnR5QScsYicyMzQ1JyxiJ1BhcnR5
#QicpIjogIjZGNjMwNDMzMzQzNTM2MDY1MDYxNzI3NDc5NDEwNDMyMzMzNDM1MDY1MD
#YxNzI3NDc5NDIifQ==
~~~


Expand Down

0 comments on commit dbc648b

Please sign in to comment.