|
| 1 | +// The js interface to rify accepts RDF nodes as strings. |
| 2 | +// This module deals with the conversions between the js representation |
| 3 | +// of RDF nodes ({ Iri: 'https://example.com' }) and the rify-js representation |
| 4 | +// AKA strings. |
| 5 | + |
| 6 | +import { assert } from '@polkadot/util'; |
| 7 | +import { assertType, assertValidNode } from './common'; |
| 8 | + |
| 9 | +// Convert a Node into a cononicalized string representation. |
| 10 | +// |
| 11 | +// ∀ A, B ∈ Node: canon(A) = canon(B) <-> A = B |
| 12 | +export function canon(node) { |
| 13 | + assertValidNode(node); |
| 14 | + return JSON.stringify(orderKeys(node)); |
| 15 | +} |
| 16 | + |
| 17 | +/// Canonicalize all the nodes in a ruleset. |
| 18 | +export function canonRules(rule) { |
| 19 | + return rule.map(({ if_all: ifAll, then }) => ({ |
| 20 | + if_all: ifAll.map((claim) => claim.map(canonAtom)), |
| 21 | + then: then.map((claim) => claim.map(canonAtom)), |
| 22 | + })); |
| 23 | +} |
| 24 | + |
| 25 | +/// Canonicalize a rule atom. |
| 26 | +/// An atom may be either { Bound: Node } or { Unbound: 'string' } |
| 27 | +// expect(canonAtom({ Bound: { Iri: 'https://example.com' } })) |
| 28 | +// .toEqual({ Bound: "{\"Iri\":\"https://example.com\"}" }); |
| 29 | +// expect(canonAtom({ Unbound: "heyo" })) |
| 30 | +// .toEqual({ Unbound: "heyo" }); |
| 31 | +function canonAtom(atom) { |
| 32 | + assert(Object.keys(atom).length === 1, 'enum must have exactly one tag'); |
| 33 | + switch (Object.keys(atom)[0]) { |
| 34 | + case 'Bound': |
| 35 | + assertType(atom.Bound, 'object'); |
| 36 | + return { Bound: canon(atom.Bound) }; |
| 37 | + case 'Unbound': |
| 38 | + assertType(atom.Unbound, 'string'); |
| 39 | + return { Unbound: atom.Unbound }; |
| 40 | + default: |
| 41 | + throw new TypeError(`expected bound or unbound rule atom got ${atom}`); |
| 42 | + } |
| 43 | +} |
| 44 | + |
| 45 | +/// Canonicalize all the nodes in a proof. |
| 46 | +export function canonProof(proof) { |
| 47 | + return proof.map(({ rule_index: ruleIndex, instantiations }) => ({ |
| 48 | + rule_index: ruleIndex, |
| 49 | + instantiations: instantiations.map(canon), |
| 50 | + })); |
| 51 | +} |
| 52 | + |
| 53 | +/// Parse all the nodes in a conaonicalized proof. |
| 54 | +export function decanonProof(proof) { |
| 55 | + return proof.map(({ rule_index: ruleIndex, instantiations }) => ({ |
| 56 | + rule_index: ruleIndex, |
| 57 | + instantiations: instantiations.map(JSON.parse), |
| 58 | + })); |
| 59 | +} |
| 60 | + |
| 61 | +/// Canonicalize all the nodes in a claimgraph. |
| 62 | +export function canonClaimGraph(cg) { |
| 63 | + return cg.map((claim) => claim.map(canon)); |
| 64 | +} |
| 65 | + |
| 66 | +/// Parse all the nodes in a canonicalized claimgraph. |
| 67 | +export function decanonClaimGraph(cg) { |
| 68 | + return cg.map((claim) => claim.map(JSON.parse)); |
| 69 | +} |
| 70 | + |
| 71 | +// recursively lexically sort the keys in an object |
| 72 | +// expect(JSON.stringify(orderKeys( |
| 73 | +// { b: '', a: '' } |
| 74 | +// ))).toEqual(JSON.stringify( |
| 75 | +// { a: '', b: '' } |
| 76 | +// )); |
| 77 | +// expect(JSON.stringify(orderKeys( |
| 78 | +// { b: '', a: { c: '', b: '', a: '' } } |
| 79 | +// ))).toEqual(JSON.stringify( |
| 80 | +// { a: { a: '', b: '', c: '' }, b: '' } |
| 81 | +// )); |
| 82 | +function orderKeys(a) { |
| 83 | + let keys; |
| 84 | + let ret; |
| 85 | + switch (typeof a) { |
| 86 | + case 'string': |
| 87 | + return a; |
| 88 | + case 'object': |
| 89 | + keys = Object.keys(a); |
| 90 | + keys.sort(); |
| 91 | + ret = {}; |
| 92 | + for (const k of keys) { |
| 93 | + ret[k] = orderKeys(a[k]); |
| 94 | + } |
| 95 | + return ret; |
| 96 | + default: |
| 97 | + throw new TypeError(`type error: orderKeys() does not accept type ${typeof a}`); |
| 98 | + } |
| 99 | +} |
0 commit comments