From 7f0b36262da0612082abae53fd400dae5f347e1a Mon Sep 17 00:00:00 2001 From: Oak Date: Thu, 29 Aug 2024 05:56:55 +0100 Subject: [PATCH] refactor: HG and AddWinsSet (put HG out of the CROs) (#131) Co-authored-by: Sacha Froment --- .github/workflows/test.yml | 3 +- buf.gen.yaml | 6 +- package.json | 1 + packages/crdt/src/cros/AddWinsSet/index.ts | 81 ++- packages/crdt/tests/AddWinsSet.test.ts | 361 +----------- packages/network/package.json | 2 +- packages/network/src/proto/messages_pb.ts | 340 ++++++------ packages/node/src/handlers.ts | 24 +- packages/node/src/index.ts | 36 +- packages/node/src/operations.ts | 21 +- packages/node/src/store/index.ts | 17 +- packages/object/package.json | 3 +- packages/object/src/hashgraph.ts | 172 +++--- packages/object/src/index.ts | 97 +++- packages/object/src/proto/object.proto | 18 +- packages/object/src/proto/object_pb.ts | 605 +++++++++++---------- packages/object/tests/hashgraph.test.ts | 302 ++++++++++ packages/object/tests/index.test.ts | 3 - pnpm-lock.yaml | 15 +- vite.config.mts | 2 +- 20 files changed, 1105 insertions(+), 1004 deletions(-) create mode 100644 packages/object/tests/hashgraph.test.ts delete mode 100644 packages/object/tests/index.test.ts diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 58f3b92c..4843b023 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -15,4 +15,5 @@ jobs: - shell: bash run: | pnpm install - pnpm test + cd packages/object && pnpm build + cd ../.. && pnpm test diff --git a/buf.gen.yaml b/buf.gen.yaml index 2cc56d55..16dcc12d 100644 --- a/buf.gen.yaml +++ b/buf.gen.yaml @@ -2,9 +2,11 @@ version: v2 plugins: - local: ./node_modules/ts-proto/protoc-gen-ts_proto strategy: directory - out: ./packages + # out: ./packages/network/src + out: ./packages/object/src opt: - esModuleInterop=true - fileSuffix=_pb inputs: - - directory: ./packages + #- directory: ./packages/network/src + - directory: ./packages/object/src diff --git a/package.json b/package.json index d01fbbc3..a153b6de 100644 --- a/package.json +++ b/package.json @@ -24,6 +24,7 @@ "@types/node": "^22.5.0", "assemblyscript": "^0.27.29", "release-it": "^17.6.0", + "ts-proto": "^2.0.3", "typedoc": "^0.26.6", "typescript": "^5.5.4", "vite": "^5.4.1", diff --git a/packages/crdt/src/cros/AddWinsSet/index.ts b/packages/crdt/src/cros/AddWinsSet/index.ts index 8e74db2b..54e826af 100644 --- a/packages/crdt/src/cros/AddWinsSet/index.ts +++ b/packages/crdt/src/cros/AddWinsSet/index.ts @@ -1,72 +1,71 @@ import { ActionType, - HashGraph, - Operation, - OperationType, + type CRO, + type Operation, + type Vertex, } from "@topology-foundation/object"; -/// AddWinsSet with support for state and op changes -export class AddWinsSet { - state: Map; - hashGraph: HashGraph; +export class AddWinsSet implements CRO { + operations: string[] = ["add", "remove"]; + state: Map; - constructor(nodeId: string) { - this.state = new Map(); - this.hashGraph = new HashGraph(this.resolveConflicts, nodeId); + constructor() { + this.state = new Map(); } - resolveConflicts(op1: Operation, op2: Operation): ActionType { - if (op1.type !== op2.type && op1.value === op2.value) { - return op1.type === OperationType.Add - ? ActionType.DropRight - : ActionType.DropLeft; - } - return ActionType.Nop; + private _add(value: T): void { + if (!this.state.get(value)) this.state.set(value, true); } add(value: T): void { - const op = new Operation(OperationType.Add, value); - this.state.set(value, (this.state.get(value) || 0) + 1); + this._add(value); } - remove(value: T): void { - const op = new Operation(OperationType.Remove, value); - this.add(value); + private _remove(value: T): void { + if (this.state.get(value)) this.state.set(value, false); } - getValue(value: T): number { - return this.state.get(value) || 0; + remove(value: T): void { + this._remove(value); } - isInSet(value: T): boolean { - const count = this.getValue(value); - return count > 0 && count % 2 === 1; + contains(value: T): boolean { + return this.state.get(value) === true; } values(): T[] { return Array.from(this.state.entries()) - .filter(([_, count]) => count % 2 === 1) + .filter(([_, exists]) => exists) .map(([value, _]) => value); } - merge(other: AddWinsSet): void { - for (const [value, count] of other.state) { - this.state.set(value, Math.max(this.getValue(value), count)); + // in this case is an array of length 2 and there are only two possible operations + resolveConflicts(vertices: Vertex[]): ActionType { + if ( + vertices[0].operation.type !== vertices[1].operation.type && + vertices[0].operation.value === vertices[1].operation.value + ) { + return vertices[0].operation.type === "add" + ? ActionType.DropRight + : ActionType.DropLeft; } + return ActionType.Nop; } - read(): T[] { - const operations = this.hashGraph.linearizeOps(); - const tempCounter = new AddWinsSet(""); - + // merged at HG level and called as a callback + mergeCallback(operations: Operation[]): void { + this.state = new Map(); for (const op of operations) { - if (op.type === OperationType.Add) { - tempCounter.add(op.value); - } else { - tempCounter.remove(op.value); + switch (op.type) { + case "add": + if (op.value !== null) this._add(op.value); + break; + case "remove": + if (op.value !== null) this._remove(op.value); + break; + default: + break; } } - - return tempCounter.values(); } } diff --git a/packages/crdt/tests/AddWinsSet.test.ts b/packages/crdt/tests/AddWinsSet.test.ts index 203fb34b..77340d68 100644 --- a/packages/crdt/tests/AddWinsSet.test.ts +++ b/packages/crdt/tests/AddWinsSet.test.ts @@ -1,358 +1,35 @@ -import { Operation, OperationType } from "@topology-foundation/object"; import { beforeEach, describe, expect, test } from "vitest"; import { AddWinsSet } from "../src/cros/AddWinsSet/index.js"; describe("HashGraph for AddWinSet tests", () => { let cro: AddWinsSet; - let op0: Operation; - let vertexHash0: string; - const peerId = "peerId0"; beforeEach(() => { - cro = new AddWinsSet("peer0"); - op0 = new Operation(OperationType.Nop, 0); - vertexHash0 = cro.hashGraph.rootHash; + cro = new AddWinsSet(); }); - test("Test: Add Two Vertices", () => { - /* - V1:NOP <- V2:ADD(1) <- V2:REMOVE(1) - */ - const op1: Operation = new Operation(OperationType.Add, 1); - const deps1: string[] = [vertexHash0]; - const vertexHash1 = cro.hashGraph.addVertex(op1, deps1, peerId); - let linearOps = cro.hashGraph.linearizeOps(); - expect(linearOps).toEqual([op0, op1]); + test("Test: Add", () => { + cro.add(1); + let set = cro.values(); + expect(set).toEqual([1]); - // Add second vertex - const op2: Operation = new Operation(OperationType.Remove, 1); - const deps2: string[] = [vertexHash1]; - const vertexHash2 = cro.hashGraph.addVertex(op2, deps2, peerId); - linearOps = cro.hashGraph.linearizeOps(); - const orderArray = cro.hashGraph.topologicalSort(); - expect(linearOps).toEqual([op0, op1, op2]); + cro.add(2); + set = cro.values(); + expect(set).toEqual([1, 2]); }); - test("Test: Add Two Concurrent Vertices With Same Value", () => { - /* - _ V2:REMOVE(1) - V1:ADD(1) / - \ _ V3:ADD(1) - */ + test("Test: Add and Remove", () => { + cro.add(1); + let set = cro.values(); + expect(set).toEqual([1]); - const op1: Operation = new Operation(OperationType.Add, 1); - const deps1: string[] = [vertexHash0]; - const vertexHash1 = cro.hashGraph.addVertex(op1, deps1, peerId); + cro.add(2); + set = cro.values(); + expect(set).toEqual([1, 2]); - let linearOps = cro.hashGraph.linearizeOps(); - expect(linearOps).toEqual([op0, op1]); - - // Add second vertex - const op2: Operation = new Operation(OperationType.Remove, 1); - const deps2: string[] = [vertexHash1]; - const vertexHash2 = cro.hashGraph.addVertex(op2, deps2, peerId); - - linearOps = cro.hashGraph.linearizeOps(); - expect(linearOps).toEqual([op0, op1, op2]); - - // Add the third vertex V3 concurrent with V2 - const op3: Operation = new Operation(OperationType.Add, 1); - const deps3: string[] = [vertexHash1]; - const vertexHash3 = cro.hashGraph.addVertex(op3, deps3, peerId); - - linearOps = cro.hashGraph.linearizeOps(); - expect(linearOps).toEqual([op0, op1, op3]); - }); - - test("Test: Add Two Concurrent Vertices With Different Values", () => { - /* - _ V2:REMOVE(1) - V1:ADD(1) / - \ _ V3:ADD(2) - */ - - const op1: Operation = new Operation(OperationType.Add, 1); - const deps1: string[] = [vertexHash0]; - const vertexHash1 = cro.hashGraph.addVertex(op1, deps1, peerId); - let linearOps = cro.hashGraph.linearizeOps(); - expect(linearOps).toEqual([op0, op1]); - - // Add second vertex - const op2: Operation = new Operation(OperationType.Remove, 1); - const deps2: string[] = [vertexHash1]; - const vertexHash2 = cro.hashGraph.addVertex(op2, deps2, peerId); - linearOps = cro.hashGraph.linearizeOps(); - expect(linearOps).toEqual([op0, op1, op2]); - - // Add the third vertex V3 concurrent with V2 - const op3: Operation = new Operation(OperationType.Add, 3); - const deps3: string[] = [vertexHash1]; - const vertexHash3 = cro.hashGraph.addVertex(op3, deps3, peerId); - linearOps = cro.hashGraph.linearizeOps(); - expect([ - [op0, op1, op2, op3], - [op0, op1, op3, op2], - ]).toContainEqual(linearOps); - }); - - test("Test: Tricky Case", () => { - /* - ___ V2:REMOVE(1) <- V4:ADD(10) - V1:ADD(1) / - \ ___ V3:ADD(1) <- V5:REMOVE(5) - */ - - const op1: Operation = new Operation(OperationType.Add, 1); - const deps1: string[] = [vertexHash0]; - const vertexHash1 = cro.hashGraph.addVertex(op1, deps1, peerId); - - // Add second vertex - const op2: Operation = new Operation(OperationType.Remove, 1); - const deps2: string[] = [vertexHash1]; - const vertexHash2 = cro.hashGraph.addVertex(op2, deps2, peerId); - - // Add the third vertex V3 concurrent with V2 - const op3: Operation = new Operation(OperationType.Add, 1); - const deps3: string[] = [vertexHash1]; - const vertexHash3 = cro.hashGraph.addVertex(op3, deps3, peerId); - - // Add the vertex V4 with dependency on V2 - const op4: Operation = new Operation(OperationType.Add, 10); - const deps4: string[] = [vertexHash2]; - const vertexHash4 = cro.hashGraph.addVertex(op4, deps4, peerId); - - // Add the vertex V5 with dependency on V3 - const op5: Operation = new Operation(OperationType.Remove, 5); - const deps5: string[] = [vertexHash3]; - const vertexHash5 = cro.hashGraph.addVertex(op5, deps5, peerId); - const linearOps = cro.hashGraph.linearizeOps(); - expect([ - [op0, op1, op4, op3, op5], - [op0, op1, op3, op5, op4], - ]).toContainEqual(linearOps); - }); - - test("Test: Yuta Papa's Case", () => { - /* - ___ V2:REMOVE(1) <- V4:ADD(2) - V1:ADD(1) / - \ ___ V3:REMOVE(2) <- V5:ADD(1) - */ - - const op1: Operation = new Operation(OperationType.Add, 1); - const deps1: string[] = [vertexHash0]; - const vertexHash1 = cro.hashGraph.addVertex(op1, deps1, peerId); - - // Add second vertex - const op2: Operation = new Operation(OperationType.Remove, 1); - const deps2: string[] = [vertexHash1]; - const vertexHash2 = cro.hashGraph.addVertex(op2, deps2, peerId); - - // Add the third vertex V3 concurrent with V2 - const op3: Operation = new Operation(OperationType.Remove, 2); - const deps3: string[] = [vertexHash1]; - const vertexHash3 = cro.hashGraph.addVertex(op3, deps3, peerId); - - // Add the vertex V4 with dependency on V2 - const op4: Operation = new Operation(OperationType.Add, 2); - const deps4: string[] = [vertexHash2]; - const vertexHash4 = cro.hashGraph.addVertex(op4, deps4, peerId); - - // Add the vertex V5 with dependency on V3 - const op5: Operation = new Operation(OperationType.Add, 1); - const deps5: string[] = [vertexHash3]; - const vertexHash5 = cro.hashGraph.addVertex(op5, deps5, peerId); - const linearOps = cro.hashGraph.linearizeOps(); - expect([ - [op0, op1, op4, op5], - [op0, op1, op5, op4], - ]).toContainEqual(linearOps); - }); - - test("Test: Mega Complex Case", () => { - /* - __ V6:ADD(3) - / - ___ V2:ADD(1) <-- V3:RM(2) <-- V7:RM(1) <-- V8:RM(3) - / ______________/ - V1:ADD(1)/ / - \ / - \ ___ V4:RM(2) <-- V5:ADD(2) <-- V9:RM(1) - - Topological Sorted Array: - [V1, V4, V5, V9, V2, V3, V7, V8, V6] - OR - [V1, V2, V3, V6, V7, V4, V5, V8, V9] - OR - [V1, V2, V3, V6, V7, V4, V5, V9, V8] - */ - - const op1: Operation = new Operation(OperationType.Add, 1); - const deps1: string[] = [vertexHash0]; - const vertexHash1 = cro.hashGraph.addVertex(op1, deps1, peerId); - console.log("vertex1: ", vertexHash1); - // Add second vertex - const op2: Operation = new Operation(OperationType.Add, 1); - const deps2: string[] = [vertexHash1]; - const vertexHash2 = cro.hashGraph.addVertex(op2, deps2, peerId); - console.log("vertex2: ", vertexHash2); - // Add the third vertex V3 with dependency on V2 - const op3: Operation = new Operation(OperationType.Remove, 2); - const deps3: string[] = [vertexHash2]; - const vertexHash3 = cro.hashGraph.addVertex(op3, deps3, peerId); - console.log("vertex3: ", vertexHash3); - // Add the vertex V4 -> [V1] - const op4: Operation = new Operation(OperationType.Remove, 2); - const deps4: string[] = [vertexHash1]; - const vertexHash4 = cro.hashGraph.addVertex(op4, deps4, peerId); - console.log("vertex4: ", vertexHash4); - // Add the vertex V5 -> [V4] - const op5: Operation = new Operation(OperationType.Add, 2); - const deps5: string[] = [vertexHash4]; - const vertexHash5 = cro.hashGraph.addVertex(op5, deps5, peerId); - console.log("vertex5: ", vertexHash5); - // Add the vertex V6 ->[V3] - const op6: Operation = new Operation(OperationType.Add, 3); - const deps6: string[] = [vertexHash3]; - const vertexHash6 = cro.hashGraph.addVertex(op6, deps6, peerId); - console.log("vertex6: ", vertexHash6); - // Add the vertex V7 -> [V3] - const op7: Operation = new Operation(OperationType.Remove, 1); - const deps7: string[] = [vertexHash3]; - const vertexHash7 = cro.hashGraph.addVertex(op7, deps7, peerId); - console.log("vertex7: ", vertexHash7); - // Add the vertex V8 -> [V7, V5] - const op8: Operation = new Operation(OperationType.Remove, 3); - const deps8: string[] = [vertexHash7, vertexHash5]; - const vertexHash8 = cro.hashGraph.addVertex(op8, deps8, peerId); - console.log("vertex8: ", vertexHash8); - // Add the vertex V9 -> [V5] - const op9: Operation = new Operation(OperationType.Remove, 1); - const deps9: string[] = [vertexHash5]; - const vertexHash9 = cro.hashGraph.addVertex(op9, deps9, peerId); - console.log("vertex9: ", vertexHash9); - - const sortedOrder = cro.hashGraph.topologicalSort(); - expect([ - [ - vertexHash0, - vertexHash1, - vertexHash4, - vertexHash5, - vertexHash9, - vertexHash2, - vertexHash3, - vertexHash7, - vertexHash8, - vertexHash6, - ], - ]).toContainEqual(sortedOrder); - console.log(sortedOrder); - const linearOps = cro.hashGraph.linearizeOps(); - // expect([[op0, op1, op2, op6, op7, op4, op5], [op0, op1, op4, op5, op9, op2, op3, op]]).toContainEqual(linearOps); - }); - - test("Test: Mega Complex Case 1", () => { - /* - __ V6:ADD(3) - / - ___ V2:ADD(1) <-- V3:RM(2) <-- V7:RM(1) <-- V8:RM(3) - / ^ - V1:ADD(1)/ \ - \ \ - \ ___ V4:RM(2) <-------------------- V5:ADD(2) <-- V9:RM(1) - 6, 7, 8, 3, 2, 9, 5, 4, 1 - Topological Sorted Array: - [V1, V2, V3, V6, V4, V5, V9, V7, V8] - OR - [V1, V4, V2, V3, V7, V8, V6, V5, V9] - OR - [1, 4, 2, 3, 7, 5, 9, 8, 6] - */ - - const op1: Operation = new Operation(OperationType.Add, 1); - const deps1: string[] = [vertexHash0]; - const vertexHash1 = cro.hashGraph.addVertex(op1, deps1, peerId); - console.log("vertex1: ", vertexHash1); - // Add second vertex - const op2: Operation = new Operation(OperationType.Add, 1); - const deps2: string[] = [vertexHash1]; - const vertexHash2 = cro.hashGraph.addVertex(op2, deps2, peerId); - console.log("vertex2: ", vertexHash2); - // Add the third vertex V3 with dependency on V2 - const op3: Operation = new Operation(OperationType.Remove, 2); - const deps3: string[] = [vertexHash2]; - const vertexHash3 = cro.hashGraph.addVertex(op3, deps3, peerId); - console.log("vertex3: ", vertexHash3); - // Add the vertex V4 -> [V1] - const op4: Operation = new Operation(OperationType.Remove, 2); - const deps4: string[] = [vertexHash1]; - const vertexHash4 = cro.hashGraph.addVertex(op4, deps4, peerId); - console.log("vertex4: ", vertexHash4); - // Add the vertex V6 ->[V3] - const op6: Operation = new Operation(OperationType.Add, 3); - const deps6: string[] = [vertexHash3]; - const vertexHash6 = cro.hashGraph.addVertex(op6, deps6, peerId); - console.log("vertex6: ", vertexHash6); - // Add the vertex V7 -> [V3] - const op7: Operation = new Operation(OperationType.Remove, 1); - const deps7: string[] = [vertexHash3]; - const vertexHash7 = cro.hashGraph.addVertex(op7, deps7, peerId); - console.log("vertex7: ", vertexHash7); - // Add the vertex V5 -> [V4, V7] - const op5: Operation = new Operation(OperationType.Add, 2); - const deps5: string[] = [vertexHash4, vertexHash7]; - const vertexHash5 = cro.hashGraph.addVertex(op5, deps5, peerId); - console.log("vertex5: ", vertexHash5); - // Add the vertex V8 -> [V7] - const op8: Operation = new Operation(OperationType.Remove, 3); - const deps8: string[] = [vertexHash7, vertexHash5]; - const vertexHash8 = cro.hashGraph.addVertex(op8, deps8, peerId); - console.log("vertex8: ", vertexHash8); - // Add the vertex V9 -> [V5] - const op9: Operation = new Operation(OperationType.Remove, 1); - const deps9: string[] = [vertexHash5]; - const vertexHash9 = cro.hashGraph.addVertex(op9, deps9, peerId); - console.log("vertex9: ", vertexHash9); - - const sortedOrder = cro.hashGraph.topologicalSort(); - console.log(sortedOrder); - // expect([[op0, op1, op2, op3, op6, op4, op5, op9, op7, op8]]).toContainEqual(sortedOrder); - }); - - test("Test: Joao's latest brain teaser", () => { - /* - - __ V2:Add(2) <------------\ - V1:Add(1) / \ - V5:RM(2) - \__ V3:RM(2) <- V4:RM(2) <--/ - - */ - const op1: Operation = new Operation(OperationType.Add, 1); - const deps1: string[] = [vertexHash0]; - const vertexHash1 = cro.hashGraph.addVertex(op1, deps1, peerId); - - // Add the second vertex V2 <- [V1] - const op2: Operation = new Operation(OperationType.Add, 2); - const deps2: string[] = [vertexHash1]; - const vertexHash2 = cro.hashGraph.addVertex(op2, deps2, peerId); - - // Add the third vertex V3 <- [V1] - const op3: Operation = new Operation(OperationType.Remove, 2); - const deps3: string[] = [vertexHash1]; - const vertexHash3 = cro.hashGraph.addVertex(op3, deps3, peerId); - - // Add the fourth vertex V4 <- [V3] - const op4: Operation = new Operation(OperationType.Remove, 2); - const deps4: string[] = [vertexHash3]; - const vertexHash4 = cro.hashGraph.addVertex(op4, deps4, peerId); - - // Add the fifth vertex V5 <- [V2, V4] - const op5: Operation = new Operation(OperationType.Remove, 2); - const deps5: string[] = [vertexHash2, vertexHash4]; - const vertexHash5 = cro.hashGraph.addVertex(op5, deps5, peerId); - - const linearOps = cro.hashGraph.linearizeOps(); - expect(linearOps).toEqual([op0, op1, op2, op5]); + cro.remove(1); + set = cro.values(); + expect(cro.contains(1)).toBe(false); + expect(set).toEqual([2]); }); }); diff --git a/packages/network/package.json b/packages/network/package.json index 211ff00f..40873daf 100644 --- a/packages/network/package.json +++ b/packages/network/package.json @@ -26,6 +26,7 @@ "react-native-webrtc": "^124.0.3" }, "dependencies": { + "@bufbuild/protobuf": "^2.0.0", "@chainsafe/libp2p-gossipsub": "^13.1.0", "@chainsafe/libp2p-noise": "^15.1.1", "@chainsafe/libp2p-yamux": "^6.0.2", @@ -48,7 +49,6 @@ "it-map": "^3.1.1", "it-pipe": "^3.0.1", "libp2p": "^1.8.3", - "protobufjs": "^7.4.0", "ts-proto": "^2.0.3", "uint8arrays": "^5.1.0" } diff --git a/packages/network/src/proto/messages_pb.ts b/packages/network/src/proto/messages_pb.ts index 98859648..bde7f067 100644 --- a/packages/network/src/proto/messages_pb.ts +++ b/packages/network/src/proto/messages_pb.ts @@ -1,217 +1,197 @@ // Code generated by protoc-gen-ts_proto. DO NOT EDIT. // versions: -// protoc-gen-ts_proto v1.181.1 +// protoc-gen-ts_proto v2.0.3 // protoc unknown -// source: network/src/proto/messages.proto +// source: proto/messages.proto /* eslint-disable */ -import _m0 from "protobufjs/minimal.js"; +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; export const protobufPackage = "topology.network"; export interface Message { - sender: string; - type: Message_MessageType; - data: Uint8Array; + sender: string; + type: Message_MessageType; + data: Uint8Array; } export enum Message_MessageType { - UPDATE = 0, - SYNC = 1, - SYNC_ACCEPT = 2, - SYNC_REJECT = 3, - CUSTOM = 4, - UNRECOGNIZED = -1, + UPDATE = 0, + SYNC = 1, + SYNC_ACCEPT = 2, + SYNC_REJECT = 3, + CUSTOM = 4, + UNRECOGNIZED = -1, } export function message_MessageTypeFromJSON(object: any): Message_MessageType { - switch (object) { - case 0: - case "UPDATE": - return Message_MessageType.UPDATE; - case 1: - case "SYNC": - return Message_MessageType.SYNC; - case 2: - case "SYNC_ACCEPT": - return Message_MessageType.SYNC_ACCEPT; - case 3: - case "SYNC_REJECT": - return Message_MessageType.SYNC_REJECT; - case 4: - case "CUSTOM": - return Message_MessageType.CUSTOM; - case -1: - case "UNRECOGNIZED": - default: - return Message_MessageType.UNRECOGNIZED; - } + switch (object) { + case 0: + case "UPDATE": + return Message_MessageType.UPDATE; + case 1: + case "SYNC": + return Message_MessageType.SYNC; + case 2: + case "SYNC_ACCEPT": + return Message_MessageType.SYNC_ACCEPT; + case 3: + case "SYNC_REJECT": + return Message_MessageType.SYNC_REJECT; + case 4: + case "CUSTOM": + return Message_MessageType.CUSTOM; + case -1: + case "UNRECOGNIZED": + default: + return Message_MessageType.UNRECOGNIZED; + } } export function message_MessageTypeToJSON(object: Message_MessageType): string { - switch (object) { - case Message_MessageType.UPDATE: - return "UPDATE"; - case Message_MessageType.SYNC: - return "SYNC"; - case Message_MessageType.SYNC_ACCEPT: - return "SYNC_ACCEPT"; - case Message_MessageType.SYNC_REJECT: - return "SYNC_REJECT"; - case Message_MessageType.CUSTOM: - return "CUSTOM"; - case Message_MessageType.UNRECOGNIZED: - default: - return "UNRECOGNIZED"; - } + switch (object) { + case Message_MessageType.UPDATE: + return "UPDATE"; + case Message_MessageType.SYNC: + return "SYNC"; + case Message_MessageType.SYNC_ACCEPT: + return "SYNC_ACCEPT"; + case Message_MessageType.SYNC_REJECT: + return "SYNC_REJECT"; + case Message_MessageType.CUSTOM: + return "CUSTOM"; + case Message_MessageType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } } function createBaseMessage(): Message { - return { sender: "", type: 0, data: new Uint8Array(0) }; + return { sender: "", type: 0, data: new Uint8Array(0) }; } export const Message = { - encode( - message: Message, - writer: _m0.Writer = _m0.Writer.create(), - ): _m0.Writer { - if (message.sender !== "") { - writer.uint32(10).string(message.sender); - } - if (message.type !== 0) { - writer.uint32(16).int32(message.type); - } - if (message.data.length !== 0) { - writer.uint32(26).bytes(message.data); - } - return writer; - }, - - decode(input: _m0.Reader | Uint8Array, length?: number): Message { - const reader = - input instanceof _m0.Reader ? input : _m0.Reader.create(input); - const end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseMessage(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - if (tag !== 10) { - break; - } - - message.sender = reader.string(); - continue; - case 2: - if (tag !== 16) { - break; - } - - message.type = reader.int32() as any; - continue; - case 3: - if (tag !== 26) { - break; - } - - message.data = reader.bytes(); - continue; - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skipType(tag & 7); - } - return message; - }, - - fromJSON(object: any): Message { - return { - sender: isSet(object.sender) ? globalThis.String(object.sender) : "", - type: isSet(object.type) ? message_MessageTypeFromJSON(object.type) : 0, - data: isSet(object.data) - ? bytesFromBase64(object.data) - : new Uint8Array(0), - }; - }, - - toJSON(message: Message): unknown { - const obj: any = {}; - if (message.sender !== "") { - obj.sender = message.sender; - } - if (message.type !== 0) { - obj.type = message_MessageTypeToJSON(message.type); - } - if (message.data.length !== 0) { - obj.data = base64FromBytes(message.data); - } - return obj; - }, - - create, I>>(base?: I): Message { - return Message.fromPartial(base ?? ({} as any)); - }, - fromPartial, I>>(object: I): Message { - const message = createBaseMessage(); - message.sender = object.sender ?? ""; - message.type = object.type ?? 0; - message.data = object.data ?? new Uint8Array(0); - return message; - }, + encode(message: Message, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.sender !== "") { + writer.uint32(10).string(message.sender); + } + if (message.type !== 0) { + writer.uint32(16).int32(message.type); + } + if (message.data.length !== 0) { + writer.uint32(26).bytes(message.data); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Message { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessage(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.sender = reader.string(); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.type = reader.int32() as any; + continue; + case 3: + if (tag !== 26) { + break; + } + + message.data = reader.bytes(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Message { + return { + sender: isSet(object.sender) ? globalThis.String(object.sender) : "", + type: isSet(object.type) ? message_MessageTypeFromJSON(object.type) : 0, + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0), + }; + }, + + toJSON(message: Message): unknown { + const obj: any = {}; + if (message.sender !== "") { + obj.sender = message.sender; + } + if (message.type !== 0) { + obj.type = message_MessageTypeToJSON(message.type); + } + if (message.data.length !== 0) { + obj.data = base64FromBytes(message.data); + } + return obj; + }, + + create, I>>(base?: I): Message { + return Message.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): Message { + const message = createBaseMessage(); + message.sender = object.sender ?? ""; + message.type = object.type ?? 0; + message.data = object.data ?? new Uint8Array(0); + return message; + }, }; function bytesFromBase64(b64: string): Uint8Array { - if ((globalThis as any).Buffer) { - return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); - } else { - const bin = globalThis.atob(b64); - const arr = new Uint8Array(bin.length); - for (let i = 0; i < bin.length; ++i) { - arr[i] = bin.charCodeAt(i); - } - return arr; - } + if ((globalThis as any).Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } } function base64FromBytes(arr: Uint8Array): string { - if ((globalThis as any).Buffer) { - return globalThis.Buffer.from(arr).toString("base64"); - } else { - const bin: string[] = []; - arr.forEach((byte) => { - bin.push(globalThis.String.fromCharCode(byte)); - }); - return globalThis.btoa(bin.join("")); - } + if ((globalThis as any).Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(globalThis.String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } } -type Builtin = - | Date - | Function - | Uint8Array - | string - | number - | boolean - | undefined; - -export type DeepPartial = T extends Builtin - ? T - : T extends globalThis.Array - ? globalThis.Array> - : T extends ReadonlyArray - ? ReadonlyArray> - : T extends {} - ? { [K in keyof T]?: DeepPartial } - : Partial; +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; type KeysOfUnion = T extends T ? keyof T : never; -export type Exact = P extends Builtin - ? P - : P & { [K in keyof P]: Exact } & { - [K in Exclude>]: never; - }; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; function isSet(value: any): boolean { - return value !== null && value !== undefined; + return value !== null && value !== undefined; } diff --git a/packages/node/src/handlers.ts b/packages/node/src/handlers.ts index 62d6643f..3f2de986 100644 --- a/packages/node/src/handlers.ts +++ b/packages/node/src/handlers.ts @@ -1,6 +1,6 @@ import type { Stream } from "@libp2p/interface"; import { Message, Message_MessageType } from "@topology-foundation/network"; -import { TopologyObject } from "@topology-foundation/object"; +import { TopologyObjectBase } from "@topology-foundation/object"; import * as lp from "it-length-prefixed"; import type { TopologyNode } from "./index.js"; @@ -60,15 +60,14 @@ export async function topologyMessagesHandler( operations array doesn't contain the full remote operations array */ function updateHandler(node: TopologyNode, data: Uint8Array) { - const object_operations = TopologyObject.decode(data); + const object_operations = TopologyObjectBase.decode(data); let object = node.objectStore.get(object_operations.id); if (!object) { - object = TopologyObject.create({ + object = TopologyObjectBase.create({ id: object_operations.id, - operations: [], }); } - object.operations.push(...object_operations.operations); + object.vertices.push(...object_operations.vertices); node.objectStore.put(object.id, object); } @@ -103,22 +102,23 @@ function syncHandler( function syncAcceptHandler(node: TopologyNode, data: Uint8Array) { // don't blindly accept, validate the operations // might have have appeared in the meantime - const object_operations = TopologyObject.decode(data); - let object = node.objectStore.get(object_operations.id); + const object_operations = TopologyObjectBase.decode(data); + let object: TopologyObjectBase | undefined = node.objectStore.get( + object_operations.id, + ); if (!object) { - object = TopologyObject.create({ + object = TopologyObjectBase.create({ id: object_operations.id, - operations: [], }); } - object_operations.operations.filter((op) => { - if (object?.operations.find((op2) => op.nonce === op2.nonce)) { + object_operations.vertices.filter((v1) => { + if (object?.vertices.find((v2) => v1.hash === v2.hash)) { return false; } return true; }); - object.operations.push(...object_operations.operations); + object.vertices.push(...object_operations.vertices); node.objectStore.put(object.id, object); // TODO missing sending back the diff diff --git a/packages/node/src/index.ts b/packages/node/src/index.ts index f3a16b55..94425455 100644 --- a/packages/node/src/index.ts +++ b/packages/node/src/index.ts @@ -6,7 +6,11 @@ import { TopologyNetworkNode, type TopologyNetworkNodeConfig, } from "@topology-foundation/network"; -import { TopologyObject, newTopologyObject } from "@topology-foundation/object"; +import { + type CRO, + TopologyObjectBase, + newTopologyObject, +} from "@topology-foundation/object"; import { topologyMessagesHandler } from "./handlers.js"; import { OPERATIONS, executeObjectOperation } from "./operations.js"; import { TopologyObjectStore } from "./store/index.js"; @@ -71,9 +75,10 @@ export class TopologyNode { this.networkNode.sendMessage(peerId, [protocol], message); } - async createObject(id?: string, path?: string, abi?: string) { + async createObject(cro: CRO, id?: string, path?: string, abi?: string) { const object = await newTopologyObject( this.networkNode.peerId, + cro, path, id, abi, @@ -81,7 +86,7 @@ export class TopologyNode { executeObjectOperation( this, OPERATIONS.CREATE, - TopologyObject.encode(object).finish(), + TopologyObjectBase.encode(object).finish(), ); this.networkNode.addGroupMessageHandler(object.id, async (e) => topologyMessagesHandler(this, undefined, e.detail.msg.data), @@ -90,31 +95,25 @@ export class TopologyNode { } updateObject(id: string, operations: { fn: string; args: string[] }[]) { - const object = TopologyObject.create({ + // TODO: needs refactor for working with hash graph + const object = TopologyObjectBase.create({ id, - operations: operations.map((op) => { - return { - nonce: generateNonce(op.fn, op.args), - fn: op.fn, - args: op.args, - }; - }), }); executeObjectOperation( this, OPERATIONS.UPDATE, - TopologyObject.encode(object).finish(), + TopologyObjectBase.encode(object).finish(), ); } async subscribeObject(id: string, fetch?: boolean, peerId?: string) { - const object = TopologyObject.create({ + const object = TopologyObjectBase.create({ id, }); executeObjectOperation( this, OPERATIONS.SUBSCRIBE, - TopologyObject.encode(object).finish(), + TopologyObjectBase.encode(object).finish(), fetch, peerId, ); @@ -125,13 +124,13 @@ export class TopologyNode { } unsubscribeObject(id: string, purge?: boolean) { - const object = TopologyObject.create({ + const object = TopologyObjectBase.create({ id, }); executeObjectOperation( this, OPERATIONS.UNSUBSCRIBE, - TopologyObject.encode(object).finish(), + TopologyObjectBase.encode(object).finish(), purge, ); } @@ -141,14 +140,13 @@ export class TopologyNode { operations: { nonce: string; fn: string; args: string[] }[], peerId?: string, ) { - const object = TopologyObject.create({ + const object = TopologyObjectBase.create({ id, - operations, }); executeObjectOperation( this, OPERATIONS.SYNC, - TopologyObject.encode(object).finish(), + TopologyObjectBase.encode(object).finish(), peerId, ); } diff --git a/packages/node/src/operations.ts b/packages/node/src/operations.ts index 9c614ce4..74a54a89 100644 --- a/packages/node/src/operations.ts +++ b/packages/node/src/operations.ts @@ -1,5 +1,5 @@ import { Message, Message_MessageType } from "@topology-foundation/network"; -import { TopologyObject } from "@topology-foundation/object"; +import { TopologyObjectBase } from "@topology-foundation/object"; import type { TopologyNode } from "./index.js"; /* Object operations */ @@ -52,7 +52,7 @@ export async function executeObjectOperation( /* data: { id: string, abi: string, bytecode: Uint8Array } */ function createObject(node: TopologyNode, data: Uint8Array) { - const object = TopologyObject.decode(data); + const object = TopologyObjectBase.decode(data); node.networkNode.subscribe(object.id); node.objectStore.put(object.id, object); } @@ -62,18 +62,17 @@ function createObject(node: TopologyNode, data: Uint8Array) { operations array doesn't contain the full remote operations array */ function updateObject(node: TopologyNode, data: Uint8Array) { - const object_operations = TopologyObject.decode(data); + const object_operations = TopologyObjectBase.decode(data); let object = node.objectStore.get(object_operations.id); if (!object) { - object = TopologyObject.create({ + object = TopologyObjectBase.create({ id: object_operations.id, - operations: [], }); } - for (const op of object_operations.operations) { - if (object.operations.some((o) => o.nonce === op.nonce)) continue; - object.operations.push(op); + for (const v1 of object_operations.vertices) { + if (object.vertices.some((v2) => v1.hash === v2.hash)) continue; + object.vertices.push(v1); } node.objectStore.put(object.id, object); @@ -91,7 +90,7 @@ async function subscribeObject( fetch?: boolean, peerId?: string, ) { - const object = TopologyObject.decode(data); + const object = TopologyObjectBase.decode(data); node.networkNode.subscribe(object.id); if (!fetch) return; @@ -123,7 +122,7 @@ function unsubscribeObject( data: Uint8Array, purge?: boolean, ) { - const object = TopologyObject.decode(data); + const object = TopologyObjectBase.decode(data); node.networkNode.unsubscribe(object.id); if (!purge) return; node.objectStore.remove(object.id); @@ -138,7 +137,7 @@ async function syncObject( data: Uint8Array, peerId?: string, ) { - const object = TopologyObject.decode(data); + const object = TopologyObjectBase.decode(data); const message = Message.create({ type: Message_MessageType.SYNC, data: data, diff --git a/packages/node/src/store/index.ts b/packages/node/src/store/index.ts index ca097c85..17b70408 100644 --- a/packages/node/src/store/index.ts +++ b/packages/node/src/store/index.ts @@ -1,25 +1,25 @@ -import type { TopologyObject } from "@topology-foundation/object"; +import type { TopologyObjectBase } from "@topology-foundation/object"; export type TopologyObjectStoreCallback = ( objectId: string, - object: TopologyObject, + object: TopologyObjectBase, ) => void; export class TopologyObjectStore { // TODO: should be abstracted in handling multiple types of storage - private _store: Map; + private _store: Map; private _subscriptions: Map; constructor() { - this._store = new Map(); + this._store = new Map(); this._subscriptions = new Map(); } - get(objectId: string): TopologyObject | undefined { + get(objectId: string): TopologyObjectBase | undefined { return this._store.get(objectId); } - put(objectId: string, object: TopologyObject) { + put(objectId: string, object: TopologyObjectBase) { this._store.set(objectId, object); this._notifySubscribers(objectId, object); } @@ -31,7 +31,10 @@ export class TopologyObjectStore { this._subscriptions.get(objectId)?.push(callback); } - private _notifySubscribers(objectId: string, object: TopologyObject): void { + private _notifySubscribers( + objectId: string, + object: TopologyObjectBase, + ): void { const callbacks = this._subscriptions.get(objectId); if (callbacks) { for (const callback of callbacks) { diff --git a/packages/object/package.json b/packages/object/package.json index e998b626..11e6523a 100644 --- a/packages/object/package.json +++ b/packages/object/package.json @@ -9,6 +9,7 @@ "type": "module", "types": "./dist/src/index.d.ts", "files": ["src", "dist", "!dist/test", "!**/*.tsbuildinfo"], + "main": "./dist/src/index.js", "exports": { ".": { "types": "./dist/src/index.d.ts", @@ -26,7 +27,7 @@ "assemblyscript": "^0.27.29" }, "dependencies": { - "protobufjs": "^7.4.0", + "@bufbuild/protobuf": "^2.0.0", "ts-proto": "^2.0.3" } } diff --git a/packages/object/src/hashgraph.ts b/packages/object/src/hashgraph.ts index 48c96249..0e179947 100644 --- a/packages/object/src/hashgraph.ts +++ b/packages/object/src/hashgraph.ts @@ -1,13 +1,10 @@ import * as crypto from "node:crypto"; type Hash = string; +export type Operation = { type: string; value: T | null }; -class Vertex { - constructor( - readonly hash: Hash, - readonly operation: Operation, - readonly dependencies: Set, - ) {} +enum OperationType { + NOP = "-1", } export enum ActionType { @@ -17,107 +14,112 @@ export enum ActionType { Swap = 3, } -export enum OperationType { - Add = 0, - Remove = 1, - Nop = 2, -} - -export class Operation { - constructor( - readonly type: OperationType, - readonly value: T, - ) {} +export interface Vertex { + hash: Hash; + nodeId: string; + // internal Operation type enum converted to number + // -1 for NOP + operation: Operation; + dependencies: Hash[]; } -export interface IHashGraph { - addVertex(op: T, deps: Hash[], nodeId: string): Hash; - addToFrontier(op: T): Hash; - topologicalSort(): Hash[]; - areCausallyRelated(vertexHash1: Hash, vertexHash2: Hash): boolean; - getFrontier(): Hash[]; - getDependencies(vertexHash: Hash): Hash[] | undefined; - getVertex(vertexHash: Hash): Vertex | undefined; - getAllVertices(): Vertex[]; -} +export class HashGraph { + nodeId: string; + resolveConflicts: (vertices: Vertex[]) => ActionType; -export class HashGraph { - private vertices: Map> = new Map(); - private frontier: Set = new Set(); - private forwardEdges: Map> = new Map(); - rootHash: Hash = ""; + vertices: Map> = new Map(); + frontier: Hash[] = []; + forwardEdges: Map = new Map(); + static readonly rootHash: Hash = computeHash( + "", + { type: OperationType.NOP, value: null }, + [], + ); constructor( - private resolveConflicts: ( - op1: Operation, - op2: Operation, - ) => ActionType, - private nodeId: string, + nodeId: string, + resolveConflicts: (vertices: Vertex[]) => ActionType, ) { - // Create and add the NOP root vertex - const nopOperation = new Operation(OperationType.Nop, 0 as T); - this.rootHash = this.computeHash(nopOperation, [], ""); - const rootVertex = new Vertex(this.rootHash, nopOperation, new Set()); - this.vertices.set(this.rootHash, rootVertex); - this.frontier.add(this.rootHash); - this.forwardEdges.set(this.rootHash, new Set()); - } + this.nodeId = nodeId; + this.resolveConflicts = resolveConflicts; - // Time complexity: O(1), Space complexity: O(1) - private computeHash(op: Operation, deps: Hash[], nodeId: string): Hash { - const serialized = JSON.stringify({ op, deps, nodeId }); - const hash = crypto.createHash("sha256").update(serialized).digest("hex"); - - return hash; + // Create and add the NOP root vertex + const rootVertex: Vertex = { + hash: HashGraph.rootHash, + nodeId: "", + operation: { + type: OperationType.NOP, + value: null, + }, + dependencies: [], + }; + this.vertices.set(HashGraph.rootHash, rootVertex); + this.frontier.push(HashGraph.rootHash); + this.forwardEdges.set(HashGraph.rootHash, []); } addToFrontier(operation: Operation): Hash { const deps = this.getFrontier(); - const hash = this.computeHash(operation, deps, this.nodeId); - const vertex = new Vertex(hash, operation, new Set(deps)); + const hash = computeHash(this.nodeId, operation, deps); + const vertex: Vertex = { + hash, + nodeId: this.nodeId, + operation, + dependencies: deps, + }; this.vertices.set(hash, vertex); - this.frontier.add(hash); + this.frontier.push(hash); // Update forward edges for (const dep of deps) { if (!this.forwardEdges.has(dep)) { - this.forwardEdges.set(dep, new Set()); + this.forwardEdges.set(dep, []); } - this.forwardEdges.get(dep)?.add(hash); - this.frontier.delete(dep); + this.forwardEdges.get(dep)?.push(hash); } + + const depsSet = new Set(deps); + this.frontier = this.frontier.filter((hash) => !depsSet.has(hash)); return hash; } + // Time complexity: O(d), where d is the number of dependencies // Space complexity: O(d) - addVertex(op: Operation, deps: Hash[], nodeId: string): Hash { + addVertex(operation: Operation, deps: Hash[], nodeId: string): Hash { + const hash = computeHash(nodeId, operation, deps); + if (this.vertices.has(hash)) { + return hash; // Vertex already exists + } + // Temporary fix: don't add the vertex if the dependencies are not present in the local HG. if ( !deps.every((dep) => this.forwardEdges.has(dep) || this.vertices.has(dep)) ) { - console.log("Invalid dependency detected."); + console.error("Invalid dependency detected."); return ""; } - const hash = this.computeHash(op, deps, nodeId); - if (this.vertices.has(hash)) { - return hash; // Vertex already exists - } - - const vertex = new Vertex(hash, op, new Set(deps)); + const vertex: Vertex = { + hash, + nodeId, + operation, + dependencies: deps, + }; this.vertices.set(hash, vertex); - this.frontier.add(hash); + this.frontier.push(hash); // Update forward edges for (const dep of deps) { if (!this.forwardEdges.has(dep)) { - this.forwardEdges.set(dep, new Set()); + this.forwardEdges.set(dep, []); } - this.forwardEdges.get(dep)?.add(hash); - this.frontier.delete(dep); + this.forwardEdges.get(dep)?.push(hash); } + const depsSet = new Set(deps); + this.frontier = this.frontier.filter((hash) => !depsSet.has(hash)); + return hash; } @@ -131,19 +133,19 @@ export class HashGraph { visited.add(hash); - const children = this.forwardEdges.get(hash) || new Set(); + const children = this.forwardEdges.get(hash) || []; for (const child of children) { visit(child); } result.push(hash); }; // Start with the root vertex - visit(this.rootHash); + visit(HashGraph.rootHash); return result.reverse(); } - linearizeOps(): Operation[] { + linearizeOperations(): Operation[] { const order = this.topologicalSort(); const result: Operation[] = []; let i = 0; @@ -157,13 +159,13 @@ export class HashGraph { const moving = order[j]; if (!this.areCausallyRelated(anchor, moving)) { - const op1 = this.vertices.get(anchor)?.operation; - const op2 = this.vertices.get(moving)?.operation; + const v1 = this.vertices.get(anchor); + const v2 = this.vertices.get(moving); let action: ActionType; - if (!op1 || !op2) { + if (!v1 || !v2) { action = ActionType.Nop; } else { - action = this.resolveConflicts(op1, op2); + action = this.resolveConflicts([v1, v2]); } switch (action) { @@ -190,7 +192,7 @@ export class HashGraph { if (shouldIncrementI) { const op = this.vertices.get(order[i])?.operation; - if (op) result.push(); + if (op && op.value !== null) result.push(op); i++; } } @@ -244,11 +246,6 @@ export class HashGraph { return Array.from(this.frontier); } - // Time complexity: O(1), Space complexity: O(1) - getRoot(): Hash { - return this.rootHash; - } - // Time complexity: O(1), Space complexity: O(1) getDependencies(vertexHash: Hash): Hash[] { return Array.from(this.vertices.get(vertexHash)?.dependencies || []); @@ -264,3 +261,14 @@ export class HashGraph { return Array.from(this.vertices.values()); } } + +// Time complexity: O(1), Space complexity: O(1) +function computeHash( + nodeId: string, + operation: Operation, + deps: Hash[], +): Hash { + const serialized = JSON.stringify({ operation, deps, nodeId }); + const hash = crypto.createHash("sha256").update(serialized).digest("hex"); + return hash; +} diff --git a/packages/object/src/index.ts b/packages/object/src/index.ts index 34c05aa8..e10b0824 100644 --- a/packages/object/src/index.ts +++ b/packages/object/src/index.ts @@ -1,43 +1,108 @@ import * as crypto from "node:crypto"; -import type { TopologyObject } from "./proto/object_pb.js"; +import { + type ActionType, + HashGraph, + type Operation, + type Vertex, +} from "./hashgraph.js"; +import type { TopologyObjectBase } from "./proto/object_pb.js"; import { compileWasm } from "./wasm/compiler.js"; export * from "./proto/object_pb.js"; export * from "./hashgraph.js"; +export interface CRO { + resolveConflicts: (vertices: Vertex[]) => ActionType; + mergeCallback: (operations: Operation[]) => void; +} + +export interface TopologyObject extends TopologyObjectBase { + cro: ProxyHandler> | null; + hashGraph: HashGraph; +} + /* Creates a new TopologyObject */ -export async function newTopologyObject( - peerId: string, +export async function newTopologyObject( + nodeId: string, + cro: CRO, path?: string, id?: string, abi?: string, -): Promise { +): Promise> { // const bytecode = await compileWasm(path); const bytecode = new Uint8Array(); - return { + const obj: TopologyObject = { id: id ?? crypto .createHash("sha256") .update(abi ?? "") - .update(peerId) + .update(nodeId) .update(Math.floor(Math.random() * Number.MAX_VALUE).toString()) .digest("hex"), abi: abi ?? "", bytecode: bytecode ?? new Uint8Array(), - operations: [], + vertices: [], + cro: null, + hashGraph: new HashGraph(nodeId, cro.resolveConflicts), + }; + obj.cro = new Proxy(cro, proxyCROHandler(obj)); + return obj; +} + +// This function is black magic, it allows us to intercept calls to the CRO object +function proxyCROHandler(obj: TopologyObject): ProxyHandler { + return { + get(target, propKey, receiver) { + if (typeof target[propKey as keyof object] === "function") { + return new Proxy(target[propKey as keyof object], { + apply(applyTarget, thisArg, args) { + if ((thisArg.operations as string[]).includes(propKey as string)) + callFn( + obj, + propKey as string, + (args.length === 1 ? args[0] : args) as T, + ); + return Reflect.apply(applyTarget, thisArg, args); + }, + }); + } + return Reflect.get(target, propKey, receiver); + }, }; } -export async function callFn( - obj: TopologyObject, +export async function callFn( + obj: TopologyObject, fn: string, - args: string[], -): Promise { - obj.operations.push({ - nonce: Math.floor(Math.random() * Number.MAX_VALUE).toString(), - fn: fn, - args: args, - }); + args: T, +): Promise> { + obj.hashGraph.addToFrontier({ type: fn, value: args }); + return obj; } + +export async function merge(obj: TopologyObject, vertices: Vertex[]) { + for (const vertex of vertices) { + obj.hashGraph.addVertex( + vertex.operation, + vertex.dependencies, + vertex.nodeId, + ); + } + + const operations = obj.hashGraph.linearizeOperations(); + // TODO remove this in favor of RIBLT + obj.vertices = obj.hashGraph.getAllVertices().map((vertex) => { + return { + hash: vertex.hash, + nodeId: vertex.nodeId, + operation: { + type: vertex.operation.type, + value: vertex.operation.value as string, + }, + dependencies: vertex.dependencies, + }; + }); + (obj.cro as CRO).mergeCallback(operations); +} diff --git a/packages/object/src/proto/object.proto b/packages/object/src/proto/object.proto index 08beca0d..f234574d 100644 --- a/packages/object/src/proto/object.proto +++ b/packages/object/src/proto/object.proto @@ -1,15 +1,21 @@ syntax = "proto3"; package topology.object; -message TopologyObject { - message Operation { - string nonce = 1; - optional string fn = 2; - repeated string args = 3; +message TopologyObjectBase { + // Supposed to be the RIBLT stuff + message Vertex { + message Operation { + string type = 1; + string value = 2; + } + string hash = 1; + string nodeId = 2; + Operation operation = 3; + repeated string dependencies = 4; }; string id = 1; optional string abi = 2; optional bytes bytecode = 3; - repeated Operation operations = 4; + repeated Vertex vertices = 4; } diff --git a/packages/object/src/proto/object_pb.ts b/packages/object/src/proto/object_pb.ts index d4475700..31165a5d 100644 --- a/packages/object/src/proto/object_pb.ts +++ b/packages/object/src/proto/object_pb.ts @@ -1,304 +1,363 @@ // Code generated by protoc-gen-ts_proto. DO NOT EDIT. // versions: -// protoc-gen-ts_proto v1.181.1 +// protoc-gen-ts_proto v2.0.3 // protoc unknown -// source: object/src/proto/object.proto +// source: proto/object.proto /* eslint-disable */ -import _m0 from "protobufjs/minimal.js"; +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; export const protobufPackage = "topology.object"; -export interface TopologyObject { - id: string; - abi?: string | undefined; - bytecode?: Uint8Array | undefined; - operations: TopologyObject_Operation[]; +export interface TopologyObjectBase { + id: string; + abi?: string | undefined; + bytecode?: Uint8Array | undefined; + vertices: TopologyObjectBase_Vertex[]; } -export interface TopologyObject_Operation { - nonce: string; - fn?: string | undefined; - args: string[]; +/** Supposed to be the RIBLT stuff */ +export interface TopologyObjectBase_Vertex { + hash: string; + nodeId: string; + operation: TopologyObjectBase_Vertex_Operation | undefined; + dependencies: string[]; } -function createBaseTopologyObject(): TopologyObject { - return { id: "", abi: undefined, bytecode: undefined, operations: [] }; +export interface TopologyObjectBase_Vertex_Operation { + type: string; + value: string; } -export const TopologyObject = { - encode( - message: TopologyObject, - writer: _m0.Writer = _m0.Writer.create(), - ): _m0.Writer { - if (message.id !== "") { - writer.uint32(10).string(message.id); - } - if (message.abi !== undefined) { - writer.uint32(18).string(message.abi); - } - if (message.bytecode !== undefined) { - writer.uint32(26).bytes(message.bytecode); - } - for (const v of message.operations) { - TopologyObject_Operation.encode(v!, writer.uint32(34).fork()).ldelim(); - } - return writer; - }, - - decode(input: _m0.Reader | Uint8Array, length?: number): TopologyObject { - const reader = - input instanceof _m0.Reader ? input : _m0.Reader.create(input); - const end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseTopologyObject(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - if (tag !== 10) { - break; - } - - message.id = reader.string(); - continue; - case 2: - if (tag !== 18) { - break; - } - - message.abi = reader.string(); - continue; - case 3: - if (tag !== 26) { - break; - } - - message.bytecode = reader.bytes(); - continue; - case 4: - if (tag !== 34) { - break; - } - - message.operations.push( - TopologyObject_Operation.decode(reader, reader.uint32()), - ); - continue; - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skipType(tag & 7); - } - return message; - }, - - fromJSON(object: any): TopologyObject { - return { - id: isSet(object.id) ? globalThis.String(object.id) : "", - abi: isSet(object.abi) ? globalThis.String(object.abi) : undefined, - bytecode: isSet(object.bytecode) - ? bytesFromBase64(object.bytecode) - : undefined, - operations: globalThis.Array.isArray(object?.operations) - ? object.operations.map((e: any) => - TopologyObject_Operation.fromJSON(e), - ) - : [], - }; - }, - - toJSON(message: TopologyObject): unknown { - const obj: any = {}; - if (message.id !== "") { - obj.id = message.id; - } - if (message.abi !== undefined) { - obj.abi = message.abi; - } - if (message.bytecode !== undefined) { - obj.bytecode = base64FromBytes(message.bytecode); - } - if (message.operations?.length) { - obj.operations = message.operations.map((e) => - TopologyObject_Operation.toJSON(e), - ); - } - return obj; - }, - - create, I>>( - base?: I, - ): TopologyObject { - return TopologyObject.fromPartial(base ?? ({} as any)); - }, - fromPartial, I>>( - object: I, - ): TopologyObject { - const message = createBaseTopologyObject(); - message.id = object.id ?? ""; - message.abi = object.abi ?? undefined; - message.bytecode = object.bytecode ?? undefined; - message.operations = - object.operations?.map((e) => TopologyObject_Operation.fromPartial(e)) || - []; - return message; - }, +function createBaseTopologyObjectBase(): TopologyObjectBase { + return { id: "", abi: undefined, bytecode: undefined, vertices: [] }; +} + +export const TopologyObjectBase = { + encode(message: TopologyObjectBase, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.abi !== undefined) { + writer.uint32(18).string(message.abi); + } + if (message.bytecode !== undefined) { + writer.uint32(26).bytes(message.bytecode); + } + for (const v of message.vertices) { + TopologyObjectBase_Vertex.encode(v!, writer.uint32(34).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): TopologyObjectBase { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTopologyObjectBase(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.abi = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.bytecode = reader.bytes(); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.vertices.push(TopologyObjectBase_Vertex.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): TopologyObjectBase { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + abi: isSet(object.abi) ? globalThis.String(object.abi) : undefined, + bytecode: isSet(object.bytecode) ? bytesFromBase64(object.bytecode) : undefined, + vertices: globalThis.Array.isArray(object?.vertices) + ? object.vertices.map((e: any) => TopologyObjectBase_Vertex.fromJSON(e)) + : [], + }; + }, + + toJSON(message: TopologyObjectBase): unknown { + const obj: any = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.abi !== undefined) { + obj.abi = message.abi; + } + if (message.bytecode !== undefined) { + obj.bytecode = base64FromBytes(message.bytecode); + } + if (message.vertices?.length) { + obj.vertices = message.vertices.map((e) => TopologyObjectBase_Vertex.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): TopologyObjectBase { + return TopologyObjectBase.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): TopologyObjectBase { + const message = createBaseTopologyObjectBase(); + message.id = object.id ?? ""; + message.abi = object.abi ?? undefined; + message.bytecode = object.bytecode ?? undefined; + message.vertices = object.vertices?.map((e) => TopologyObjectBase_Vertex.fromPartial(e)) || []; + return message; + }, }; -function createBaseTopologyObject_Operation(): TopologyObject_Operation { - return { nonce: "", fn: undefined, args: [] }; +function createBaseTopologyObjectBase_Vertex(): TopologyObjectBase_Vertex { + return { hash: "", nodeId: "", operation: undefined, dependencies: [] }; } -export const TopologyObject_Operation = { - encode( - message: TopologyObject_Operation, - writer: _m0.Writer = _m0.Writer.create(), - ): _m0.Writer { - if (message.nonce !== "") { - writer.uint32(10).string(message.nonce); - } - if (message.fn !== undefined) { - writer.uint32(18).string(message.fn); - } - for (const v of message.args) { - writer.uint32(26).string(v!); - } - return writer; - }, - - decode( - input: _m0.Reader | Uint8Array, - length?: number, - ): TopologyObject_Operation { - const reader = - input instanceof _m0.Reader ? input : _m0.Reader.create(input); - const end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseTopologyObject_Operation(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - if (tag !== 10) { - break; - } - - message.nonce = reader.string(); - continue; - case 2: - if (tag !== 18) { - break; - } - - message.fn = reader.string(); - continue; - case 3: - if (tag !== 26) { - break; - } - - message.args.push(reader.string()); - continue; - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skipType(tag & 7); - } - return message; - }, - - fromJSON(object: any): TopologyObject_Operation { - return { - nonce: isSet(object.nonce) ? globalThis.String(object.nonce) : "", - fn: isSet(object.fn) ? globalThis.String(object.fn) : undefined, - args: globalThis.Array.isArray(object?.args) - ? object.args.map((e: any) => globalThis.String(e)) - : [], - }; - }, - - toJSON(message: TopologyObject_Operation): unknown { - const obj: any = {}; - if (message.nonce !== "") { - obj.nonce = message.nonce; - } - if (message.fn !== undefined) { - obj.fn = message.fn; - } - if (message.args?.length) { - obj.args = message.args; - } - return obj; - }, - - create, I>>( - base?: I, - ): TopologyObject_Operation { - return TopologyObject_Operation.fromPartial(base ?? ({} as any)); - }, - fromPartial, I>>( - object: I, - ): TopologyObject_Operation { - const message = createBaseTopologyObject_Operation(); - message.nonce = object.nonce ?? ""; - message.fn = object.fn ?? undefined; - message.args = object.args?.map((e) => e) || []; - return message; - }, +export const TopologyObjectBase_Vertex = { + encode(message: TopologyObjectBase_Vertex, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.hash !== "") { + writer.uint32(10).string(message.hash); + } + if (message.nodeId !== "") { + writer.uint32(18).string(message.nodeId); + } + if (message.operation !== undefined) { + TopologyObjectBase_Vertex_Operation.encode(message.operation, writer.uint32(26).fork()).join(); + } + for (const v of message.dependencies) { + writer.uint32(34).string(v!); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): TopologyObjectBase_Vertex { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTopologyObjectBase_Vertex(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.hash = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.nodeId = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.operation = TopologyObjectBase_Vertex_Operation.decode(reader, reader.uint32()); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.dependencies.push(reader.string()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): TopologyObjectBase_Vertex { + return { + hash: isSet(object.hash) ? globalThis.String(object.hash) : "", + nodeId: isSet(object.nodeId) ? globalThis.String(object.nodeId) : "", + operation: isSet(object.operation) ? TopologyObjectBase_Vertex_Operation.fromJSON(object.operation) : undefined, + dependencies: globalThis.Array.isArray(object?.dependencies) + ? object.dependencies.map((e: any) => globalThis.String(e)) + : [], + }; + }, + + toJSON(message: TopologyObjectBase_Vertex): unknown { + const obj: any = {}; + if (message.hash !== "") { + obj.hash = message.hash; + } + if (message.nodeId !== "") { + obj.nodeId = message.nodeId; + } + if (message.operation !== undefined) { + obj.operation = TopologyObjectBase_Vertex_Operation.toJSON(message.operation); + } + if (message.dependencies?.length) { + obj.dependencies = message.dependencies; + } + return obj; + }, + + create, I>>(base?: I): TopologyObjectBase_Vertex { + return TopologyObjectBase_Vertex.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): TopologyObjectBase_Vertex { + const message = createBaseTopologyObjectBase_Vertex(); + message.hash = object.hash ?? ""; + message.nodeId = object.nodeId ?? ""; + message.operation = (object.operation !== undefined && object.operation !== null) + ? TopologyObjectBase_Vertex_Operation.fromPartial(object.operation) + : undefined; + message.dependencies = object.dependencies?.map((e) => e) || []; + return message; + }, +}; + +function createBaseTopologyObjectBase_Vertex_Operation(): TopologyObjectBase_Vertex_Operation { + return { type: "", value: "" }; +} + +export const TopologyObjectBase_Vertex_Operation = { + encode(message: TopologyObjectBase_Vertex_Operation, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.type !== "") { + writer.uint32(10).string(message.type); + } + if (message.value !== "") { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): TopologyObjectBase_Vertex_Operation { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTopologyObjectBase_Vertex_Operation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.type = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.value = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): TopologyObjectBase_Vertex_Operation { + return { + type: isSet(object.type) ? globalThis.String(object.type) : "", + value: isSet(object.value) ? globalThis.String(object.value) : "", + }; + }, + + toJSON(message: TopologyObjectBase_Vertex_Operation): unknown { + const obj: any = {}; + if (message.type !== "") { + obj.type = message.type; + } + if (message.value !== "") { + obj.value = message.value; + } + return obj; + }, + + create, I>>( + base?: I, + ): TopologyObjectBase_Vertex_Operation { + return TopologyObjectBase_Vertex_Operation.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): TopologyObjectBase_Vertex_Operation { + const message = createBaseTopologyObjectBase_Vertex_Operation(); + message.type = object.type ?? ""; + message.value = object.value ?? ""; + return message; + }, }; function bytesFromBase64(b64: string): Uint8Array { - if ((globalThis as any).Buffer) { - return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); - } else { - const bin = globalThis.atob(b64); - const arr = new Uint8Array(bin.length); - for (let i = 0; i < bin.length; ++i) { - arr[i] = bin.charCodeAt(i); - } - return arr; - } + if ((globalThis as any).Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } } function base64FromBytes(arr: Uint8Array): string { - if ((globalThis as any).Buffer) { - return globalThis.Buffer.from(arr).toString("base64"); - } else { - const bin: string[] = []; - arr.forEach((byte) => { - bin.push(globalThis.String.fromCharCode(byte)); - }); - return globalThis.btoa(bin.join("")); - } + if ((globalThis as any).Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(globalThis.String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } } -type Builtin = - | Date - | Function - | Uint8Array - | string - | number - | boolean - | undefined; - -export type DeepPartial = T extends Builtin - ? T - : T extends globalThis.Array - ? globalThis.Array> - : T extends ReadonlyArray - ? ReadonlyArray> - : T extends {} - ? { [K in keyof T]?: DeepPartial } - : Partial; +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; type KeysOfUnion = T extends T ? keyof T : never; -export type Exact = P extends Builtin - ? P - : P & { [K in keyof P]: Exact } & { - [K in Exclude>]: never; - }; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; function isSet(value: any): boolean { - return value !== null && value !== undefined; + return value !== null && value !== undefined; } diff --git a/packages/object/tests/hashgraph.test.ts b/packages/object/tests/hashgraph.test.ts new file mode 100644 index 00000000..e82bb2d7 --- /dev/null +++ b/packages/object/tests/hashgraph.test.ts @@ -0,0 +1,302 @@ +import { beforeEach, describe, expect, test } from "vitest"; +import { AddWinsSet } from "../../crdt/src/cros/AddWinsSet/index.js"; +import { type TopologyObject, merge, newTopologyObject } from "../src/index.js"; + +describe("HashGraph for AddWinSet tests", () => { + let obj1: TopologyObject; + let obj2: TopologyObject; + let obj3: TopologyObject; + + beforeEach(async () => { + obj1 = await newTopologyObject("peer1", new AddWinsSet()); + obj2 = await newTopologyObject("peer2", new AddWinsSet()); + obj3 = await newTopologyObject("peer3", new AddWinsSet()); + }); + + test("Test: Add Two Vertices", () => { + /* + V1:NOP <- V2:ADD(1) <- V2:REMOVE(1) + */ + + const cro1 = obj1.cro as AddWinsSet; + cro1.add(1); + cro1.remove(1); + expect(cro1.contains(1)).toBe(false); + + const linearOps = obj1.hashGraph.linearizeOperations(); + expect(linearOps).toEqual([ + { type: "add", value: 1 }, + { type: "remove", value: 1 }, + ]); + }); + + test("Test: Add Two Concurrent Vertices With Same Value", () => { + /* + _ V2:REMOVE(1) + V1:ADD(1) / + \ _ V3:ADD(1) + */ + + const cro1 = obj1.cro as AddWinsSet; + const cro2 = obj2.cro as AddWinsSet; + + cro1.add(1); + merge(obj2, obj1.hashGraph.getAllVertices()); + + cro1.remove(1); + cro2.add(1); + merge(obj1, obj2.hashGraph.getAllVertices()); + merge(obj2, obj1.hashGraph.getAllVertices()); + + expect(cro1.contains(1)).toBe(true); + expect(obj1.hashGraph.vertices).toEqual(obj2.hashGraph.vertices); + + const linearOps = obj1.hashGraph.linearizeOperations(); + expect(linearOps).toEqual([ + { type: "add", value: 1 }, + { type: "add", value: 1 }, + ]); + }); + + test("Test: Add Two Concurrent Vertices With Different Values", () => { + /* + _ V2:REMOVE(1) + V1:ADD(1) / + \ _ V3:ADD(2) + */ + + const cro1 = obj1.cro as AddWinsSet; + const cro2 = obj2.cro as AddWinsSet; + + cro1.add(1); + merge(obj2, obj1.hashGraph.getAllVertices()); + + cro1.remove(1); + cro2.add(2); + merge(obj1, obj2.hashGraph.getAllVertices()); + merge(obj2, obj1.hashGraph.getAllVertices()); + + expect(cro1.contains(1)).toBe(false); + expect(cro1.contains(2)).toBe(true); + expect(obj1.hashGraph.vertices).toEqual(obj2.hashGraph.vertices); + + const linearOps = obj1.hashGraph.linearizeOperations(); + expect(linearOps).toEqual([ + { type: "add", value: 1 }, + { type: "add", value: 2 }, + { type: "remove", value: 1 }, + ]); + }); + + test("Test: Tricky Case", () => { + /* + ___ V2:REMOVE(1) <- V4:ADD(10) + V1:ADD(1) / + \ ___ V3:ADD(1) <- V5:REMOVE(5) + */ + + const cro1 = obj1.cro as AddWinsSet; + const cro2 = obj2.cro as AddWinsSet; + + cro1.add(1); + merge(obj2, obj1.hashGraph.getAllVertices()); + + cro1.remove(1); + cro2.add(1); + cro1.add(10); + cro2.remove(5); + merge(obj1, obj2.hashGraph.getAllVertices()); + merge(obj2, obj1.hashGraph.getAllVertices()); + + expect(cro1.contains(1)).toBe(true); + expect(cro1.contains(10)).toBe(true); + expect(cro1.contains(5)).toBe(false); + expect(obj1.hashGraph.vertices).toEqual(obj2.hashGraph.vertices); + + const linearOps = obj1.hashGraph.linearizeOperations(); + expect(linearOps).toEqual([ + { type: "add", value: 1 }, + { type: "add", value: 1 }, + { type: "remove", value: 5 }, + { type: "add", value: 10 }, + ]); + }); + + test("Test: Yuta Papa's Case", () => { + /* + ___ V2:REMOVE(1) <- V4:ADD(2) + V1:ADD(1) / + \ ___ V3:REMOVE(2) <- V5:ADD(1) + */ + + const cro1 = obj1.cro as AddWinsSet; + const cro2 = obj2.cro as AddWinsSet; + + cro1.add(1); + merge(obj2, obj1.hashGraph.getAllVertices()); + + cro1.remove(1); + cro2.remove(2); + cro1.add(2); + cro2.add(1); + merge(obj1, obj2.hashGraph.getAllVertices()); + merge(obj2, obj1.hashGraph.getAllVertices()); + + expect(cro1.contains(1)).toBe(true); + expect(cro1.contains(2)).toBe(true); + expect(obj1.hashGraph.vertices).toEqual(obj2.hashGraph.vertices); + + const linearOps = obj1.hashGraph.linearizeOperations(); + expect(linearOps).toEqual([ + { type: "add", value: 1 }, + { type: "add", value: 1 }, + { type: "add", value: 2 }, + ]); + }); + + test("Test: Mega Complex Case", () => { + /* + __ V6:ADD(3) + / + ___ V2:ADD(1) <-- V3:RM(2) <-- V7:RM(1) <-- V8:RM(3) + / ______________/ + V1:ADD(1)/ / + \ / + \ ___ V4:RM(2) <-- V5:ADD(2) <-- V9:RM(1) + */ + + const cro1 = obj1.cro as AddWinsSet; + const cro2 = obj2.cro as AddWinsSet; + const cro3 = obj3.cro as AddWinsSet; + + cro1.add(1); + merge(obj2, obj1.hashGraph.getAllVertices()); + + cro1.add(1); + cro1.remove(2); + cro2.remove(2); + cro2.add(2); + + merge(obj3, obj1.hashGraph.getAllVertices()); + cro3.add(3); + cro1.remove(1); + + merge(obj1, obj2.hashGraph.getAllVertices()); + cro1.remove(3); + cro2.remove(1); + + merge(obj1, obj2.hashGraph.getAllVertices()); + merge(obj1, obj3.hashGraph.getAllVertices()); + merge(obj2, obj1.hashGraph.getAllVertices()); + merge(obj2, obj3.hashGraph.getAllVertices()); + merge(obj3, obj1.hashGraph.getAllVertices()); + merge(obj3, obj2.hashGraph.getAllVertices()); + + expect(cro1.contains(1)).toBe(false); + expect(cro1.contains(2)).toBe(true); + expect(cro1.contains(3)).toBe(true); + expect(obj1.hashGraph.vertices).toEqual(obj2.hashGraph.vertices); + expect(obj1.hashGraph.vertices).toEqual(obj3.hashGraph.vertices); + + const linearOps = obj1.hashGraph.linearizeOperations(); + expect(linearOps).toEqual([ + { type: "add", value: 1 }, + { type: "remove", value: 2 }, + { type: "add", value: 2 }, + { type: "add", value: 1 }, + { type: "add", value: 3 }, + { type: "remove", value: 1 }, + ]); + }); + + test("Test: Mega Complex Case 1", () => { + /* + __ V5:ADD(3) + / + ___ V2:ADD(1) <-- V3:RM(2) <-- V6:RM(1) <-- V8:RM(3) + / ^ + V1:ADD(1)/ \ + \ \ + \ ___ V4:RM(2) <-------------------- V7:ADD(2) <-- V9:RM(1) + */ + + const cro1 = obj1.cro as AddWinsSet; + const cro2 = obj2.cro as AddWinsSet; + const cro3 = obj3.cro as AddWinsSet; + + cro1.add(1); + merge(obj2, obj1.hashGraph.getAllVertices()); + + cro1.add(1); + cro1.remove(2); + cro2.remove(2); + + merge(obj3, obj1.hashGraph.getAllVertices()); + cro3.add(3); + cro1.remove(1); + + merge(obj2, obj1.hashGraph.getAllVertices()); + cro2.add(2); + cro1.remove(3); + cro2.remove(1); + + merge(obj1, obj2.hashGraph.getAllVertices()); + merge(obj1, obj3.hashGraph.getAllVertices()); + merge(obj2, obj1.hashGraph.getAllVertices()); + merge(obj2, obj3.hashGraph.getAllVertices()); + merge(obj3, obj1.hashGraph.getAllVertices()); + merge(obj3, obj2.hashGraph.getAllVertices()); + + expect(cro1.contains(1)).toBe(false); + expect(cro1.contains(2)).toBe(true); + expect(cro1.contains(3)).toBe(true); + expect(obj1.hashGraph.vertices).toEqual(obj2.hashGraph.vertices); + expect(obj1.hashGraph.vertices).toEqual(obj3.hashGraph.vertices); + + const linearOps = obj1.hashGraph.linearizeOperations(); + expect(linearOps).toEqual([ + { type: "add", value: 1 }, + { type: "remove", value: 2 }, + { type: "add", value: 1 }, + { type: "remove", value: 2 }, + { type: "add", value: 3 }, + { type: "remove", value: 1 }, + { type: "add", value: 2 }, + { type: "remove", value: 1 }, + ]); + }); + + test("Test: Joao's latest brain teaser", () => { + /* + __ V2:Add(2) <------------\ + V1:Add(1) / \ - V5:RM(2) + \__ V3:RM(2) <- V4:RM(2) <--/ + */ + + const cro1 = obj1.cro as AddWinsSet; + const cro2 = obj2.cro as AddWinsSet; + + cro1.add(1); + merge(obj2, obj1.hashGraph.getAllVertices()); + + cro1.add(2); + cro2.remove(2); + cro2.remove(2); + merge(obj1, obj2.hashGraph.getAllVertices()); + merge(obj2, obj1.hashGraph.getAllVertices()); + + cro1.remove(2); + merge(obj2, obj1.hashGraph.getAllVertices()); + + expect(cro1.contains(1)).toBe(true); + expect(cro1.contains(2)).toBe(false); + expect(obj1.hashGraph.vertices).toEqual(obj2.hashGraph.vertices); + + const linearOps = obj1.hashGraph.linearizeOperations(); + expect(linearOps).toEqual([ + { type: "add", value: 1 }, + { type: "add", value: 2 }, + { type: "remove", value: 2 }, + ]); + }); +}); diff --git a/packages/object/tests/index.test.ts b/packages/object/tests/index.test.ts deleted file mode 100644 index 59f99d4c..00000000 --- a/packages/object/tests/index.test.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { test } from "vitest"; - -test("mock", () => {}); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 62720fb1..292d2366 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -23,6 +23,9 @@ importers: release-it: specifier: ^17.6.0 version: 17.6.0(typescript@5.5.4) + ts-proto: + specifier: ^2.0.3 + version: 2.0.3 typedoc: specifier: ^0.26.6 version: 0.26.6(typescript@5.5.4) @@ -151,6 +154,9 @@ importers: packages/network: dependencies: + '@bufbuild/protobuf': + specifier: ^2.0.0 + version: 2.0.0 '@chainsafe/libp2p-gossipsub': specifier: ^13.1.0 version: 13.2.0 @@ -217,9 +223,6 @@ importers: libp2p: specifier: ^1.8.3 version: 1.9.2 - protobufjs: - specifier: ^7.4.0 - version: 7.4.0 ts-proto: specifier: ^2.0.3 version: 2.0.3 @@ -273,9 +276,9 @@ importers: packages/object: dependencies: - protobufjs: - specifier: ^7.4.0 - version: 7.4.0 + '@bufbuild/protobuf': + specifier: ^2.0.0 + version: 2.0.0 ts-proto: specifier: ^2.0.3 version: 2.0.3 diff --git a/vite.config.mts b/vite.config.mts index 1d6ee24c..50bab196 100644 --- a/vite.config.mts +++ b/vite.config.mts @@ -4,6 +4,6 @@ import tsconfigPaths from "vite-tsconfig-paths"; export default defineConfig({ plugins: [tsconfigPaths()], test: { - exclude: ["**/node_modules", "**/AddWinsSet.test.ts"], + exclude: ["**/node_modules"], }, });