diff --git a/.github/actions/env/action.yml b/.github/actions/env/action.yml index 6cb2e5837..e32a8d642 100644 --- a/.github/actions/env/action.yml +++ b/.github/actions/env/action.yml @@ -7,11 +7,6 @@ inputs: runs: using: composite steps: - - uses: earthly/actions-setup@v1 - with: - github-token: ${{ inputs.token }} - version: "latest" - use-cache: true - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml new file mode 100644 index 000000000..2b9a98ab3 --- /dev/null +++ b/.github/workflows/benchmark.yml @@ -0,0 +1,44 @@ +name: Benchmark +on: + workflow_dispatch: + pull_request: + types: [ assigned, opened, synchronize, reopened, labeled ] + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + Benchmark: + runs-on: "github-001" + if: contains(github.event.pull_request.labels.*.name, 'benchmarks') || github.ref == 'refs/heads/main' + steps: + - uses: 'actions/checkout@v4' + with: + fetch-depth: 0 + - run: go build -o /tmp/ledger ./ + - run: echo "running actions as ${USER}" + - run: > + /tmp/ledger serve + --postgres-uri=postgres://formance:formance@127.0.0.1/ledger + --postgres-conn-max-idle-time=120s + --postgres-max-open-conns=500 + --postgres-max-idle-conns=100 + --experimental-features + --otel-metrics-keep-in-memory & + - run: > + earthly + --allow-privileged + ${{ contains(github.event.pull_request.labels.*.name, 'no-cache') && '--no-cache' || '' }} + ./test/performance+run --args="-benchtime 10s --ledger.url=http://localhost:3068 --parallelism=5" + - run: > + earthly + --allow-privileged + ${{ contains(github.event.pull_request.labels.*.name, 'no-cache') && '--no-cache' || '' }} + ./test/performance+generate-graphs + - run: kill -9 $(ps aux | grep "ledger serve"| grep -v "grep" | awk '{print $2}') + if: always() + - uses: actions/upload-artifact@v4 + with: + name: graphs + path: test/performance/report \ No newline at end of file diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 94af18a82..367248788 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -25,7 +25,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} Dirty: - runs-on: "ubuntu-latest" + runs-on: "formance-runner" steps: - uses: 'actions/checkout@v4' with: @@ -50,13 +50,12 @@ jobs: if (( $(echo ${#hasChanged}) != 0 )); then git status echo "There are changes in the repository" + git diff exit 1 fi Tests: - runs-on: "ubuntu-latest" - needs: - - Dirty + runs-on: "formance-runner" steps: - uses: 'actions/checkout@v4' with: @@ -67,16 +66,19 @@ jobs: token: ${{ secrets.NUMARY_GITHUB_TOKEN }} - run: > earthly - --no-output --allow-privileged --secret SPEAKEASY_API_KEY=$SPEAKEASY_API_KEY ${{ contains(github.event.pull_request.labels.*.name, 'no-cache') && '--no-cache' || '' }} - +tests + +tests --coverage=true env: SPEAKEASY_API_KEY: ${{ secrets.SPEAKEASY_API_KEY }} + - name: Upload coverage reports to Codecov with GitHub Action + uses: codecov/codecov-action@v4.2.0 + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} GoReleaser: - runs-on: "ubuntu-latest" + runs-on: "formance-runner" if: contains(github.event.pull_request.labels.*.name, 'build-images') || github.ref == 'refs/heads/main' || github.event_name == 'merge_group' needs: - Dirty @@ -115,7 +117,7 @@ jobs: GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }} Deploy: - runs-on: "ubuntu-latest" + runs-on: "formance-runner" if: github.ref == 'refs/heads/main' environment: staging needs: diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index b6dc7fbe8..1bd3d9646 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -13,6 +13,11 @@ jobs: - uses: 'actions/checkout@v4' with: fetch-depth: 0 + - uses: earthly/actions-setup@v1 + with: + github-token: ${{ inputs.token }} + version: "latest" + use-cache: true - name: Setup Env uses: ./.github/actions/env with: diff --git a/.gitignore b/.gitignore index ae122c5da..d5f8533f6 100644 --- a/.gitignore +++ b/.gitignore @@ -1,18 +1,7 @@ -coverage* -/dist/ -cmd/control/* -!cmd/control/gitkeep -.DS_Store -.idea +cover.out +go.work* +*.jar +node_modules +dist vendor -sdk/swagger.yaml -sdk/swagger.yaml-e -sdk/sdks -.vscode -.env -sqlstorage.test -ledger.test -antlr-*-complete.jar -go.work -go.work.sum -benchs +worktrees \ No newline at end of file diff --git a/.goreleaser.yml b/.goreleaser.yml index 5d4b398ed..4677dffaf 100644 --- a/.goreleaser.yml +++ b/.goreleaser.yml @@ -18,9 +18,27 @@ builds: - CGO_ENABLED=0 goos: - linux + - darwin goarch: - amd64 - arm64 + - binary: benchmarks + id: benchmarks + ldflags: + - -extldflags "-static" + env: + - CGO_ENABLED=0 + goos: + - linux + - darwin + goarch: + - amd64 + - arm64 + command: test + dir: test/performance + no_main_check: true + tags: + - it release: prerelease: auto @@ -33,6 +51,7 @@ archives: - id: "{{.ProjectName}}" builds: - ledger + - benchmarks format: tar.gz name_template: "{{.ProjectName}}_{{.Os}}-{{.Arch}}" @@ -47,11 +66,22 @@ nfpms: formats: - deb - rpm + - id: benchmarks + package_name: ledger-benchmarks + file_name_template: "{{ .ProjectName }}_benchmarks_{{ .Version }}_{{ .Os }}_{{ .Arch }}" + builds: + - benchmarks + homepage: https://formance.com + maintainer: Maxence Maireaux + formats: + - deb + - rpm publishers: - name: fury.io ids: - ledger + - benchmarks dir: "{{ dir .ArtifactPath }}" cmd: curl --http1.1 -F package=@{{ .ArtifactName }} https://{{ .Env.FURY_TOKEN }}@push.fury.io/ledger/ @@ -75,3 +105,20 @@ brews: system "#{bin}/ledger version" install: | bin.install "ledger" + - name: ledger-benchmarks + alternative_names: + - ledger-benchmarks@{{ .Major }} + repository: + owner: formancehq + name: homebrew-tap + pull_request: + enabled: true + base: + owner: formancehq + name: homebrew-tap + branch: main + directory: Formula + homepage: https://formance.com + skip_upload: 'false' + install: | + bin.install "ledger-benchmarks" diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..b73b54152 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,454 @@ +# Contributing + +The project use [Go standard layout](https://github.com/golang-standards/project-layout), please respect it. +Also, the project use [conventional commit](https://www.conventionalcommits.org/en/v1.0.0/) style for commits. + +Development commands involve the usage of [Earthly](https://earthly.dev/) as well as docker for end-to-end testing. + +## Getting started + +You can test the ledger using the provided docker-compose: +```shell +$ docker compose up -d +``` + +Then create a first transaction (output truncated for brievity): +```shell +$ curl -X POST http://localhost:3068/quickstart/transactions -d '{"postings": [{"source": "world", "destination": "bank", "amount": 100, "asset": "USD"}]}' | jq +{ + "data": [ + { + "postings": [ + { + "source": "world", + "destination": "bank", + "amount": 100, + "asset": "USD" + } + ], + "metadata": {}, + "timestamp": "2024-10-03T08:10:37.109371Z", + "insertedAt": "2024-10-03T08:10:37.109371Z", + "id": 1, + ...wiudd + } + ] +} +``` + +List transactions: +``` +$ curl -X GET http://localhost:3068/quickstart/transactions +``` + +With those commands, we have created a logical ledger named `quickstart` and a transaction on it. +As a program, the ledger can handle any number of logical ledger. + +> [!NOTE] +> In the documentation, you will read the term `ledger` everywhere. When we speak of a `ledger`, we are speaking about a ***logical ledger*** + +The ledger has been automatically created because the commands use the v1 api (it's legacy). + +> [!WARNING] +> Be careful while using the v1 api, if you mess in the ledger name in the url, a ledger will be automatically created + +Actually, the ledger has a v2 api, used by prefixing urls with `/v2`. +And, on this version, ledgers has to be created manually. + +You can create a new ledger this way : +```shell +$ curl -X POST http://localhost:3068/v2/testing +``` + +Check the ledger list : +```shell +$ curl http://localhost:3068/v2 | jq +{ + "cursor": { + "pageSize": 15, + "hasMore": false, + "data": [ + { + "bucket": "quickstart", + "metadata": {}, + "features": { + "ACCOUNT_METADATA_HISTORY": "SYNC", + "HASH_LOGS": "SYNC", + "INDEX_ADDRESS_SEGMENTS": "ON", + "INDEX_TRANSACTION_ACCOUNTS": "ON", + "MOVES_HISTORY": "ON", + "MOVES_HISTORY_POST_COMMIT_EFFECTIVE_VOLUMES": "SYNC", + "TRANSACTION_METADATA_HISTORY": "SYNC" + }, + "id": 1, + "name": "quickstart", + "addedAt": "2024-10-02T12:27:56.750952Z" + }, + { + "bucket": "_default", + "metadata": {}, + "features": { + "ACCOUNT_METADATA_HISTORY": "SYNC", + "HASH_LOGS": "SYNC", + "INDEX_ADDRESS_SEGMENTS": "ON", + "INDEX_TRANSACTION_ACCOUNTS": "ON", + "MOVES_HISTORY": "ON", + "MOVES_HISTORY_POST_COMMIT_EFFECTIVE_VOLUMES": "SYNC", + "TRANSACTION_METADATA_HISTORY": "SYNC" + }, + "id": 2, + "name": "testing", + "addedAt": "2024-10-03T08:03:27.360424Z" + } + ] + } +} +``` + +Then we see our ledgers : +* `quickstart`: created automatically on the v1 api +* `testing`: created manually + +We also see some additional properties: +* `bucket`: [bucket](#buckets) where the ledger is installed +* `features`: [features](#features) configuration of the ledger +* `metadata`: metadata of the ledger + +Additionally, each ledger is created on a bucket, `quickstart` is installed on bucket `quickstart` while `testing` is installed on bucket `_default`. + +> [!IMPORTANT] +> Any new ledger created on the /v2 api will use, by default, the bucket `_default`. +> +> But, automatically created ledgers by v1 api will create a new bucket with the same name as the ledger. That's for compatibility reasons regarding ledger v1 behavior. + +### Buckets + +To create a ledger on a specific bucket, use the command: + +```shell +$ curl -X POST http://localhost:3068/v2/testing -d '{"bucket": "bucket0"}' +$ curl http://localhost:3068/v2/testing | jq +{ + "data": { + "bucket": "bucket0", + "metadata": {}, + "features": { + "ACCOUNT_METADATA_HISTORY": "SYNC", + "HASH_LOGS": "SYNC", + "INDEX_ADDRESS_SEGMENTS": "ON", + "INDEX_TRANSACTION_ACCOUNTS": "ON", + "MOVES_HISTORY": "ON", + "MOVES_HISTORY_POST_COMMIT_EFFECTIVE_VOLUMES": "SYNC", + "TRANSACTION_METADATA_HISTORY": "SYNC" + }, + "id": 2, + "name": "testing", + "addedAt": "2024-10-03T08:27:11.540373Z" + } +} +``` + +Under the hood, a bucket is a Postgres schema. You can use the bucket feature to implement some kind of horizontal scaling. + +### Features + +Each usage of the ledger service, is different. +Some usage involve a high write throughput, some other involve high read throughput, custom aggregation etc... + +So, each ledger can be configured with a set of features. By default, when creating a ledger, all features are enabled. +See [variables and constants](./internal/README.md#constants) for possible configurations and meaning of each feature. + +To create a ledger with specific features, use the command: +```shell +$ curl -X POST http://localhost:3068/v2/testing2 -d '{"features": {"HASH_LOGS": "DISABLED"}}' +$ curl http://localhost:3068/v2/testing2 | jq +{ + "data": { + "bucket": "_default", + "metadata": {}, + "features": { + "ACCOUNT_METADATA_HISTORY": "SYNC", + "HASH_LOGS": "DISABLED", + "INDEX_ADDRESS_SEGMENTS": "ON", + "INDEX_TRANSACTION_ACCOUNTS": "ON", + "MOVES_HISTORY": "ON", + "MOVES_HISTORY_POST_COMMIT_EFFECTIVE_VOLUMES": "SYNC", + "TRANSACTION_METADATA_HISTORY": "SYNC" + }, + "id": 3, + "name": "testing2", + "addedAt": "2024-10-03T08:40:40.545229Z" + } +} +``` + +When overriding features, all not specified features will receive the default configuration. + +> [!WARNING] +> Current set of feature is not stable, some can be added, or removed. + +Current set of features: + +| Name | Default value | Possible configuration | Description | +|------------------------------|---------------|------------------------|------------------------------------------------------------------| +| ACCOUNT_METADATA_HISTORY | SYNC | SYNC \| DISABLED | Historize metadata changes on accounts | +| TRANSACTION_METADATA_HISTORY | SYNC | SYNC \| DISABLED | Historize metadata changes on transactions | +| HASH_LOGS | SYNC | SYNC \| DISABLED | [Hash logs](#hashed-log) | +| INDEX_ADDRESS_SEGMENTS | ON | ON \| OFF | Index accounts addresses segments | +| INDEX_TRANSACTION_ACCOUNTS | ON | ON \| OFF | Index transactions accounts set | +| MOVES_HISTORY | ON | ON \| OFF | [Historize funds movements by account](#funds-movements-history) | +| MOVES_HISTORY_POST_COMMIT_EFFECTIVE_VOLUMES | SYNC | SYNC \| DISABLED | Compute and maintains post commit effective volumes | + + +## Funds movements history + +When feature `MOVES_HISTORY` is enabled (= `ON`), the ledger will register any individual funds movements for each account/asset pair. +There is the table schema : + +![table schema](./docs/database/_default/diagrams/tables/moves.1degree.png "Moves table") + +Column `post_commit_effective_volumes` will be set only if feature `MOVES_HISTORY_POST_COMMIT_EFFECTIVE_VOLUMES` is enabled. +See [post commit effective volumes upgrade](#effective-volumes-updates) for details explanation of the process. + +## Numscript + +The ledger service is able to use the Numscript interpreter to create transactions. + +See [Numscript](https://github.com/formancehq/numscript) + +## Database + +Database schemas are extracted from a live ledger. +You can find them in: +* [System schema](./docs/database/_system/diagrams) +* [Bucket schema](./docs/database/_default/diagrams) + +## Deadlocks + +TODO + +## Data consistency + +### Balances locking + +The following sequence diagram describe the process of creating a transaction. +It supposes the minimal set of features. + +```mermaid +sequenceDiagram + actor Client + participant Ledger + participant Database + Client->>Ledger: Create transaction + Ledger->>Database: Start SQL transaction (Read committed isolation level) + Database-->>Ledger: SQL transaction started + Ledger->>Database: Get and lock balances of bounded source accounts + note left of Database: There, we will retrieve balances from database and lock associated balances
Until the end of the SQL transaction, no concurrent transaction will be able to use locked balances.
See notes above for details. + Database-->>Ledger: Balances of bounded accounts + Ledger->>Ledger: Compute transaction + note right of Ledger: Calling Numscript interpreter + Ledger-->>Database: Store transaction and update balances + note left of Database: This diagram simplify the process of writing the transaction. More entities will be saved,
and some computation can happen, depending on the enabled features. More on this later. + Database-->>Ledger: Transaction written + Ledger->>Database: Commit SQL transaction + note left of Database: Now, updated account balances are written to the database and any
concurrent transaction waiting for those accounts will be unblocked. + Database-->>Ledger: Transaction committed + Ledger-->>Client: Created transaction +``` + +***Get and lock balances of bounded source accounts*** + +Locking balances of bounded source accounts can be achieved using the following query : +```sql +SELECT input - output AS balance +FROM accounts_volumes +WHERE address IN () +FOR UPDATE +``` + +The ```FOR UPDATE``` add a [RowExclusiveLock](https://www.postgresql.org/docs/current/explicit-locking.html#LOCKING-ROWS) on the balance account until the end of the SQL transaction. + +It will work... most of the time. There is still a edge case. +What if the ```accounts_volumes``` table has no rows for the account? Like a never used account. +In this case, the database will not take any lock and the core will work with data with no control. +For example, suppose the following case : +* send 100 USD from ```user:1``` to ```order:1``` +* ```user:1``` balance is allowed to go down to -200 USD + +Now, two transaction starting at the same time spending 200 USD. +Both of them will try to get the balance of the ```user:1``` account, and will get an empty balance. +Since the account is allowed to go down to -200, both transaction will pass. +Finally, both transactions will be committed, resulting in a balance of -400 USD, which would violates business rules. + +So, a complete solution is more : +```sql +WITH ( + INSERT INTO accounts_volume (accounts_address, input, output) + VALUES ... -- Insert 0 as input and output for each account address + ON CONFLICT DO NOTHING -- If we have conflict, this indicates than the account has already a registered balance, so ignore the conflict +) AS ins +SELECT input - output AS balance -- Finally, we still need to select the balance and lock the row +FROM accounts_volumes +WHERE address IN () +FOR UPDATE +``` + +### Transaction write + +TODO + +### Hashed log + +Ledgers can be configured with feature `HASH_LOGS` to `SYNC`. +By using this feature, each log will be hashed with the previous hash. +The generated signature is included in the log model. + +This mechanism allow to audit the full database. + +```mermaid +sequenceDiagram + actor Ledger + actor Store + actor Database + Ledger->>Store: Insert log + Store->>Database: SELECT pg_advisory_xact_lock() + Database-->>Store: OK + note right of Database: The ledger is locked at this point until the end of the current transaction + Store->>Database: Get last log + Database-->>Store: Last log + Store->>Store: Compute hash of new log + Store->>Database: Write log + Database-->>Store: Log written + Store-->>Ledger: Updated log +``` + +As you may have noticed, logs hashing involve a lock on the ledger. +It can quickly become a bottleneck of high write throughput. + +### Effective volumes updates + +[Effective volumes](#post-commit-effective-volumes) are enabled if the following features are enabled : +* `MOVES_HISTORY`: `ON` +* `MOVES_HISTORY_POST_COMMIT_EFFECTIVE_VOLUMES`: `SYNC` + +When inserting a fund movement in the database with the `MOVES_HISTORY_POST_COMMIT_EFFECTIVE_VOLUMES` enabled on the ledger, the ledger has to do : +* Compute actual post commit effective volumes for the moves by searching for the previous moves for account/asset pair, using the date of the move +* Inserting the new move +* Update any futures moves post_commit_effective_volumes + +### Import + +Ledgers can be imported and exported. + +```mermaid +sequenceDiagram + actor Client + actor Ledger + actor Database + Client->>Ledger: Create transaction + Ledger->>Database: Start SQL transaction (Serialization Isolation level) + Database-->>Ledger: SQL transaction started + Ledger->>Database: Count logs + Database-->>Ledger: Count of logs + alt id count > 0 + note left of Ledger: Import is only allowed on empty ledgers + Ledger-->>Client: Invalid state error + else + Ledger->>Ledger: Read log stream + loop over log stream + Ledger->>Database: Apply appropriate actions (create transactions, update metadata...) + Database-->>Ledger: OK + end + end + Ledger->>Database: Commit SQL transaction + alt another action has been made on the ledger in the meantime + Database-->>Ledger: Serialization error + else + Database-->>Ledger: SQL Transaction committed + end + Ledger->>Client: OK +``` + +To import a ledger, we use the [Serializable Isolation Level](https://www.postgresql.org/docs/7.2/xact-serializable.html). +This isolation level is the strictest Postgres can offer. + +That's way, if another concurrent request write something on the ledger (a new transaction for example), the import request will fail with a serialization error. +It is the case, because, whatever the ledger is configured, finally, when writing, we will ***always*** write a log describing the action, causing conflict with Serializable Isolation Level. + +As said, this isolation level is the strictest Postgres can offer, we could ask why we don't use it all the time. +That's because, if we would do that, we would have frequent serialization errors, and we would need to retry very often, and probably creating a big bottleneck. + +## Testing strategy + +Tests are split in different scopes : +* Unit tests: as any go app, you will find unit test along the source code in _test.go files over the app. +* [e2e](./test/e2e) : End to end test. Those tests are mainly api tests, and app lifecycle tests. It checks than the ledger endpoint works as expected. +* [migrations](./test/migrations) : Migrations tests. Tests inside this package allow to import an existing database to apply current code migrations on it. +* [performance](./test/performance) : Performance tests. Tests inside this package test performance of the ledger. +* [stress](./test/stress) : Stress tests. Tests inside this package ensure than ledger state stay consistent under high concurrency. + +## API changes + +Openapi specification at [root](./openapi.yaml) must not be edited directly as it is generated. +Update [v2 spec](./openapi/v2.yaml) and regenerate the client using `earthly +pre-commit` or `earthly +generate-client`. + +## Dev commands + +### Before commit + +```shell +$ earthly +pre-commit +``` + +This command will : +* lint the code +* generate the client sdk (in [pkg/client](pkg/client)) +* fix dependencies +* generate [openapi](openapi.yaml) specification by combining [api versions](./openapi) +* ... + +### Run tests + +```shell +$ earthly -P +tests +``` + +Additionally, the flag ```--coverage=true``` can be passed to generate coverage : +```shell +$ earthly -P +tests --coverage=true # Generated under cover.out +``` + +## API reference + +See [API reference](./docs/api/README.md) + +## Terminology + +### Bounded source account + +A bounded source account, is an account used in a Numscript script, as a source account, and with a bottom limit. Example: + + send [USD/2 100] { + source = @bank + destination = @user:1 + } + + In this example, ```bank``` is considered as an unbounded source account. + + An account used with an unbounded overdraft will not be considered as a bounded source account. + For example: + + send [USD/2 100] { + source = @bank allowing unbounded overdraft + destination = @user:1 + } + + With this script, ```bank``` will not be considered as an unbounded source account. +> [!NOTE] +> It is also the case of the ```world``` account, which is always an unbounded overdraft account. + +### Post commit volumes +* post commit volumes (pcv): see [description](./internal/README.md#type-transaction) + +### Post commit effective volumes +* post commit effective volumes (pcev): see [description](./internal/README.md#type-transaction) \ No newline at end of file diff --git a/Earthfile b/Earthfile index 5f0db1a10..148d45d65 100644 --- a/Earthfile +++ b/Earthfile @@ -1,35 +1,46 @@ VERSION 0.8 PROJECT FormanceHQ/ledger -IMPORT github.com/formancehq/earthly:tags/v0.16.2 AS core -IMPORT github.com/formancehq/stack/releases:main AS releases +IMPORT github.com/formancehq/earthly:tags/v0.17.1 AS core FROM core+base-image +CACHE --persist --sharing=shared /go +CACHE --persist --sharing=shared /root/.cache/golangci-lint + sources: + FROM core+builder-image + WORKDIR /src/pkg/client + COPY pkg/client/go.mod pkg/client/go.sum ./ + RUN go mod download WORKDIR /src - COPY go.mod go.sum . - COPY --dir internal pkg cmd . + COPY go.mod go.sum ./ + RUN go mod download + COPY --dir internal pkg cmd tools . COPY main.go . SAVE ARTIFACT /src generate: FROM core+builder-image RUN apk update && apk add openjdk11 - DO --pass-args core+GO_INSTALL --package=go.uber.org/mock/mockgen@latest + RUN go install go.uber.org/mock/mockgen@v0.4.0 + RUN go install github.com/princjef/gomarkdoc/cmd/gomarkdoc@latest COPY (+sources/*) /src WORKDIR /src - DO --pass-args core+GO_GENERATE + RUN go generate ./... SAVE ARTIFACT internal AS LOCAL internal SAVE ARTIFACT pkg AS LOCAL pkg SAVE ARTIFACT cmd AS LOCAL cmd compile: - FROM core+builder-image - COPY (+sources/*) /src + FROM +sources WORKDIR /src ARG VERSION=latest - DO --pass-args core+GO_COMPILE --VERSION=$VERSION + RUN go build + RUN go build -o main -ldflags="-X ${GIT_PATH}/cmd.Version=${VERSION} \ + -X ${GIT_PATH}/cmd.BuildDate=$(date +%s) \ + -X ${GIT_PATH}/cmd.Commit=${EARTHLY_BUILD_SHA}" + SAVE ARTIFACT main build-image: FROM core+final-image @@ -41,15 +52,12 @@ build-image: DO --pass-args core+SAVE_IMAGE --COMPONENT=ledger --REPOSITORY=${REPOSITORY} --TAG=$tag tests: - FROM core+builder-image + FROM +tidy RUN go install github.com/onsi/ginkgo/v2/ginkgo@latest - - COPY (+sources/*) /src - WORKDIR /src COPY --dir --pass-args (+generate/*) . - COPY --dir test . ARG includeIntegrationTests="true" + ARG includeEnd2EndTests="true" ARG coverage="" ARG debug=false @@ -60,30 +68,28 @@ tests: LET goFlags="-race" IF [ "$coverage" = "true" ] SET goFlags="$goFlags -covermode=atomic" - SET goFlags="$goFlags -coverpkg=github.com/formancehq/stack/components/ledger/internal/..." - SET goFlags="$goFlags,github.com/formancehq/stack/components/ledger/cmd/..." - SET goFlags="$goFlags -coverprofile cover.out" + SET goFlags="$goFlags -coverpkg=github.com/formancehq/ledger/internal/..." + SET goFlags="$goFlags,github.com/formancehq/ledger/pkg/events/..." + SET goFlags="$goFlags,github.com/formancehq/ledger/pkg/accounts/..." + SET goFlags="$goFlags,github.com/formancehq/ledger/pkg/assets/..." + SET goFlags="$goFlags,github.com/formancehq/ledger/cmd/..." + SET goFlags="$goFlags -coverprofile coverage.txt" END + IF [ "$includeIntegrationTests" = "true" ] SET goFlags="$goFlags -tags it" - WITH DOCKER \ - --pull=postgres:15-alpine \ - --pull=clickhouse/clickhouse-server:head \ - --pull=elasticsearch:8.14.3 - RUN --mount type=cache,id=gopkgcache,target=${GOPATH}/pkg/mod \ - --mount type=cache,id=gobuildcache,target=/root/.cache/go-build \ - ginkgo -r -p $goFlags + WITH DOCKER --pull=postgres:15-alpine + RUN ginkgo -r -p $goFlags END ELSE - RUN --mount type=cache,id=gopkgcache,target=${GOPATH}/pkg/mod \ - --mount type=cache,id=gobuildcache,target=/root/.cache/go-build \ - ginkgo -r -p $goFlags + RUN ginkgo -r -p $goFlags END IF [ "$coverage" = "true" ] - # exclude files suffixed with _generated.go, these are mocks used by tests - RUN cat cover.out | grep -v "_generated.go" > cover2.out - RUN mv cover2.out cover.out - SAVE ARTIFACT cover.out AS LOCAL cover.out + # as special case, exclude files suffixed by debug.go + # toremovelater: exclude machine code as it will be updated soon + RUN cat coverage.txt | grep -v debug.go | grep -v "/machine/" > coverage2.txt + RUN mv coverage2.txt coverage.txt + SAVE ARTIFACT coverage.txt AS LOCAL coverage.txt END deploy: @@ -99,34 +105,30 @@ deploy-staging: BUILD --pass-args core+deploy-staging lint: - FROM core+builder-image - COPY (+sources/*) /src - WORKDIR /src - COPY --pass-args +tidy/go.* . - COPY --dir test . - DO --pass-args core+GO_LINT --ADDITIONAL_ARGUMENTS="--build-tags it" + #todo: get config from core + FROM +tidy + RUN golangci-lint run --fix --build-tags it --timeout 5m SAVE ARTIFACT cmd AS LOCAL cmd SAVE ARTIFACT internal AS LOCAL internal SAVE ARTIFACT pkg AS LOCAL pkg SAVE ARTIFACT test AS LOCAL test + SAVE ARTIFACT tools AS LOCAL tools SAVE ARTIFACT main.go AS LOCAL main.go pre-commit: WAIT - BUILD --pass-args +tidy - END - BUILD --pass-args +lint - WAIT + BUILD +tidy + BUILD +lint BUILD +openapi + BUILD +openapi-markdown END + BUILD +generate BUILD +generate-client + BUILD +export-docs-events bench: - FROM core+builder-image - DO --pass-args core+GO_INSTALL --package=golang.org/x/perf/cmd/benchstat@latest - COPY (+sources/*) /src - WORKDIR /src - COPY --dir test . + FROM +tidy + RUN go install golang.org/x/perf/cmd/benchstat@latest WORKDIR /src/test/performance ARG benchTime=1s ARG count=1 @@ -141,9 +143,7 @@ bench: SET additionalArgs=-v END WITH DOCKER --pull postgres:15-alpine - RUN --mount type=cache,id=gopkgcache,target=${GOPATH}/pkg/mod \ - --mount type=cache,id=gobuild,target=/root/.cache/go-build \ - go test -timeout $testTimeout -bench=$bench -run ^$ -tags it $additionalArgs \ + RUN go test -timeout $testTimeout -bench=$bench -run ^$ -tags it $additionalArgs \ -benchtime=$benchTime | tee -a /results.txt END RUN benchstat /results.txt @@ -151,7 +151,7 @@ bench: benchstat: FROM core+builder-image - DO --pass-args core+GO_INSTALL --package=golang.org/x/perf/cmd/benchstat@latest + RUN go install golang.org/x/perf/cmd/benchstat@latest ARG compareAgainstRevision=main COPY --pass-args github.com/formancehq/stack/components/ledger:$compareAgainstRevision+bench/results.txt /tmp/main.txt COPY --pass-args +bench/results.txt /tmp/branch.txt @@ -167,12 +167,18 @@ openapi: RUN yq -oy ./openapi.json > openapi.yaml SAVE ARTIFACT ./openapi.yaml AS LOCAL ./openapi.yaml +openapi-markdown: + FROM node:20-alpine + RUN npm install -g widdershins + COPY openapi/v2.yaml openapi.yaml + RUN widdershins openapi.yaml -o README.md --search false --language_tabs 'http:HTTP' --summary --omitHeader + SAVE ARTIFACT README.md AS LOCAL docs/api/README.md + tidy: - FROM core+builder-image - COPY --pass-args (+sources/src) /src + FROM +sources WORKDIR /src COPY --dir test . - DO --pass-args core+GO_TIDY + RUN go mod tidy release: FROM core+builder-image @@ -187,8 +193,23 @@ generate-client: COPY (core+sources-speakeasy/speakeasy) /bin/speakeasy COPY (+openapi/openapi.yaml) openapi.yaml RUN cat ./openapi.yaml | yq e -o json > openapi.json - COPY (releases+sources/src/openapi-overlay.json) openapi-overlay.json + COPY (core+sources/out --LOCATION=openapi-overlay.json) openapi-overlay.json RUN jq -s '.[0] * .[1]' openapi.json openapi-overlay.json > final.json COPY --dir pkg/client client RUN --secret SPEAKEASY_API_KEY speakeasy generate sdk -s ./final.json -o ./client -l go SAVE ARTIFACT client AS LOCAL ./pkg/client + +export-database-schema: + FROM +sources + RUN go install github.com/roerohan/wait-for-it@latest + COPY --dir scripts scripts + WITH DOCKER --pull postgres:15-alpine --pull schemaspy/schemaspy:6.2.4 + RUN ./scripts/export-database-schema.sh + END + SAVE ARTIFACT docs/database/_system/diagrams AS LOCAL docs/database/_system/diagrams + SAVE ARTIFACT docs/database/_default/diagrams AS LOCAL docs/database/_default/diagrams + +export-docs-events: + FROM +tidy + RUN go run tools/docs/events/main.go --write-dir docs/events + SAVE ARTIFACT docs/events AS LOCAL docs/events \ No newline at end of file diff --git a/README.md b/README.md index a51d3305d..e34ba264c 100644 --- a/README.md +++ b/README.md @@ -2,54 +2,22 @@ Formance Ledger (fka numary) is a programmable financial ledger that provides a foundation for money-moving applications. The ledger provides atomic multi-postings transactions and is programmable in [Numscript](doc:machine-instructions), a built-in language dedicated to money movements. It can be used either as a standalone micro-service or as part of the greater Formance Stack, and will shine for apps that require a lot of custom, money-moving code, e.g: -* E-commerce with complex payments flows, payments splitting, such as marketplaces -* Company-issued currencies systems, e.g. Twitch Bits -* In-game currencies, inventories and trading systems, e.g. Fortnite V-Bucks -* Payment gateways using non-standard assets, e.g. learning credits -* Local currencies and complementary finance - -# Getting started +- E-commerce with complex payments flows, payments splitting, such as marketplaces +- Company-issued currencies systems, e.g. Twitch Bits +- In-game currencies, inventories and trading systems, e.g. Fortnite V-Bucks +- Payment gateways using non-standard assets, e.g. learning credits +- Local currencies and complementary finance Formance Ledger works as a standalone binary, the latest of which can be downloaded from the [releases page](https://github.com/formancehq/ledger/releases). You can move the binary to any executable path, such as to `/usr/local/bin`. Installations using brew, apt, yum or docker are also [available](https://docs.formance.com/docs/installation-1). -```SHELL - -ledger server start - -# Submit a first transaction -echo " -send [USD/2 599] ( - source = @world - destination = @payments:001 -) - -send [USD/2 599] ( - source = @payments:001 - destination = @rides:0234 -) - -send [USD/2 599] ( - source = @rides:0234 - destination = { - 85/100 to @drivers:042 - 15/100 to @platform:fees - } -) -" > example.num - -ledger exec quickstart example.num - -# Get the balances of drivers:042 -curl -X GET http://localhost:3068/quickstart/accounts/drivers:042 - -# List transactions -curl -X GET http://localhost:3068/quickstart/transactions -``` - -# Documentation +## Documentation You can find the complete Numary documentation at [docs.formance.com](https://docs.formance.com) -# Community +## Community If you need help, want to show us what you built or just hang out and chat about ledgers you are more than welcome on our [Slack](https://bit.ly/formance-slack) - looking forward to see you there! + +## Contributing + +See [CONTRIBUTING.md](./CONTRIBUTING.md) \ No newline at end of file diff --git a/cmd/buckets.go b/cmd/buckets.go index d9ec9107a..c28252451 100644 --- a/cmd/buckets.go +++ b/cmd/buckets.go @@ -1,70 +1,15 @@ package cmd import ( - "github.com/formancehq/go-libs/bun/bunconnect" - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/service" - "github.com/formancehq/ledger/internal/storage/driver" "github.com/spf13/cobra" ) func NewBucket() *cobra.Command { - return &cobra.Command{ + ret := &cobra.Command{ Use: "buckets", Aliases: []string{"storage"}, } -} - -func NewBucketUpgrade() *cobra.Command { - cmd := &cobra.Command{ - Use: "upgrade", - Args: cobra.ExactArgs(1), - SilenceUsage: true, - RunE: func(cmd *cobra.Command, args []string) error { - connectionOptions, err := bunconnect.ConnectionOptionsFromFlags(cmd) - if err != nil { - return err - } - - driver := driver.New(*connectionOptions) - if err := driver.Initialize(cmd.Context()); err != nil { - return err - } - defer func() { - _ = driver.Close() - }() - - name := args[0] - - bucket, err := driver.OpenBucket(cmd.Context(), name) - if err != nil { - return err - } - - logger := logging.NewDefaultLogger(cmd.OutOrStdout(), service.IsDebug(cmd), false) - - return bucket.Migrate(logging.ContextWithLogger(cmd.Context(), logger)) - }, - } - return cmd -} - -func upgradeAll(cmd *cobra.Command, _ []string) error { - logger := logging.NewDefaultLogger(cmd.OutOrStdout(), service.IsDebug(cmd), false) - ctx := logging.ContextWithLogger(cmd.Context(), logger) - - connectionOptions, err := bunconnect.ConnectionOptionsFromFlags(cmd) - if err != nil { - return err - } - - driver := driver.New(*connectionOptions) - if err := driver.Initialize(ctx); err != nil { - return err - } - defer func() { - _ = driver.Close() - }() - return driver.UpgradeAllBuckets(ctx) + ret.AddCommand(NewBucketUpgrade()) + return ret } diff --git a/cmd/buckets_upgrade.go b/cmd/buckets_upgrade.go new file mode 100644 index 000000000..99d844287 --- /dev/null +++ b/cmd/buckets_upgrade.go @@ -0,0 +1,74 @@ +package cmd + +import ( + "github.com/formancehq/go-libs/v2/bun/bunconnect" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/service" + "github.com/formancehq/ledger/internal/storage/driver" + "github.com/spf13/cobra" +) + +func NewBucketUpgrade() *cobra.Command { + cmd := &cobra.Command{ + Use: "upgrade", + Args: cobra.ExactArgs(1), + SilenceUsage: true, + RunE: func(cmd *cobra.Command, args []string) error { + connectionOptions, err := bunconnect.ConnectionOptionsFromFlags(cmd) + if err != nil { + return err + } + + db, err := bunconnect.OpenSQLDB(cmd.Context(), *connectionOptions) + if err != nil { + return err + } + defer func() { + _ = db.Close() + }() + + driver := driver.New(db) + if err := driver.Initialize(cmd.Context()); err != nil { + return err + } + + if args[0] == "*" { + return upgradeAll(cmd) + } + + logger := logging.NewDefaultLogger(cmd.OutOrStdout(), service.IsDebug(cmd), false) + + return driver.UpgradeBucket(logging.ContextWithLogger(cmd.Context(), logger), args[0]) + }, + } + + service.AddFlags(cmd.Flags()) + bunconnect.AddFlags(cmd.Flags()) + + return cmd +} + +func upgradeAll(cmd *cobra.Command) error { + logger := logging.NewDefaultLogger(cmd.OutOrStdout(), service.IsDebug(cmd), false) + ctx := logging.ContextWithLogger(cmd.Context(), logger) + + connectionOptions, err := bunconnect.ConnectionOptionsFromFlags(cmd) + if err != nil { + return err + } + + db, err := bunconnect.OpenSQLDB(cmd.Context(), *connectionOptions) + if err != nil { + return err + } + defer func() { + _ = db.Close() + }() + + driver := driver.New(db) + if err := driver.Initialize(ctx); err != nil { + return err + } + + return driver.UpgradeAllBuckets(ctx) +} diff --git a/cmd/buckets_upgrade_test.go b/cmd/buckets_upgrade_test.go new file mode 100644 index 000000000..ab990c54c --- /dev/null +++ b/cmd/buckets_upgrade_test.go @@ -0,0 +1,54 @@ +//go:build it + +package cmd + +import ( + "io" + "testing" + + "github.com/formancehq/go-libs/v2/bun/bunconnect" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/testing/docker" + "github.com/formancehq/go-libs/v2/testing/platform/pgtesting" + "github.com/stretchr/testify/require" +) + +func TestBucketsUpgrade(t *testing.T) { + t.Parallel() + + dockerPool := docker.NewPool(t, logging.Testing()) + srv := pgtesting.CreatePostgresServer(t, dockerPool) + ctx := logging.TestingContext() + + type testCase struct { + name string + args []string + } + + for _, tc := range []testCase{ + { + name: "nominal", + args: []string{"test"}, + }, + { + name: "upgrade all", + args: []string{"*"}, + }, + } { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + db := srv.NewDatabase(t) + + args := []string{ + "--" + bunconnect.PostgresURIFlag, db.ConnString(), + } + args = append(args, tc.args...) + + cmd := NewBucketUpgrade() + cmd.SetOut(io.Discard) + cmd.SetArgs(args) + require.NoError(t, cmd.ExecuteContext(ctx)) + }) + } +} diff --git a/cmd/container.go b/cmd/container.go deleted file mode 100644 index 7482b704e..000000000 --- a/cmd/container.go +++ /dev/null @@ -1,39 +0,0 @@ -package cmd - -import ( - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/otlp/otlpmetrics" - "github.com/formancehq/go-libs/otlp/otlptraces" - "github.com/formancehq/go-libs/publish" - "github.com/formancehq/go-libs/service" - "github.com/formancehq/ledger/internal/engine" - driver "github.com/formancehq/ledger/internal/storage/driver" - "github.com/spf13/cobra" - "go.uber.org/fx" -) - -const ServiceName = "ledger" - -func resolveOptions(cmd *cobra.Command, userOptions ...fx.Option) []fx.Option { - options := make([]fx.Option, 0) - options = append(options, fx.NopLogger) - - numscriptCacheMaxCountFlag, _ := cmd.Flags().GetInt(NumscriptCacheMaxCountFlag) - ledgerBatchSizeFlag, _ := cmd.Flags().GetInt(ledgerBatchSizeFlag) - - options = append(options, - publish.FXModuleFromFlags(cmd, service.IsDebug(cmd)), - otlptraces.FXModuleFromFlags(cmd), - otlpmetrics.FXModuleFromFlags(cmd), - auth.FXModuleFromFlags(cmd), - driver.FXModuleFromFlags(cmd), - engine.Module(engine.Configuration{ - NumscriptCache: engine.NumscriptCacheConfiguration{ - MaxCount: numscriptCacheMaxCountFlag, - }, - LedgerBatchSize: ledgerBatchSizeFlag, - }), - ) - - return append(options, userOptions...) -} diff --git a/cmd/doc.go b/cmd/doc.go deleted file mode 100644 index 957955298..000000000 --- a/cmd/doc.go +++ /dev/null @@ -1,62 +0,0 @@ -package cmd - -import ( - "fmt" - "os" - "sort" - "strings" - "text/tabwriter" - - "github.com/spf13/cobra" - "github.com/spf13/pflag" -) - -func NewDocCommand() *cobra.Command { - cmd := &cobra.Command{ - Use: "doc", - } - cmd.AddCommand(NewDocFlagCommand()) - return cmd -} - -func NewDocFlagCommand() *cobra.Command { - cmd := &cobra.Command{ - Use: "flags", - Run: func(cmd *cobra.Command, args []string) { - - w := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', tabwriter.Debug) - defer func(w *tabwriter.Writer) { - if err := w.Flush(); err != nil { - panic(err) - } - }(w) - - allKeys := make([]string, 0) - cmd.Flags().VisitAll(func(f *pflag.Flag) { - allKeys = append(allKeys, f.Name) - }) - sort.Strings(allKeys) - - if _, err := fmt.Fprintf(w, - "\tFlag\tEnv var\tDefault value\tDescription\t\r\n"); err != nil { - panic(err) - } - if _, err := fmt.Fprintf(w, - "\t-\t-\t-\t-\t\r\n"); err != nil { - panic(err) - } - for _, key := range allKeys { - asEnvVar := strings.ToUpper(strings.Replace(key, "-", "_", -1)) - flag := cmd.Parent().Parent().PersistentFlags().Lookup(key) - if flag == nil { - continue - } - if _, err := fmt.Fprintf(w, - "\t--%s\t%s\t%s\t%s\t\r\n", key, asEnvVar, flag.DefValue, flag.Usage); err != nil { - panic(err) - } - } - }, - } - return cmd -} diff --git a/cmd/root.go b/cmd/root.go index 43103a632..1a49d271a 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -1,23 +1,15 @@ package cmd import ( - "github.com/formancehq/go-libs/bun/bunmigrate" - "github.com/formancehq/go-libs/service" + "github.com/formancehq/go-libs/v2/bun/bunmigrate" + "github.com/formancehq/go-libs/v2/service" "github.com/uptrace/bun" - "github.com/formancehq/go-libs/aws/iam" - "github.com/formancehq/go-libs/bun/bunconnect" - - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/otlp/otlpmetrics" - "github.com/formancehq/go-libs/otlp/otlptraces" - "github.com/formancehq/go-libs/publish" - "github.com/formancehq/ledger/internal/storage/systemstore" "github.com/spf13/cobra" ) const ( - BindFlag = "bind" + ServiceName = "ledger" ) var ( @@ -34,33 +26,19 @@ func NewRootCommand() *cobra.Command { Version: Version, } - serve := NewServe() + serve := NewServeCommand() version := NewVersion() buckets := NewBucket() - buckets.AddCommand(NewBucketUpgrade()) root.AddCommand(serve) root.AddCommand(buckets) root.AddCommand(version) - root.AddCommand(bunmigrate.NewDefaultCommand(func(cmd *cobra.Command, args []string, db *bun.DB) error { - return upgradeAll(cmd, args) + root.AddCommand(bunmigrate.NewDefaultCommand(func(cmd *cobra.Command, _ []string, _ *bun.DB) error { + // todo: use provided db ... + return upgradeAll(cmd) })) - root.AddCommand(NewDocCommand()) - - root.PersistentFlags().String(BindFlag, "0.0.0.0:3068", "API bind address") - - service.AddFlags(root.PersistentFlags()) - otlpmetrics.AddFlags(root.PersistentFlags()) - otlptraces.AddFlags(root.PersistentFlags()) - auth.AddFlags(root.PersistentFlags()) - publish.AddFlags(ServiceName, root.PersistentFlags(), func(cd *publish.ConfigDefault) { - cd.PublisherCircuitBreakerSchema = systemstore.Schema - }) - bunconnect.AddFlags(root.PersistentFlags()) - iam.AddFlags(root.PersistentFlags()) - return root } diff --git a/cmd/serve.go b/cmd/serve.go index 39f0210b9..5c6f30ff7 100644 --- a/cmd/serve.go +++ b/cmd/serve.go @@ -1,94 +1,190 @@ package cmd import ( - "net/http" - - "github.com/go-chi/chi/v5" - - "github.com/formancehq/go-libs/time" - + apilib "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/health" + "github.com/formancehq/go-libs/v2/httpserver" + "github.com/formancehq/go-libs/v2/otlp" "github.com/formancehq/ledger/internal/storage/driver" + "github.com/go-chi/chi/v5" + "go.opentelemetry.io/otel/sdk/metric" + "net/http" + "net/http/pprof" + "time" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/aws/iam" + "github.com/formancehq/go-libs/v2/bun/bunconnect" + "github.com/formancehq/go-libs/v2/otlp/otlpmetrics" + "github.com/formancehq/go-libs/v2/otlp/otlptraces" + "github.com/formancehq/go-libs/v2/publish" "github.com/formancehq/ledger/internal/api" + "github.com/formancehq/ledger/internal/bus" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + systemcontroller "github.com/formancehq/ledger/internal/controller/system" + "github.com/formancehq/ledger/internal/storage" - "github.com/formancehq/go-libs/ballast" - "github.com/formancehq/go-libs/httpserver" - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/service" + "github.com/formancehq/go-libs/v2/ballast" + "github.com/formancehq/go-libs/v2/service" "github.com/spf13/cobra" "go.uber.org/fx" ) const ( + BindFlag = "bind" BallastSizeInBytesFlag = "ballast-size" NumscriptCacheMaxCountFlag = "numscript-cache-max-count" - ledgerBatchSizeFlag = "ledger-batch-size" - ReadOnlyFlag = "read-only" AutoUpgradeFlag = "auto-upgrade" + ExperimentalFeaturesFlag = "experimental-features" + BulkMaxSizeFlag = "bulk-max-size" ) -func NewServe() *cobra.Command { +func NewServeCommand() *cobra.Command { cmd := &cobra.Command{ - Use: "serve", - RunE: func(cmd *cobra.Command, args []string) error { - readOnly, _ := cmd.Flags().GetBool(ReadOnlyFlag) - autoUpgrade, _ := cmd.Flags().GetBool(AutoUpgradeFlag) - ballastSize, _ := cmd.Flags().GetUint(BallastSizeInBytesFlag) - bind, _ := cmd.Flags().GetString(BindFlag) - - return service.New(cmd.OutOrStdout(), resolveOptions( - cmd, - ballast.Module(ballastSize), + Use: "serve", + SilenceUsage: true, + RunE: func(cmd *cobra.Command, _ []string) error { + serveConfiguration := discoverServeConfiguration(cmd) + + connectionOptions, err := bunconnect.ConnectionOptionsFromFlags(cmd) + if err != nil { + return err + } + + experimentalFeatures, err := cmd.Flags().GetBool(ExperimentalFeaturesFlag) + if err != nil { + return err + } + + bulkMaxSize, err := cmd.Flags().GetInt(BulkMaxSizeFlag) + if err != nil { + return err + } + + options := []fx.Option{ + fx.NopLogger, + otlp.FXModuleFromFlags(cmd), + otlptraces.FXModuleFromFlags(cmd), + otlpmetrics.FXModuleFromFlags(cmd), + publish.FXModuleFromFlags(cmd, service.IsDebug(cmd)), + auth.FXModuleFromFlags(cmd), + bunconnect.Module(*connectionOptions, service.IsDebug(cmd)), + storage.NewFXModule(serveConfiguration.autoUpgrade), + systemcontroller.NewFXModule(systemcontroller.ModuleConfiguration{ + NSCacheConfiguration: ledgercontroller.CacheConfiguration{ + MaxCount: serveConfiguration.numscriptCacheMaxCount, + }, + DatabaseRetryConfiguration: systemcontroller.DatabaseRetryConfiguration{ + MaxRetry: 10, + Delay: time.Millisecond * 100, + }, + EnableFeatures: experimentalFeatures, + }), + bus.NewFxModule(), + ballast.Module(serveConfiguration.ballastSize), api.Module(api.Config{ - Version: Version, - ReadOnly: readOnly, - Debug: service.IsDebug(cmd), + Version: Version, + Debug: service.IsDebug(cmd), + BulkMaxSize: bulkMaxSize, }), - fx.Invoke(func(lc fx.Lifecycle, driver *driver.Driver) { - if autoUpgrade { - lc.Append(fx.Hook{ - OnStart: driver.UpgradeAllBuckets, - }) - } + fx.Decorate(func( + params struct { + fx.In + + Handler chi.Router + HealthController *health.HealthController + + MeterProvider *metric.MeterProvider `optional:"true"` + Exporter *otlpmetrics.InMemoryExporter `optional:"true"` + }, + ) chi.Router { + return assembleFinalRouter( + service.IsDebug(cmd), + params.MeterProvider, + params.Exporter, + params.HealthController, + params.Handler, + ) }), - fx.Invoke(func(lc fx.Lifecycle, h chi.Router, logger logging.Logger) { - - wrappedRouter := chi.NewRouter() - wrappedRouter.Use(func(handler http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - r = r.WithContext(logging.ContextWithLogger(r.Context(), logger)) - handler.ServeHTTP(w, r) - }) - }) - wrappedRouter.Use(Log()) - wrappedRouter.Mount("/", h) - - lc.Append(httpserver.NewHook(wrappedRouter, httpserver.WithAddress(bind))) + fx.Invoke(func(lc fx.Lifecycle, h chi.Router) { + lc.Append(httpserver.NewHook(h, httpserver.WithAddress(serveConfiguration.bind))) }), - )...).Run(cmd) + } + + return service.New(cmd.OutOrStdout(), options...).Run(cmd) }, } cmd.Flags().Uint(BallastSizeInBytesFlag, 0, "Ballast size in bytes, default to 0") - cmd.Flags().Int(NumscriptCacheMaxCountFlag, 1024, "Numscript cache max count") - cmd.Flags().Int(ledgerBatchSizeFlag, 50, "ledger batch size") - cmd.Flags().Bool(ReadOnlyFlag, false, "Read only mode") + cmd.Flags().Uint(NumscriptCacheMaxCountFlag, 1024, "Numscript cache max count") cmd.Flags().Bool(AutoUpgradeFlag, false, "Automatically upgrade all schemas") + cmd.Flags().String(BindFlag, "0.0.0.0:3068", "API bind address") + cmd.Flags().Bool(ExperimentalFeaturesFlag, false, "Enable features configurability") + cmd.Flags().Int(BulkMaxSizeFlag, api.DefaultBulkMaxSize, "Bulk max size (default 100)") + + service.AddFlags(cmd.Flags()) + bunconnect.AddFlags(cmd.Flags()) + otlpmetrics.AddFlags(cmd.Flags()) + otlptraces.AddFlags(cmd.Flags()) + auth.AddFlags(cmd.Flags()) + publish.AddFlags(ServiceName, cmd.Flags(), func(cd *publish.ConfigDefault) { + cd.PublisherCircuitBreakerSchema = driver.SchemaSystem + }) + iam.AddFlags(cmd.Flags()) + return cmd } -func Log() func(h http.Handler) http.Handler { - return func(h http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - start := time.Now() - h.ServeHTTP(w, r) - latency := time.Since(start) - logging.FromContext(r.Context()).WithFields(map[string]interface{}{ - "method": r.Method, - "path": r.URL.Path, - "latency": latency, - "user_agent": r.UserAgent(), - "params": r.URL.Query().Encode(), - }).Debug("Request") +type serveConfiguration struct { + ballastSize uint + numscriptCacheMaxCount uint + autoUpgrade bool + bind string +} + +func discoverServeConfiguration(cmd *cobra.Command) serveConfiguration { + ret := serveConfiguration{} + ret.ballastSize, _ = cmd.Flags().GetUint(BallastSizeInBytesFlag) + ret.numscriptCacheMaxCount, _ = cmd.Flags().GetUint(NumscriptCacheMaxCountFlag) + ret.autoUpgrade, _ = cmd.Flags().GetBool(AutoUpgradeFlag) + ret.bind, _ = cmd.Flags().GetString(BindFlag) + + return ret +} + +func assembleFinalRouter( + exportPProf bool, + meterProvider *metric.MeterProvider, + exporter *otlpmetrics.InMemoryExporter, + healthController *health.HealthController, + handler http.Handler, +) *chi.Mux { + wrappedRouter := chi.NewRouter() + wrappedRouter.Route("/_/", func(r chi.Router) { + if exporter != nil { + r.Handle("/metrics", otlpmetrics.NewInMemoryExporterHandler( + meterProvider, + exporter, + )) + } + if exportPProf { + r.Handle("/debug/pprof/*", http.StripPrefix( + "/_", + http.HandlerFunc(pprof.Index), + )) + } + r.Handle("/healthcheck", http.HandlerFunc(healthController.Check)) + r.Get("/info", func(w http.ResponseWriter, r *http.Request) { + apilib.RawOk(w, struct { + Server string `json:"server"` + Version string `json:"version"` + }{ + Server: "ledger", + Version: Version, + }) }) - } + }) + wrappedRouter.Get("/_healthcheck", healthController.Check) + wrappedRouter.Mount("/", handler) + + return wrappedRouter } diff --git a/cmd/version.go b/cmd/version.go index 5b9e9ff24..2d12e4e52 100644 --- a/cmd/version.go +++ b/cmd/version.go @@ -6,7 +6,7 @@ import ( "github.com/spf13/cobra" ) -func PrintVersion(cmd *cobra.Command, args []string) { +func PrintVersion(_ *cobra.Command, _ []string) { fmt.Printf("Version: %s \n", Version) fmt.Printf("Date: %s \n", BuildDate) fmt.Printf("Commit: %s \n", Commit) diff --git a/examples/otlp-exporter/otel-collector-config.yaml b/deployments/otel-collector-config.yaml similarity index 85% rename from examples/otlp-exporter/otel-collector-config.yaml rename to deployments/otel-collector-config.yaml index 052d0b27a..09760168e 100644 --- a/examples/otlp-exporter/otel-collector-config.yaml +++ b/deployments/otel-collector-config.yaml @@ -18,7 +18,11 @@ exporters: # metric_expiration: 180m resource_to_telemetry_conversion: enabled: true - logging: +# logging: + otlp/2: + endpoint: jaeger:4317 + tls: + insecure: true processors: batch: @@ -41,16 +45,14 @@ connectors: - name: http.method service: - telemetry: - logs: - level: "debug" +# telemetry: +# logs: +# level: "debug" extensions: [pprof, zpages, health_check] pipelines: traces: receivers: [otlp] -# processors: [batch] - exporters: [spanmetrics] + exporters: [spanmetrics, otlp/2] metrics: receivers: [otlp, spanmetrics] -# processors: [batch] exporters: [prometheus] diff --git a/examples/otlp-exporter/prometheus.yaml b/deployments/prometheus.yaml similarity index 100% rename from examples/otlp-exporter/prometheus.yaml rename to deployments/prometheus.yaml diff --git a/docker-compose.yml b/docker-compose.yml index 0b1ffdbf0..0dc85c461 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,7 +1,7 @@ version: '3.8' -volumes: - postgres: + services: + postgres: image: "postgres:16-alpine" healthcheck: @@ -16,12 +16,28 @@ services: POSTGRES_USER: "ledger" POSTGRES_PASSWORD: "ledger" POSTGRES_DB: "ledger" - PGDATA: /data/postgres + + prometheus: + image: prom/prometheus:latest + restart: always volumes: - - postgres:/data/postgres + - ./deployments/prometheus.yaml:/etc/prometheus/prometheus.yml + ports: + - "9090:9090" + + otel: + image: "otel/opentelemetry-collector-contrib:0.81.0" + command: [ "--config=/etc/otel-collector-config.yaml" ] + volumes: + - ./deployments/otel-collector-config.yaml:/etc/otel-collector-config.yaml + + jaeger: + image: jaegertracing/opentelemetry-all-in-one + ports: + - "16686:16686/tcp" ledger: - image: golang:1.19-alpine + image: golang:1.22-alpine entrypoint: go run main.go serve volumes: - .:/src @@ -31,7 +47,18 @@ services: depends_on: postgres: condition: service_healthy + jaeger: + condition: service_started environment: - STORAGE_DRIVER: "postgres" - STORAGE_POSTGRES_CONN_STRING: "postgresql://ledger:ledger@postgres/ledger?sslmode=disable" - DEBUG: "true" + GOWORK: off + DEBUG: "${DEBUG:-false}" + OTEL_SERVICE_NAME: ledger + OTEL_METRICS_EXPORTER: "otlp" + OTEL_METRICS_EXPORTER_OTLP_ENDPOINT: otel:4317 + OTEL_METRICS_EXPORTER_OTLP_INSECURE: "true" + OTEL_METRICS_KEEP_IN_MEMORY: "true" + OTEL_TRACES_EXPORTER: otlp + OTEL_TRACES_EXPORTER_OTLP_ENDPOINT: otel:4317 + OTEL_TRACES_EXPORTER_OTLP_INSECURE: "true" + OTEL_TRACES_BATCH: "true" + POSTGRES_URI: "postgresql://ledger:ledger@postgres/ledger?sslmode=disable" diff --git a/docs/api/README.md b/docs/api/README.md new file mode 100644 index 000000000..7cd5c29c8 --- /dev/null +++ b/docs/api/README.md @@ -0,0 +1,4330 @@ + + +

Ledger API v2

+ +> Scroll down for code samples, example requests and responses. Select a language for code samples from the tabs above or the mobile navigation menu. + +Base URLs: + +* http://localhost:8080/ + +# Authentication + +- oAuth2 authentication. + + - Flow: clientCredentials + + - Token URL = [/api/auth/oauth/token](/api/auth/oauth/token) + +|Scope|Scope Description| +|---|---| + +

ledger

+ +## Show server information + + + +> Code samples + +```http +GET http://localhost:8080/_/info HTTP/1.1 +Host: localhost:8080 +Accept: application/json + +``` + +`GET /_/info` + +> Example responses + +> 200 Response + +```json +{ + "server": "string", + "version": "string" +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|OK|[V2ConfigInfo](#schemav2configinfo)| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| +|5XX|Unknown|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +

ledger.v2

+ +## List ledgers + + + +> Code samples + +```http +GET http://localhost:8080/v2 HTTP/1.1 +Host: localhost:8080 +Accept: application/json + +``` + +`GET /v2` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|pageSize|query|integer(int64)|false|The maximum number of results to return per page.| +|cursor|query|string|false|Parameter used in pagination requests. Maximum page size is set to 15.| + +#### Detailed descriptions + +**pageSize**: The maximum number of results to return per page. + +**cursor**: Parameter used in pagination requests. Maximum page size is set to 15. +Set to the value of next for the next page of results. +Set to the value of previous for the previous page of results. +No other parameters can be set when this parameter is set. + +> Example responses + +> 200 Response + +```json +{ + "cursor": { + "pageSize": 15, + "hasMore": false, + "previous": "YXVsdCBhbmQgYSBtYXhpbXVtIG1heF9yZXN1bHRzLol=", + "next": "", + "data": [ + { + "name": "string", + "addedAt": "2019-08-24T14:15:22Z", + "bucket": "string", + "metadata": { + "admin": "true" + } + } + ] + } +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|OK|[V2LedgerListResponse](#schemav2ledgerlistresponse)| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +## Get a ledger + + + +> Code samples + +```http +GET http://localhost:8080/v2/{ledger} HTTP/1.1 +Host: localhost:8080 +Accept: application/json + +``` + +`GET /v2/{ledger}` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| + +> Example responses + +> 200 Response + +```json +{ + "data": { + "name": "string", + "addedAt": "2019-08-24T14:15:22Z", + "bucket": "string", + "metadata": { + "admin": "true" + } + } +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|OK|[V2GetLedgerResponse](#schemav2getledgerresponse)| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +## Create a ledger + + + +> Code samples + +```http +POST http://localhost:8080/v2/{ledger} HTTP/1.1 +Host: localhost:8080 +Content-Type: application/json +Accept: application/json + +``` + +`POST /v2/{ledger}` + +> Body parameter + +```json +{ + "bucket": "string", + "metadata": { + "admin": "true" + }, + "features": { + "property1": "string", + "property2": "string" + } +} +``` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|body|body|[V2CreateLedgerRequest](#schemav2createledgerrequest)|false|none| +|ledger|path|string|true|Name of the ledger.| + +> Example responses + +> default Response + +```json +{ + "errorCode": "VALIDATION", + "errorMessage": "[VALIDATION] invalid 'cursor' query param", + "details": "https://play.numscript.org/?payload=eyJlcnJvciI6ImFjY291bnQgaGFkIGluc3VmZmljaWVudCBmdW5kcyJ9" +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|204|[No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5)|OK|None| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +## Update ledger metadata + + + +> Code samples + +```http +PUT http://localhost:8080/v2/{ledger}/metadata HTTP/1.1 +Host: localhost:8080 +Content-Type: application/json +Accept: application/json + +``` + +`PUT /v2/{ledger}/metadata` + +> Body parameter + +```json +{ + "admin": "true" +} +``` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|body|body|[V2Metadata](#schemav2metadata)|false|none| +|ledger|path|string|true|Name of the ledger.| + +> Example responses + +> default Response + +```json +{ + "errorCode": "VALIDATION", + "errorMessage": "[VALIDATION] invalid 'cursor' query param", + "details": "https://play.numscript.org/?payload=eyJlcnJvciI6ImFjY291bnQgaGFkIGluc3VmZmljaWVudCBmdW5kcyJ9" +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|204|[No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5)|OK|None| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| +|5XX|Unknown|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +## Delete ledger metadata by key + + + +> Code samples + +```http +DELETE http://localhost:8080/v2/{ledger}/metadata/{key} HTTP/1.1 +Host: localhost:8080 +Accept: application/json + +``` + +`DELETE /v2/{ledger}/metadata/{key}` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| +|key|path|string|true|Key to remove.| + +> Example responses + +> default Response + +```json +{ + "errorCode": "VALIDATION", + "errorMessage": "[VALIDATION] invalid 'cursor' query param", + "details": "https://play.numscript.org/?payload=eyJlcnJvciI6ImFjY291bnQgaGFkIGluc3VmZmljaWVudCBmdW5kcyJ9" +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|204|[No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5)|OK|None| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +## Get information about a ledger + + + +> Code samples + +```http +GET http://localhost:8080/v2/{ledger}/_info HTTP/1.1 +Host: localhost:8080 +Accept: application/json + +``` + +`GET /v2/{ledger}/_info` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| + +> Example responses + +> 200 Response + +```json +{ + "data": { + "name": "ledger001", + "storage": { + "migrations": [ + { + "version": 11, + "name": "migrations:001", + "date": "2019-08-24T14:15:22Z", + "state": "TO DO" + } + ] + } + } +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|OK|[V2LedgerInfoResponse](#schemav2ledgerinforesponse)| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +## Bulk request + + + +> Code samples + +```http +POST http://localhost:8080/v2/{ledger}/_bulk HTTP/1.1 +Host: localhost:8080 +Content-Type: application/json +Accept: application/json + +``` + +`POST /v2/{ledger}/_bulk` + +> Body parameter + +```json +[ + { + "action": "string", + "ik": "string", + "data": { + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "script": { + "plain": "vars {\naccount $user\n}\nsend [COIN 10] (\n\tsource = @world\n\tdestination = $user\n)\n", + "vars": { + "user": "users:042" + } + }, + "reference": "ref:001", + "metadata": { + "admin": "true" + } + } + } +] +``` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| +|body|body|[V2Bulk](#schemav2bulk)|false|none| + +> Example responses + +> 200 Response + +```json +{ + "data": [ + { + "responseType": "string", + "data": { + "insertedAt": "2019-08-24T14:15:22Z", + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "reference": "ref:001", + "metadata": { + "admin": "true" + }, + "id": 0, + "reverted": true, + "revertedAt": "2019-08-24T14:15:22Z", + "preCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "preCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + } + } + } + ] +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|OK|[V2BulkResponse](#schemav2bulkresponse)| +|400|[Bad Request](https://tools.ietf.org/html/rfc7231#section-6.5.1)|OK|[V2BulkResponse](#schemav2bulkresponse)| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +## Count the accounts from a ledger + + + +> Code samples + +```http +HEAD http://localhost:8080/v2/{ledger}/accounts HTTP/1.1 +Host: localhost:8080 +Content-Type: application/json +Accept: application/json + +``` + +`HEAD /v2/{ledger}/accounts` + +> Body parameter + +```json +{} +``` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| +|pit|query|string(date-time)|false|none| +|body|body|object|false|none| + +> Example responses + +> default Response + +```json +{ + "errorCode": "VALIDATION", + "errorMessage": "[VALIDATION] invalid 'cursor' query param", + "details": "https://play.numscript.org/?payload=eyJlcnJvciI6ImFjY291bnQgaGFkIGluc3VmZmljaWVudCBmdW5kcyJ9" +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|204|[No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5)|OK|None| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + +### Response Headers + +|Status|Header|Type|Format|Description| +|---|---|---|---|---| +|204|Count|integer|bigint|none| + + + +## List accounts from a ledger + + + +> Code samples + +```http +GET http://localhost:8080/v2/{ledger}/accounts HTTP/1.1 +Host: localhost:8080 +Content-Type: application/json +Accept: application/json + +``` + +`GET /v2/{ledger}/accounts` + +List accounts from a ledger, sorted by address in descending order. + +> Body parameter + +```json +{} +``` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| +|pageSize|query|integer(int64)|false|The maximum number of results to return per page.| +|cursor|query|string|false|Parameter used in pagination requests. Maximum page size is set to 15.| +|expand|query|string|false|none| +|pit|query|string(date-time)|false|none| +|body|body|object|false|none| + +#### Detailed descriptions + +**pageSize**: The maximum number of results to return per page. + +**cursor**: Parameter used in pagination requests. Maximum page size is set to 15. +Set to the value of next for the next page of results. +Set to the value of previous for the previous page of results. +No other parameters can be set when this parameter is set. + +> Example responses + +> 200 Response + +```json +{ + "cursor": { + "pageSize": 15, + "hasMore": false, + "previous": "YXVsdCBhbmQgYSBtYXhpbXVtIG1heF9yZXN1bHRzLol=", + "next": "", + "data": [ + { + "address": "users:001", + "metadata": { + "admin": "true" + }, + "volumes": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + }, + "EUR": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "effectiveVolumes": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + }, + "EUR": { + "input": 100, + "output": 10, + "balance": 90 + } + } + } + ] + } +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|OK|[V2AccountsCursorResponse](#schemav2accountscursorresponse)| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +## Get account by its address + + + +> Code samples + +```http +GET http://localhost:8080/v2/{ledger}/accounts/{address} HTTP/1.1 +Host: localhost:8080 +Accept: application/json + +``` + +`GET /v2/{ledger}/accounts/{address}` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| +|address|path|string|true|Exact address of the account. It must match the following regular expressions pattern:| +|expand|query|string|false|none| +|pit|query|string(date-time)|false|none| + +#### Detailed descriptions + +**address**: Exact address of the account. It must match the following regular expressions pattern: +``` +^\w+(:\w+)*$ +``` + +> Example responses + +> 200 Response + +```json +{ + "data": { + "address": "users:001", + "metadata": { + "admin": "true" + }, + "volumes": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + }, + "EUR": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "effectiveVolumes": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + }, + "EUR": { + "input": 100, + "output": 10, + "balance": 90 + } + } + } +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|OK|[V2AccountResponse](#schemav2accountresponse)| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +## Add metadata to an account + + + +> Code samples + +```http +POST http://localhost:8080/v2/{ledger}/accounts/{address}/metadata HTTP/1.1 +Host: localhost:8080 +Content-Type: application/json +Accept: application/json +Idempotency-Key: string + +``` + +`POST /v2/{ledger}/accounts/{address}/metadata` + +> Body parameter + +```json +{ + "admin": "true" +} +``` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| +|address|path|string|true|Exact address of the account. It must match the following regular expressions pattern:| +|dryRun|query|boolean|false|Set the dry run mode. Dry run mode doesn't add the logs to the database or publish a message to the message broker.| +|Idempotency-Key|header|string|false|Use an idempotency key| +|body|body|[V2Metadata](#schemav2metadata)|true|metadata| + +#### Detailed descriptions + +**address**: Exact address of the account. It must match the following regular expressions pattern: +``` +^\w+(:\w+)*$ +``` + +> Example responses + +> default Response + +```json +{ + "errorCode": "VALIDATION", + "errorMessage": "[VALIDATION] invalid 'cursor' query param", + "details": "https://play.numscript.org/?payload=eyJlcnJvciI6ImFjY291bnQgaGFkIGluc3VmZmljaWVudCBmdW5kcyJ9" +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|204|[No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5)|No Content|None| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + +

Response Schema

+ + + +## Delete metadata by key + + + +> Code samples + +```http +DELETE http://localhost:8080/v2/{ledger}/transactions/{id}/metadata/{key} HTTP/1.1 +Host: localhost:8080 +Accept: application/json + +``` + +`DELETE /v2/{ledger}/transactions/{id}/metadata/{key}` + +Delete metadata by key + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| +|id|path|integer(bigint)|true|Transaction ID.| +|key|path|string|true|The key to remove.| + +> Example responses + +> default Response + +```json +{ + "errorCode": "VALIDATION", + "errorMessage": "[VALIDATION] invalid 'cursor' query param", + "details": "https://play.numscript.org/?payload=eyJlcnJvciI6ImFjY291bnQgaGFkIGluc3VmZmljaWVudCBmdW5kcyJ9" +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|2XX|Unknown|Key deleted|None| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + +

Response Schema

+ + + +## Get statistics from a ledger + + + +> Code samples + +```http +GET http://localhost:8080/v2/{ledger}/stats HTTP/1.1 +Host: localhost:8080 +Accept: application/json + +``` + +`GET /v2/{ledger}/stats` + +Get statistics from a ledger. (aggregate metrics on accounts and transactions) + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|name of the ledger| + +> Example responses + +> 200 Response + +```json +{ + "data": { + "accounts": 0, + "transactions": 0 + } +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|OK|[V2StatsResponse](#schemav2statsresponse)| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +## Count the transactions from a ledger + + + +> Code samples + +```http +HEAD http://localhost:8080/v2/{ledger}/transactions HTTP/1.1 +Host: localhost:8080 +Content-Type: application/json +Accept: application/json + +``` + +`HEAD /v2/{ledger}/transactions` + +> Body parameter + +```json +{} +``` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| +|pit|query|string(date-time)|false|none| +|body|body|object|false|none| + +> Example responses + +> default Response + +```json +{ + "errorCode": "VALIDATION", + "errorMessage": "[VALIDATION] invalid 'cursor' query param", + "details": "https://play.numscript.org/?payload=eyJlcnJvciI6ImFjY291bnQgaGFkIGluc3VmZmljaWVudCBmdW5kcyJ9" +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|204|[No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5)|OK|None| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + +### Response Headers + +|Status|Header|Type|Format|Description| +|---|---|---|---|---| +|204|Count|integer|int64|none| + + + +## List transactions from a ledger + + + +> Code samples + +```http +GET http://localhost:8080/v2/{ledger}/transactions HTTP/1.1 +Host: localhost:8080 +Content-Type: application/json +Accept: application/json + +``` + +`GET /v2/{ledger}/transactions` + +List transactions from a ledger, sorted by id in descending order. + +> Body parameter + +```json +{} +``` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| +|pageSize|query|integer(int64)|false|The maximum number of results to return per page.| +|cursor|query|string|false|Parameter used in pagination requests. Maximum page size is set to 15.| +|expand|query|string|false|none| +|pit|query|string(date-time)|false|none| +|order|query|string|false|none| +|reverse|query|boolean|false|none| +|body|body|object|false|none| + +#### Detailed descriptions + +**pageSize**: The maximum number of results to return per page. + +**cursor**: Parameter used in pagination requests. Maximum page size is set to 15. +Set to the value of next for the next page of results. +Set to the value of previous for the previous page of results. +No other parameters can be set when this parameter is set. + +#### Enumerated Values + +|Parameter|Value| +|---|---| +|order|effective| + +> Example responses + +> 200 Response + +```json +{ + "cursor": { + "pageSize": 15, + "hasMore": false, + "previous": "YXVsdCBhbmQgYSBtYXhpbXVtIG1heF9yZXN1bHRzLol=", + "next": "", + "data": [ + { + "insertedAt": "2019-08-24T14:15:22Z", + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "reference": "ref:001", + "metadata": { + "admin": "true" + }, + "id": 0, + "reverted": true, + "revertedAt": "2019-08-24T14:15:22Z", + "preCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "preCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + } + } + ] + } +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|OK|[V2TransactionsCursorResponse](#schemav2transactionscursorresponse)| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +## Create a new transaction to a ledger + + + +> Code samples + +```http +POST http://localhost:8080/v2/{ledger}/transactions HTTP/1.1 +Host: localhost:8080 +Content-Type: application/json +Accept: application/json +Idempotency-Key: string + +``` + +`POST /v2/{ledger}/transactions` + +> Body parameter + +```json +{ + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "script": { + "plain": "vars {\naccount $user\n}\nsend [COIN 10] (\n\tsource = @world\n\tdestination = $user\n)\n", + "vars": { + "user": "users:042" + } + }, + "reference": "ref:001", + "metadata": { + "admin": "true" + } +} +``` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| +|dryRun|query|boolean|false|Set the dryRun mode. dry run mode doesn't add the logs to the database or publish a message to the message broker.| +|Idempotency-Key|header|string|false|Use an idempotency key| +|force|query|boolean|false|Disable balance checks when passing postings| +|body|body|[V2PostTransaction](#schemav2posttransaction)|true|The request body must contain at least one of the following objects:| + +#### Detailed descriptions + +**body**: The request body must contain at least one of the following objects: + - `postings`: suitable for simple transactions + - `script`: enabling more complex transactions with Numscript + +> Example responses + +> 200 Response + +```json +{ + "data": { + "insertedAt": "2019-08-24T14:15:22Z", + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "reference": "ref:001", + "metadata": { + "admin": "true" + }, + "id": 0, + "reverted": true, + "revertedAt": "2019-08-24T14:15:22Z", + "preCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "preCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + } + } +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|OK|[V2CreateTransactionResponse](#schemav2createtransactionresponse)| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +## Get transaction from a ledger by its ID + + + +> Code samples + +```http +GET http://localhost:8080/v2/{ledger}/transactions/{id} HTTP/1.1 +Host: localhost:8080 +Accept: application/json + +``` + +`GET /v2/{ledger}/transactions/{id}` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| +|id|path|integer(bigint)|true|Transaction ID.| +|expand|query|string|false|none| +|pit|query|string(date-time)|false|none| + +> Example responses + +> 200 Response + +```json +{ + "data": { + "insertedAt": "2019-08-24T14:15:22Z", + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "reference": "ref:001", + "metadata": { + "admin": "true" + }, + "id": 0, + "reverted": true, + "revertedAt": "2019-08-24T14:15:22Z", + "preCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "preCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + } + } +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|OK|[V2GetTransactionResponse](#schemav2gettransactionresponse)| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +## Set the metadata of a transaction by its ID + + + +> Code samples + +```http +POST http://localhost:8080/v2/{ledger}/transactions/{id}/metadata HTTP/1.1 +Host: localhost:8080 +Content-Type: application/json +Accept: application/json +Idempotency-Key: string + +``` + +`POST /v2/{ledger}/transactions/{id}/metadata` + +> Body parameter + +```json +{ + "admin": "true" +} +``` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| +|id|path|integer(bigint)|true|Transaction ID.| +|dryRun|query|boolean|false|Set the dryRun mode. Dry run mode doesn't add the logs to the database or publish a message to the message broker.| +|Idempotency-Key|header|string|false|Use an idempotency key| +|body|body|[V2Metadata](#schemav2metadata)|false|metadata| + +> Example responses + +> default Response + +```json +{ + "errorCode": "VALIDATION", + "errorMessage": "[VALIDATION] invalid 'cursor' query param", + "details": "https://play.numscript.org/?payload=eyJlcnJvciI6ImFjY291bnQgaGFkIGluc3VmZmljaWVudCBmdW5kcyJ9" +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|204|[No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5)|No Content|None| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + +

Response Schema

+ + + +## Revert a ledger transaction by its ID + + + +> Code samples + +```http +POST http://localhost:8080/v2/{ledger}/transactions/{id}/revert HTTP/1.1 +Host: localhost:8080 +Accept: application/json + +``` + +`POST /v2/{ledger}/transactions/{id}/revert` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| +|id|path|integer(bigint)|true|Transaction ID.| +|force|query|boolean|false|Force revert| +|atEffectiveDate|query|boolean|false|Revert transaction at effective date of the original tx| +|dryRun|query|boolean|false|Set the dryRun mode. dry run mode doesn't add the logs to the database or publish a message to the message broker.| + +> Example responses + +> 201 Response + +```json +{ + "data": { + "insertedAt": "2019-08-24T14:15:22Z", + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "reference": "ref:001", + "metadata": { + "admin": "true" + }, + "id": 0, + "reverted": true, + "revertedAt": "2019-08-24T14:15:22Z", + "preCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "preCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + } + } +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|201|[Created](https://tools.ietf.org/html/rfc7231#section-6.3.2)|OK|[V2CreateTransactionResponse](#schemav2createtransactionresponse)| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +## Get the aggregated balances from selected accounts + + + +> Code samples + +```http +GET http://localhost:8080/v2/{ledger}/aggregate/balances HTTP/1.1 +Host: localhost:8080 +Content-Type: application/json +Accept: application/json + +``` + +`GET /v2/{ledger}/aggregate/balances` + +> Body parameter + +```json +{} +``` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| +|pit|query|string(date-time)|false|none| +|useInsertionDate|query|boolean|false|Use insertion date instead of effective date| +|body|body|object|false|none| + +> Example responses + +> 200 Response + +```json +{ + "data": { + "USD": 100, + "EUR": 12 + } +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|OK|[V2AggregateBalancesResponse](#schemav2aggregatebalancesresponse)| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +## Get list of volumes with balances for (account/asset) + + + +> Code samples + +```http +GET http://localhost:8080/v2/{ledger}/volumes HTTP/1.1 +Host: localhost:8080 +Content-Type: application/json +Accept: application/json + +``` + +`GET /v2/{ledger}/volumes` + +> Body parameter + +```json +{} +``` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|pageSize|query|integer(int64)|false|The maximum number of results to return per page.| +|cursor|query|string|false|Parameter used in pagination requests. Maximum page size is set to 15.| +|ledger|path|string|true|Name of the ledger.| +|endTime|query|string(date-time)|false|none| +|startTime|query|string(date-time)|false|none| +|insertionDate|query|boolean|false|Use insertion date instead of effective date| +|groupBy|query|integer(int64)|false|Group volumes and balance by the level of the segment of the address| +|body|body|object|false|none| + +#### Detailed descriptions + +**pageSize**: The maximum number of results to return per page. + +**cursor**: Parameter used in pagination requests. Maximum page size is set to 15. +Set to the value of next for the next page of results. +Set to the value of previous for the previous page of results. +No other parameters can be set when this parameter is set. + +> Example responses + +> 200 Response + +```json +{ + "cursor": { + "pageSize": 15, + "hasMore": false, + "previous": "YXVsdCBhbmQgYSBtYXhpbXVtIG1heF9yZXN1bHRzLol=", + "next": "", + "data": [ + { + "account": "string", + "asset": "string", + "input": 0, + "output": 0, + "balance": 0 + } + ] + } +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|OK|[V2VolumesWithBalanceCursorResponse](#schemav2volumeswithbalancecursorresponse)| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +## List the logs from a ledger + + + +> Code samples + +```http +GET http://localhost:8080/v2/{ledger}/logs HTTP/1.1 +Host: localhost:8080 +Content-Type: application/json +Accept: application/json + +``` + +`GET /v2/{ledger}/logs` + +List the logs from a ledger, sorted by ID in descending order. + +> Body parameter + +```json +{} +``` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| +|pageSize|query|integer(int64)|false|The maximum number of results to return per page.| +|cursor|query|string|false|Parameter used in pagination requests. Maximum page size is set to 15.| +|pit|query|string(date-time)|false|none| +|body|body|object|false|none| + +#### Detailed descriptions + +**pageSize**: The maximum number of results to return per page. + +**cursor**: Parameter used in pagination requests. Maximum page size is set to 15. +Set to the value of next for the next page of results. +Set to the value of previous for the previous page of results. +No other parameters can be set when this parameter is set. + +> Example responses + +> 200 Response + +```json +{ + "cursor": { + "pageSize": 15, + "hasMore": false, + "previous": "YXVsdCBhbmQgYSBtYXhpbXVtIG1heF9yZXN1bHRzLol=", + "next": "", + "data": [ + { + "id": 1234, + "type": "NEW_TRANSACTION", + "data": {}, + "hash": "9ee060170400f556b7e1575cb13f9db004f150a08355c7431c62bc639166431e", + "date": "2019-08-24T14:15:22Z" + } + ] + } +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|OK|[V2LogsCursorResponse](#schemav2logscursorresponse)| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +## v2ImportLogs + + + +> Code samples + +```http +POST http://localhost:8080/v2/{ledger}/logs/import HTTP/1.1 +Host: localhost:8080 +Content-Type: application/octet-stream +Accept: application/json + +``` + +`POST /v2/{ledger}/logs/import` + +> Body parameter + +```yaml +string + +``` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| +|body|body|string|false|none| + +> Example responses + +> default Response + +```json +{ + "errorCode": "VALIDATION", + "errorMessage": "[VALIDATION] invalid 'cursor' query param", + "details": "https://play.numscript.org/?payload=eyJlcnJvciI6ImFjY291bnQgaGFkIGluc3VmZmljaWVudCBmdW5kcyJ9" +} +``` + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|204|[No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5)|Import OK|None| +|default|Default|Error|[V2ErrorResponse](#schemav2errorresponse)| + + + +## Export logs + + + +> Code samples + +```http +POST http://localhost:8080/v2/{ledger}/logs/export HTTP/1.1 +Host: localhost:8080 +Accept: application/octet-stream + +``` + +`POST /v2/{ledger}/logs/export` + +

Parameters

+ +|Name|In|Type|Required|Description| +|---|---|---|---|---| +|ledger|path|string|true|Name of the ledger.| + +> Example responses + +> default Response + +

Responses

+ +|Status|Meaning|Description|Schema| +|---|---|---|---| +|200|[OK](https://tools.ietf.org/html/rfc7231#section-6.3.1)|Import OK|None| +|default|Default|Error|string| + + + +# Schemas + +

V2AccountsCursorResponse

+ + + + + + +```json +{ + "cursor": { + "pageSize": 15, + "hasMore": false, + "previous": "YXVsdCBhbmQgYSBtYXhpbXVtIG1heF9yZXN1bHRzLol=", + "next": "", + "data": [ + { + "address": "users:001", + "metadata": { + "admin": "true" + }, + "volumes": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + }, + "EUR": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "effectiveVolumes": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + }, + "EUR": { + "input": 100, + "output": 10, + "balance": 90 + } + } + } + ] + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|cursor|object|true|none|none| +|» pageSize|integer(int64)|true|none|none| +|» hasMore|boolean|true|none|none| +|» previous|string|false|none|none| +|» next|string|false|none|none| +|» data|[[V2Account](#schemav2account)]|true|none|none| + +

V2TransactionsCursorResponse

+ + + + + + +```json +{ + "cursor": { + "pageSize": 15, + "hasMore": false, + "previous": "YXVsdCBhbmQgYSBtYXhpbXVtIG1heF9yZXN1bHRzLol=", + "next": "", + "data": [ + { + "insertedAt": "2019-08-24T14:15:22Z", + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "reference": "ref:001", + "metadata": { + "admin": "true" + }, + "id": 0, + "reverted": true, + "revertedAt": "2019-08-24T14:15:22Z", + "preCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "preCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + } + } + ] + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|cursor|object|true|none|none| +|» pageSize|integer(int64)|true|none|none| +|» hasMore|boolean|true|none|none| +|» previous|string|false|none|none| +|» next|string|false|none|none| +|» data|[[V2Transaction](#schemav2transaction)]|true|none|none| + +

V2LogsCursorResponse

+ + + + + + +```json +{ + "cursor": { + "pageSize": 15, + "hasMore": false, + "previous": "YXVsdCBhbmQgYSBtYXhpbXVtIG1heF9yZXN1bHRzLol=", + "next": "", + "data": [ + { + "id": 1234, + "type": "NEW_TRANSACTION", + "data": {}, + "hash": "9ee060170400f556b7e1575cb13f9db004f150a08355c7431c62bc639166431e", + "date": "2019-08-24T14:15:22Z" + } + ] + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|cursor|object|true|none|none| +|» pageSize|integer(int64)|true|none|none| +|» hasMore|boolean|true|none|none| +|» previous|string|false|none|none| +|» next|string|false|none|none| +|» data|[[V2Log](#schemav2log)]|true|none|none| + +

V2AccountResponse

+ + + + + + +```json +{ + "data": { + "address": "users:001", + "metadata": { + "admin": "true" + }, + "volumes": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + }, + "EUR": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "effectiveVolumes": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + }, + "EUR": { + "input": 100, + "output": 10, + "balance": 90 + } + } + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|data|[V2Account](#schemav2account)|true|none|none| + +

V2AggregateBalancesResponse

+ + + + + + +```json +{ + "data": { + "USD": 100, + "EUR": 12 + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|data|[V2AssetsBalances](#schemav2assetsbalances)|true|none|none| + +

V2VolumesWithBalanceCursorResponse

+ + + + + + +```json +{ + "cursor": { + "pageSize": 15, + "hasMore": false, + "previous": "YXVsdCBhbmQgYSBtYXhpbXVtIG1heF9yZXN1bHRzLol=", + "next": "", + "data": [ + { + "account": "string", + "asset": "string", + "input": 0, + "output": 0, + "balance": 0 + } + ] + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|cursor|object|true|none|none| +|» pageSize|integer(int64)|true|none|none| +|» hasMore|boolean|true|none|none| +|» previous|string|false|none|none| +|» next|string|false|none|none| +|» data|[[V2VolumesWithBalance](#schemav2volumeswithbalance)]|true|none|none| + +

V2VolumesWithBalance

+ + + + + + +```json +{ + "account": "string", + "asset": "string", + "input": 0, + "output": 0, + "balance": 0 +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|account|string|true|none|none| +|asset|string|true|none|none| +|input|integer(bigint)|true|none|none| +|output|integer(bigint)|true|none|none| +|balance|integer(bigint)|true|none|none| + +

V2Metadata

+ + + + + + +```json +{ + "admin": "true" +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|**additionalProperties**|string|false|none|none| + +

V2ConfigInfo

+ + + + + + +```json +{ + "server": "string", + "version": "string" +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|server|string|true|none|none| +|version|string|true|none|none| + +

V2Account

+ + + + + + +```json +{ + "address": "users:001", + "metadata": { + "admin": "true" + }, + "volumes": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + }, + "EUR": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "effectiveVolumes": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + }, + "EUR": { + "input": 100, + "output": 10, + "balance": 90 + } + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|address|string|true|none|none| +|metadata|object|true|none|none| +|» **additionalProperties**|string|false|none|none| +|volumes|[V2Volumes](#schemav2volumes)|false|none|none| +|effectiveVolumes|[V2Volumes](#schemav2volumes)|false|none|none| + +

V2AssetsBalances

+ + + + + + +```json +{ + "USD": 100, + "EUR": 12 +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|**additionalProperties**|integer(bigint)|false|none|none| + +

V2Posting

+ + + + + + +```json +{ + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|amount|integer(bigint)|true|none|none| +|asset|string|true|none|none| +|destination|string|true|none|none| +|source|string|true|none|none| + +

V2Transaction

+ + + + + + +```json +{ + "insertedAt": "2019-08-24T14:15:22Z", + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "reference": "ref:001", + "metadata": { + "admin": "true" + }, + "id": 0, + "reverted": true, + "revertedAt": "2019-08-24T14:15:22Z", + "preCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "preCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|insertedAt|string(date-time)|true|none|none| +|timestamp|string(date-time)|true|none|none| +|postings|[[V2Posting](#schemav2posting)]|true|none|none| +|reference|string|false|none|none| +|metadata|[V2Metadata](#schemav2metadata)|true|none|none| +|id|integer(bigint)|true|none|none| +|reverted|boolean|true|none|none| +|revertedAt|string(date-time)|false|none|none| +|preCommitVolumes|[V2AggregatedVolumes](#schemav2aggregatedvolumes)|false|none|none| +|postCommitVolumes|[V2AggregatedVolumes](#schemav2aggregatedvolumes)|false|none|none| +|preCommitEffectiveVolumes|[V2AggregatedVolumes](#schemav2aggregatedvolumes)|false|none|none| +|postCommitEffectiveVolumes|[V2AggregatedVolumes](#schemav2aggregatedvolumes)|false|none|none| + +

V2PostTransaction

+ + + + + + +```json +{ + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "script": { + "plain": "vars {\naccount $user\n}\nsend [COIN 10] (\n\tsource = @world\n\tdestination = $user\n)\n", + "vars": { + "user": "users:042" + } + }, + "reference": "ref:001", + "metadata": { + "admin": "true" + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|timestamp|string(date-time)|false|none|none| +|postings|[[V2Posting](#schemav2posting)]|false|none|none| +|script|object|false|none|none| +|» plain|string|true|none|none| +|» vars|object|false|none|none| +|reference|string|false|none|none| +|metadata|[V2Metadata](#schemav2metadata)|true|none|none| + +

V2Stats

+ + + + + + +```json +{ + "accounts": 0, + "transactions": 0 +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|accounts|integer(int64)|true|none|none| +|transactions|integer(bigint)|true|none|none| + +

V2Log

+ + + + + + +```json +{ + "id": 1234, + "type": "NEW_TRANSACTION", + "data": {}, + "hash": "9ee060170400f556b7e1575cb13f9db004f150a08355c7431c62bc639166431e", + "date": "2019-08-24T14:15:22Z" +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|id|integer(bigint)|true|none|none| +|type|string|true|none|none| +|data|object|true|none|none| +|hash|string|true|none|none| +|date|string(date-time)|true|none|none| + +#### Enumerated Values + +|Property|Value| +|---|---| +|type|NEW_TRANSACTION| +|type|SET_METADATA| +|type|REVERTED_TRANSACTION| + +

V2CreateTransactionResponse

+ + + + + + +```json +{ + "data": { + "insertedAt": "2019-08-24T14:15:22Z", + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "reference": "ref:001", + "metadata": { + "admin": "true" + }, + "id": 0, + "reverted": true, + "revertedAt": "2019-08-24T14:15:22Z", + "preCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "preCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + } + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|data|[V2Transaction](#schemav2transaction)|true|none|none| + +

V2RevertTransactionResponse

+ + + + + + +```json +{ + "data": { + "insertedAt": "2019-08-24T14:15:22Z", + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "reference": "ref:001", + "metadata": { + "admin": "true" + }, + "id": 0, + "reverted": true, + "revertedAt": "2019-08-24T14:15:22Z", + "preCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "preCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + } + } +} + +``` + +### Properties + +*None* + +

V2GetTransactionResponse

+ + + + + + +```json +{ + "data": { + "insertedAt": "2019-08-24T14:15:22Z", + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "reference": "ref:001", + "metadata": { + "admin": "true" + }, + "id": 0, + "reverted": true, + "revertedAt": "2019-08-24T14:15:22Z", + "preCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "preCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + } + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|data|[V2Transaction](#schemav2transaction)|true|none|none| + +

V2StatsResponse

+ + + + + + +```json +{ + "data": { + "accounts": 0, + "transactions": 0 + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|data|[V2Stats](#schemav2stats)|true|none|none| + +

V2ConfigInfoResponse

+ + + + + + +```json +{ + "server": "string", + "version": "string" +} + +``` + +### Properties + +*None* + +

V2Volume

+ + + + + + +```json +{ + "input": 100, + "output": 20, + "balance": 80 +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|input|integer(bigint)|true|none|none| +|output|integer(bigint)|true|none|none| +|balance|integer(bigint)|false|none|none| + +

V2Volumes

+ + + + + + +```json +{ + "USD": { + "input": 100, + "output": 10, + "balance": 90 + }, + "EUR": { + "input": 100, + "output": 10, + "balance": 90 + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|**additionalProperties**|[V2Volume](#schemav2volume)|false|none|none| + +

V2AggregatedVolumes

+ + + + + + +```json +{ + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|**additionalProperties**|[V2Volumes](#schemav2volumes)|false|none|none| + +

V2ErrorResponse

+ + + + + + +```json +{ + "errorCode": "VALIDATION", + "errorMessage": "[VALIDATION] invalid 'cursor' query param", + "details": "https://play.numscript.org/?payload=eyJlcnJvciI6ImFjY291bnQgaGFkIGluc3VmZmljaWVudCBmdW5kcyJ9" +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|errorCode|[V2ErrorsEnum](#schemav2errorsenum)|true|none|none| +|errorMessage|string|true|none|none| +|details|string|false|none|none| + +

V2ErrorsEnum

+ + + + + + +```json +"VALIDATION" + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|string|false|none|none| + +#### Enumerated Values + +|Property|Value| +|---|---| +|*anonymous*|INTERNAL| +|*anonymous*|INSUFFICIENT_FUND| +|*anonymous*|VALIDATION| +|*anonymous*|CONFLICT| +|*anonymous*|COMPILATION_FAILED| +|*anonymous*|METADATA_OVERRIDE| +|*anonymous*|NOT_FOUND| +|*anonymous*|REVERT_OCCURRING| +|*anonymous*|ALREADY_REVERT| +|*anonymous*|NO_POSTINGS| +|*anonymous*|LEDGER_NOT_FOUND| +|*anonymous*|IMPORT| +|*anonymous*|TIMEOUT| +|*anonymous*|BULK_SIZE_EXCEEDED| + +

V2LedgerInfoResponse

+ + + + + + +```json +{ + "data": { + "name": "ledger001", + "storage": { + "migrations": [ + { + "version": 11, + "name": "migrations:001", + "date": "2019-08-24T14:15:22Z", + "state": "TO DO" + } + ] + } + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|data|[V2LedgerInfo](#schemav2ledgerinfo)|false|none|none| + +

V2LedgerInfo

+ + + + + + +```json +{ + "name": "ledger001", + "storage": { + "migrations": [ + { + "version": 11, + "name": "migrations:001", + "date": "2019-08-24T14:15:22Z", + "state": "TO DO" + } + ] + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|name|string|false|none|none| +|storage|object|false|none|none| +|» migrations|[[V2MigrationInfo](#schemav2migrationinfo)]|false|none|none| + +

V2MigrationInfo

+ + + + + + +```json +{ + "version": 11, + "name": "migrations:001", + "date": "2019-08-24T14:15:22Z", + "state": "TO DO" +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|version|integer(int64)|false|none|none| +|name|string|false|none|none| +|date|string(date-time)|false|none|none| +|state|string|false|none|none| + +#### Enumerated Values + +|Property|Value| +|---|---| +|state|TO DO| +|state|DONE| + +

V2Bulk

+ + + + + + +```json +[ + { + "action": "string", + "ik": "string", + "data": { + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "script": { + "plain": "vars {\naccount $user\n}\nsend [COIN 10] (\n\tsource = @world\n\tdestination = $user\n)\n", + "vars": { + "user": "users:042" + } + }, + "reference": "ref:001", + "metadata": { + "admin": "true" + } + } + } +] + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[[V2BulkElement](#schemav2bulkelement)]|false|none|none| + +

V2BaseBulkElement

+ + + + + + +```json +{ + "action": "string", + "ik": "string" +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|action|string|true|none|none| +|ik|string|false|none|none| + +

V2BulkElement

+ + + + + + +```json +{ + "action": "string", + "ik": "string", + "data": { + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "script": { + "plain": "vars {\naccount $user\n}\nsend [COIN 10] (\n\tsource = @world\n\tdestination = $user\n)\n", + "vars": { + "user": "users:042" + } + }, + "reference": "ref:001", + "metadata": { + "admin": "true" + } + } +} + +``` + +### Properties + +oneOf + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[V2BulkElementCreateTransaction](#schemav2bulkelementcreatetransaction)|false|none|none| + +xor + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[V2BulkElementAddMetadata](#schemav2bulkelementaddmetadata)|false|none|none| + +xor + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[V2BulkElementRevertTransaction](#schemav2bulkelementreverttransaction)|false|none|none| + +xor + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[V2BulkElementDeleteMetadata](#schemav2bulkelementdeletemetadata)|false|none|none| + +

V2BulkElementCreateTransaction

+ + + + + + +```json +{ + "action": "string", + "ik": "string", + "data": { + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "script": { + "plain": "vars {\naccount $user\n}\nsend [COIN 10] (\n\tsource = @world\n\tdestination = $user\n)\n", + "vars": { + "user": "users:042" + } + }, + "reference": "ref:001", + "metadata": { + "admin": "true" + } + } +} + +``` + +### Properties + +allOf + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[V2BaseBulkElement](#schemav2basebulkelement)|false|none|none| + +and + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|object|false|none|none| +|» data|[V2PostTransaction](#schemav2posttransaction)|false|none|none| + +

V2TargetId

+ + + + + + +```json +"string" + +``` + +### Properties + +oneOf + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|string|false|none|none| + +xor + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|integer(bigint)|false|none|none| + +

V2TargetType

+ + + + + + +```json +"TRANSACTION" + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|string|false|none|none| + +#### Enumerated Values + +|Property|Value| +|---|---| +|*anonymous*|TRANSACTION| +|*anonymous*|ACCOUNT| + +

V2BulkElementAddMetadata

+ + + + + + +```json +{ + "action": "string", + "ik": "string", + "data": { + "targetId": "string", + "targetType": "TRANSACTION", + "metadata": { + "property1": "string", + "property2": "string" + } + } +} + +``` + +### Properties + +allOf + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[V2BaseBulkElement](#schemav2basebulkelement)|false|none|none| + +and + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|object|false|none|none| +|» data|object|false|none|none| +|»» targetId|[V2TargetId](#schemav2targetid)|true|none|none| +|»» targetType|[V2TargetType](#schemav2targettype)|true|none|none| +|»» metadata|object|true|none|none| +|»»» **additionalProperties**|string|false|none|none| + +

V2BulkElementRevertTransaction

+ + + + + + +```json +{ + "action": "string", + "ik": "string", + "data": { + "id": 0, + "force": true, + "atEffectiveDate": true + } +} + +``` + +### Properties + +allOf + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[V2BaseBulkElement](#schemav2basebulkelement)|false|none|none| + +and + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|object|false|none|none| +|» data|object|false|none|none| +|»» id|integer(bigint)|true|none|none| +|»» force|boolean|false|none|none| +|»» atEffectiveDate|boolean|false|none|none| + +

V2BulkElementDeleteMetadata

+ + + + + + +```json +{ + "action": "string", + "ik": "string", + "data": { + "targetId": "string", + "targetType": "TRANSACTION", + "key": "string" + } +} + +``` + +### Properties + +allOf + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[V2BaseBulkElement](#schemav2basebulkelement)|false|none|none| + +and + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|object|false|none|none| +|» data|object|false|none|none| +|»» targetId|[V2TargetId](#schemav2targetid)|true|none|none| +|»» targetType|[V2TargetType](#schemav2targettype)|true|none|none| +|»» key|string|true|none|none| + +

V2BulkResponse

+ + + + + + +```json +{ + "data": [ + { + "responseType": "string", + "data": { + "insertedAt": "2019-08-24T14:15:22Z", + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "reference": "ref:001", + "metadata": { + "admin": "true" + }, + "id": 0, + "reverted": true, + "revertedAt": "2019-08-24T14:15:22Z", + "preCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "preCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + } + } + } + ] +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|data|[[V2BulkElementResult](#schemav2bulkelementresult)]|true|none|none| + +

V2BulkElementResult

+ + + + + + +```json +{ + "responseType": "string", + "data": { + "insertedAt": "2019-08-24T14:15:22Z", + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "reference": "ref:001", + "metadata": { + "admin": "true" + }, + "id": 0, + "reverted": true, + "revertedAt": "2019-08-24T14:15:22Z", + "preCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "preCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + } + } +} + +``` + +### Properties + +oneOf + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[V2BulkElementResultCreateTransaction](#schemav2bulkelementresultcreatetransaction)|false|none|none| + +xor + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[V2BulkElementResultAddMetadata](#schemav2bulkelementresultaddmetadata)|false|none|none| + +xor + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[V2BulkElementResultRevertTransaction](#schemav2bulkelementresultreverttransaction)|false|none|none| + +xor + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[V2BulkElementResultDeleteMetadata](#schemav2bulkelementresultdeletemetadata)|false|none|none| + +xor + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[V2BulkElementResultError](#schemav2bulkelementresulterror)|false|none|none| + +

V2BaseBulkElementResult

+ + + + + + +```json +{ + "responseType": "string" +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|responseType|string|true|none|none| + +

V2BulkElementResultCreateTransaction

+ + + + + + +```json +{ + "responseType": "string", + "data": { + "insertedAt": "2019-08-24T14:15:22Z", + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "reference": "ref:001", + "metadata": { + "admin": "true" + }, + "id": 0, + "reverted": true, + "revertedAt": "2019-08-24T14:15:22Z", + "preCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "preCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + } + } +} + +``` + +### Properties + +allOf + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[V2BaseBulkElementResult](#schemav2basebulkelementresult)|false|none|none| + +and + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|object|false|none|none| +|» data|[V2Transaction](#schemav2transaction)|true|none|none| + +

V2BulkElementResultAddMetadata

+ + + + + + +```json +{ + "responseType": "string" +} + +``` + +### Properties + +*None* + +

V2BulkElementResultRevertTransaction

+ + + + + + +```json +{ + "responseType": "string", + "data": { + "insertedAt": "2019-08-24T14:15:22Z", + "timestamp": "2019-08-24T14:15:22Z", + "postings": [ + { + "amount": 100, + "asset": "COIN", + "destination": "users:002", + "source": "users:001" + } + ], + "reference": "ref:001", + "metadata": { + "admin": "true" + }, + "id": 0, + "reverted": true, + "revertedAt": "2019-08-24T14:15:22Z", + "preCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "preCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + }, + "postCommitEffectiveVolumes": { + "orders:1": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + }, + "orders:2": { + "USD": { + "input": 100, + "output": 10, + "balance": 90 + } + } + } + } +} + +``` + +### Properties + +allOf + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[V2BaseBulkElementResult](#schemav2basebulkelementresult)|false|none|none| + +and + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|object|false|none|none| +|» data|[V2Transaction](#schemav2transaction)|true|none|none| + +

V2BulkElementResultDeleteMetadata

+ + + + + + +```json +{ + "responseType": "string" +} + +``` + +### Properties + +*None* + +

V2BulkElementResultError

+ + + + + + +```json +{ + "responseType": "string", + "errorCode": "string", + "errorDescription": "string", + "errorDetails": "string" +} + +``` + +### Properties + +allOf + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|[V2BaseBulkElementResult](#schemav2basebulkelementresult)|false|none|none| + +and + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|*anonymous*|object|false|none|none| +|» errorCode|string|true|none|none| +|» errorDescription|string|true|none|none| +|» errorDetails|string|false|none|none| + +

V2CreateLedgerRequest

+ + + + + + +```json +{ + "bucket": "string", + "metadata": { + "admin": "true" + }, + "features": { + "property1": "string", + "property2": "string" + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|bucket|string|false|none|none| +|metadata|[V2Metadata](#schemav2metadata)|false|none|none| +|features|object|false|none|none| +|» **additionalProperties**|string|false|none|none| + +

V2Ledger

+ + + + + + +```json +{ + "name": "string", + "addedAt": "2019-08-24T14:15:22Z", + "bucket": "string", + "metadata": { + "admin": "true" + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|name|string|true|none|none| +|addedAt|string(date-time)|true|none|none| +|bucket|string|true|none|none| +|metadata|[V2Metadata](#schemav2metadata)|false|none|none| + +

V2LedgerListResponse

+ + + + + + +```json +{ + "cursor": { + "pageSize": 15, + "hasMore": false, + "previous": "YXVsdCBhbmQgYSBtYXhpbXVtIG1heF9yZXN1bHRzLol=", + "next": "", + "data": [ + { + "name": "string", + "addedAt": "2019-08-24T14:15:22Z", + "bucket": "string", + "metadata": { + "admin": "true" + } + } + ] + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|cursor|object|true|none|none| +|» pageSize|integer(int64)|true|none|none| +|» hasMore|boolean|true|none|none| +|» previous|string|false|none|none| +|» next|string|false|none|none| +|» data|[[V2Ledger](#schemav2ledger)]|true|none|none| + +

V2UpdateLedgerMetadataRequest

+ + + + + + +```json +{ + "admin": "true" +} + +``` + +### Properties + +*None* + +

V2GetLedgerResponse

+ + + + + + +```json +{ + "data": { + "name": "string", + "addedAt": "2019-08-24T14:15:22Z", + "bucket": "string", + "metadata": { + "admin": "true" + } + } +} + +``` + +### Properties + +|Name|Type|Required|Restrictions|Description| +|---|---|---|---|---| +|data|[V2Ledger](#schemav2ledger)|true|none|none| + diff --git a/docs/database/_default/diagrams/orphans/orphans.dot b/docs/database/_default/diagrams/orphans/orphans.dot new file mode 100644 index 000000000..77e45a25e --- /dev/null +++ b/docs/database/_default/diagrams/orphans/orphans.dot @@ -0,0 +1,52 @@ +digraph "orphans" { + graph [ rankdir="RL" bgcolor="#ffffff" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "accounts_volumes" [ + label=< + + + + + + + + +
accounts_volumes[table]
ledger
varchar[2147483647]
accounts_address
varchar[2147483647]
asset
varchar[2147483647]
input
numeric[0]
output
numeric[0]
< 0 0 >
> + URL="tables/accounts_volumes.html" + target="_top" + tooltip="accounts_volumes" + ]; + "goose_db_version" [ + label=< + + + + + + + +
goose_db_version[table]
id
serial[10]
version_id
int8[19]
is_applied
bool[1]
tstamp
timestamp[29,6]
< 0 0 >
> + URL="tables/goose_db_version.html" + target="_top" + tooltip="goose_db_version" + ]; + "logs" [ + label=< + + + + + + + + + + + + + +
logs[table]
seq
bigserial[19]
ledger
varchar[2147483647]
id
numeric[0]
type
"_default"."log_type"[2147483647]
hash
bytea[2147483647]
date
timestamp[29,6]
data
jsonb[2147483647]
idempotency_key
varchar[255]
memento
bytea[2147483647]
idempotency_hash
bytea[2147483647]
< 0 0 >
> + URL="tables/logs.html" + target="_top" + tooltip="logs" + ]; +} diff --git a/docs/database/_default/diagrams/orphans/orphans.png b/docs/database/_default/diagrams/orphans/orphans.png new file mode 100644 index 000000000..e03134589 Binary files /dev/null and b/docs/database/_default/diagrams/orphans/orphans.png differ diff --git a/docs/database/_default/diagrams/summary/relationships.real.compact.dot b/docs/database/_default/diagrams/summary/relationships.real.compact.dot new file mode 100644 index 000000000..acf21c52d --- /dev/null +++ b/docs/database/_default/diagrams/summary/relationships.real.compact.dot @@ -0,0 +1,97 @@ +digraph "compactRelationshipsDiagram" { + graph [ rankdir="RL" bgcolor="#ffffff" label="\nGenerated by SchemaSpy" labeljust="l" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "moves" [ + label=< + + + + + + + + + + + + +
moves[table]
seq
ledger
transactions_seq
accounts_seq
accounts_address
accounts_address_array
asset
effective_date
...
< 2
> + URL="tables/moves.html" + target="_top" + tooltip="moves" + ]; + "accounts" [ + label=< + + + + + + + + + +
accounts[table]
seq
ledger
address
address_array
first_usage
...
2 >
> + URL="tables/accounts.html" + target="_top" + tooltip="accounts" + ]; + "transactions_metadata" [ + label=< + + + + + + + + + + +
transactions_metadata[table]
seq
ledger
transactions_seq
revision
date
metadata
...
< 1
> + URL="tables/transactions_metadata.html" + target="_top" + tooltip="transactions_metadata" + ]; + "accounts_metadata" [ + label=< + + + + + + + + + + +
accounts_metadata[table]
seq
ledger
accounts_seq
metadata
revision
date
...
< 1
> + URL="tables/accounts_metadata.html" + target="_top" + tooltip="accounts_metadata" + ]; + "transactions" [ + label=< + + + + + + + + + + + + + + +
transactions[table]
seq
ledger
id
timestamp
reference
sources
destinations
sources_arrays
destinations_arrays
metadata
...
2 >
> + URL="tables/transactions.html" + target="_top" + tooltip="transactions" + ]; + "accounts_metadata":"accounts_seq":w -> "accounts":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; + "moves":"accounts_seq":w -> "accounts":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; + "moves":"transactions_seq":w -> "transactions":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; + "transactions_metadata":"transactions_seq":w -> "transactions":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; +} diff --git a/docs/database/_default/diagrams/summary/relationships.real.compact.png b/docs/database/_default/diagrams/summary/relationships.real.compact.png new file mode 100644 index 000000000..cedb77457 Binary files /dev/null and b/docs/database/_default/diagrams/summary/relationships.real.compact.png differ diff --git a/docs/database/_default/diagrams/summary/relationships.real.large.dot b/docs/database/_default/diagrams/summary/relationships.real.large.dot new file mode 100644 index 000000000..8a11f9ae6 --- /dev/null +++ b/docs/database/_default/diagrams/summary/relationships.real.large.dot @@ -0,0 +1,108 @@ +digraph "largeRelationshipsDiagram" { + graph [ rankdir="RL" bgcolor="#ffffff" label="\nGenerated by SchemaSpy" labeljust="l" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "moves" [ + label=< + + + + + + + + + + + + + + + + + +
moves[table]
seq
ledger
transactions_seq
accounts_seq
accounts_address
accounts_address_array
asset
amount
insertion_date
effective_date
post_commit_volumes
post_commit_effective_volumes
is_source
transactions_id
< 2
> + URL="tables/moves.html" + target="_top" + tooltip="moves" + ]; + "accounts" [ + label=< + + + + + + + + + + + +
accounts[table]
seq
ledger
address
address_array
insertion_date
updated_at
metadata
first_usage
2 >
> + URL="tables/accounts.html" + target="_top" + tooltip="accounts" + ]; + "transactions_metadata" [ + label=< + + + + + + + + + + +
transactions_metadata[table]
seq
ledger
transactions_seq
revision
date
metadata
transactions_id
< 1
> + URL="tables/transactions_metadata.html" + target="_top" + tooltip="transactions_metadata" + ]; + "accounts_metadata" [ + label=< + + + + + + + + + + +
accounts_metadata[table]
seq
ledger
accounts_seq
metadata
revision
date
accounts_address
< 1
> + URL="tables/accounts_metadata.html" + target="_top" + tooltip="accounts_metadata" + ]; + "transactions" [ + label=< + + + + + + + + + + + + + + + + + + +
transactions[table]
seq
ledger
id
timestamp
reference
reverted_at
updated_at
postings
sources
destinations
sources_arrays
destinations_arrays
metadata
post_commit_volumes
inserted_at
2 >
> + URL="tables/transactions.html" + target="_top" + tooltip="transactions" + ]; + "accounts_metadata":"accounts_seq":w -> "accounts":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; + "moves":"accounts_seq":w -> "accounts":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; + "moves":"transactions_seq":w -> "transactions":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; + "transactions_metadata":"transactions_seq":w -> "transactions":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; +} diff --git a/docs/database/_default/diagrams/summary/relationships.real.large.png b/docs/database/_default/diagrams/summary/relationships.real.large.png new file mode 100644 index 000000000..5d72e87da Binary files /dev/null and b/docs/database/_default/diagrams/summary/relationships.real.large.png differ diff --git a/docs/database/_default/diagrams/tables/accounts.1degree.dot b/docs/database/_default/diagrams/tables/accounts.1degree.dot new file mode 100644 index 000000000..3130804ef --- /dev/null +++ b/docs/database/_default/diagrams/tables/accounts.1degree.dot @@ -0,0 +1,59 @@ +digraph "oneDegreeRelationshipsDiagram" { + graph [ rankdir="RL" bgcolor="#ffffff" label="\nGenerated by SchemaSpy" labeljust="l" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "accounts_metadata":"accounts_seq":w -> "accounts":"seq.type":e [arrowhead=none dir=back arrowtail=crowodot]; + "moves":"accounts_seq":w -> "accounts":"seq.type":e [arrowhead=none dir=back arrowtail=crowodot]; + "accounts" [ + label=< + + + + + + + + + + + +
accounts[table]
seq
bigserial[19]
ledger
varchar[2147483647]
address
varchar[2147483647]
address_array
jsonb[2147483647]
insertion_date
timestamp[29,6]
updated_at
timestamp[29,6]
metadata
jsonb[2147483647]
first_usage
timestamp[29,6]
< 0 2 >
> + URL="accounts.html" + target="_top" + tooltip="accounts" + ]; + "accounts_metadata" [ + label=< + + + + + + + + + + +
accounts_metadata[table]
seq
ledger
accounts_seq
metadata
revision
date
...
< 1
> + URL="accounts_metadata.html" + target="_top" + tooltip="accounts_metadata" + ]; + "moves" [ + label=< + + + + + + + + + + + + +
moves[table]
seq
ledger
transactions_seq
accounts_seq
accounts_address
accounts_address_array
asset
effective_date
...
< 2
> + URL="moves.html" + target="_top" + tooltip="moves" + ]; +} diff --git a/docs/database/_default/diagrams/tables/accounts.1degree.png b/docs/database/_default/diagrams/tables/accounts.1degree.png new file mode 100644 index 000000000..15a6958d4 Binary files /dev/null and b/docs/database/_default/diagrams/tables/accounts.1degree.png differ diff --git a/docs/database/_default/diagrams/tables/accounts.2degrees.dot b/docs/database/_default/diagrams/tables/accounts.2degrees.dot new file mode 100644 index 000000000..874a104ba --- /dev/null +++ b/docs/database/_default/diagrams/tables/accounts.2degrees.dot @@ -0,0 +1,71 @@ +digraph "twoDegreesRelationshipsDiagram" { + graph [ rankdir="RL" bgcolor="#ffffff" label="\nGenerated by SchemaSpy" labeljust="l" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "accounts_metadata":"accounts_seq":w -> "accounts":"seq.type":e [arrowhead=none dir=back arrowtail=crowodot]; + "moves":"accounts_seq":w -> "accounts":"seq.type":e [arrowhead=none dir=back arrowtail=crowodot]; + "moves":"transactions_seq":w -> "transactions":"elipses":e [arrowhead=none dir=back arrowtail=crowodot]; + "accounts" [ + label=< + + + + + + + + + + + +
accounts[table]
seq
bigserial[19]
ledger
varchar[2147483647]
address
varchar[2147483647]
address_array
jsonb[2147483647]
insertion_date
timestamp[29,6]
updated_at
timestamp[29,6]
metadata
jsonb[2147483647]
first_usage
timestamp[29,6]
< 0 2 >
> + URL="accounts.html" + target="_top" + tooltip="accounts" + ]; + "accounts_metadata" [ + label=< + + + + + + + + + + +
accounts_metadata[table]
seq
ledger
accounts_seq
metadata
revision
date
...
< 1
> + URL="accounts_metadata.html" + target="_top" + tooltip="accounts_metadata" + ]; + "moves" [ + label=< + + + + + + + + + + + + +
moves[table]
seq
ledger
transactions_seq
accounts_seq
accounts_address
accounts_address_array
asset
effective_date
...
< 2
> + URL="moves.html" + target="_top" + tooltip="moves" + ]; + "transactions" [ + label=< + + + + +
transactions[table]
...
2 >
> + URL="transactions.html" + target="_top" + tooltip="transactions" + ]; +} diff --git a/docs/database/_default/diagrams/tables/accounts.2degrees.png b/docs/database/_default/diagrams/tables/accounts.2degrees.png new file mode 100644 index 000000000..886c587f4 Binary files /dev/null and b/docs/database/_default/diagrams/tables/accounts.2degrees.png differ diff --git a/docs/database/_default/diagrams/tables/accounts_metadata.1degree.dot b/docs/database/_default/diagrams/tables/accounts_metadata.1degree.dot new file mode 100644 index 000000000..a9c2b7471 --- /dev/null +++ b/docs/database/_default/diagrams/tables/accounts_metadata.1degree.dot @@ -0,0 +1,37 @@ +digraph "oneDegreeRelationshipsDiagram" { + graph [ rankdir="RL" bgcolor="#ffffff" label="\nGenerated by SchemaSpy" labeljust="l" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "accounts_metadata":"accounts_seq":w -> "accounts":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; + "accounts" [ + label=< + + + + + + + + + +
accounts[table]
seq
ledger
address
address_array
first_usage
...
2 >
> + URL="accounts.html" + target="_top" + tooltip="accounts" + ]; + "accounts_metadata" [ + label=< + + + + + + + + + + +
accounts_metadata[table]
seq
bigserial[19]
ledger
varchar[2147483647]
accounts_seq
int8[19]
metadata
jsonb[2147483647]
revision
numeric[0]
date
timestamp[29,6]
accounts_address
varchar[2147483647]
< 1 0 >
> + URL="accounts_metadata.html" + target="_top" + tooltip="accounts_metadata" + ]; +} diff --git a/docs/database/_default/diagrams/tables/accounts_metadata.1degree.png b/docs/database/_default/diagrams/tables/accounts_metadata.1degree.png new file mode 100644 index 000000000..96a351a13 Binary files /dev/null and b/docs/database/_default/diagrams/tables/accounts_metadata.1degree.png differ diff --git a/docs/database/_default/diagrams/tables/accounts_metadata.2degrees.dot b/docs/database/_default/diagrams/tables/accounts_metadata.2degrees.dot new file mode 100644 index 000000000..9e53fd613 --- /dev/null +++ b/docs/database/_default/diagrams/tables/accounts_metadata.2degrees.dot @@ -0,0 +1,49 @@ +digraph "twoDegreesRelationshipsDiagram" { + graph [ rankdir="RL" bgcolor="#ffffff" label="\nGenerated by SchemaSpy" labeljust="l" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "accounts_metadata":"accounts_seq":w -> "accounts":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; + "moves":"elipses":w -> "accounts":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; + "accounts" [ + label=< + + + + + + + + + +
accounts[table]
seq
ledger
address
address_array
first_usage
...
2 >
> + URL="accounts.html" + target="_top" + tooltip="accounts" + ]; + "accounts_metadata" [ + label=< + + + + + + + + + + +
accounts_metadata[table]
seq
bigserial[19]
ledger
varchar[2147483647]
accounts_seq
int8[19]
metadata
jsonb[2147483647]
revision
numeric[0]
date
timestamp[29,6]
accounts_address
varchar[2147483647]
< 1 0 >
> + URL="accounts_metadata.html" + target="_top" + tooltip="accounts_metadata" + ]; + "moves" [ + label=< + + + + +
moves[table]
...
< 2
> + URL="moves.html" + target="_top" + tooltip="moves" + ]; +} diff --git a/docs/database/_default/diagrams/tables/accounts_metadata.2degrees.png b/docs/database/_default/diagrams/tables/accounts_metadata.2degrees.png new file mode 100644 index 000000000..038ee4a68 Binary files /dev/null and b/docs/database/_default/diagrams/tables/accounts_metadata.2degrees.png differ diff --git a/docs/database/_default/diagrams/tables/accounts_volumes.1degree.dot b/docs/database/_default/diagrams/tables/accounts_volumes.1degree.dot new file mode 100644 index 000000000..f8c5da12c --- /dev/null +++ b/docs/database/_default/diagrams/tables/accounts_volumes.1degree.dot @@ -0,0 +1,18 @@ +digraph "oneDegreeRelationshipsDiagram" { + graph [ rankdir="RL" bgcolor="#ffffff" label="\nGenerated by SchemaSpy" labeljust="l" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "accounts_volumes" [ + label=< + + + + + + + + +
accounts_volumes[table]
ledger
varchar[2147483647]
accounts_address
varchar[2147483647]
asset
varchar[2147483647]
input
numeric[0]
output
numeric[0]
< 0 0 >
> + URL="accounts_volumes.html" + target="_top" + tooltip="accounts_volumes" + ]; +} diff --git a/docs/database/_default/diagrams/tables/accounts_volumes.1degree.png b/docs/database/_default/diagrams/tables/accounts_volumes.1degree.png new file mode 100644 index 000000000..39cc417ab Binary files /dev/null and b/docs/database/_default/diagrams/tables/accounts_volumes.1degree.png differ diff --git a/docs/database/_default/diagrams/tables/goose_db_version.1degree.dot b/docs/database/_default/diagrams/tables/goose_db_version.1degree.dot new file mode 100644 index 000000000..400a0af51 --- /dev/null +++ b/docs/database/_default/diagrams/tables/goose_db_version.1degree.dot @@ -0,0 +1,17 @@ +digraph "oneDegreeRelationshipsDiagram" { + graph [ rankdir="RL" bgcolor="#ffffff" label="\nGenerated by SchemaSpy" labeljust="l" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "goose_db_version" [ + label=< + + + + + + + +
goose_db_version[table]
id
serial[10]
version_id
int8[19]
is_applied
bool[1]
tstamp
timestamp[29,6]
< 0 0 >
> + URL="goose_db_version.html" + target="_top" + tooltip="goose_db_version" + ]; +} diff --git a/docs/database/_default/diagrams/tables/goose_db_version.1degree.png b/docs/database/_default/diagrams/tables/goose_db_version.1degree.png new file mode 100644 index 000000000..a0b0c9ea0 Binary files /dev/null and b/docs/database/_default/diagrams/tables/goose_db_version.1degree.png differ diff --git a/docs/database/_default/diagrams/tables/logs.1degree.dot b/docs/database/_default/diagrams/tables/logs.1degree.dot new file mode 100644 index 000000000..c964f74cd --- /dev/null +++ b/docs/database/_default/diagrams/tables/logs.1degree.dot @@ -0,0 +1,23 @@ +digraph "oneDegreeRelationshipsDiagram" { + graph [ rankdir="RL" bgcolor="#ffffff" label="\nGenerated by SchemaSpy" labeljust="l" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "logs" [ + label=< + + + + + + + + + + + + + +
logs[table]
seq
bigserial[19]
ledger
varchar[2147483647]
id
numeric[0]
type
"_default"."log_type"[2147483647]
hash
bytea[2147483647]
date
timestamp[29,6]
data
jsonb[2147483647]
idempotency_key
varchar[255]
memento
bytea[2147483647]
idempotency_hash
bytea[2147483647]
< 0 0 >
> + URL="logs.html" + target="_top" + tooltip="logs" + ]; +} diff --git a/docs/database/_default/diagrams/tables/logs.1degree.png b/docs/database/_default/diagrams/tables/logs.1degree.png new file mode 100644 index 000000000..881e63515 Binary files /dev/null and b/docs/database/_default/diagrams/tables/logs.1degree.png differ diff --git a/docs/database/_default/diagrams/tables/moves.1degree.dot b/docs/database/_default/diagrams/tables/moves.1degree.dot new file mode 100644 index 000000000..4eecec0aa --- /dev/null +++ b/docs/database/_default/diagrams/tables/moves.1degree.dot @@ -0,0 +1,66 @@ +digraph "oneDegreeRelationshipsDiagram" { + graph [ rankdir="RL" bgcolor="#ffffff" label="\nGenerated by SchemaSpy" labeljust="l" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "moves":"accounts_seq":w -> "accounts":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; + "moves":"transactions_seq":w -> "transactions":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; + "accounts" [ + label=< + + + + + + + + + +
accounts[table]
seq
ledger
address
address_array
first_usage
...
2 >
> + URL="accounts.html" + target="_top" + tooltip="accounts" + ]; + "moves" [ + label=< + + + + + + + + + + + + + + + + + +
moves[table]
seq
bigserial[19]
ledger
varchar[2147483647]
transactions_seq
int8[19]
accounts_seq
int8[19]
accounts_address
varchar[2147483647]
accounts_address_array
jsonb[2147483647]
asset
varchar[2147483647]
amount
numeric[0]
insertion_date
timestamp[29,6]
effective_date
timestamp[29,6]
post_commit_volumes
"_default"."volumes"[2147483647]
post_commit_effective_volumes
"_default"."volumes"[2147483647]
is_source
bool[1]
transactions_id
int8[19]
< 2 0 >
> + URL="moves.html" + target="_top" + tooltip="moves" + ]; + "transactions" [ + label=< + + + + + + + + + + + + + + +
transactions[table]
seq
ledger
id
timestamp
reference
sources
destinations
sources_arrays
destinations_arrays
metadata
...
2 >
> + URL="transactions.html" + target="_top" + tooltip="transactions" + ]; +} diff --git a/docs/database/_default/diagrams/tables/moves.1degree.png b/docs/database/_default/diagrams/tables/moves.1degree.png new file mode 100644 index 000000000..c4d89080b Binary files /dev/null and b/docs/database/_default/diagrams/tables/moves.1degree.png differ diff --git a/docs/database/_default/diagrams/tables/moves.2degrees.dot b/docs/database/_default/diagrams/tables/moves.2degrees.dot new file mode 100644 index 000000000..522901764 --- /dev/null +++ b/docs/database/_default/diagrams/tables/moves.2degrees.dot @@ -0,0 +1,90 @@ +digraph "twoDegreesRelationshipsDiagram" { + graph [ rankdir="RL" bgcolor="#ffffff" label="\nGenerated by SchemaSpy" labeljust="l" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "accounts_metadata":"elipses":w -> "accounts":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; + "moves":"accounts_seq":w -> "accounts":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; + "moves":"transactions_seq":w -> "transactions":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; + "transactions_metadata":"elipses":w -> "transactions":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; + "accounts" [ + label=< + + + + + + + + + +
accounts[table]
seq
ledger
address
address_array
first_usage
...
2 >
> + URL="accounts.html" + target="_top" + tooltip="accounts" + ]; + "accounts_metadata" [ + label=< + + + + +
accounts_metadata[table]
...
< 1
> + URL="accounts_metadata.html" + target="_top" + tooltip="accounts_metadata" + ]; + "moves" [ + label=< + + + + + + + + + + + + + + + + + +
moves[table]
seq
bigserial[19]
ledger
varchar[2147483647]
transactions_seq
int8[19]
accounts_seq
int8[19]
accounts_address
varchar[2147483647]
accounts_address_array
jsonb[2147483647]
asset
varchar[2147483647]
amount
numeric[0]
insertion_date
timestamp[29,6]
effective_date
timestamp[29,6]
post_commit_volumes
"_default"."volumes"[2147483647]
post_commit_effective_volumes
"_default"."volumes"[2147483647]
is_source
bool[1]
transactions_id
int8[19]
< 2 0 >
> + URL="moves.html" + target="_top" + tooltip="moves" + ]; + "transactions" [ + label=< + + + + + + + + + + + + + + +
transactions[table]
seq
ledger
id
timestamp
reference
sources
destinations
sources_arrays
destinations_arrays
metadata
...
2 >
> + URL="transactions.html" + target="_top" + tooltip="transactions" + ]; + "transactions_metadata" [ + label=< + + + + +
transactions_metadata[table]
...
< 1
> + URL="transactions_metadata.html" + target="_top" + tooltip="transactions_metadata" + ]; +} diff --git a/docs/database/_default/diagrams/tables/moves.2degrees.png b/docs/database/_default/diagrams/tables/moves.2degrees.png new file mode 100644 index 000000000..1202ae2cc Binary files /dev/null and b/docs/database/_default/diagrams/tables/moves.2degrees.png differ diff --git a/docs/database/_default/diagrams/tables/transactions.1degree.dot b/docs/database/_default/diagrams/tables/transactions.1degree.dot new file mode 100644 index 000000000..3b53f2715 --- /dev/null +++ b/docs/database/_default/diagrams/tables/transactions.1degree.dot @@ -0,0 +1,66 @@ +digraph "oneDegreeRelationshipsDiagram" { + graph [ rankdir="RL" bgcolor="#ffffff" label="\nGenerated by SchemaSpy" labeljust="l" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "moves":"transactions_seq":w -> "transactions":"seq.type":e [arrowhead=none dir=back arrowtail=crowodot]; + "transactions_metadata":"transactions_seq":w -> "transactions":"seq.type":e [arrowhead=none dir=back arrowtail=crowodot]; + "moves" [ + label=< + + + + + + + + + + + + +
moves[table]
seq
ledger
transactions_seq
accounts_seq
accounts_address
accounts_address_array
asset
effective_date
...
< 2
> + URL="moves.html" + target="_top" + tooltip="moves" + ]; + "transactions" [ + label=< + + + + + + + + + + + + + + + + + + +
transactions[table]
seq
bigserial[19]
ledger
varchar[2147483647]
id
int8[19]
timestamp
timestamp[29,6]
reference
varchar[2147483647]
reverted_at
timestamp[29,6]
updated_at
timestamp[29,6]
postings
varchar[2147483647]
sources
jsonb[2147483647]
destinations
jsonb[2147483647]
sources_arrays
jsonb[2147483647]
destinations_arrays
jsonb[2147483647]
metadata
jsonb[2147483647]
post_commit_volumes
jsonb[2147483647]
inserted_at
timestamp[29,6]
< 0 2 >
> + URL="transactions.html" + target="_top" + tooltip="transactions" + ]; + "transactions_metadata" [ + label=< + + + + + + + + + + +
transactions_metadata[table]
seq
ledger
transactions_seq
revision
date
metadata
...
< 1
> + URL="transactions_metadata.html" + target="_top" + tooltip="transactions_metadata" + ]; +} diff --git a/docs/database/_default/diagrams/tables/transactions.1degree.png b/docs/database/_default/diagrams/tables/transactions.1degree.png new file mode 100644 index 000000000..b580165c1 Binary files /dev/null and b/docs/database/_default/diagrams/tables/transactions.1degree.png differ diff --git a/docs/database/_default/diagrams/tables/transactions.2degrees.dot b/docs/database/_default/diagrams/tables/transactions.2degrees.dot new file mode 100644 index 000000000..ed85af516 --- /dev/null +++ b/docs/database/_default/diagrams/tables/transactions.2degrees.dot @@ -0,0 +1,78 @@ +digraph "twoDegreesRelationshipsDiagram" { + graph [ rankdir="RL" bgcolor="#ffffff" label="\nGenerated by SchemaSpy" labeljust="l" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "moves":"accounts_seq":w -> "accounts":"elipses":e [arrowhead=none dir=back arrowtail=crowodot]; + "moves":"transactions_seq":w -> "transactions":"seq.type":e [arrowhead=none dir=back arrowtail=crowodot]; + "transactions_metadata":"transactions_seq":w -> "transactions":"seq.type":e [arrowhead=none dir=back arrowtail=crowodot]; + "accounts" [ + label=< + + + + +
accounts[table]
...
2 >
> + URL="accounts.html" + target="_top" + tooltip="accounts" + ]; + "moves" [ + label=< + + + + + + + + + + + + +
moves[table]
seq
ledger
transactions_seq
accounts_seq
accounts_address
accounts_address_array
asset
effective_date
...
< 2
> + URL="moves.html" + target="_top" + tooltip="moves" + ]; + "transactions" [ + label=< + + + + + + + + + + + + + + + + + + +
transactions[table]
seq
bigserial[19]
ledger
varchar[2147483647]
id
int8[19]
timestamp
timestamp[29,6]
reference
varchar[2147483647]
reverted_at
timestamp[29,6]
updated_at
timestamp[29,6]
postings
varchar[2147483647]
sources
jsonb[2147483647]
destinations
jsonb[2147483647]
sources_arrays
jsonb[2147483647]
destinations_arrays
jsonb[2147483647]
metadata
jsonb[2147483647]
post_commit_volumes
jsonb[2147483647]
inserted_at
timestamp[29,6]
< 0 2 >
> + URL="transactions.html" + target="_top" + tooltip="transactions" + ]; + "transactions_metadata" [ + label=< + + + + + + + + + + +
transactions_metadata[table]
seq
ledger
transactions_seq
revision
date
metadata
...
< 1
> + URL="transactions_metadata.html" + target="_top" + tooltip="transactions_metadata" + ]; +} diff --git a/docs/database/_default/diagrams/tables/transactions.2degrees.png b/docs/database/_default/diagrams/tables/transactions.2degrees.png new file mode 100644 index 000000000..1ca4b07ce Binary files /dev/null and b/docs/database/_default/diagrams/tables/transactions.2degrees.png differ diff --git a/docs/database/_default/diagrams/tables/transactions_metadata.1degree.dot b/docs/database/_default/diagrams/tables/transactions_metadata.1degree.dot new file mode 100644 index 000000000..6029a152a --- /dev/null +++ b/docs/database/_default/diagrams/tables/transactions_metadata.1degree.dot @@ -0,0 +1,42 @@ +digraph "oneDegreeRelationshipsDiagram" { + graph [ rankdir="RL" bgcolor="#ffffff" label="\nGenerated by SchemaSpy" labeljust="l" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "transactions_metadata":"transactions_seq":w -> "transactions":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; + "transactions" [ + label=< + + + + + + + + + + + + + + +
transactions[table]
seq
ledger
id
timestamp
reference
sources
destinations
sources_arrays
destinations_arrays
metadata
...
2 >
> + URL="transactions.html" + target="_top" + tooltip="transactions" + ]; + "transactions_metadata" [ + label=< + + + + + + + + + + +
transactions_metadata[table]
seq
bigserial[19]
ledger
varchar[2147483647]
transactions_seq
int8[19]
revision
numeric[0]
date
timestamp[29,6]
metadata
jsonb[2147483647]
transactions_id
int8[19]
< 1 0 >
> + URL="transactions_metadata.html" + target="_top" + tooltip="transactions_metadata" + ]; +} diff --git a/docs/database/_default/diagrams/tables/transactions_metadata.1degree.png b/docs/database/_default/diagrams/tables/transactions_metadata.1degree.png new file mode 100644 index 000000000..679b6f90a Binary files /dev/null and b/docs/database/_default/diagrams/tables/transactions_metadata.1degree.png differ diff --git a/docs/database/_default/diagrams/tables/transactions_metadata.2degrees.dot b/docs/database/_default/diagrams/tables/transactions_metadata.2degrees.dot new file mode 100644 index 000000000..bf7cfbeda --- /dev/null +++ b/docs/database/_default/diagrams/tables/transactions_metadata.2degrees.dot @@ -0,0 +1,54 @@ +digraph "twoDegreesRelationshipsDiagram" { + graph [ rankdir="RL" bgcolor="#ffffff" label="\nGenerated by SchemaSpy" labeljust="l" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "moves":"elipses":w -> "transactions":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; + "transactions_metadata":"transactions_seq":w -> "transactions":"seq":e [arrowhead=none dir=back arrowtail=crowodot]; + "moves" [ + label=< + + + + +
moves[table]
...
< 2
> + URL="moves.html" + target="_top" + tooltip="moves" + ]; + "transactions" [ + label=< + + + + + + + + + + + + + + +
transactions[table]
seq
ledger
id
timestamp
reference
sources
destinations
sources_arrays
destinations_arrays
metadata
...
2 >
> + URL="transactions.html" + target="_top" + tooltip="transactions" + ]; + "transactions_metadata" [ + label=< + + + + + + + + + + +
transactions_metadata[table]
seq
bigserial[19]
ledger
varchar[2147483647]
transactions_seq
int8[19]
revision
numeric[0]
date
timestamp[29,6]
metadata
jsonb[2147483647]
transactions_id
int8[19]
< 1 0 >
> + URL="transactions_metadata.html" + target="_top" + tooltip="transactions_metadata" + ]; +} diff --git a/docs/database/_default/diagrams/tables/transactions_metadata.2degrees.png b/docs/database/_default/diagrams/tables/transactions_metadata.2degrees.png new file mode 100644 index 000000000..b5538dbed Binary files /dev/null and b/docs/database/_default/diagrams/tables/transactions_metadata.2degrees.png differ diff --git a/docs/database/_system/diagrams/orphans/orphans.dot b/docs/database/_system/diagrams/orphans/orphans.dot new file mode 100644 index 000000000..e41600f8d --- /dev/null +++ b/docs/database/_system/diagrams/orphans/orphans.dot @@ -0,0 +1,33 @@ +digraph "orphans" { + graph [ rankdir="RL" bgcolor="#ffffff" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "goose_db_version" [ + label=< + + + + + + + +
goose_db_version[table]
id
serial[10]
version_id
int8[19]
is_applied
bool[1]
tstamp
timestamp[29,6]
< 0 0 >
> + URL="tables/goose_db_version.html" + target="_top" + tooltip="goose_db_version" + ]; + "ledgers" [ + label=< + + + + + + + + + +
ledgers[table]
name
varchar[63]
added_at
timestamp[29,6]
bucket
varchar[63]
metadata
jsonb[2147483647]
features
jsonb[2147483647]
id
bigserial[19]
< 0 0 >
> + URL="tables/ledgers.html" + target="_top" + tooltip="ledgers" + ]; +} diff --git a/docs/database/_system/diagrams/orphans/orphans.png b/docs/database/_system/diagrams/orphans/orphans.png new file mode 100644 index 000000000..7df36caf3 Binary files /dev/null and b/docs/database/_system/diagrams/orphans/orphans.png differ diff --git a/docs/database/_system/diagrams/tables/goose_db_version.1degree.dot b/docs/database/_system/diagrams/tables/goose_db_version.1degree.dot new file mode 100644 index 000000000..400a0af51 --- /dev/null +++ b/docs/database/_system/diagrams/tables/goose_db_version.1degree.dot @@ -0,0 +1,17 @@ +digraph "oneDegreeRelationshipsDiagram" { + graph [ rankdir="RL" bgcolor="#ffffff" label="\nGenerated by SchemaSpy" labeljust="l" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "goose_db_version" [ + label=< + + + + + + + +
goose_db_version[table]
id
serial[10]
version_id
int8[19]
is_applied
bool[1]
tstamp
timestamp[29,6]
< 0 0 >
> + URL="goose_db_version.html" + target="_top" + tooltip="goose_db_version" + ]; +} diff --git a/docs/database/_system/diagrams/tables/goose_db_version.1degree.png b/docs/database/_system/diagrams/tables/goose_db_version.1degree.png new file mode 100644 index 000000000..a0b0c9ea0 Binary files /dev/null and b/docs/database/_system/diagrams/tables/goose_db_version.1degree.png differ diff --git a/docs/database/_system/diagrams/tables/ledgers.1degree.dot b/docs/database/_system/diagrams/tables/ledgers.1degree.dot new file mode 100644 index 000000000..4e4f82ff1 --- /dev/null +++ b/docs/database/_system/diagrams/tables/ledgers.1degree.dot @@ -0,0 +1,19 @@ +digraph "oneDegreeRelationshipsDiagram" { + graph [ rankdir="RL" bgcolor="#ffffff" label="\nGenerated by SchemaSpy" labeljust="l" nodesep="0.18" ranksep="0.46" fontname="Helvetica" fontsize="11" ration="compress" ]; node [ fontname="Helvetica" fontsize="11" shape="plaintext" ]; edge [ arrowsize="0.8" ]; + "ledgers" [ + label=< + + + + + + + + + +
ledgers[table]
name
varchar[63]
added_at
timestamp[29,6]
bucket
varchar[63]
metadata
jsonb[2147483647]
features
jsonb[2147483647]
id
bigserial[19]
< 0 0 >
> + URL="ledgers.html" + target="_top" + tooltip="ledgers" + ]; +} diff --git a/docs/database/_system/diagrams/tables/ledgers.1degree.png b/docs/database/_system/diagrams/tables/ledgers.1degree.png new file mode 100644 index 000000000..ef2116785 Binary files /dev/null and b/docs/database/_system/diagrams/tables/ledgers.1degree.png differ diff --git a/docs/events/CommittedTransactions.json b/docs/events/CommittedTransactions.json new file mode 100644 index 000000000..0be89a80a --- /dev/null +++ b/docs/events/CommittedTransactions.json @@ -0,0 +1,158 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://github.com/formancehq/ledger/internal/bus/committed-transactions", + "$ref": "#/$defs/CommittedTransactions", + "$defs": { + "CommittedTransactions": { + "properties": { + "ledger": { + "type": "string" + }, + "transactions": { + "items": { + "$ref": "#/$defs/Transaction" + }, + "type": "array" + }, + "accountMetadata": { + "additionalProperties": { + "$ref": "#/$defs/Metadata" + }, + "type": "object" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "ledger", + "transactions", + "accountMetadata" + ] + }, + "Int": { + "properties": {}, + "additionalProperties": false, + "type": "object" + }, + "Metadata": { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + "PostCommitVolumes": { + "additionalProperties": { + "$ref": "#/$defs/VolumesByAssets" + }, + "type": "object" + }, + "Posting": { + "properties": { + "source": { + "type": "string" + }, + "destination": { + "type": "string" + }, + "amount": { + "$ref": "#/$defs/Int" + }, + "asset": { + "type": "string" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "source", + "destination", + "amount", + "asset" + ] + }, + "Postings": { + "items": { + "$ref": "#/$defs/Posting" + }, + "type": "array" + }, + "Time": { + "type": "string", + "format": "date-time", + "title": "Normalized date" + }, + "Transaction": { + "properties": { + "postings": { + "$ref": "#/$defs/Postings" + }, + "metadata": { + "$ref": "#/$defs/Metadata" + }, + "timestamp": { + "$ref": "#/$defs/Time" + }, + "reference": { + "type": "string" + }, + "insertedAt": { + "$ref": "#/$defs/Time" + }, + "id": { + "type": "integer" + }, + "revertedAt": { + "$ref": "#/$defs/Time" + }, + "postCommitVolumes": { + "$ref": "#/$defs/PostCommitVolumes" + }, + "postCommitEffectiveVolumes": { + "$ref": "#/$defs/PostCommitVolumes" + }, + "reverted": { + "type": "boolean" + }, + "preCommitVolumes": { + "$ref": "#/$defs/PostCommitVolumes" + }, + "preCommitEffectiveVolumes": { + "$ref": "#/$defs/PostCommitVolumes" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "postings", + "metadata", + "timestamp", + "id" + ] + }, + "Volumes": { + "properties": { + "input": { + "$ref": "#/$defs/Int" + }, + "output": { + "$ref": "#/$defs/Int" + }, + "balance": { + "$ref": "#/$defs/Int" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "input", + "output" + ] + }, + "VolumesByAssets": { + "additionalProperties": { + "$ref": "#/$defs/Volumes" + }, + "type": "object" + } + } +} \ No newline at end of file diff --git a/docs/events/DeletedMetadata.json b/docs/events/DeletedMetadata.json new file mode 100644 index 000000000..043315291 --- /dev/null +++ b/docs/events/DeletedMetadata.json @@ -0,0 +1,29 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://github.com/formancehq/ledger/internal/bus/deleted-metadata", + "$ref": "#/$defs/DeletedMetadata", + "$defs": { + "DeletedMetadata": { + "properties": { + "ledger": { + "type": "string" + }, + "targetType": { + "type": "string" + }, + "targetId": true, + "key": { + "type": "string" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "ledger", + "targetType", + "targetId", + "key" + ] + } + } +} \ No newline at end of file diff --git a/docs/events/RevertedTransaction.json b/docs/events/RevertedTransaction.json new file mode 100644 index 000000000..28f65f5b7 --- /dev/null +++ b/docs/events/RevertedTransaction.json @@ -0,0 +1,152 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://github.com/formancehq/ledger/internal/bus/reverted-transaction", + "$ref": "#/$defs/RevertedTransaction", + "$defs": { + "Int": { + "properties": {}, + "additionalProperties": false, + "type": "object" + }, + "Metadata": { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + "PostCommitVolumes": { + "additionalProperties": { + "$ref": "#/$defs/VolumesByAssets" + }, + "type": "object" + }, + "Posting": { + "properties": { + "source": { + "type": "string" + }, + "destination": { + "type": "string" + }, + "amount": { + "$ref": "#/$defs/Int" + }, + "asset": { + "type": "string" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "source", + "destination", + "amount", + "asset" + ] + }, + "Postings": { + "items": { + "$ref": "#/$defs/Posting" + }, + "type": "array" + }, + "RevertedTransaction": { + "properties": { + "ledger": { + "type": "string" + }, + "revertedTransaction": { + "$ref": "#/$defs/Transaction" + }, + "revertTransaction": { + "$ref": "#/$defs/Transaction" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "ledger", + "revertedTransaction", + "revertTransaction" + ] + }, + "Time": { + "type": "string", + "format": "date-time", + "title": "Normalized date" + }, + "Transaction": { + "properties": { + "postings": { + "$ref": "#/$defs/Postings" + }, + "metadata": { + "$ref": "#/$defs/Metadata" + }, + "timestamp": { + "$ref": "#/$defs/Time" + }, + "reference": { + "type": "string" + }, + "insertedAt": { + "$ref": "#/$defs/Time" + }, + "id": { + "type": "integer" + }, + "revertedAt": { + "$ref": "#/$defs/Time" + }, + "postCommitVolumes": { + "$ref": "#/$defs/PostCommitVolumes" + }, + "postCommitEffectiveVolumes": { + "$ref": "#/$defs/PostCommitVolumes" + }, + "reverted": { + "type": "boolean" + }, + "preCommitVolumes": { + "$ref": "#/$defs/PostCommitVolumes" + }, + "preCommitEffectiveVolumes": { + "$ref": "#/$defs/PostCommitVolumes" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "postings", + "metadata", + "timestamp", + "id" + ] + }, + "Volumes": { + "properties": { + "input": { + "$ref": "#/$defs/Int" + }, + "output": { + "$ref": "#/$defs/Int" + }, + "balance": { + "$ref": "#/$defs/Int" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "input", + "output" + ] + }, + "VolumesByAssets": { + "additionalProperties": { + "$ref": "#/$defs/Volumes" + }, + "type": "object" + } + } +} \ No newline at end of file diff --git a/docs/events/SavedMetadata.json b/docs/events/SavedMetadata.json new file mode 100644 index 000000000..c0b6f0c31 --- /dev/null +++ b/docs/events/SavedMetadata.json @@ -0,0 +1,37 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://github.com/formancehq/ledger/internal/bus/saved-metadata", + "$ref": "#/$defs/SavedMetadata", + "$defs": { + "Metadata": { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + "SavedMetadata": { + "properties": { + "ledger": { + "type": "string" + }, + "targetType": { + "type": "string" + }, + "targetId": { + "type": "string" + }, + "metadata": { + "$ref": "#/$defs/Metadata" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "ledger", + "targetType", + "targetId", + "metadata" + ] + } + } +} \ No newline at end of file diff --git a/examples/basic-auth/docker-compose.yml b/examples/basic-auth/docker-compose.yml deleted file mode 100644 index 9767eea25..000000000 --- a/examples/basic-auth/docker-compose.yml +++ /dev/null @@ -1,26 +0,0 @@ ---- -volumes: - postgres: -services: - postgres: - extends: - file: ../../docker-compose.yml - service: postgres - ledger: - extends: - file: ../../docker-compose.yml - service: ledger - depends_on: - - postgres - image: golang:1.19-alpine - entrypoint: go run main.go serve - volumes: - - ../..:/src - ports: - - 3068:3068 - working_dir: /src - environment: - CGO_ENABLED: 0 - DEBUG: "true" - AUTH_BASIC_ENABLED: "true" - AUTH_BASIC_CREDENTIALS: "user:password" diff --git a/examples/jaeger-exporter/docker-compose.yml b/examples/jaeger-exporter/docker-compose.yml deleted file mode 100644 index 746f1db65..000000000 --- a/examples/jaeger-exporter/docker-compose.yml +++ /dev/null @@ -1,33 +0,0 @@ ---- -volumes: - postgres: -services: - postgres: - extends: - file: ../../docker-compose.yml - service: postgres - jaeger: - image: jaegertracing/opentelemetry-all-in-one - ports: - - "16686:16686/tcp" - ledger: - extends: - file: ../../docker-compose.yml - service: ledger - depends_on: - - postgres - - jaeger - image: golang:1.19-alpine - entrypoint: go run main.go serve - volumes: - - ../..:/src - working_dir: /src - ports: - - "3068:3068/tcp" - environment: - CGO_ENABLED: 0 - DEBUG: "true" - OTEL_TRACES: "true" - OTEL_TRACES_EXPORTER: jaeger - OTEL_TRACES_EXPORTER_JAEGER_ENDPOINT: http://jaeger:14268/api/traces - OTEL_SERVICE_NAME: ledger diff --git a/examples/otlp-exporter/docker-compose.yml b/examples/otlp-exporter/docker-compose.yml deleted file mode 100644 index 3a6ea3b5b..000000000 --- a/examples/otlp-exporter/docker-compose.yml +++ /dev/null @@ -1,46 +0,0 @@ ---- -volumes: - postgres: -services: - postgres: - extends: - file: ../../docker-compose.yml - service: postgres - prometheus: - image: prom/prometheus:latest - restart: always - volumes: - - ./prometheus.yaml:/etc/prometheus/prometheus.yml - ports: - - "9090:9090" - otel: - image: "otel/opentelemetry-collector-contrib:0.81.0" - command: [ "--config=/etc/otel-collector-config.yaml" ] - volumes: - - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml - ledger: - extends: - file: ../../docker-compose.yml - service: ledger - depends_on: - - postgres - - otel - image: golang:1.19-alpine - entrypoint: go run main.go serve - volumes: - - ../..:/src - working_dir: /src - environment: - CGO_ENABLED: 0 - DEBUG: "true" - OTEL_TRACES: "true" - OTEL_TRACES_EXPORTER: otlp - OTEL_TRACES_EXPORTER_OTLP_ENDPOINT: otel:4317 - OTEL_TRACES_EXPORTER_OTLP_INSECURE: "true" - OTEL_METRICS: "true" - OTEL_METRICS_EXPORTER: otlp - OTEL_METRICS_EXPORTER_OTLP_ENDPOINT: otel:4317 - OTEL_METRICS_EXPORTER_OTLP_INSECURE: "true" - OTEL_SERVICE_NAME: ledger - OTEL_RESOURCE_ATTRIBUTES: version=develop - OTEL_METRICS_RUNTIME: "true" diff --git a/go.mod b/go.mod index ed1622582..fdcc389b5 100644 --- a/go.mod +++ b/go.mod @@ -6,35 +6,43 @@ toolchain go1.22.7 replace github.com/formancehq/stack/ledger/client => ./pkg/client +replace google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215 => google.golang.org/genproto v0.0.0-20240903143218-8af14fe29dc1 + require ( github.com/ThreeDotsLabs/watermill v1.3.7 github.com/alitto/pond v1.9.2 github.com/antlr/antlr4/runtime/Go/antlr v1.4.10 github.com/bluele/gcache v0.0.2 - github.com/formancehq/go-libs v1.7.2 + github.com/formancehq/go-libs/v2 v2.0.1-0.20241022185745-110c95803b63 github.com/formancehq/stack/ledger/client v0.0.0-00010101000000-000000000000 github.com/go-chi/chi/v5 v5.1.0 github.com/go-chi/cors v1.2.1 github.com/google/go-cmp v0.6.0 github.com/google/uuid v1.6.0 - github.com/jackc/pgx/v5 v5.7.1 - github.com/lib/pq v1.10.9 + github.com/invopop/jsonschema v0.12.0 + github.com/jamiealquiza/tachymeter v2.0.0+incompatible github.com/logrusorgru/aurora v2.0.3+incompatible + github.com/nats-io/nats.go v1.37.0 github.com/onsi/ginkgo/v2 v2.20.2 github.com/onsi/gomega v1.34.2 + github.com/ory/dockertest/v3 v3.11.0 github.com/pborman/uuid v1.2.1 - github.com/pkg/errors v0.9.1 - github.com/sirupsen/logrus v1.9.3 + github.com/shomali11/xsql v0.0.0-20190608141458-bf76292144df github.com/spf13/cobra v1.8.1 github.com/spf13/pflag v1.0.5 github.com/stretchr/testify v1.9.0 github.com/uptrace/bun v1.2.3 github.com/uptrace/bun/dialect/pgdialect v1.2.3 + github.com/xeipuuv/gojsonschema v1.2.0 + github.com/xo/dburl v0.23.2 go.opentelemetry.io/otel v1.31.0 go.opentelemetry.io/otel/metric v1.31.0 + go.opentelemetry.io/otel/sdk/metric v1.31.0 go.opentelemetry.io/otel/trace v1.31.0 - go.uber.org/fx v1.23.0 + go.uber.org/fx v1.22.2 go.uber.org/mock v0.4.0 + golang.org/x/oauth2 v0.23.0 + golang.org/x/sync v0.8.0 ) require ( @@ -77,7 +85,7 @@ require ( github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3 // indirect github.com/eapache/queue v1.1.0 // indirect github.com/ebitengine/purego v0.8.0 // indirect - github.com/ericlagergren/decimal v0.0.0-20221120152707-495c53812d05 // indirect + github.com/ericlagergren/decimal v0.0.0-20240411145413-00de7ca16731 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/go-chi/chi v4.1.2+incompatible // indirect github.com/go-chi/render v1.0.3 // indirect @@ -101,9 +109,9 @@ require ( github.com/hashicorp/go-retryablehttp v0.7.7 // indirect github.com/hashicorp/go-uuid v1.0.3 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/invopop/jsonschema v0.12.0 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect + github.com/jackc/pgx/v5 v5.7.1 // indirect github.com/jackc/puddle/v2 v2.2.2 // indirect github.com/jcmturner/aescts/v2 v2.0.0 // indirect github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect @@ -115,19 +123,21 @@ require ( github.com/lithammer/shortuuid/v3 v3.0.7 // indirect github.com/lufia/plan9stats v0.0.0-20240909124753-873cd0166683 // indirect github.com/mailru/easyjson v0.7.7 // indirect + github.com/minio/highwayhash v1.0.3 // indirect github.com/moby/docker-image-spec v1.3.1 // indirect github.com/moby/term v0.5.0 // indirect github.com/muhlemmer/gu v0.3.1 // indirect github.com/muhlemmer/httpforwarded v0.1.0 // indirect - github.com/nats-io/nats.go v1.37.0 // indirect + github.com/nats-io/jwt/v2 v2.7.0 // indirect + github.com/nats-io/nats-server/v2 v2.10.21 // indirect github.com/nats-io/nkeys v0.4.7 // indirect github.com/nats-io/nuid v1.0.1 // indirect github.com/oklog/ulid v1.3.1 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.1.0 // indirect github.com/opencontainers/runc v1.1.14 // indirect - github.com/ory/dockertest/v3 v3.11.0 // indirect github.com/pierrec/lz4/v4 v4.1.21 // indirect + github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect github.com/puzpuzpuz/xsync/v3 v3.4.0 // indirect @@ -135,6 +145,8 @@ require ( github.com/riandyrn/otelchi v0.10.0 // indirect github.com/rs/cors v1.11.1 // indirect github.com/shirou/gopsutil/v4 v4.24.9 // indirect + github.com/shomali11/util v0.0.0-20180607005212-e0f70fd665ff // indirect + github.com/sirupsen/logrus v1.9.3 // indirect github.com/tklauser/go-sysconf v0.3.14 // indirect github.com/tklauser/numcpus v0.9.0 // indirect github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect @@ -150,8 +162,6 @@ require ( github.com/xdg-go/stringprep v1.0.4 // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect - github.com/xeipuuv/gojsonschema v1.2.0 // indirect - github.com/xo/dburl v0.23.2 // indirect github.com/yusufpapurcu/wmi v1.2.4 // indirect github.com/zitadel/oidc/v2 v2.12.2 // indirect go.opentelemetry.io/contrib/instrumentation/host v0.56.0 // indirect @@ -167,17 +177,15 @@ require ( go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.31.0 // indirect go.opentelemetry.io/otel/log v0.6.0 // indirect go.opentelemetry.io/otel/sdk v1.31.0 // indirect - go.opentelemetry.io/otel/sdk/metric v1.31.0 // indirect go.opentelemetry.io/proto/otlp v1.3.1 // indirect go.uber.org/dig v1.18.0 // indirect go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.27.0 // indirect golang.org/x/crypto v0.28.0 // indirect golang.org/x/net v0.30.0 // indirect - golang.org/x/oauth2 v0.23.0 // indirect - golang.org/x/sync v0.8.0 // indirect golang.org/x/sys v0.26.0 // indirect golang.org/x/text v0.19.0 // indirect + golang.org/x/time v0.6.0 // indirect golang.org/x/tools v0.25.0 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20241007155032-5fefd90f89a9 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20241007155032-5fefd90f89a9 // indirect diff --git a/go.sum b/go.sum index a7507e86e..b0f8ee3d5 100644 --- a/go.sum +++ b/go.sum @@ -89,14 +89,18 @@ github.com/eapache/queue v1.1.0 h1:YOEu7KNc61ntiQlcEeUIoDTJ2o8mQznoNvUhiigpIqc= github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= github.com/ebitengine/purego v0.8.0 h1:JbqvnEzRvPpxhCJzJJ2y0RbiZ8nyjccVUrSM3q+GvvE= github.com/ebitengine/purego v0.8.0/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ= -github.com/ericlagergren/decimal v0.0.0-20221120152707-495c53812d05 h1:S92OBrGuLLZsyM5ybUzgc/mPjIYk2AZqufieooe98uw= -github.com/ericlagergren/decimal v0.0.0-20221120152707-495c53812d05/go.mod h1:M9R1FoZ3y//hwwnJtO51ypFGwm8ZfpxPT/ZLtO1mcgQ= +github.com/ericlagergren/decimal v0.0.0-20240411145413-00de7ca16731 h1:R/ZjJpjQKsZ6L/+Gf9WHbt31GG8NMVcpRqUE+1mMIyo= +github.com/ericlagergren/decimal v0.0.0-20240411145413-00de7ca16731/go.mod h1:M9R1FoZ3y//hwwnJtO51ypFGwm8ZfpxPT/ZLtO1mcgQ= github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4= github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/formancehq/go-libs v1.7.2 h1:zPLkMVigMxcdPQiA8Q0HLPgA/al/hKmLxLw9muDPM1U= -github.com/formancehq/go-libs v1.7.2/go.mod h1:3+crp7AA/Rllpo9M/ZQslaHkYt9EtXtbE4qYasV201Q= +github.com/formancehq/go-libs/v2 v2.0.1-0.20241017152835-2c30f563ab46 h1:8wZtnWSIYNV7DwD0Jr4HsbcRgezOrgDJ2Q0w9ABieKc= +github.com/formancehq/go-libs/v2 v2.0.1-0.20241017152835-2c30f563ab46/go.mod h1:LgxayMN6wgAQbkB3ioBDTHOVMKp1rC6Q55M1CvG44xY= +github.com/formancehq/go-libs/v2 v2.0.1-0.20241017153232-1a62cecf1a61 h1:GSIhsdo/YXuZXI4q8xA8IrdOkkjfFp6O+DiNywk8s8U= +github.com/formancehq/go-libs/v2 v2.0.1-0.20241017153232-1a62cecf1a61/go.mod h1:LgxayMN6wgAQbkB3ioBDTHOVMKp1rC6Q55M1CvG44xY= +github.com/formancehq/go-libs/v2 v2.0.1-0.20241022185745-110c95803b63 h1:DN6gDFwh3zO9VwV6Nt2tj4/BEecyfWfOdHp1YYJ5sBA= +github.com/formancehq/go-libs/v2 v2.0.1-0.20241022185745-110c95803b63/go.mod h1:LgxayMN6wgAQbkB3ioBDTHOVMKp1rC6Q55M1CvG44xY= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= github.com/go-chi/chi v4.1.2+incompatible h1:fGFk2Gmi/YKXk0OmGfBh0WgmN3XB8lVnEyNz34tQRec= @@ -175,6 +179,8 @@ github.com/jackc/pgx/v5 v5.7.1 h1:x7SYsPBYDkHDksogeSmZZ5xzThcTgRz++I5E+ePFUcs= github.com/jackc/pgx/v5 v5.7.1/go.mod h1:e7O26IywZZ+naJtWWos6i6fvWK+29etgITqrqHLfoZA= github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/jamiealquiza/tachymeter v2.0.0+incompatible h1:mGiF1DGo8l6vnGT8FXNNcIXht/YmjzfraiUprXYwJ6g= +github.com/jamiealquiza/tachymeter v2.0.0+incompatible/go.mod h1:Ayf6zPZKEnLsc3winWEXJRkTBhdHo58HODAu1oFJkYU= github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8= github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs= github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo= @@ -272,6 +278,10 @@ github.com/rs/cors v1.11.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/shirou/gopsutil/v4 v4.24.9 h1:KIV+/HaHD5ka5f570RZq+2SaeFsb/pq+fp2DGNWYoOI= github.com/shirou/gopsutil/v4 v4.24.9/go.mod h1:3fkaHNeYsUFCGZ8+9vZVWtbyM1k2eRnlL+bWO8Bxa/Q= +github.com/shomali11/util v0.0.0-20180607005212-e0f70fd665ff h1:A47HTOEURe8GFXu/9ztnUzVgBBo0NlWoKmVPmfJ4LR8= +github.com/shomali11/util v0.0.0-20180607005212-e0f70fd665ff/go.mod h1:WWE2GJM9B5UpdOiwH2val10w/pvJ2cUUQOOA/4LgOng= +github.com/shomali11/xsql v0.0.0-20190608141458-bf76292144df h1:SVCDTuzM3KEk8WBwSSw7RTPLw9ajzBaXDg39Bo6xIeU= +github.com/shomali11/xsql v0.0.0-20190608141458-bf76292144df/go.mod h1:K8jR5lDI2MGs9Ky+X2jIF4MwIslI0L8o8ijIlEq7/Vw= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= @@ -375,8 +385,8 @@ go.opentelemetry.io/proto/otlp v1.3.1 h1:TrMUixzpM0yuc/znrFTP9MMRh8trP93mkCiDVeX go.opentelemetry.io/proto/otlp v1.3.1/go.mod h1:0X1WI4de4ZsLrrJNLAQbFeLCm3T7yBkR0XqQ7niQU+8= go.uber.org/dig v1.18.0 h1:imUL1UiY0Mg4bqbFfsRQO5G4CGRBec/ZujWTvSVp3pw= go.uber.org/dig v1.18.0/go.mod h1:Us0rSJiThwCv2GteUN0Q7OKvU7n5J4dxZ9JKUXozFdE= -go.uber.org/fx v1.23.0 h1:lIr/gYWQGfTwGcSXWXu4vP5Ws6iqnNEIY+F/aFzCKTg= -go.uber.org/fx v1.23.0/go.mod h1:o/D9n+2mLP6v1EG+qsdT1O8wKopYAsqZasju97SDFCU= +go.uber.org/fx v1.22.2 h1:iPW+OPxv0G8w75OemJ1RAnTUrF55zOJlXlo1TbJ0Buw= +go.uber.org/fx v1.22.2/go.mod h1:o/D9n+2mLP6v1EG+qsdT1O8wKopYAsqZasju97SDFCU= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/mock v0.4.0 h1:VcM4ZOtdbR4f6VXfiOpwpVJDL6lCReaZ6mw31wqh7KU= @@ -427,6 +437,7 @@ golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= diff --git a/internal/README.md b/internal/README.md new file mode 100644 index 000000000..3976f0484 --- /dev/null +++ b/internal/README.md @@ -0,0 +1,1225 @@ + + +# ledger + +```go +import "github.com/formancehq/ledger/internal" +``` + +## Index + +- [Constants](<#constants>) +- [Variables](<#variables>) +- [func ComputeIdempotencyHash\(inputs any\) string](<#ComputeIdempotencyHash>) +- [type Account](<#Account>) +- [type AccountMetadata](<#AccountMetadata>) +- [type AccountsVolumes](<#AccountsVolumes>) +- [type BalancesByAssets](<#BalancesByAssets>) +- [type BalancesByAssetsByAccounts](<#BalancesByAssetsByAccounts>) +- [type Configuration](<#Configuration>) + - [func NewDefaultConfiguration\(\) Configuration](<#NewDefaultConfiguration>) + - [func \(c \*Configuration\) SetDefaults\(\)](<#Configuration.SetDefaults>) + - [func \(c \*Configuration\) Validate\(\) error](<#Configuration.Validate>) +- [type CreatedTransaction](<#CreatedTransaction>) + - [func \(p CreatedTransaction\) GetMemento\(\) any](<#CreatedTransaction.GetMemento>) + - [func \(p CreatedTransaction\) Type\(\) LogType](<#CreatedTransaction.Type>) +- [type DeletedMetadata](<#DeletedMetadata>) + - [func \(s DeletedMetadata\) Type\(\) LogType](<#DeletedMetadata.Type>) + - [func \(s \*DeletedMetadata\) UnmarshalJSON\(data \[\]byte\) error](<#DeletedMetadata.UnmarshalJSON>) +- [type ErrInvalidBucketName](<#ErrInvalidBucketName>) + - [func \(e ErrInvalidBucketName\) Error\(\) string](<#ErrInvalidBucketName.Error>) + - [func \(e ErrInvalidBucketName\) Is\(err error\) bool](<#ErrInvalidBucketName.Is>) +- [type ErrInvalidLedgerName](<#ErrInvalidLedgerName>) + - [func \(e ErrInvalidLedgerName\) Error\(\) string](<#ErrInvalidLedgerName.Error>) + - [func \(e ErrInvalidLedgerName\) Is\(err error\) bool](<#ErrInvalidLedgerName.Is>) +- [type FeatureSet](<#FeatureSet>) + - [func \(f FeatureSet\) String\(\) string](<#FeatureSet.String>) + - [func \(f FeatureSet\) With\(feature, value string\) FeatureSet](<#FeatureSet.With>) +- [type Ledger](<#Ledger>) + - [func MustNewWithDefault\(name string\) Ledger](<#MustNewWithDefault>) + - [func New\(name string, configuration Configuration\) \(\*Ledger, error\)](<#New>) + - [func NewWithDefaults\(name string\) \(\*Ledger, error\)](<#NewWithDefaults>) + - [func \(l Ledger\) HasFeature\(feature, value string\) bool](<#Ledger.HasFeature>) + - [func \(l Ledger\) WithMetadata\(m metadata.Metadata\) Ledger](<#Ledger.WithMetadata>) +- [type Log](<#Log>) + - [func NewLog\(payload LogPayload\) Log](<#NewLog>) + - [func \(l Log\) ChainLog\(previous \*Log\) Log](<#Log.ChainLog>) + - [func \(l \*Log\) ComputeHash\(previous \*Log\)](<#Log.ComputeHash>) + - [func \(l \*Log\) UnmarshalJSON\(data \[\]byte\) error](<#Log.UnmarshalJSON>) + - [func \(l Log\) WithIdempotencyKey\(key string\) Log](<#Log.WithIdempotencyKey>) +- [type LogPayload](<#LogPayload>) + - [func HydrateLog\(\_type LogType, data \[\]byte\) \(LogPayload, error\)](<#HydrateLog>) +- [type LogType](<#LogType>) + - [func LogTypeFromString\(logType string\) LogType](<#LogTypeFromString>) + - [func \(lt LogType\) MarshalJSON\(\) \(\[\]byte, error\)](<#LogType.MarshalJSON>) + - [func \(lt \*LogType\) Scan\(src interface\{\}\) error](<#LogType.Scan>) + - [func \(lt LogType\) String\(\) string](<#LogType.String>) + - [func \(lt \*LogType\) UnmarshalJSON\(data \[\]byte\) error](<#LogType.UnmarshalJSON>) + - [func \(lt LogType\) Value\(\) \(driver.Value, error\)](<#LogType.Value>) +- [type Memento](<#Memento>) +- [type Move](<#Move>) +- [type Moves](<#Moves>) + - [func \(m Moves\) ComputePostCommitEffectiveVolumes\(\) PostCommitVolumes](<#Moves.ComputePostCommitEffectiveVolumes>) +- [type PostCommitVolumes](<#PostCommitVolumes>) + - [func \(a PostCommitVolumes\) AddInput\(account, asset string, input \*big.Int\)](<#PostCommitVolumes.AddInput>) + - [func \(a PostCommitVolumes\) AddOutput\(account, asset string, output \*big.Int\)](<#PostCommitVolumes.AddOutput>) + - [func \(a PostCommitVolumes\) Copy\(\) PostCommitVolumes](<#PostCommitVolumes.Copy>) + - [func \(a PostCommitVolumes\) Merge\(volumes PostCommitVolumes\) PostCommitVolumes](<#PostCommitVolumes.Merge>) +- [type Posting](<#Posting>) + - [func NewPosting\(source string, destination string, asset string, amount \*big.Int\) Posting](<#NewPosting>) +- [type Postings](<#Postings>) + - [func \(p Postings\) Reverse\(\) Postings](<#Postings.Reverse>) + - [func \(p Postings\) Validate\(\) \(int, error\)](<#Postings.Validate>) +- [type RevertedTransaction](<#RevertedTransaction>) + - [func \(r RevertedTransaction\) GetMemento\(\) any](<#RevertedTransaction.GetMemento>) + - [func \(r RevertedTransaction\) Type\(\) LogType](<#RevertedTransaction.Type>) +- [type SavedMetadata](<#SavedMetadata>) + - [func \(s SavedMetadata\) Type\(\) LogType](<#SavedMetadata.Type>) + - [func \(s \*SavedMetadata\) UnmarshalJSON\(data \[\]byte\) error](<#SavedMetadata.UnmarshalJSON>) +- [type Transaction](<#Transaction>) + - [func NewTransaction\(\) Transaction](<#NewTransaction>) + - [func \(tx Transaction\) InvolvedAccountAndAssets\(\) map\[string\]\[\]string](<#Transaction.InvolvedAccountAndAssets>) + - [func \(tx Transaction\) InvolvedAccounts\(\) \[\]string](<#Transaction.InvolvedAccounts>) + - [func \(tx Transaction\) IsReverted\(\) bool](<#Transaction.IsReverted>) + - [func \(Transaction\) JSONSchemaExtend\(schema \*jsonschema.Schema\)](<#Transaction.JSONSchemaExtend>) + - [func \(tx Transaction\) MarshalJSON\(\) \(\[\]byte, error\)](<#Transaction.MarshalJSON>) + - [func \(tx Transaction\) Reverse\(\) Transaction](<#Transaction.Reverse>) + - [func \(tx Transaction\) VolumeUpdates\(\) \[\]AccountsVolumes](<#Transaction.VolumeUpdates>) + - [func \(tx Transaction\) WithInsertedAt\(date time.Time\) Transaction](<#Transaction.WithInsertedAt>) + - [func \(tx Transaction\) WithMetadata\(m metadata.Metadata\) Transaction](<#Transaction.WithMetadata>) + - [func \(tx Transaction\) WithPostCommitEffectiveVolumes\(volumes PostCommitVolumes\) Transaction](<#Transaction.WithPostCommitEffectiveVolumes>) + - [func \(tx Transaction\) WithPostings\(postings ...Posting\) Transaction](<#Transaction.WithPostings>) + - [func \(tx Transaction\) WithReference\(ref string\) Transaction](<#Transaction.WithReference>) + - [func \(tx Transaction\) WithRevertedAt\(timestamp time.Time\) Transaction](<#Transaction.WithRevertedAt>) + - [func \(tx Transaction\) WithTimestamp\(ts time.Time\) Transaction](<#Transaction.WithTimestamp>) +- [type TransactionData](<#TransactionData>) + - [func NewTransactionData\(\) TransactionData](<#NewTransactionData>) + - [func \(data TransactionData\) WithPostings\(postings ...Posting\) TransactionData](<#TransactionData.WithPostings>) +- [type Transactions](<#Transactions>) +- [type Volumes](<#Volumes>) + - [func NewEmptyVolumes\(\) Volumes](<#NewEmptyVolumes>) + - [func NewVolumesInt64\(input, output int64\) Volumes](<#NewVolumesInt64>) + - [func \(v Volumes\) Balance\(\) \*big.Int](<#Volumes.Balance>) + - [func \(v Volumes\) Copy\(\) Volumes](<#Volumes.Copy>) + - [func \(Volumes\) JSONSchemaExtend\(schema \*jsonschema.Schema\)](<#Volumes.JSONSchemaExtend>) + - [func \(v Volumes\) MarshalJSON\(\) \(\[\]byte, error\)](<#Volumes.MarshalJSON>) + - [func \(v \*Volumes\) Scan\(src interface\{\}\) error](<#Volumes.Scan>) + - [func \(v Volumes\) Value\(\) \(driver.Value, error\)](<#Volumes.Value>) +- [type VolumesByAssets](<#VolumesByAssets>) + - [func \(v VolumesByAssets\) Balances\(\) BalancesByAssets](<#VolumesByAssets.Balances>) +- [type VolumesWithBalance](<#VolumesWithBalance>) +- [type VolumesWithBalanceByAssetByAccount](<#VolumesWithBalanceByAssetByAccount>) +- [type VolumesWithBalanceByAssets](<#VolumesWithBalanceByAssets>) + + +## Constants + + + +```go +const ( + // FeatureMovesHistory is used to define if the ledger has to save funds movements history. + // Value is either ON or OFF + FeatureMovesHistory = "MOVES_HISTORY" + // FeatureMovesHistoryPostCommitEffectiveVolumes is used to define if the pvce property of funds movements history + // has to be updated with back dated transaction. + // Value is either SYNC or DISABLED. + // todo: depends on FeatureMovesHistory (dependency should be checked) + FeatureMovesHistoryPostCommitEffectiveVolumes = "MOVES_HISTORY_POST_COMMIT_EFFECTIVE_VOLUMES" + // FeatureHashLogs is used to defined it the logs has to be hashed. + FeatureHashLogs = "HASH_LOGS" + // FeatureAccountMetadataHistory is used to defined it the account metadata must be historized. + FeatureAccountMetadataHistory = "ACCOUNT_METADATA_HISTORY" + // FeatureTransactionMetadataHistory is used to defined it the transaction metadata must be historized. + FeatureTransactionMetadataHistory = "TRANSACTION_METADATA_HISTORY" + // FeatureIndexAddressSegments is used to defined it we want to index segments of accounts address. + // Without this feature, the ledger will not allow filtering on partial account address. + FeatureIndexAddressSegments = "INDEX_ADDRESS_SEGMENTS" + // FeatureIndexTransactionAccounts is used to defined it we want to index accounts used in a transaction. + FeatureIndexTransactionAccounts = "INDEX_TRANSACTION_ACCOUNTS" + + DefaultBucket = "_default" +) +``` + + + +```go +const ( + MetaTargetTypeAccount = "ACCOUNT" + MetaTargetTypeTransaction = "TRANSACTION" +) +``` + + + +```go +const ( + WORLD = "world" +) +``` + +## Variables + + + +```go +var ( + DefaultFeatures = FeatureSet{ + FeatureMovesHistory: "ON", + FeatureMovesHistoryPostCommitEffectiveVolumes: "SYNC", + FeatureHashLogs: "SYNC", + FeatureAccountMetadataHistory: "SYNC", + FeatureTransactionMetadataHistory: "SYNC", + FeatureIndexAddressSegments: "ON", + FeatureIndexTransactionAccounts: "ON", + } + MinimalFeatureSet = FeatureSet{ + FeatureMovesHistory: "OFF", + FeatureMovesHistoryPostCommitEffectiveVolumes: "DISABLED", + FeatureHashLogs: "DISABLED", + FeatureAccountMetadataHistory: "DISABLED", + FeatureTransactionMetadataHistory: "DISABLED", + FeatureIndexAddressSegments: "OFF", + FeatureIndexTransactionAccounts: "OFF", + } + FeatureConfigurations = map[string][]string{ + FeatureMovesHistory: {"ON", "OFF"}, + FeatureMovesHistoryPostCommitEffectiveVolumes: {"SYNC", "DISABLED"}, + FeatureHashLogs: {"SYNC", "DISABLED"}, + FeatureAccountMetadataHistory: {"SYNC", "DISABLED"}, + FeatureTransactionMetadataHistory: {"SYNC", "DISABLED"}, + FeatureIndexAddressSegments: {"ON", "OFF"}, + FeatureIndexTransactionAccounts: {"ON", "OFF"}, + } +) +``` + + + +```go +var Zero = big.NewInt(0) +``` + + +## func ComputeIdempotencyHash + +```go +func ComputeIdempotencyHash(inputs any) string +``` + + + + +## type Account + + + +```go +type Account struct { + bun.BaseModel `bun:"table:accounts"` + + Address string `json:"address" bun:"address"` + Metadata metadata.Metadata `json:"metadata" bun:"metadata,type:jsonb"` + FirstUsage time.Time `json:"-" bun:"first_usage,nullzero"` + InsertionDate time.Time `json:"_" bun:"insertion_date,nullzero"` + UpdatedAt time.Time `json:"-" bun:"updated_at,nullzero"` + Volumes VolumesByAssets `json:"volumes,omitempty" bun:"volumes,scanonly"` + EffectiveVolumes VolumesByAssets `json:"effectiveVolumes,omitempty" bun:"effective_volumes,scanonly"` +} +``` + + +## type AccountMetadata + + + +```go +type AccountMetadata map[string]metadata.Metadata +``` + + +## type AccountsVolumes + + + +```go +type AccountsVolumes struct { + bun.BaseModel `bun:"accounts_volumes"` + + Account string `bun:"accounts_address,type:varchar"` + Asset string `bun:"asset,type:varchar"` + Input *big.Int `bun:"input,type:numeric"` + Output *big.Int `bun:"output,type:numeric"` +} +``` + + +## type BalancesByAssets + + + +```go +type BalancesByAssets map[string]*big.Int +``` + + +## type BalancesByAssetsByAccounts + + + +```go +type BalancesByAssetsByAccounts map[string]BalancesByAssets +``` + + +## type Configuration + + + +```go +type Configuration struct { + Bucket string `json:"bucket" bun:"bucket,type:varchar(255)"` + Metadata metadata.Metadata `json:"metadata" bun:"metadata,type:jsonb"` + Features FeatureSet `json:"features" bun:"features,type:jsonb"` +} +``` + + +### func NewDefaultConfiguration + +```go +func NewDefaultConfiguration() Configuration +``` + + + + +### func \(\*Configuration\) SetDefaults + +```go +func (c *Configuration) SetDefaults() +``` + + + + +### func \(\*Configuration\) Validate + +```go +func (c *Configuration) Validate() error +``` + + + + +## type CreatedTransaction + + + +```go +type CreatedTransaction struct { + Transaction Transaction `json:"transaction"` + AccountMetadata AccountMetadata `json:"accountMetadata"` +} +``` + + +### func \(CreatedTransaction\) GetMemento + +```go +func (p CreatedTransaction) GetMemento() any +``` + + + + +### func \(CreatedTransaction\) Type + +```go +func (p CreatedTransaction) Type() LogType +``` + + + + +## type DeletedMetadata + + + +```go +type DeletedMetadata struct { + TargetType string `json:"targetType"` + TargetID any `json:"targetId"` + Key string `json:"key"` +} +``` + + +### func \(DeletedMetadata\) Type + +```go +func (s DeletedMetadata) Type() LogType +``` + + + + +### func \(\*DeletedMetadata\) UnmarshalJSON + +```go +func (s *DeletedMetadata) UnmarshalJSON(data []byte) error +``` + + + + +## type ErrInvalidBucketName + + + +```go +type ErrInvalidBucketName struct { + // contains filtered or unexported fields +} +``` + + +### func \(ErrInvalidBucketName\) Error + +```go +func (e ErrInvalidBucketName) Error() string +``` + + + + +### func \(ErrInvalidBucketName\) Is + +```go +func (e ErrInvalidBucketName) Is(err error) bool +``` + + + + +## type ErrInvalidLedgerName + + + +```go +type ErrInvalidLedgerName struct { + // contains filtered or unexported fields +} +``` + + +### func \(ErrInvalidLedgerName\) Error + +```go +func (e ErrInvalidLedgerName) Error() string +``` + + + + +### func \(ErrInvalidLedgerName\) Is + +```go +func (e ErrInvalidLedgerName) Is(err error) bool +``` + + + + +## type FeatureSet + + + +```go +type FeatureSet map[string]string +``` + + +### func \(FeatureSet\) String + +```go +func (f FeatureSet) String() string +``` + + + + +### func \(FeatureSet\) With + +```go +func (f FeatureSet) With(feature, value string) FeatureSet +``` + + + + +## type Ledger + + + +```go +type Ledger struct { + bun.BaseModel `bun:"_system.ledgers,alias:ledgers"` + + Configuration + ID int `json:"id" bun:"id,type:int,scanonly"` + Name string `json:"name" bun:"name,type:varchar(255),pk"` + AddedAt time.Time `json:"addedAt" bun:"added_at,type:timestamp,nullzero"` +} +``` + + +### func MustNewWithDefault + +```go +func MustNewWithDefault(name string) Ledger +``` + + + + +### func New + +```go +func New(name string, configuration Configuration) (*Ledger, error) +``` + + + + +### func NewWithDefaults + +```go +func NewWithDefaults(name string) (*Ledger, error) +``` + + + + +### func \(Ledger\) HasFeature + +```go +func (l Ledger) HasFeature(feature, value string) bool +``` + + + + +### func \(Ledger\) WithMetadata + +```go +func (l Ledger) WithMetadata(m metadata.Metadata) Ledger +``` + + + + +## type Log + +Log represents atomic actions made on the ledger. + +```go +type Log struct { + bun.BaseModel `bun:"table:logs,alias:logs"` + + Type LogType `json:"type" bun:"type,type:log_type"` + Data LogPayload `json:"data" bun:"data,type:jsonb"` + Date time.Time `json:"date" bun:"date,type:timestamptz,nullzero"` + IdempotencyKey string `json:"idempotencyKey" bun:"idempotency_key,type:varchar(256),unique,nullzero"` + // IdempotencyHash is a signature used when using IdempotencyKey. + // It allows to check if the usage of IdempotencyKey match inputs given on the first idempotency key usage. + IdempotencyHash string `json:"idempotencyHash" bun:"idempotency_hash,unique,nullzero"` + ID int `json:"id" bun:"id,unique,type:numeric"` + Hash []byte `json:"hash" bun:"hash,type:bytea,scanonly"` +} +``` + + +### func NewLog + +```go +func NewLog(payload LogPayload) Log +``` + + + + +### func \(Log\) ChainLog + +```go +func (l Log) ChainLog(previous *Log) Log +``` + + + + +### func \(\*Log\) ComputeHash + +```go +func (l *Log) ComputeHash(previous *Log) +``` + + + + +### func \(\*Log\) UnmarshalJSON + +```go +func (l *Log) UnmarshalJSON(data []byte) error +``` + + + + +### func \(Log\) WithIdempotencyKey + +```go +func (l Log) WithIdempotencyKey(key string) Log +``` + + + + +## type LogPayload + + + +```go +type LogPayload interface { + Type() LogType +} +``` + + +### func HydrateLog + +```go +func HydrateLog(_type LogType, data []byte) (LogPayload, error) +``` + + + + +## type LogType + + + +```go +type LogType int16 +``` + + + +```go +const ( + SetMetadataLogType LogType = iota // "SET_METADATA" + NewLogType // "NEW_TRANSACTION" + RevertedTransactionLogType // "REVERTED_TRANSACTION" + DeleteMetadataLogType +) +``` + + +### func LogTypeFromString + +```go +func LogTypeFromString(logType string) LogType +``` + + + + +### func \(LogType\) MarshalJSON + +```go +func (lt LogType) MarshalJSON() ([]byte, error) +``` + + + + +### func \(\*LogType\) Scan + +```go +func (lt *LogType) Scan(src interface{}) error +``` + + + + +### func \(LogType\) String + +```go +func (lt LogType) String() string +``` + + + + +### func \(\*LogType\) UnmarshalJSON + +```go +func (lt *LogType) UnmarshalJSON(data []byte) error +``` + + + + +### func \(LogType\) Value + +```go +func (lt LogType) Value() (driver.Value, error) +``` + + + + +## type Memento + + + +```go +type Memento interface { + GetMemento() any +} +``` + + +## type Move + + + +```go +type Move struct { + bun.BaseModel `bun:"table:moves"` + + TransactionID int `bun:"transactions_id,type:bigint"` + IsSource bool `bun:"is_source,type:bool"` + Account string `bun:"accounts_address,type:varchar"` + Amount *bunpaginate.BigInt `bun:"amount,type:numeric"` + Asset string `bun:"asset,type:varchar"` + InsertionDate time.Time `bun:"insertion_date,type:timestamp,nullzero"` + EffectiveDate time.Time `bun:"effective_date,type:timestamp,nullzero"` + PostCommitVolumes *Volumes `bun:"post_commit_volumes,type:jsonb"` + PostCommitEffectiveVolumes *Volumes `bun:"post_commit_effective_volumes,type:jsonb,scanonly"` +} +``` + + +## type Moves + + + +```go +type Moves []*Move +``` + + +### func \(Moves\) ComputePostCommitEffectiveVolumes + +```go +func (m Moves) ComputePostCommitEffectiveVolumes() PostCommitVolumes +``` + + + + +## type PostCommitVolumes + + + +```go +type PostCommitVolumes map[string]VolumesByAssets +``` + + +### func \(PostCommitVolumes\) AddInput + +```go +func (a PostCommitVolumes) AddInput(account, asset string, input *big.Int) +``` + + + + +### func \(PostCommitVolumes\) AddOutput + +```go +func (a PostCommitVolumes) AddOutput(account, asset string, output *big.Int) +``` + + + + +### func \(PostCommitVolumes\) Copy + +```go +func (a PostCommitVolumes) Copy() PostCommitVolumes +``` + + + + +### func \(PostCommitVolumes\) Merge + +```go +func (a PostCommitVolumes) Merge(volumes PostCommitVolumes) PostCommitVolumes +``` + + + + +## type Posting + + + +```go +type Posting struct { + Source string `json:"source"` + Destination string `json:"destination"` + Amount *big.Int `json:"amount"` + Asset string `json:"asset"` +} +``` + + +### func NewPosting + +```go +func NewPosting(source string, destination string, asset string, amount *big.Int) Posting +``` + + + + +## type Postings + + + +```go +type Postings []Posting +``` + + +### func \(Postings\) Reverse + +```go +func (p Postings) Reverse() Postings +``` + + + + +### func \(Postings\) Validate + +```go +func (p Postings) Validate() (int, error) +``` + + + + +## type RevertedTransaction + + + +```go +type RevertedTransaction struct { + RevertedTransaction Transaction `json:"revertedTransaction"` + RevertTransaction Transaction `json:"transaction"` +} +``` + + +### func \(RevertedTransaction\) GetMemento + +```go +func (r RevertedTransaction) GetMemento() any +``` + + + + +### func \(RevertedTransaction\) Type + +```go +func (r RevertedTransaction) Type() LogType +``` + + + + +## type SavedMetadata + + + +```go +type SavedMetadata struct { + TargetType string `json:"targetType"` + TargetID any `json:"targetId"` + Metadata metadata.Metadata `json:"metadata"` +} +``` + + +### func \(SavedMetadata\) Type + +```go +func (s SavedMetadata) Type() LogType +``` + + + + +### func \(\*SavedMetadata\) UnmarshalJSON + +```go +func (s *SavedMetadata) UnmarshalJSON(data []byte) error +``` + + + + +## type Transaction + + + +```go +type Transaction struct { + bun.BaseModel `bun:"table:transactions,alias:transactions"` + + TransactionData + ID int `json:"id" bun:"id,type:numeric"` + RevertedAt *time.Time `json:"revertedAt,omitempty" bun:"reverted_at,type:timestamp without time zone"` + // PostCommitVolumes are the volumes of each account/asset after a transaction has been committed. + // Those volumes will never change as those are computed in flight. + PostCommitVolumes PostCommitVolumes `json:"postCommitVolumes,omitempty" bun:"post_commit_volumes,type:jsonb"` + // PostCommitEffectiveVolumes are the volumes of each account/asset after the transaction TransactionData.Timestamp. + // Those volumes are also computed in flight, but can be updated if a transaction is inserted in the past. + PostCommitEffectiveVolumes PostCommitVolumes `json:"postCommitEffectiveVolumes,omitempty" bun:"post_commit_effective_volumes,type:jsonb,scanonly"` +} +``` + + +### func NewTransaction + +```go +func NewTransaction() Transaction +``` + + + + +### func \(Transaction\) InvolvedAccountAndAssets + +```go +func (tx Transaction) InvolvedAccountAndAssets() map[string][]string +``` + + + + +### func \(Transaction\) InvolvedAccounts + +```go +func (tx Transaction) InvolvedAccounts() []string +``` + + + + +### func \(Transaction\) IsReverted + +```go +func (tx Transaction) IsReverted() bool +``` + + + + +### func \(Transaction\) JSONSchemaExtend + +```go +func (Transaction) JSONSchemaExtend(schema *jsonschema.Schema) +``` + + + + +### func \(Transaction\) MarshalJSON + +```go +func (tx Transaction) MarshalJSON() ([]byte, error) +``` + + + + +### func \(Transaction\) Reverse + +```go +func (tx Transaction) Reverse() Transaction +``` + + + + +### func \(Transaction\) VolumeUpdates + +```go +func (tx Transaction) VolumeUpdates() []AccountsVolumes +``` + + + + +### func \(Transaction\) WithInsertedAt + +```go +func (tx Transaction) WithInsertedAt(date time.Time) Transaction +``` + + + + +### func \(Transaction\) WithMetadata + +```go +func (tx Transaction) WithMetadata(m metadata.Metadata) Transaction +``` + + + + +### func \(Transaction\) WithPostCommitEffectiveVolumes + +```go +func (tx Transaction) WithPostCommitEffectiveVolumes(volumes PostCommitVolumes) Transaction +``` + + + + +### func \(Transaction\) WithPostings + +```go +func (tx Transaction) WithPostings(postings ...Posting) Transaction +``` + + + + +### func \(Transaction\) WithReference + +```go +func (tx Transaction) WithReference(ref string) Transaction +``` + + + + +### func \(Transaction\) WithRevertedAt + +```go +func (tx Transaction) WithRevertedAt(timestamp time.Time) Transaction +``` + + + + +### func \(Transaction\) WithTimestamp + +```go +func (tx Transaction) WithTimestamp(ts time.Time) Transaction +``` + + + + +## type TransactionData + + + +```go +type TransactionData struct { + Postings Postings `json:"postings" bun:"postings,type:jsonb"` + Metadata metadata.Metadata `json:"metadata" bun:"metadata,type:jsonb,default:'{}'"` + Timestamp time.Time `json:"timestamp" bun:"timestamp,type:timestamp without time zone,nullzero"` + Reference string `json:"reference,omitempty" bun:"reference,type:varchar,unique,nullzero"` + InsertedAt time.Time `json:"insertedAt,omitempty" bun:"inserted_at,type:timestamp without time zone,nullzero"` +} +``` + + +### func NewTransactionData + +```go +func NewTransactionData() TransactionData +``` + + + + +### func \(TransactionData\) WithPostings + +```go +func (data TransactionData) WithPostings(postings ...Posting) TransactionData +``` + + + + +## type Transactions + + + +```go +type Transactions struct { + Transactions []TransactionData `json:"transactions"` +} +``` + + +## type Volumes + + + +```go +type Volumes struct { + Input *big.Int `json:"input"` + Output *big.Int `json:"output"` +} +``` + + +### func NewEmptyVolumes + +```go +func NewEmptyVolumes() Volumes +``` + + + + +### func NewVolumesInt64 + +```go +func NewVolumesInt64(input, output int64) Volumes +``` + + + + +### func \(Volumes\) Balance + +```go +func (v Volumes) Balance() *big.Int +``` + + + + +### func \(Volumes\) Copy + +```go +func (v Volumes) Copy() Volumes +``` + + + + +### func \(Volumes\) JSONSchemaExtend + +```go +func (Volumes) JSONSchemaExtend(schema *jsonschema.Schema) +``` + + + + +### func \(Volumes\) MarshalJSON + +```go +func (v Volumes) MarshalJSON() ([]byte, error) +``` + + + + +### func \(\*Volumes\) Scan + +```go +func (v *Volumes) Scan(src interface{}) error +``` + + + + +### func \(Volumes\) Value + +```go +func (v Volumes) Value() (driver.Value, error) +``` + + + + +## type VolumesByAssets + + + +```go +type VolumesByAssets map[string]Volumes +``` + + +### func \(VolumesByAssets\) Balances + +```go +func (v VolumesByAssets) Balances() BalancesByAssets +``` + + + + +## type VolumesWithBalance + + + +```go +type VolumesWithBalance struct { + Input *big.Int `json:"input" bun:"input"` + Output *big.Int `json:"output" bun:"output"` + Balance *big.Int `json:"balance" bun:"balance"` +} +``` + + +## type VolumesWithBalanceByAssetByAccount + + + +```go +type VolumesWithBalanceByAssetByAccount struct { + Account string `json:"account" bun:"account"` + Asset string `json:"asset" bun:"asset"` + VolumesWithBalance +} +``` + + +## type VolumesWithBalanceByAssets + + + +```go +type VolumesWithBalanceByAssets map[string]*VolumesWithBalance +``` + +Generated by [gomarkdoc]() diff --git a/internal/account.go b/internal/account.go index 59ff9844d..8bd517d4b 100644 --- a/internal/account.go +++ b/internal/account.go @@ -1,9 +1,10 @@ package ledger import ( - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/time" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/time" "github.com/uptrace/bun" + "math/big" ) const ( @@ -11,43 +12,22 @@ const ( ) type Account struct { - bun.BaseModel `bun:"table:accounts,alias:accounts"` - - Address string `json:"address"` - Metadata metadata.Metadata `json:"metadata"` - FirstUsage time.Time `json:"-" bun:"first_usage,type:timestamp without timezone"` -} - -func (a Account) copy() Account { - a.Metadata = a.Metadata.Copy() - return a -} - -func NewAccount(address string) Account { - return Account{ - Address: address, - Metadata: metadata.Metadata{}, - } -} - -type ExpandedAccount struct { - Account `bun:",extend"` - Volumes VolumesByAssets `json:"volumes,omitempty" bun:"volumes,type:jsonb"` - EffectiveVolumes VolumesByAssets `json:"effectiveVolumes,omitempty" bun:"effective_volumes,type:jsonb"` + bun.BaseModel `bun:"table:accounts"` + + Address string `json:"address" bun:"address"` + Metadata metadata.Metadata `json:"metadata" bun:"metadata,type:jsonb"` + FirstUsage time.Time `json:"-" bun:"first_usage,nullzero"` + InsertionDate time.Time `json:"_" bun:"insertion_date,nullzero"` + UpdatedAt time.Time `json:"-" bun:"updated_at,nullzero"` + Volumes VolumesByAssets `json:"volumes,omitempty" bun:"volumes,scanonly"` + EffectiveVolumes VolumesByAssets `json:"effectiveVolumes,omitempty" bun:"effective_volumes,scanonly"` } -func NewExpandedAccount(address string) ExpandedAccount { - return ExpandedAccount{ - Account: Account{ - Address: address, - Metadata: metadata.Metadata{}, - }, - Volumes: map[string]*Volumes{}, - } -} +type AccountsVolumes struct { + bun.BaseModel `bun:"accounts_volumes"` -func (v ExpandedAccount) Copy() ExpandedAccount { - v.Account = v.Account.copy() - v.Volumes = v.Volumes.copy() - return v + Account string `bun:"accounts_address,type:varchar"` + Asset string `bun:"asset,type:varchar"` + Input *big.Int `bun:"input,type:numeric"` + Output *big.Int `bun:"output,type:numeric"` } diff --git a/internal/api/backend/backend.go b/internal/api/backend/backend.go deleted file mode 100644 index ba8b2b0ee..000000000 --- a/internal/api/backend/backend.go +++ /dev/null @@ -1,99 +0,0 @@ -package backend - -import ( - "context" - "math/big" - - "github.com/formancehq/go-libs/bun/bunpaginate" - - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/migrations" - ledger "github.com/formancehq/ledger/internal" - "github.com/formancehq/ledger/internal/engine" - "github.com/formancehq/ledger/internal/engine/command" - "github.com/formancehq/ledger/internal/storage/driver" - "github.com/formancehq/ledger/internal/storage/ledgerstore" - "github.com/formancehq/ledger/internal/storage/systemstore" -) - -//go:generate mockgen -source backend.go -destination backend_generated.go -package backend . Ledger - -type Ledger interface { - GetAccountWithVolumes(ctx context.Context, query ledgerstore.GetAccountQuery) (*ledger.ExpandedAccount, error) - GetAccountsWithVolumes(ctx context.Context, query ledgerstore.GetAccountsQuery) (*bunpaginate.Cursor[ledger.ExpandedAccount], error) - CountAccounts(ctx context.Context, query ledgerstore.GetAccountsQuery) (int, error) - GetAggregatedBalances(ctx context.Context, q ledgerstore.GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) - GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) - Stats(ctx context.Context) (engine.Stats, error) - GetLogs(ctx context.Context, query ledgerstore.GetLogsQuery) (*bunpaginate.Cursor[ledger.ChainedLog], error) - CountTransactions(ctx context.Context, query ledgerstore.GetTransactionsQuery) (int, error) - GetTransactions(ctx context.Context, query ledgerstore.GetTransactionsQuery) (*bunpaginate.Cursor[ledger.ExpandedTransaction], error) - GetTransactionWithVolumes(ctx context.Context, query ledgerstore.GetTransactionQuery) (*ledger.ExpandedTransaction, error) - - CreateTransaction(ctx context.Context, parameters command.Parameters, data ledger.RunScript) (*ledger.Transaction, error) - RevertTransaction(ctx context.Context, parameters command.Parameters, id *big.Int, force, atEffectiveDate bool) (*ledger.Transaction, error) - SaveMeta(ctx context.Context, parameters command.Parameters, targetType string, targetID any, m metadata.Metadata) error - DeleteMetadata(ctx context.Context, parameters command.Parameters, targetType string, targetID any, key string) error - Import(ctx context.Context, stream chan *ledger.ChainedLog) error - Export(ctx context.Context, w engine.ExportWriter) error - - IsDatabaseUpToDate(ctx context.Context) (bool, error) - - GetVolumesWithBalances(ctx context.Context, q ledgerstore.GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) -} - -type Backend interface { - GetLedgerEngine(ctx context.Context, name string) (Ledger, error) - GetLedger(ctx context.Context, name string) (*systemstore.Ledger, error) - ListLedgers(ctx context.Context, query systemstore.ListLedgersQuery) (*bunpaginate.Cursor[systemstore.Ledger], error) - CreateLedger(ctx context.Context, name string, configuration driver.LedgerConfiguration) error - UpdateLedgerMetadata(ctx context.Context, name string, m map[string]string) error - GetVersion() string - DeleteLedgerMetadata(ctx context.Context, param string, key string) error -} - -type DefaultBackend struct { - storageDriver *driver.Driver - resolver *engine.Resolver - version string -} - -func (d DefaultBackend) DeleteLedgerMetadata(ctx context.Context, name string, key string) error { - return d.storageDriver.GetSystemStore().DeleteLedgerMetadata(ctx, name, key) -} - -func (d DefaultBackend) UpdateLedgerMetadata(ctx context.Context, name string, m map[string]string) error { - return d.storageDriver.GetSystemStore().UpdateLedgerMetadata(ctx, name, m) -} - -func (d DefaultBackend) GetLedger(ctx context.Context, name string) (*systemstore.Ledger, error) { - return d.storageDriver.GetSystemStore().GetLedger(ctx, name) -} - -func (d DefaultBackend) CreateLedger(ctx context.Context, name string, configuration driver.LedgerConfiguration) error { - _, err := d.resolver.CreateLedger(ctx, name, configuration) - - return err -} - -func (d DefaultBackend) GetLedgerEngine(ctx context.Context, name string) (Ledger, error) { - return d.resolver.GetLedger(ctx, name) -} - -func (d DefaultBackend) ListLedgers(ctx context.Context, query systemstore.ListLedgersQuery) (*bunpaginate.Cursor[systemstore.Ledger], error) { - return d.storageDriver.GetSystemStore().ListLedgers(ctx, query) -} - -func (d DefaultBackend) GetVersion() string { - return d.version -} - -var _ Backend = (*DefaultBackend)(nil) - -func NewDefaultBackend(driver *driver.Driver, version string, resolver *engine.Resolver) *DefaultBackend { - return &DefaultBackend{ - storageDriver: driver, - resolver: resolver, - version: version, - } -} diff --git a/internal/api/backend/backend_generated.go b/internal/api/backend/backend_generated.go deleted file mode 100644 index b0135f68b..000000000 --- a/internal/api/backend/backend_generated.go +++ /dev/null @@ -1,440 +0,0 @@ -// Code generated by MockGen. DO NOT EDIT. -// Source: backend.go -// -// Generated by this command: -// -// mockgen -source backend.go -destination backend_generated.go -package backend . Ledger -// - -// Package backend is a generated GoMock package. -package backend - -import ( - context "context" - big "math/big" - reflect "reflect" - - ledger "github.com/formancehq/ledger/internal" - engine "github.com/formancehq/ledger/internal/engine" - command "github.com/formancehq/ledger/internal/engine/command" - driver "github.com/formancehq/ledger/internal/storage/driver" - ledgerstore "github.com/formancehq/ledger/internal/storage/ledgerstore" - systemstore "github.com/formancehq/ledger/internal/storage/systemstore" - bunpaginate "github.com/formancehq/go-libs/bun/bunpaginate" - metadata "github.com/formancehq/go-libs/metadata" - migrations "github.com/formancehq/go-libs/migrations" - gomock "go.uber.org/mock/gomock" -) - -// MockLedger is a mock of Ledger interface. -type MockLedger struct { - ctrl *gomock.Controller - recorder *MockLedgerMockRecorder -} - -// MockLedgerMockRecorder is the mock recorder for MockLedger. -type MockLedgerMockRecorder struct { - mock *MockLedger -} - -// NewMockLedger creates a new mock instance. -func NewMockLedger(ctrl *gomock.Controller) *MockLedger { - mock := &MockLedger{ctrl: ctrl} - mock.recorder = &MockLedgerMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockLedger) EXPECT() *MockLedgerMockRecorder { - return m.recorder -} - -// CountAccounts mocks base method. -func (m *MockLedger) CountAccounts(ctx context.Context, query ledgerstore.GetAccountsQuery) (int, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CountAccounts", ctx, query) - ret0, _ := ret[0].(int) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// CountAccounts indicates an expected call of CountAccounts. -func (mr *MockLedgerMockRecorder) CountAccounts(ctx, query any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountAccounts", reflect.TypeOf((*MockLedger)(nil).CountAccounts), ctx, query) -} - -// CountTransactions mocks base method. -func (m *MockLedger) CountTransactions(ctx context.Context, query ledgerstore.GetTransactionsQuery) (int, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CountTransactions", ctx, query) - ret0, _ := ret[0].(int) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// CountTransactions indicates an expected call of CountTransactions. -func (mr *MockLedgerMockRecorder) CountTransactions(ctx, query any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountTransactions", reflect.TypeOf((*MockLedger)(nil).CountTransactions), ctx, query) -} - -// CreateTransaction mocks base method. -func (m *MockLedger) CreateTransaction(ctx context.Context, parameters command.Parameters, data ledger.RunScript) (*ledger.Transaction, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CreateTransaction", ctx, parameters, data) - ret0, _ := ret[0].(*ledger.Transaction) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// CreateTransaction indicates an expected call of CreateTransaction. -func (mr *MockLedgerMockRecorder) CreateTransaction(ctx, parameters, data any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateTransaction", reflect.TypeOf((*MockLedger)(nil).CreateTransaction), ctx, parameters, data) -} - -// DeleteMetadata mocks base method. -func (m *MockLedger) DeleteMetadata(ctx context.Context, parameters command.Parameters, targetType string, targetID any, key string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteMetadata", ctx, parameters, targetType, targetID, key) - ret0, _ := ret[0].(error) - return ret0 -} - -// DeleteMetadata indicates an expected call of DeleteMetadata. -func (mr *MockLedgerMockRecorder) DeleteMetadata(ctx, parameters, targetType, targetID, key any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteMetadata", reflect.TypeOf((*MockLedger)(nil).DeleteMetadata), ctx, parameters, targetType, targetID, key) -} - -// Export mocks base method. -func (m *MockLedger) Export(ctx context.Context, w engine.ExportWriter) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Export", ctx, w) - ret0, _ := ret[0].(error) - return ret0 -} - -// Export indicates an expected call of Export. -func (mr *MockLedgerMockRecorder) Export(ctx, w any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Export", reflect.TypeOf((*MockLedger)(nil).Export), ctx, w) -} - -// GetAccountWithVolumes mocks base method. -func (m *MockLedger) GetAccountWithVolumes(ctx context.Context, query ledgerstore.GetAccountQuery) (*ledger.ExpandedAccount, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAccountWithVolumes", ctx, query) - ret0, _ := ret[0].(*ledger.ExpandedAccount) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAccountWithVolumes indicates an expected call of GetAccountWithVolumes. -func (mr *MockLedgerMockRecorder) GetAccountWithVolumes(ctx, query any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccountWithVolumes", reflect.TypeOf((*MockLedger)(nil).GetAccountWithVolumes), ctx, query) -} - -// GetAccountsWithVolumes mocks base method. -func (m *MockLedger) GetAccountsWithVolumes(ctx context.Context, query ledgerstore.GetAccountsQuery) (*bunpaginate.Cursor[ledger.ExpandedAccount], error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAccountsWithVolumes", ctx, query) - ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.ExpandedAccount]) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAccountsWithVolumes indicates an expected call of GetAccountsWithVolumes. -func (mr *MockLedgerMockRecorder) GetAccountsWithVolumes(ctx, query any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccountsWithVolumes", reflect.TypeOf((*MockLedger)(nil).GetAccountsWithVolumes), ctx, query) -} - -// GetAggregatedBalances mocks base method. -func (m *MockLedger) GetAggregatedBalances(ctx context.Context, q ledgerstore.GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAggregatedBalances", ctx, q) - ret0, _ := ret[0].(ledger.BalancesByAssets) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAggregatedBalances indicates an expected call of GetAggregatedBalances. -func (mr *MockLedgerMockRecorder) GetAggregatedBalances(ctx, q any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAggregatedBalances", reflect.TypeOf((*MockLedger)(nil).GetAggregatedBalances), ctx, q) -} - -// GetLogs mocks base method. -func (m *MockLedger) GetLogs(ctx context.Context, query ledgerstore.GetLogsQuery) (*bunpaginate.Cursor[ledger.ChainedLog], error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLogs", ctx, query) - ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.ChainedLog]) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLogs indicates an expected call of GetLogs. -func (mr *MockLedgerMockRecorder) GetLogs(ctx, query any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLogs", reflect.TypeOf((*MockLedger)(nil).GetLogs), ctx, query) -} - -// GetMigrationsInfo mocks base method. -func (m *MockLedger) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetMigrationsInfo", ctx) - ret0, _ := ret[0].([]migrations.Info) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetMigrationsInfo indicates an expected call of GetMigrationsInfo. -func (mr *MockLedgerMockRecorder) GetMigrationsInfo(ctx any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMigrationsInfo", reflect.TypeOf((*MockLedger)(nil).GetMigrationsInfo), ctx) -} - -// GetTransactionWithVolumes mocks base method. -func (m *MockLedger) GetTransactionWithVolumes(ctx context.Context, query ledgerstore.GetTransactionQuery) (*ledger.ExpandedTransaction, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTransactionWithVolumes", ctx, query) - ret0, _ := ret[0].(*ledger.ExpandedTransaction) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetTransactionWithVolumes indicates an expected call of GetTransactionWithVolumes. -func (mr *MockLedgerMockRecorder) GetTransactionWithVolumes(ctx, query any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTransactionWithVolumes", reflect.TypeOf((*MockLedger)(nil).GetTransactionWithVolumes), ctx, query) -} - -// GetTransactions mocks base method. -func (m *MockLedger) GetTransactions(ctx context.Context, query ledgerstore.GetTransactionsQuery) (*bunpaginate.Cursor[ledger.ExpandedTransaction], error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTransactions", ctx, query) - ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.ExpandedTransaction]) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetTransactions indicates an expected call of GetTransactions. -func (mr *MockLedgerMockRecorder) GetTransactions(ctx, query any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTransactions", reflect.TypeOf((*MockLedger)(nil).GetTransactions), ctx, query) -} - -// GetVolumesWithBalances mocks base method. -func (m *MockLedger) GetVolumesWithBalances(ctx context.Context, q ledgerstore.GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetVolumesWithBalances", ctx, q) - ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount]) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetVolumesWithBalances indicates an expected call of GetVolumesWithBalances. -func (mr *MockLedgerMockRecorder) GetVolumesWithBalances(ctx, q any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetVolumesWithBalances", reflect.TypeOf((*MockLedger)(nil).GetVolumesWithBalances), ctx, q) -} - -// Import mocks base method. -func (m *MockLedger) Import(ctx context.Context, stream chan *ledger.ChainedLog) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Import", ctx, stream) - ret0, _ := ret[0].(error) - return ret0 -} - -// Import indicates an expected call of Import. -func (mr *MockLedgerMockRecorder) Import(ctx, stream any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Import", reflect.TypeOf((*MockLedger)(nil).Import), ctx, stream) -} - -// IsDatabaseUpToDate mocks base method. -func (m *MockLedger) IsDatabaseUpToDate(ctx context.Context) (bool, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "IsDatabaseUpToDate", ctx) - ret0, _ := ret[0].(bool) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// IsDatabaseUpToDate indicates an expected call of IsDatabaseUpToDate. -func (mr *MockLedgerMockRecorder) IsDatabaseUpToDate(ctx any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsDatabaseUpToDate", reflect.TypeOf((*MockLedger)(nil).IsDatabaseUpToDate), ctx) -} - -// RevertTransaction mocks base method. -func (m *MockLedger) RevertTransaction(ctx context.Context, parameters command.Parameters, id *big.Int, force, atEffectiveDate bool) (*ledger.Transaction, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "RevertTransaction", ctx, parameters, id, force, atEffectiveDate) - ret0, _ := ret[0].(*ledger.Transaction) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// RevertTransaction indicates an expected call of RevertTransaction. -func (mr *MockLedgerMockRecorder) RevertTransaction(ctx, parameters, id, force, atEffectiveDate any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RevertTransaction", reflect.TypeOf((*MockLedger)(nil).RevertTransaction), ctx, parameters, id, force, atEffectiveDate) -} - -// SaveMeta mocks base method. -func (m_2 *MockLedger) SaveMeta(ctx context.Context, parameters command.Parameters, targetType string, targetID any, m metadata.Metadata) error { - m_2.ctrl.T.Helper() - ret := m_2.ctrl.Call(m_2, "SaveMeta", ctx, parameters, targetType, targetID, m) - ret0, _ := ret[0].(error) - return ret0 -} - -// SaveMeta indicates an expected call of SaveMeta. -func (mr *MockLedgerMockRecorder) SaveMeta(ctx, parameters, targetType, targetID, m any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SaveMeta", reflect.TypeOf((*MockLedger)(nil).SaveMeta), ctx, parameters, targetType, targetID, m) -} - -// Stats mocks base method. -func (m *MockLedger) Stats(ctx context.Context) (engine.Stats, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Stats", ctx) - ret0, _ := ret[0].(engine.Stats) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Stats indicates an expected call of Stats. -func (mr *MockLedgerMockRecorder) Stats(ctx any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Stats", reflect.TypeOf((*MockLedger)(nil).Stats), ctx) -} - -// MockBackend is a mock of Backend interface. -type MockBackend struct { - ctrl *gomock.Controller - recorder *MockBackendMockRecorder -} - -// MockBackendMockRecorder is the mock recorder for MockBackend. -type MockBackendMockRecorder struct { - mock *MockBackend -} - -// NewMockBackend creates a new mock instance. -func NewMockBackend(ctrl *gomock.Controller) *MockBackend { - mock := &MockBackend{ctrl: ctrl} - mock.recorder = &MockBackendMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockBackend) EXPECT() *MockBackendMockRecorder { - return m.recorder -} - -// CreateLedger mocks base method. -func (m *MockBackend) CreateLedger(ctx context.Context, name string, configuration driver.LedgerConfiguration) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CreateLedger", ctx, name, configuration) - ret0, _ := ret[0].(error) - return ret0 -} - -// CreateLedger indicates an expected call of CreateLedger. -func (mr *MockBackendMockRecorder) CreateLedger(ctx, name, configuration any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateLedger", reflect.TypeOf((*MockBackend)(nil).CreateLedger), ctx, name, configuration) -} - -// DeleteLedgerMetadata mocks base method. -func (m *MockBackend) DeleteLedgerMetadata(ctx context.Context, param, key string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteLedgerMetadata", ctx, param, key) - ret0, _ := ret[0].(error) - return ret0 -} - -// DeleteLedgerMetadata indicates an expected call of DeleteLedgerMetadata. -func (mr *MockBackendMockRecorder) DeleteLedgerMetadata(ctx, param, key any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteLedgerMetadata", reflect.TypeOf((*MockBackend)(nil).DeleteLedgerMetadata), ctx, param, key) -} - -// GetLedger mocks base method. -func (m *MockBackend) GetLedger(ctx context.Context, name string) (*systemstore.Ledger, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLedger", ctx, name) - ret0, _ := ret[0].(*systemstore.Ledger) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLedger indicates an expected call of GetLedger. -func (mr *MockBackendMockRecorder) GetLedger(ctx, name any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLedger", reflect.TypeOf((*MockBackend)(nil).GetLedger), ctx, name) -} - -// GetLedgerEngine mocks base method. -func (m *MockBackend) GetLedgerEngine(ctx context.Context, name string) (Ledger, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLedgerEngine", ctx, name) - ret0, _ := ret[0].(Ledger) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLedgerEngine indicates an expected call of GetLedgerEngine. -func (mr *MockBackendMockRecorder) GetLedgerEngine(ctx, name any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLedgerEngine", reflect.TypeOf((*MockBackend)(nil).GetLedgerEngine), ctx, name) -} - -// GetVersion mocks base method. -func (m *MockBackend) GetVersion() string { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetVersion") - ret0, _ := ret[0].(string) - return ret0 -} - -// GetVersion indicates an expected call of GetVersion. -func (mr *MockBackendMockRecorder) GetVersion() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetVersion", reflect.TypeOf((*MockBackend)(nil).GetVersion)) -} - -// ListLedgers mocks base method. -func (m *MockBackend) ListLedgers(ctx context.Context, query systemstore.ListLedgersQuery) (*bunpaginate.Cursor[systemstore.Ledger], error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListLedgers", ctx, query) - ret0, _ := ret[0].(*bunpaginate.Cursor[systemstore.Ledger]) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListLedgers indicates an expected call of ListLedgers. -func (mr *MockBackendMockRecorder) ListLedgers(ctx, query any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListLedgers", reflect.TypeOf((*MockBackend)(nil).ListLedgers), ctx, query) -} - -// UpdateLedgerMetadata mocks base method. -func (m_2 *MockBackend) UpdateLedgerMetadata(ctx context.Context, name string, m map[string]string) error { - m_2.ctrl.T.Helper() - ret := m_2.ctrl.Call(m_2, "UpdateLedgerMetadata", ctx, name, m) - ret0, _ := ret[0].(error) - return ret0 -} - -// UpdateLedgerMetadata indicates an expected call of UpdateLedgerMetadata. -func (mr *MockBackendMockRecorder) UpdateLedgerMetadata(ctx, name, m any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateLedgerMetadata", reflect.TypeOf((*MockBackend)(nil).UpdateLedgerMetadata), ctx, name, m) -} diff --git a/internal/api/backend/context.go b/internal/api/backend/context.go deleted file mode 100644 index cc11ed024..000000000 --- a/internal/api/backend/context.go +++ /dev/null @@ -1,17 +0,0 @@ -package backend - -import ( - "context" -) - -type ledgerKey struct{} - -var _ledgerKey = ledgerKey{} - -func ContextWithLedger(ctx context.Context, ledger Ledger) context.Context { - return context.WithValue(ctx, _ledgerKey, ledger) -} - -func LedgerFromContext(ctx context.Context) Ledger { - return ctx.Value(_ledgerKey).(Ledger) -} diff --git a/internal/api/backend/playnumscript.go b/internal/api/backend/playnumscript.go deleted file mode 100644 index 716200408..000000000 --- a/internal/api/backend/playnumscript.go +++ /dev/null @@ -1,24 +0,0 @@ -package backend - -import ( - "encoding/base64" - "encoding/json" - "fmt" - "strings" -) - -func EncodeLink(errStr string) string { - if errStr == "" { - return "" - } - - errStr = strings.ReplaceAll(errStr, "\n", "\r\n") - payload, err := json.Marshal(map[string]any{ - "error": errStr, - }) - if err != nil { - panic(err) - } - payloadB64 := base64.StdEncoding.EncodeToString(payload) - return fmt.Sprintf("https://play.numscript.org/?payload=%v", payloadB64) -} diff --git a/internal/api/backend/resolver.go b/internal/api/backend/resolver.go deleted file mode 100644 index 132bbb74f..000000000 --- a/internal/api/backend/resolver.go +++ /dev/null @@ -1,118 +0,0 @@ -package backend - -import ( - "math/rand" - "net/http" - "strings" - "sync" - "time" - - "github.com/go-chi/chi/v5" - - "github.com/formancehq/ledger/internal/storage/sqlutils" - - sharedapi "github.com/formancehq/go-libs/api" - - "github.com/pkg/errors" - - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/ledger/internal/opentelemetry/tracer" -) - -var ( - r *rand.Rand - mu sync.Mutex -) - -const ( - ErrOutdatedSchema = "OUTDATED_SCHEMA" -) - -func init() { - r = rand.New(rand.NewSource(time.Now().UnixNano())) -} - -var letterRunes = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789") - -func randomTraceID(n int) string { - mu.Lock() - defer mu.Unlock() - - b := make([]rune, n) - for i := range b { - b[i] = letterRunes[r.Intn(len(letterRunes))] - } - return string(b) -} - -func LedgerMiddleware( - resolver Backend, - excludePathFromSchemaCheck []string, -) func(handler http.Handler) http.Handler { - return func(handler http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - name := chi.URLParam(r, "ledger") - if name == "" { - w.WriteHeader(http.StatusNotFound) - return - } - - ctx, span := tracer.Start(r.Context(), name) - defer span.End() - - r = r.WithContext(ctx) - - loggerFields := map[string]any{ - "ledger": name, - } - if span.SpanContext().TraceID().IsValid() { - loggerFields["trace-id"] = span.SpanContext().TraceID().String() - } else { - loggerFields["trace-id"] = randomTraceID(10) - } - - r = r.WithContext(logging.ContextWithFields(r.Context(), loggerFields)) - - l, err := resolver.GetLedgerEngine(r.Context(), name) - if err != nil { - switch { - case sqlutils.IsNotFoundError(err): - sharedapi.WriteErrorResponse(w, http.StatusNotFound, "LEDGER_NOT_FOUND", err) - default: - sharedapi.InternalServerError(w, r, err) - } - return - } - - pathWithoutLedger := r.URL.Path[1:] - nextSlash := strings.Index(pathWithoutLedger, "/") - if nextSlash >= 0 { - pathWithoutLedger = pathWithoutLedger[nextSlash:] - } else { - pathWithoutLedger = "" - } - - excluded := false - for _, path := range excludePathFromSchemaCheck { - if pathWithoutLedger == path { - excluded = true - break - } - } - - if !excluded { - isUpToDate, err := l.IsDatabaseUpToDate(ctx) - if err != nil { - sharedapi.BadRequest(w, sharedapi.ErrorInternal, err) - return - } - if !isUpToDate { - sharedapi.BadRequest(w, ErrOutdatedSchema, errors.New("You need to upgrade your ledger schema to the last version")) - return - } - } - - handler.ServeHTTP(w, r.WithContext(ContextWithLedger(r.Context(), l))) - }) - } -} diff --git a/internal/api/common/context.go b/internal/api/common/context.go new file mode 100644 index 000000000..39340e1b2 --- /dev/null +++ b/internal/api/common/context.go @@ -0,0 +1,19 @@ +package common + +import ( + "context" + + "github.com/formancehq/ledger/internal/controller/ledger" +) + +type ledgerKey struct{} + +var _ledgerKey = ledgerKey{} + +func ContextWithLedger(ctx context.Context, ledger ledger.Controller) context.Context { + return context.WithValue(ctx, _ledgerKey, ledger) +} + +func LedgerFromContext(ctx context.Context) ledger.Controller { + return ctx.Value(_ledgerKey).(ledger.Controller) +} diff --git a/internal/api/common/errors.go b/internal/api/common/errors.go new file mode 100644 index 000000000..f518b9411 --- /dev/null +++ b/internal/api/common/errors.go @@ -0,0 +1,17 @@ +package common + +import ( + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/platform/postgres" + "net/http" +) + +func HandleCommonErrors(w http.ResponseWriter, r *http.Request, err error) { + switch { + case errors.Is(err, postgres.ErrTooManyClient{}): + api.WriteErrorResponse(w, http.StatusServiceUnavailable, api.ErrorInternal, err) + default: + api.InternalServerError(w, r, err) + } +} diff --git a/internal/api/common/middleware_logid.go b/internal/api/common/middleware_logid.go new file mode 100644 index 000000000..812cde51c --- /dev/null +++ b/internal/api/common/middleware_logid.go @@ -0,0 +1,51 @@ +package common + +import ( + "math/rand" + "net/http" + "sync" + "time" + + "go.opentelemetry.io/otel/trace" + + "github.com/formancehq/go-libs/v2/logging" +) + +var ( + r *rand.Rand + mu sync.Mutex +) + +func init() { + r = rand.New(rand.NewSource(time.Now().UnixNano())) +} + +var letterRunes = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789") + +func randomTraceID(n int) string { + mu.Lock() + defer mu.Unlock() + + b := make([]rune, n) + for i := range b { + b[i] = letterRunes[r.Intn(len(letterRunes))] + } + return string(b) +} + +func LogID() func(handler http.Handler) http.Handler { + return func(handler http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + loggerFields := map[string]any{} + if span := trace.SpanFromContext(r.Context()); span.SpanContext().TraceID().IsValid() { + loggerFields["trace-id"] = span.SpanContext().TraceID().String() + } else { + loggerFields["trace-id"] = randomTraceID(10) + } + + r = r.WithContext(logging.ContextWithFields(r.Context(), loggerFields)) + + handler.ServeHTTP(w, r) + }) + } +} diff --git a/internal/api/common/middleware_resolver.go b/internal/api/common/middleware_resolver.go new file mode 100644 index 000000000..03c1ab474 --- /dev/null +++ b/internal/api/common/middleware_resolver.go @@ -0,0 +1,80 @@ +package common + +import ( + "go.opentelemetry.io/otel/trace" + "net/http" + "strings" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/platform/postgres" + "github.com/formancehq/ledger/internal/controller/system" + + "errors" +) + +const ( + ErrOutdatedSchema = "OUTDATED_SCHEMA" +) + +func LedgerMiddleware( + backend system.Controller, + resolver func(*http.Request) string, + tracer trace.Tracer, + excludePathFromSchemaCheck ...string, +) func(handler http.Handler) http.Handler { + return func(handler http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + name := resolver(r) + if name == "" { + api.NotFound(w, errors.New("empty name")) + return + } + + ctx, span := tracer.Start(r.Context(), "OpenLedger") + defer span.End() + + var err error + l, err := backend.GetLedgerController(ctx, name) + if err != nil { + switch { + case postgres.IsNotFoundError(err): + api.WriteErrorResponse(w, http.StatusNotFound, "LEDGER_NOT_FOUND", err) + default: + api.InternalServerError(w, r, err) + } + return + } + ctx = ContextWithLedger(ctx, l) + + pathWithoutLedger := r.URL.Path[1:] + nextSlash := strings.Index(pathWithoutLedger, "/") + if nextSlash >= 0 { + pathWithoutLedger = pathWithoutLedger[nextSlash:] + } else { + pathWithoutLedger = "" + } + + excluded := false + for _, path := range excludePathFromSchemaCheck { + if pathWithoutLedger == path { + excluded = true + break + } + } + + if !excluded { + isUpToDate, err := l.IsDatabaseUpToDate(ctx) + if err != nil { + api.InternalServerError(w, r, err) + return + } + if !isUpToDate { + api.BadRequest(w, ErrOutdatedSchema, errors.New("You need to upgrade your ledger schema to the last version")) + return + } + } + + handler.ServeHTTP(w, r.WithContext(ctx)) + }) + } +} diff --git a/internal/api/common/middleware_resolver_test.go b/internal/api/common/middleware_resolver_test.go new file mode 100644 index 000000000..42f3bb437 --- /dev/null +++ b/internal/api/common/middleware_resolver_test.go @@ -0,0 +1,114 @@ +package common + +import ( + "encoding/json" + nooptracer "go.opentelemetry.io/otel/trace/noop" + "net/http" + "net/http/httptest" + "testing" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/logging" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestResolverMiddleware(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + getLedgerControllerErr error + isDatabaseUpToDateErr error + isDatabaseUpToDate bool + expectStatusCode int + expectErrorCode string + ledger string + } + + for _, tc := range []testCase{ + { + name: "nominal", + isDatabaseUpToDate: true, + ledger: "foo", + }, + { + name: "empty name", + ledger: "", + expectStatusCode: http.StatusNotFound, + expectErrorCode: api.ErrorCodeNotFound, + }, + { + name: "not found", + ledger: "foo", + getLedgerControllerErr: ledgercontroller.ErrNotFound, + expectStatusCode: http.StatusNotFound, + expectErrorCode: "LEDGER_NOT_FOUND", + }, + { + name: "error on retrieving ledger controller", + ledger: "foo", + getLedgerControllerErr: errors.New("unexpected error"), + expectStatusCode: http.StatusInternalServerError, + expectErrorCode: api.ErrorInternal, + }, + { + name: "error on checking database schema status", + ledger: "foo", + isDatabaseUpToDateErr: errors.New("unexpected error"), + expectStatusCode: http.StatusInternalServerError, + expectErrorCode: api.ErrorInternal, + }, + { + name: "database not up to date", + ledger: "foo", + expectStatusCode: http.StatusBadRequest, + expectErrorCode: ErrOutdatedSchema, + }, + } { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + ctx := logging.TestingContext() + systemController := NewSystemController(ctrl) + ledgerController := NewLedgerController(ctrl) + + ledger := tc.ledger + + systemController.EXPECT(). + GetLedgerController(gomock.Any(), ledger). + AnyTimes(). + Return(ledgerController, tc.getLedgerControllerErr) + + ledgerController.EXPECT(). + IsDatabaseUpToDate(gomock.Any()). + AnyTimes(). + Return(tc.isDatabaseUpToDate, tc.isDatabaseUpToDateErr) + + m := LedgerMiddleware(systemController, func(*http.Request) string { + return ledger + }, nooptracer.Tracer{}) + h := m(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusNoContent) + })) + + req := httptest.NewRequest(http.MethodGet, "/"+ledger+"/_info", nil) + req = req.WithContext(ctx) + rec := httptest.NewRecorder() + + h.ServeHTTP(rec, req) + + if tc.expectStatusCode == 0 { + require.Equal(t, http.StatusNoContent, rec.Code) + } else { + require.Equal(t, tc.expectStatusCode, rec.Code) + errorResponse := api.ErrorResponse{} + require.NoError(t, json.Unmarshal(rec.Body.Bytes(), &errorResponse)) + require.Equal(t, tc.expectErrorCode, errorResponse.ErrorCode) + } + }) + } +} diff --git a/internal/api/common/mocks.go b/internal/api/common/mocks.go new file mode 100644 index 000000000..ffc0edce4 --- /dev/null +++ b/internal/api/common/mocks.go @@ -0,0 +1,3 @@ +//go:generate mockgen -write_source_comment=false -write_package_comment=false -source ../../controller/system/controller.go -destination mocks_system_controller_test.go -package common --mock_names Controller=SystemController . Controller +//go:generate mockgen -write_source_comment=false -write_package_comment=false -source ../../controller/ledger/controller.go -destination mocks_ledger_controller_test.go -package common --mock_names Controller=LedgerController . Controller +package common \ No newline at end of file diff --git a/internal/api/common/mocks_ledger_controller_test.go b/internal/api/common/mocks_ledger_controller_test.go new file mode 100644 index 000000000..3b9b07eaf --- /dev/null +++ b/internal/api/common/mocks_ledger_controller_test.go @@ -0,0 +1,334 @@ +// Code generated by MockGen. DO NOT EDIT. +// +// Generated by this command: +// +// mockgen -write_source_comment=false -write_package_comment=false -source ../../controller/ledger/controller.go -destination mocks_ledger_controller_test.go -package common --mock_names Controller=LedgerController . Controller +package common + +import ( + context "context" + reflect "reflect" + + bunpaginate "github.com/formancehq/go-libs/v2/bun/bunpaginate" + migrations "github.com/formancehq/go-libs/v2/migrations" + ledger "github.com/formancehq/ledger/internal" + ledger0 "github.com/formancehq/ledger/internal/controller/ledger" + gomock "go.uber.org/mock/gomock" +) + +// LedgerController is a mock of Controller interface. +type LedgerController struct { + ctrl *gomock.Controller + recorder *LedgerControllerMockRecorder +} + +// LedgerControllerMockRecorder is the mock recorder for LedgerController. +type LedgerControllerMockRecorder struct { + mock *LedgerController +} + +// NewLedgerController creates a new mock instance. +func NewLedgerController(ctrl *gomock.Controller) *LedgerController { + mock := &LedgerController{ctrl: ctrl} + mock.recorder = &LedgerControllerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *LedgerController) EXPECT() *LedgerControllerMockRecorder { + return m.recorder +} + +// CountAccounts mocks base method. +func (m *LedgerController) CountAccounts(ctx context.Context, query ledger0.ListAccountsQuery) (int, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CountAccounts", ctx, query) + ret0, _ := ret[0].(int) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CountAccounts indicates an expected call of CountAccounts. +func (mr *LedgerControllerMockRecorder) CountAccounts(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountAccounts", reflect.TypeOf((*LedgerController)(nil).CountAccounts), ctx, query) +} + +// CountTransactions mocks base method. +func (m *LedgerController) CountTransactions(ctx context.Context, query ledger0.ListTransactionsQuery) (int, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CountTransactions", ctx, query) + ret0, _ := ret[0].(int) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CountTransactions indicates an expected call of CountTransactions. +func (mr *LedgerControllerMockRecorder) CountTransactions(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountTransactions", reflect.TypeOf((*LedgerController)(nil).CountTransactions), ctx, query) +} + +// CreateTransaction mocks base method. +func (m *LedgerController) CreateTransaction(ctx context.Context, parameters ledger0.Parameters[ledger0.RunScript]) (*ledger.CreatedTransaction, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateTransaction", ctx, parameters) + ret0, _ := ret[0].(*ledger.CreatedTransaction) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateTransaction indicates an expected call of CreateTransaction. +func (mr *LedgerControllerMockRecorder) CreateTransaction(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateTransaction", reflect.TypeOf((*LedgerController)(nil).CreateTransaction), ctx, parameters) +} + +// DeleteAccountMetadata mocks base method. +func (m *LedgerController) DeleteAccountMetadata(ctx context.Context, parameters ledger0.Parameters[ledger0.DeleteAccountMetadata]) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteAccountMetadata", ctx, parameters) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteAccountMetadata indicates an expected call of DeleteAccountMetadata. +func (mr *LedgerControllerMockRecorder) DeleteAccountMetadata(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteAccountMetadata", reflect.TypeOf((*LedgerController)(nil).DeleteAccountMetadata), ctx, parameters) +} + +// DeleteTransactionMetadata mocks base method. +func (m *LedgerController) DeleteTransactionMetadata(ctx context.Context, parameters ledger0.Parameters[ledger0.DeleteTransactionMetadata]) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteTransactionMetadata", ctx, parameters) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteTransactionMetadata indicates an expected call of DeleteTransactionMetadata. +func (mr *LedgerControllerMockRecorder) DeleteTransactionMetadata(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteTransactionMetadata", reflect.TypeOf((*LedgerController)(nil).DeleteTransactionMetadata), ctx, parameters) +} + +// Export mocks base method. +func (m *LedgerController) Export(ctx context.Context, w ledger0.ExportWriter) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Export", ctx, w) + ret0, _ := ret[0].(error) + return ret0 +} + +// Export indicates an expected call of Export. +func (mr *LedgerControllerMockRecorder) Export(ctx, w any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Export", reflect.TypeOf((*LedgerController)(nil).Export), ctx, w) +} + +// GetAccount mocks base method. +func (m *LedgerController) GetAccount(ctx context.Context, query ledger0.GetAccountQuery) (*ledger.Account, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAccount", ctx, query) + ret0, _ := ret[0].(*ledger.Account) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAccount indicates an expected call of GetAccount. +func (mr *LedgerControllerMockRecorder) GetAccount(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccount", reflect.TypeOf((*LedgerController)(nil).GetAccount), ctx, query) +} + +// GetAggregatedBalances mocks base method. +func (m *LedgerController) GetAggregatedBalances(ctx context.Context, q ledger0.GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAggregatedBalances", ctx, q) + ret0, _ := ret[0].(ledger.BalancesByAssets) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAggregatedBalances indicates an expected call of GetAggregatedBalances. +func (mr *LedgerControllerMockRecorder) GetAggregatedBalances(ctx, q any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAggregatedBalances", reflect.TypeOf((*LedgerController)(nil).GetAggregatedBalances), ctx, q) +} + +// GetMigrationsInfo mocks base method. +func (m *LedgerController) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetMigrationsInfo", ctx) + ret0, _ := ret[0].([]migrations.Info) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetMigrationsInfo indicates an expected call of GetMigrationsInfo. +func (mr *LedgerControllerMockRecorder) GetMigrationsInfo(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMigrationsInfo", reflect.TypeOf((*LedgerController)(nil).GetMigrationsInfo), ctx) +} + +// GetStats mocks base method. +func (m *LedgerController) GetStats(ctx context.Context) (ledger0.Stats, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetStats", ctx) + ret0, _ := ret[0].(ledger0.Stats) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetStats indicates an expected call of GetStats. +func (mr *LedgerControllerMockRecorder) GetStats(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetStats", reflect.TypeOf((*LedgerController)(nil).GetStats), ctx) +} + +// GetTransaction mocks base method. +func (m *LedgerController) GetTransaction(ctx context.Context, query ledger0.GetTransactionQuery) (*ledger.Transaction, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetTransaction", ctx, query) + ret0, _ := ret[0].(*ledger.Transaction) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetTransaction indicates an expected call of GetTransaction. +func (mr *LedgerControllerMockRecorder) GetTransaction(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTransaction", reflect.TypeOf((*LedgerController)(nil).GetTransaction), ctx, query) +} + +// GetVolumesWithBalances mocks base method. +func (m *LedgerController) GetVolumesWithBalances(ctx context.Context, q ledger0.GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetVolumesWithBalances", ctx, q) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetVolumesWithBalances indicates an expected call of GetVolumesWithBalances. +func (mr *LedgerControllerMockRecorder) GetVolumesWithBalances(ctx, q any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetVolumesWithBalances", reflect.TypeOf((*LedgerController)(nil).GetVolumesWithBalances), ctx, q) +} + +// Import mocks base method. +func (m *LedgerController) Import(ctx context.Context, stream chan ledger.Log) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Import", ctx, stream) + ret0, _ := ret[0].(error) + return ret0 +} + +// Import indicates an expected call of Import. +func (mr *LedgerControllerMockRecorder) Import(ctx, stream any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Import", reflect.TypeOf((*LedgerController)(nil).Import), ctx, stream) +} + +// IsDatabaseUpToDate mocks base method. +func (m *LedgerController) IsDatabaseUpToDate(ctx context.Context) (bool, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "IsDatabaseUpToDate", ctx) + ret0, _ := ret[0].(bool) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// IsDatabaseUpToDate indicates an expected call of IsDatabaseUpToDate. +func (mr *LedgerControllerMockRecorder) IsDatabaseUpToDate(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsDatabaseUpToDate", reflect.TypeOf((*LedgerController)(nil).IsDatabaseUpToDate), ctx) +} + +// ListAccounts mocks base method. +func (m *LedgerController) ListAccounts(ctx context.Context, query ledger0.ListAccountsQuery) (*bunpaginate.Cursor[ledger.Account], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListAccounts", ctx, query) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Account]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListAccounts indicates an expected call of ListAccounts. +func (mr *LedgerControllerMockRecorder) ListAccounts(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListAccounts", reflect.TypeOf((*LedgerController)(nil).ListAccounts), ctx, query) +} + +// ListLogs mocks base method. +func (m *LedgerController) ListLogs(ctx context.Context, query ledger0.GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListLogs", ctx, query) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Log]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListLogs indicates an expected call of ListLogs. +func (mr *LedgerControllerMockRecorder) ListLogs(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListLogs", reflect.TypeOf((*LedgerController)(nil).ListLogs), ctx, query) +} + +// ListTransactions mocks base method. +func (m *LedgerController) ListTransactions(ctx context.Context, query ledger0.ListTransactionsQuery) (*bunpaginate.Cursor[ledger.Transaction], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListTransactions", ctx, query) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Transaction]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListTransactions indicates an expected call of ListTransactions. +func (mr *LedgerControllerMockRecorder) ListTransactions(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListTransactions", reflect.TypeOf((*LedgerController)(nil).ListTransactions), ctx, query) +} + +// RevertTransaction mocks base method. +func (m *LedgerController) RevertTransaction(ctx context.Context, parameters ledger0.Parameters[ledger0.RevertTransaction]) (*ledger.RevertedTransaction, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RevertTransaction", ctx, parameters) + ret0, _ := ret[0].(*ledger.RevertedTransaction) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// RevertTransaction indicates an expected call of RevertTransaction. +func (mr *LedgerControllerMockRecorder) RevertTransaction(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RevertTransaction", reflect.TypeOf((*LedgerController)(nil).RevertTransaction), ctx, parameters) +} + +// SaveAccountMetadata mocks base method. +func (m *LedgerController) SaveAccountMetadata(ctx context.Context, parameters ledger0.Parameters[ledger0.SaveAccountMetadata]) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SaveAccountMetadata", ctx, parameters) + ret0, _ := ret[0].(error) + return ret0 +} + +// SaveAccountMetadata indicates an expected call of SaveAccountMetadata. +func (mr *LedgerControllerMockRecorder) SaveAccountMetadata(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SaveAccountMetadata", reflect.TypeOf((*LedgerController)(nil).SaveAccountMetadata), ctx, parameters) +} + +// SaveTransactionMetadata mocks base method. +func (m *LedgerController) SaveTransactionMetadata(ctx context.Context, parameters ledger0.Parameters[ledger0.SaveTransactionMetadata]) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SaveTransactionMetadata", ctx, parameters) + ret0, _ := ret[0].(error) + return ret0 +} + +// SaveTransactionMetadata indicates an expected call of SaveTransactionMetadata. +func (mr *LedgerControllerMockRecorder) SaveTransactionMetadata(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SaveTransactionMetadata", reflect.TypeOf((*LedgerController)(nil).SaveTransactionMetadata), ctx, parameters) +} diff --git a/internal/api/common/mocks_system_controller_test.go b/internal/api/common/mocks_system_controller_test.go new file mode 100644 index 000000000..b0fbeaea8 --- /dev/null +++ b/internal/api/common/mocks_system_controller_test.go @@ -0,0 +1,126 @@ +// Code generated by MockGen. DO NOT EDIT. +// +// Generated by this command: +// +// mockgen -write_source_comment=false -write_package_comment=false -source ../../controller/system/controller.go -destination mocks_system_controller_test.go -package common --mock_names Controller=SystemController . Controller +package common + +import ( + context "context" + reflect "reflect" + + bunpaginate "github.com/formancehq/go-libs/v2/bun/bunpaginate" + ledger "github.com/formancehq/ledger/internal" + ledger0 "github.com/formancehq/ledger/internal/controller/ledger" + gomock "go.uber.org/mock/gomock" +) + +// SystemController is a mock of Controller interface. +type SystemController struct { + ctrl *gomock.Controller + recorder *SystemControllerMockRecorder +} + +// SystemControllerMockRecorder is the mock recorder for SystemController. +type SystemControllerMockRecorder struct { + mock *SystemController +} + +// NewSystemController creates a new mock instance. +func NewSystemController(ctrl *gomock.Controller) *SystemController { + mock := &SystemController{ctrl: ctrl} + mock.recorder = &SystemControllerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *SystemController) EXPECT() *SystemControllerMockRecorder { + return m.recorder +} + +// CreateLedger mocks base method. +func (m *SystemController) CreateLedger(ctx context.Context, name string, configuration ledger.Configuration) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateLedger", ctx, name, configuration) + ret0, _ := ret[0].(error) + return ret0 +} + +// CreateLedger indicates an expected call of CreateLedger. +func (mr *SystemControllerMockRecorder) CreateLedger(ctx, name, configuration any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateLedger", reflect.TypeOf((*SystemController)(nil).CreateLedger), ctx, name, configuration) +} + +// DeleteLedgerMetadata mocks base method. +func (m *SystemController) DeleteLedgerMetadata(ctx context.Context, param, key string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteLedgerMetadata", ctx, param, key) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteLedgerMetadata indicates an expected call of DeleteLedgerMetadata. +func (mr *SystemControllerMockRecorder) DeleteLedgerMetadata(ctx, param, key any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteLedgerMetadata", reflect.TypeOf((*SystemController)(nil).DeleteLedgerMetadata), ctx, param, key) +} + +// GetLedger mocks base method. +func (m *SystemController) GetLedger(ctx context.Context, name string) (*ledger.Ledger, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetLedger", ctx, name) + ret0, _ := ret[0].(*ledger.Ledger) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetLedger indicates an expected call of GetLedger. +func (mr *SystemControllerMockRecorder) GetLedger(ctx, name any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLedger", reflect.TypeOf((*SystemController)(nil).GetLedger), ctx, name) +} + +// GetLedgerController mocks base method. +func (m *SystemController) GetLedgerController(ctx context.Context, name string) (ledger0.Controller, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetLedgerController", ctx, name) + ret0, _ := ret[0].(ledger0.Controller) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetLedgerController indicates an expected call of GetLedgerController. +func (mr *SystemControllerMockRecorder) GetLedgerController(ctx, name any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLedgerController", reflect.TypeOf((*SystemController)(nil).GetLedgerController), ctx, name) +} + +// ListLedgers mocks base method. +func (m *SystemController) ListLedgers(ctx context.Context, query ledger0.ListLedgersQuery) (*bunpaginate.Cursor[ledger.Ledger], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListLedgers", ctx, query) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Ledger]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListLedgers indicates an expected call of ListLedgers. +func (mr *SystemControllerMockRecorder) ListLedgers(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListLedgers", reflect.TypeOf((*SystemController)(nil).ListLedgers), ctx, query) +} + +// UpdateLedgerMetadata mocks base method. +func (m_2 *SystemController) UpdateLedgerMetadata(ctx context.Context, name string, m map[string]string) error { + m_2.ctrl.T.Helper() + ret := m_2.ctrl.Call(m_2, "UpdateLedgerMetadata", ctx, name, m) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpdateLedgerMetadata indicates an expected call of UpdateLedgerMetadata. +func (mr *SystemControllerMockRecorder) UpdateLedgerMetadata(ctx, name, m any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateLedgerMetadata", reflect.TypeOf((*SystemController)(nil).UpdateLedgerMetadata), ctx, name, m) +} diff --git a/internal/numscript.go b/internal/api/common/numscript.go similarity index 83% rename from internal/numscript.go rename to internal/api/common/numscript.go index 9936f86bc..2eae29a37 100644 --- a/internal/numscript.go +++ b/internal/api/common/numscript.go @@ -1,11 +1,13 @@ -package ledger +package common import ( "fmt" + "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" "sort" "strings" - "github.com/formancehq/go-libs/metadata" + "github.com/formancehq/go-libs/v2/metadata" ) type variable struct { @@ -13,7 +15,7 @@ type variable struct { value string } -func TxToScriptData(txData TransactionData, allowUnboundedOverdrafts bool) RunScript { +func TxToScriptData(txData ledger.TransactionData, allowUnboundedOverdrafts bool) ledgercontroller.RunScript { sb := strings.Builder{} monetaryToVars := map[string]variable{} accountsToVars := map[string]variable{} @@ -21,7 +23,7 @@ func TxToScriptData(txData TransactionData, allowUnboundedOverdrafts bool) RunSc j := 0 for _, p := range txData.Postings { if _, ok := accountsToVars[p.Source]; !ok { - if p.Source != WORLD { + if p.Source != ledger.WORLD { accountsToVars[p.Source] = variable{ name: fmt.Sprintf("va%d", i), value: p.Source, @@ -30,7 +32,7 @@ func TxToScriptData(txData TransactionData, allowUnboundedOverdrafts bool) RunSc } } if _, ok := accountsToVars[p.Destination]; !ok { - if p.Destination != WORLD { + if p.Destination != ledger.WORLD { accountsToVars[p.Destination] = variable{ name: fmt.Sprintf("va%d", i), value: p.Destination, @@ -74,7 +76,7 @@ func TxToScriptData(txData TransactionData, allowUnboundedOverdrafts bool) RunSc panic(fmt.Sprintf("monetary %s not found", m)) } sb.WriteString(fmt.Sprintf("send $%s (\n", mon.name)) - if p.Source == WORLD { + if p.Source == ledger.WORLD { sb.WriteString("\tsource = @world\n") } else { src, ok := accountsToVars[p.Source] @@ -87,7 +89,7 @@ func TxToScriptData(txData TransactionData, allowUnboundedOverdrafts bool) RunSc } sb.WriteString("\n") } - if p.Destination == WORLD { + if p.Destination == ledger.WORLD { sb.WriteString("\tdestination = @world\n") } else { dest, ok := accountsToVars[p.Destination] @@ -111,8 +113,8 @@ func TxToScriptData(txData TransactionData, allowUnboundedOverdrafts bool) RunSc txData.Metadata = metadata.Metadata{} } - return RunScript{ - Script: Script{ + return ledgercontroller.RunScript{ + Script: ledgercontroller.Script{ Plain: sb.String(), Vars: vars, }, diff --git a/internal/api/module.go b/internal/api/module.go index f0226ac59..40a83fa41 100644 --- a/internal/api/module.go +++ b/internal/api/module.go @@ -2,43 +2,39 @@ package api import ( _ "embed" - + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/health" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/ledger/internal/controller/system" "github.com/go-chi/chi/v5" - - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/health" - "github.com/formancehq/ledger/internal/api/backend" - "github.com/formancehq/ledger/internal/engine" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/formancehq/ledger/internal/storage/driver" - "go.opentelemetry.io/otel/metric" - "go.opentelemetry.io/otel/metric/noop" + "go.opentelemetry.io/otel/trace" "go.uber.org/fx" ) type Config struct { - Version string - ReadOnly bool - Debug bool + Version string + Debug bool + BulkMaxSize int } func Module(cfg Config) fx.Option { return fx.Options( fx.Provide(func( - backend backend.Backend, - healthController *health.HealthController, - globalMetricsRegistry metrics.GlobalRegistry, - a auth.Authenticator, + backend system.Controller, + authenticator auth.Authenticator, + logger logging.Logger, + tracer trace.TracerProvider, ) chi.Router { - return NewRouter(backend, healthController, globalMetricsRegistry, a, cfg.ReadOnly, cfg.Debug) - }), - fx.Provide(func(storageDriver *driver.Driver, resolver *engine.Resolver) backend.Backend { - return backend.NewDefaultBackend(storageDriver, cfg.Version, resolver) + return NewRouter( + backend, + authenticator, + logger, + "develop", + cfg.Debug, + WithTracer(tracer.Tracer("api")), + WithBulkMaxSize(cfg.BulkMaxSize), + ) }), - fx.Provide(fx.Annotate(noop.NewMeterProvider, fx.As(new(metric.MeterProvider)))), - fx.Decorate(fx.Annotate(func(meterProvider metric.MeterProvider) (metrics.GlobalRegistry, error) { - return metrics.RegisterGlobalRegistry(meterProvider) - }, fx.As(new(metrics.GlobalRegistry)))), health.Module(), ) } diff --git a/internal/api/read_only.go b/internal/api/read_only.go deleted file mode 100644 index 0a788ef76..000000000 --- a/internal/api/read_only.go +++ /dev/null @@ -1,18 +0,0 @@ -package api - -import ( - "net/http" - - "github.com/formancehq/go-libs/api" - "github.com/pkg/errors" -) - -func ReadOnly(h http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if r.Method != http.MethodGet && r.Method != http.MethodOptions && r.Method != http.MethodHead { - api.BadRequest(w, "READ_ONLY", errors.New("Read only mode")) - return - } - h.ServeHTTP(w, r) - }) -} diff --git a/internal/api/router.go b/internal/api/router.go index aad48ee77..5ee8d2362 100644 --- a/internal/api/router.go +++ b/internal/api/router.go @@ -1,42 +1,108 @@ package api import ( + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/ledger/internal/controller/system" + "go.opentelemetry.io/otel/trace" + nooptracer "go.opentelemetry.io/otel/trace/noop" "net/http" - "github.com/go-chi/chi/v5" + "github.com/formancehq/go-libs/v2/logging" + "github.com/go-chi/chi/v5/middleware" + "github.com/go-chi/cors" - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/health" - "github.com/formancehq/ledger/internal/api/backend" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/ledger/internal/api/common" v1 "github.com/formancehq/ledger/internal/api/v1" v2 "github.com/formancehq/ledger/internal/api/v2" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" + "github.com/go-chi/chi/v5" ) +// todo: refine textual errors func NewRouter( - backend backend.Backend, - healthController *health.HealthController, - globalMetricsRegistry metrics.GlobalRegistry, - a auth.Authenticator, - readOnly bool, + systemController system.Controller, + authenticator auth.Authenticator, + logger logging.Logger, + version string, debug bool, + opts ...RouterOption, ) chi.Router { + + routerOptions := routerOptions{} + for _, opt := range append(defaultRouterOptions, opts...) { + opt(&routerOptions) + } + mux := chi.NewRouter() - mux.Use(func(handler http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Header().Set("Content-Type", "application/json") - handler.ServeHTTP(w, r) - }) - }) - if readOnly { - mux.Use(ReadOnly) + mux.Use( + middleware.Recoverer, + func(handler http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + r = r.WithContext(logging.ContextWithLogger(r.Context(), logger)) + + handler.ServeHTTP(w, r) + }) + }, + cors.New(cors.Options{ + AllowOriginFunc: func(r *http.Request, origin string) bool { + return true + }, + AllowCredentials: true, + }).Handler, + common.LogID(), + ) + + commonMiddlewares := []func(http.Handler) http.Handler{ + middleware.RequestLogger(api.NewLogFormatter()), } - v2Router := v2.NewRouter(backend, healthController, globalMetricsRegistry, a, debug) + + v2Router := v2.NewRouter( + systemController, + authenticator, + debug, + v2.WithTracer(routerOptions.tracer), + v2.WithMiddlewares(commonMiddlewares...), + v2.WithBulkMaxSize(routerOptions.bulkMaxSize), + ) mux.Handle("/v2*", http.StripPrefix("/v2", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { chi.RouteContext(r.Context()).Reset() v2Router.ServeHTTP(w, r) }))) - mux.Handle("/*", v1.NewRouter(backend, healthController, globalMetricsRegistry, a, debug)) + mux.Handle("/*", v1.NewRouter( + systemController, + authenticator, + version, + debug, + v1.WithTracer(routerOptions.tracer), + v1.WithMiddlewares(commonMiddlewares...), + )) return mux } + +type routerOptions struct { + tracer trace.Tracer + bulkMaxSize int +} + +type RouterOption func(ro *routerOptions) + +func WithTracer(tracer trace.Tracer) RouterOption { + return func(ro *routerOptions) { + ro.tracer = tracer + } +} + +func WithBulkMaxSize(bulkMaxSize int) RouterOption { + return func(ro *routerOptions) { + ro.bulkMaxSize = bulkMaxSize + } +} + +var defaultRouterOptions = []RouterOption{ + WithTracer(nooptracer.Tracer{}), + WithBulkMaxSize(DefaultBulkMaxSize), +} + +const DefaultBulkMaxSize = 100 diff --git a/internal/api/v1/api_utils_test.go b/internal/api/v1/api_utils_test.go index f6358295e..454fcbf25 100644 --- a/internal/api/v1/api_utils_test.go +++ b/internal/api/v1/api_utils_test.go @@ -1,29 +1,30 @@ -package v1_test +package v1 import ( "testing" - "github.com/formancehq/ledger/internal/storage/systemstore" + ledger "github.com/formancehq/ledger/internal" - "github.com/formancehq/ledger/internal/api/backend" "go.uber.org/mock/gomock" ) -func newTestingBackend(t *testing.T, expectedSchemaCheck bool) (*backend.MockBackend, *backend.MockLedger) { +func newTestingSystemController(t *testing.T, expectedSchemaCheck bool) (*SystemController, *LedgerController) { + t.Helper() + ctrl := gomock.NewController(t) - mockLedger := backend.NewMockLedger(ctrl) - backend := backend.NewMockBackend(ctrl) + mockLedger := NewLedgerController(ctrl) + backend := NewSystemController(ctrl) backend. EXPECT(). GetLedger(gomock.Any(), gomock.Any()). MinTimes(0). - Return(&systemstore.Ledger{}, nil) + Return(&ledger.Ledger{}, nil) t.Cleanup(func() { ctrl.Finish() }) backend. EXPECT(). - GetLedgerEngine(gomock.Any(), gomock.Any()). + GetLedgerController(gomock.Any(), gomock.Any()). MinTimes(0). Return(mockLedger, nil) t.Cleanup(func() { diff --git a/internal/api/v1/controllers_accounts.go b/internal/api/v1/controllers_accounts.go index 1a0514a30..8d4e17935 100644 --- a/internal/api/v1/controllers_accounts.go +++ b/internal/api/v1/controllers_accounts.go @@ -2,42 +2,27 @@ package v1 import ( "encoding/json" - "fmt" "math/big" "net/http" - "net/url" "strconv" "strings" - "github.com/formancehq/ledger/pkg/core/accounts" + "errors" - "github.com/go-chi/chi/v5" - - storageerrors "github.com/formancehq/ledger/internal/storage/sqlutils" - - "github.com/formancehq/go-libs/pointer" - - "github.com/formancehq/go-libs/bun/bunpaginate" - "github.com/formancehq/ledger/internal/api/backend" - "github.com/pkg/errors" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/query" + "github.com/formancehq/go-libs/v2/query" ledger "github.com/formancehq/ledger/internal" - "github.com/formancehq/ledger/internal/storage/ledgerstore" ) -type accountWithVolumesAndBalances ledger.ExpandedAccount +type accountWithVolumesAndBalances ledger.Account func (a accountWithVolumesAndBalances) MarshalJSON() ([]byte, error) { type aux struct { - ledger.ExpandedAccount + ledger.Account Balances map[string]*big.Int `json:"balances"` } return json.Marshal(aux{ - ExpandedAccount: ledger.ExpandedAccount(a), - Balances: a.Volumes.Balances(), + Account: ledger.Account(a), + Balances: a.Volumes.Balances(), }) } @@ -45,28 +30,24 @@ func buildAccountsFilterQuery(r *http.Request) (query.Builder, error) { clauses := make([]query.Builder, 0) if balance := r.URL.Query().Get("balance"); balance != "" { - if _, err := strconv.ParseInt(balance, 10, 64); err != nil { - return nil, err - } - - balanceOperator, err := getBalanceOperator(r) + balanceValue, err := strconv.ParseInt(balance, 10, 64) if err != nil { return nil, err } - switch balanceOperator { + switch getBalanceOperator(r) { case "e": - clauses = append(clauses, query.Match("balance", balance)) + clauses = append(clauses, query.Match("balance", balanceValue)) case "ne": - clauses = append(clauses, query.Not(query.Match("balance", balance))) + clauses = append(clauses, query.Not(query.Match("balance", balanceValue))) case "lt": - clauses = append(clauses, query.Lt("balance", balance)) + clauses = append(clauses, query.Lt("balance", balanceValue)) case "lte": - clauses = append(clauses, query.Lte("balance", balance)) + clauses = append(clauses, query.Lte("balance", balanceValue)) case "gt": - clauses = append(clauses, query.Gt("balance", balance)) + clauses = append(clauses, query.Gt("balance", balanceValue)) case "gte": - clauses = append(clauses, query.Gte("balance", balance)) + clauses = append(clauses, query.Gte("balance", balanceValue)) default: return nil, errors.New("invalid balance operator") } @@ -85,133 +66,9 @@ func buildAccountsFilterQuery(r *http.Request) (query.Builder, error) { if len(clauses) == 0 { return nil, nil } - - return query.And(clauses...), nil -} - -func countAccounts(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - - count, err := l.CountAccounts(r.Context(), ledgerstore.NewGetAccountsQuery(*options)) - if err != nil { - sharedapi.InternalServerError(w, r, err) - return - } - - w.Header().Set("Count", fmt.Sprint(count)) - sharedapi.NoContent(w) -} - -func getAccounts(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - query, err := bunpaginate.Extract[ledgerstore.GetAccountsQuery](r, func() (*ledgerstore.GetAccountsQuery, error) { - options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) - if err != nil { - return nil, err - } - options.QueryBuilder, err = buildAccountsFilterQuery(r) - return pointer.For(ledgerstore.NewGetAccountsQuery(*options)), nil - }) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - - cursor, err := l.GetAccountsWithVolumes(r.Context(), *query) - if err != nil { - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.RenderCursor(w, *cursor) -} - -func getAccount(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - param, err := url.PathUnescape(chi.URLParam(r, "address")) - if err != nil { - sharedapi.BadRequestWithDetails(w, ErrValidation, err, err.Error()) - return - } - - query := ledgerstore.NewGetAccountQuery(param) - query = query.WithExpandVolumes() - - acc, err := l.GetAccountWithVolumes(r.Context(), query) - if err != nil { - switch { - case storageerrors.IsNotFoundError(err): - acc = &ledger.ExpandedAccount{ - Account: ledger.Account{ - Address: param, - Metadata: map[string]string{}, - }, - Volumes: map[string]*ledger.Volumes{}, - EffectiveVolumes: map[string]*ledger.Volumes{}, - } - default: - sharedapi.InternalServerError(w, r, err) - return - } - } - - sharedapi.Ok(w, accountWithVolumesAndBalances(*acc)) -} - -func postAccountMetadata(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - param, err := url.PathUnescape(chi.URLParam(r, "address")) - if err != nil { - sharedapi.BadRequestWithDetails(w, ErrValidation, err, err.Error()) - return + if len(clauses) == 1 { + return clauses[0], nil } - if !accounts.ValidateAddress(param) { - sharedapi.BadRequest(w, ErrValidation, errors.New("invalid account address format")) - return - } - - var m metadata.Metadata - if err := json.NewDecoder(r.Body).Decode(&m); err != nil { - sharedapi.BadRequest(w, ErrValidation, errors.New("invalid metadata format")) - return - } - - err = l.SaveMeta(r.Context(), getCommandParameters(r), ledger.MetaTargetTypeAccount, param, m) - if err != nil { - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.NoContent(w) -} - -func deleteAccountMetadata(w http.ResponseWriter, r *http.Request) { - param, err := url.PathUnescape(chi.URLParam(r, "address")) - if err != nil { - sharedapi.BadRequestWithDetails(w, ErrValidation, err, err.Error()) - return - } - - if err := backend.LedgerFromContext(r.Context()). - DeleteMetadata( - r.Context(), - getCommandParameters(r), - ledger.MetaTargetTypeAccount, - param, - chi.URLParam(r, "key"), - ); err != nil { - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.NoContent(w) + return query.And(clauses...), nil } diff --git a/internal/api/v1/controllers_accounts_add_metadata.go b/internal/api/v1/controllers_accounts_add_metadata.go new file mode 100644 index 000000000..eabbaebec --- /dev/null +++ b/internal/api/v1/controllers_accounts_add_metadata.go @@ -0,0 +1,46 @@ +package v1 + +import ( + "encoding/json" + "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/formancehq/ledger/pkg/accounts" + "net/http" + "net/url" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/ledger/internal/api/common" + "github.com/go-chi/chi/v5" +) + +func addAccountMetadata(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + address, err := url.PathUnescape(chi.URLParam(r, "address")) + if err != nil { + api.BadRequestWithDetails(w, ErrValidation, err, err.Error()) + return + } + + if !accounts.ValidateAddress(address) { + api.BadRequest(w, ErrValidation, errors.New("invalid account address format")) + return + } + + var m metadata.Metadata + if err := json.NewDecoder(r.Body).Decode(&m); err != nil { + api.BadRequest(w, ErrValidation, errors.New("invalid metadata format")) + return + } + + err = l.SaveAccountMetadata(r.Context(), getCommandParameters(r, ledger.SaveAccountMetadata{ + Address: address, + Metadata: m, + })) + if err != nil { + common.HandleCommonErrors(w, r, err) + return + } + + api.NoContent(w) +} diff --git a/internal/api/v1/controllers_accounts_add_metadata_test.go b/internal/api/v1/controllers_accounts_add_metadata_test.go new file mode 100644 index 000000000..08f78771a --- /dev/null +++ b/internal/api/v1/controllers_accounts_add_metadata_test.go @@ -0,0 +1,117 @@ +package v1 + +import ( + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestAccountsAddMetadata(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + expectStatusCode int + expectedErrorCode string + account string + body any + } + + testCases := []testCase{ + { + name: "nominal", + account: "world", + body: metadata.Metadata{ + "foo": "bar", + }, + }, + { + name: "nominal dash 1", + account: "-test", + body: metadata.Metadata{ + "foo": "bar", + }, + }, + { + name: "nominal dash 2", + account: "-", + body: metadata.Metadata{ + "foo": "bar", + }, + }, + { + name: "nominal dash 2", + account: "-tes--t--t--t-----", + body: metadata.Metadata{ + "foo": "bar", + }, + }, + { + name: "invalid body", + account: "world", + body: "invalid - not an object", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "malformed account address", + account: "%8X%2F", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "invalid account address", + account: "1\abc", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusNoContent + } + + systemController, ledgerController := newTestingSystemController(t, true) + if testCase.expectStatusCode == http.StatusNoContent { + ledgerController.EXPECT(). + SaveAccountMetadata(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.SaveAccountMetadata]{ + Input: ledgercontroller.SaveAccountMetadata{ + Address: testCase.account, + Metadata: testCase.body.(metadata.Metadata), + }, + }). + Return(nil) + } + + router := NewRouter(systemController, auth.NewNoAuth(), "develop", os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodPost, "/", api.Buffer(t, testCase.body)) + req.URL.Path = "/xxx/accounts/" + testCase.account + "/metadata" + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode >= 300 || testCase.expectStatusCode < 200 { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v1/controllers_accounts_count.go b/internal/api/v1/controllers_accounts_count.go new file mode 100644 index 000000000..6501b65e7 --- /dev/null +++ b/internal/api/v1/controllers_accounts_count.go @@ -0,0 +1,47 @@ +package v1 + +import ( + "fmt" + "net/http" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/pointer" + "github.com/formancehq/ledger/internal/api/common" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" +) + +func countAccounts(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + query, err := bunpaginate.Extract[ledgercontroller.ListAccountsQuery](r, func() (*ledgercontroller.ListAccountsQuery, error) { + options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) + if err != nil { + return nil, err + } + options.QueryBuilder, err = buildAccountsFilterQuery(r) + if err != nil { + return nil, err + } + return pointer.For(ledgercontroller.NewListAccountsQuery(*options)), nil + }) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + count, err := l.CountAccounts(r.Context(), *query) + if err != nil { + switch { + case errors.Is(err, ledgercontroller.ErrInvalidQuery{}) || errors.Is(err, ledgercontroller.ErrMissingFeature{}): + api.BadRequest(w, ErrValidation, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + w.Header().Set("Count", fmt.Sprint(count)) + api.NoContent(w) +} diff --git a/internal/api/v1/controllers_accounts_count_test.go b/internal/api/v1/controllers_accounts_count_test.go new file mode 100644 index 000000000..4da6a9603 --- /dev/null +++ b/internal/api/v1/controllers_accounts_count_test.go @@ -0,0 +1,185 @@ +package v1 + +import ( + "net/http" + "net/http/httptest" + "net/url" + os "os" + "testing" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/query" + "github.com/formancehq/go-libs/v2/time" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestAccountsCount(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + expectQuery ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes] + expectStatusCode int + expectedErrorCode string + returnErr error + expectBackendCall bool + } + before := time.Now() + + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithPageSize(DefaultPageSize), + expectBackendCall: true, + }, + { + name: "using metadata", + queryParams: url.Values{ + "metadata[roles]": []string{"admin"}, + }, + expectBackendCall: true, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithQueryBuilder(query.Match("metadata[roles]", "admin")). + WithPageSize(DefaultPageSize), + }, + { + name: "using address", + queryParams: url.Values{"address": []string{"foo"}}, + expectBackendCall: true, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithQueryBuilder(query.Match("address", "foo")). + WithPageSize(DefaultPageSize), + }, + { + name: "invalid page size", + queryParams: url.Values{ + "pageSize": []string{"nan"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "page size over maximum", + expectBackendCall: true, + queryParams: url.Values{ + "pageSize": []string{"1000000"}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithPageSize(MaxPageSize), + }, + { + name: "using balance filter", + queryParams: url.Values{ + "balanceOperator": []string{"lt"}, + "balance": []string{"100"}, + }, + expectBackendCall: true, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithQueryBuilder(query.Lt("balance", int64(100))). + WithPageSize(DefaultPageSize), + }, + { + name: "with invalid query from core point of view", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + expectBackendCall: true, + returnErr: ledgercontroller.ErrInvalidQuery{}, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithPageSize(DefaultPageSize), + }, + { + name: "with missing feature", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + expectBackendCall: true, + returnErr: ledgercontroller.ErrMissingFeature{}, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithPageSize(DefaultPageSize), + }, + { + name: "with unexpected error", + expectStatusCode: http.StatusInternalServerError, + expectedErrorCode: api.ErrorInternal, + expectBackendCall: true, + returnErr: errors.New("undefined error"), + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithPageSize(DefaultPageSize), + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusNoContent + } + + systemController, ledgerController := newTestingSystemController(t, true) + if testCase.expectBackendCall { + ledgerController.EXPECT(). + CountAccounts(gomock.Any(), ledgercontroller.NewListAccountsQuery(testCase.expectQuery)). + Return(10, testCase.returnErr) + } + + router := NewRouter(systemController, auth.NewNoAuth(), "develop", os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodHead, "/xxx/accounts?pit="+before.Format(time.RFC3339Nano), nil) + rec := httptest.NewRecorder() + params := url.Values{} + if testCase.queryParams != nil { + params = testCase.queryParams + } + params.Set("pit", before.Format(time.RFC3339Nano)) + req.URL.RawQuery = params.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + require.Equal(t, "10", rec.Header().Get("Count")) + } else { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v1/controllers_accounts_delete_metadata.go b/internal/api/v1/controllers_accounts_delete_metadata.go new file mode 100644 index 000000000..db1d2b6c9 --- /dev/null +++ b/internal/api/v1/controllers_accounts_delete_metadata.go @@ -0,0 +1,33 @@ +package v1 + +import ( + "github.com/formancehq/ledger/internal/controller/ledger" + "net/http" + "net/url" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/ledger/internal/api/common" + "github.com/go-chi/chi/v5" +) + +func deleteAccountMetadata(w http.ResponseWriter, r *http.Request) { + address, err := url.PathUnescape(chi.URLParam(r, "address")) + if err != nil { + api.BadRequestWithDetails(w, ErrValidation, err, err.Error()) + return + } + + if err := common.LedgerFromContext(r.Context()). + DeleteAccountMetadata( + r.Context(), + getCommandParameters(r, ledger.DeleteAccountMetadata{ + Address: address, + Key: chi.URLParam(r, "key"), + }), + ); err != nil { + common.HandleCommonErrors(w, r, err) + return + } + + api.NoContent(w) +} diff --git a/internal/api/v1/controllers_accounts_delete_metadata_test.go b/internal/api/v1/controllers_accounts_delete_metadata_test.go new file mode 100644 index 000000000..85d2d2eaf --- /dev/null +++ b/internal/api/v1/controllers_accounts_delete_metadata_test.go @@ -0,0 +1,97 @@ +package v1 + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/logging" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestAccountsDeleteMetadata(t *testing.T) { + t.Parallel() + + ctx := logging.TestingContext() + + type testCase struct { + name string + queryParams url.Values + returnErr error + expectedStatusCode int + expectedErrorCode string + expectBackendCall bool + account string + } + + for _, tc := range []testCase{ + { + name: "nominal", + expectBackendCall: true, + account: "account0", + }, + { + name: "unexpected backend error", + expectBackendCall: true, + returnErr: errors.New("undefined error"), + expectedStatusCode: http.StatusInternalServerError, + expectedErrorCode: api.ErrorInternal, + account: "account0", + }, + { + name: "invalid account address", + account: "%8X%2F", + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + expectBackendCall: false, + }, + } { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + systemController, ledgerController := newTestingSystemController(t, true) + + if tc.expectBackendCall { + ledgerController.EXPECT(). + DeleteAccountMetadata( + gomock.Any(), + ledgercontroller.Parameters[ledgercontroller.DeleteAccountMetadata]{ + Input: ledgercontroller.DeleteAccountMetadata{ + Address: tc.account, + Key: "foo", + }, + }, + ). + Return(tc.returnErr) + } + + router := NewRouter(systemController, auth.NewNoAuth(), "develop", os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodDelete, "/", nil) + req.URL.Path = "/ledger0/accounts/" + tc.account + "/metadata/foo" + req = req.WithContext(ctx) + req.URL.RawQuery = tc.queryParams.Encode() + + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + if tc.expectedStatusCode == 0 || tc.expectedStatusCode == http.StatusOK { + require.Equal(t, http.StatusNoContent, rec.Code) + } else { + require.Equal(t, tc.expectedStatusCode, rec.Code) + errorResponse := api.ErrorResponse{} + require.NoError(t, json.Unmarshal(rec.Body.Bytes(), &errorResponse)) + require.Equal(t, tc.expectedErrorCode, errorResponse.ErrorCode) + } + }) + } +} diff --git a/internal/api/v1/controllers_accounts_list.go b/internal/api/v1/controllers_accounts_list.go new file mode 100644 index 000000000..ead1ed9b9 --- /dev/null +++ b/internal/api/v1/controllers_accounts_list.go @@ -0,0 +1,45 @@ +package v1 + +import ( + "net/http" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/pointer" + "github.com/formancehq/ledger/internal/api/common" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" +) + +func listAccounts(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + query, err := bunpaginate.Extract[ledgercontroller.ListAccountsQuery](r, func() (*ledgercontroller.ListAccountsQuery, error) { + options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) + if err != nil { + return nil, err + } + options.QueryBuilder, err = buildAccountsFilterQuery(r) + if err != nil { + return nil, err + } + return pointer.For(ledgercontroller.NewListAccountsQuery(*options)), nil + }) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + cursor, err := l.ListAccounts(r.Context(), *query) + if err != nil { + switch { + case errors.Is(err, ledgercontroller.ErrMissingFeature{}): + api.BadRequest(w, ErrValidation, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + api.RenderCursor(w, *cursor) +} diff --git a/internal/api/v1/controllers_accounts_list_test.go b/internal/api/v1/controllers_accounts_list_test.go new file mode 100644 index 000000000..1c8e08b70 --- /dev/null +++ b/internal/api/v1/controllers_accounts_list_test.go @@ -0,0 +1,158 @@ +package v1 + +import ( + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/query" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestAccountsList(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + expectQuery ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes] + expectStatusCode int + expectedErrorCode string + expectBackendCall bool + returnErr error + } + + testCases := []testCase{ + { + name: "nominal", + expectBackendCall: true, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithPageSize(DefaultPageSize), + }, + { + name: "using metadata", + queryParams: url.Values{ + "metadata[roles]": []string{"admin"}, + }, + expectBackendCall: true, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("metadata[roles]", "admin")). + WithPageSize(DefaultPageSize), + }, + { + name: "using address", + queryParams: url.Values{ + "address": []string{"foo"}, + }, + expectBackendCall: true, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("address", "foo")). + WithPageSize(DefaultPageSize), + }, + { + name: "using empty cursor", + queryParams: url.Values{ + "cursor": []string{bunpaginate.EncodeCursor(ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{})))}, + }, + expectBackendCall: true, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}), + }, + { + name: "using invalid cursor", + queryParams: url.Values{ + "cursor": []string{"XXX"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "invalid page size", + queryParams: url.Values{ + "pageSize": []string{"nan"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "page size over maximum", + queryParams: url.Values{ + "pageSize": []string{"1000000"}, + }, + expectBackendCall: true, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithPageSize(MaxPageSize), + }, + { + name: "using balance filter", + queryParams: url.Values{ + "balance": []string{"100"}, + "balanceOperator": []string{"e"}, + }, + expectBackendCall: true, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("balance", int64(100))). + WithPageSize(DefaultPageSize), + }, + { + name: "with missing feature", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + returnErr: ledgercontroller.ErrMissingFeature{}, + expectBackendCall: true, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithPageSize(DefaultPageSize), + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusOK + } + + expectedCursor := bunpaginate.Cursor[ledger.Account]{ + Data: []ledger.Account{ + { + Address: "world", + Metadata: metadata.Metadata{}, + }, + }, + } + + systemController, ledgerController := newTestingSystemController(t, true) + if testCase.expectBackendCall { + ledgerController.EXPECT(). + ListAccounts(gomock.Any(), ledgercontroller.NewListAccountsQuery(testCase.expectQuery)). + Return(&expectedCursor, testCase.returnErr) + } + + router := NewRouter(systemController, auth.NewNoAuth(), "develop", os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodGet, "/xxx/accounts", nil) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + cursor := api.DecodeCursorResponse[ledger.Account](t, rec.Body) + require.Equal(t, expectedCursor, *cursor) + } else { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v1/controllers_accounts_read.go b/internal/api/v1/controllers_accounts_read.go new file mode 100644 index 000000000..17fa27dc0 --- /dev/null +++ b/internal/api/v1/controllers_accounts_read.go @@ -0,0 +1,45 @@ +package v1 + +import ( + "net/http" + "net/url" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/platform/postgres" + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/api/common" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/go-chi/chi/v5" +) + +func getAccount(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + address, err := url.PathUnescape(chi.URLParam(r, "address")) + if err != nil { + api.BadRequestWithDetails(w, ErrValidation, err, err.Error()) + return + } + + query := ledgercontroller.NewGetAccountQuery(address) + query = query.WithExpandVolumes() + + acc, err := l.GetAccount(r.Context(), query) + if err != nil { + switch { + case postgres.IsNotFoundError(err): + acc = &ledger.Account{ + Address: address, + Metadata: metadata.Metadata{}, + Volumes: ledger.VolumesByAssets{}, + EffectiveVolumes: ledger.VolumesByAssets{}, + } + default: + common.HandleCommonErrors(w, r, err) + return + } + } + + api.Ok(w, accountWithVolumesAndBalances(*acc)) +} diff --git a/internal/api/v1/controllers_accounts_read_test.go b/internal/api/v1/controllers_accounts_read_test.go new file mode 100644 index 000000000..b7ad0f6e5 --- /dev/null +++ b/internal/api/v1/controllers_accounts_read_test.go @@ -0,0 +1,104 @@ +package v1 + +import ( + "bytes" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/platform/postgres" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestAccountsRead(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + body string + expectQuery ledgercontroller.GetAccountQuery + expectStatusCode int + expectedErrorCode string + expectBackendCall bool + returnErr error + account string + } + + testCases := []testCase{ + { + name: "nominal", + account: "foo", + expectQuery: ledgercontroller.NewGetAccountQuery("foo").WithExpandVolumes(), + expectBackendCall: true, + }, + { + name: "with expand volumes", + account: "foo", + expectQuery: ledgercontroller.NewGetAccountQuery("foo").WithExpandVolumes(), + expectBackendCall: true, + queryParams: url.Values{ + "expand": {"volumes"}, + }, + }, + { + name: "invalid account address", + account: "%8X%2F", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "with not existing account", + account: "foo", + expectQuery: ledgercontroller.NewGetAccountQuery("foo").WithExpandVolumes(), + expectBackendCall: true, + returnErr: postgres.ErrNotFound, + }, + } + for _, testCase := range testCases { + tc := testCase + t.Run(tc.name, func(t *testing.T) { + + if tc.expectStatusCode == 0 { + tc.expectStatusCode = http.StatusOK + } + + systemController, ledgerController := newTestingSystemController(t, true) + if tc.expectBackendCall { + ledgerController.EXPECT(). + GetAccount(gomock.Any(), tc.expectQuery). + Return(&ledger.Account{}, tc.returnErr) + } + + router := NewRouter(systemController, auth.NewNoAuth(), "develop", os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodGet, "/", bytes.NewBufferString(tc.body)) + req.URL.Path = "/xxx/accounts/" + tc.account + rec := httptest.NewRecorder() + params := url.Values{} + if tc.queryParams != nil { + params = tc.queryParams + } + req.URL.RawQuery = params.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, tc.expectStatusCode, rec.Code) + if tc.expectStatusCode < 300 && tc.expectStatusCode >= 200 { + _, ok := api.DecodeSingleResponse[ledger.Account](t, rec.Body) + require.True(t, ok) + } else { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, tc.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v1/controllers_accounts_test.go b/internal/api/v1/controllers_accounts_test.go deleted file mode 100644 index 1b391c390..000000000 --- a/internal/api/v1/controllers_accounts_test.go +++ /dev/null @@ -1,282 +0,0 @@ -package v1_test - -import ( - "net/http" - "net/http/httptest" - "net/url" - "testing" - - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/bun/bunpaginate" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/query" - ledger "github.com/formancehq/ledger/internal" - v1 "github.com/formancehq/ledger/internal/api/v1" - "github.com/formancehq/ledger/internal/engine/command" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/formancehq/ledger/internal/storage/ledgerstore" - "github.com/stretchr/testify/require" - "go.uber.org/mock/gomock" -) - -func TestGetAccounts(t *testing.T) { - t.Parallel() - - type testCase struct { - name string - queryParams url.Values - expectQuery ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes] - expectStatusCode int - expectedErrorCode string - } - - testCases := []testCase{ - { - name: "nominal", - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithPageSize(v1.DefaultPageSize), - }, - { - name: "using metadata", - queryParams: url.Values{ - "metadata[roles]": []string{"admin"}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithQueryBuilder(query.And(query.Match("metadata[roles]", "admin"))). - WithPageSize(v1.DefaultPageSize), - }, - { - name: "using address", - queryParams: url.Values{ - "address": []string{"foo"}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithQueryBuilder(query.And(query.Match("address", "foo"))). - WithPageSize(v1.DefaultPageSize), - }, - { - name: "using empty cursor", - queryParams: url.Values{ - "cursor": []string{bunpaginate.EncodeCursor(ledgerstore.NewGetAccountsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{})))}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}), - }, - { - name: "using invalid cursor", - queryParams: url.Values{ - "cursor": []string{"XXX"}, - }, - expectStatusCode: http.StatusBadRequest, - expectedErrorCode: v1.ErrValidation, - }, - { - name: "invalid page size", - queryParams: url.Values{ - "pageSize": []string{"nan"}, - }, - expectStatusCode: http.StatusBadRequest, - expectedErrorCode: v1.ErrValidation, - }, - { - name: "page size over maximum", - queryParams: url.Values{ - "pageSize": []string{"1000000"}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithPageSize(v1.MaxPageSize), - }, - { - name: "using balance filter", - queryParams: url.Values{ - "balance": []string{"100"}, - "balanceOperator": []string{"e"}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithQueryBuilder(query.And(query.Match("balance", "100"))). - WithPageSize(v1.DefaultPageSize), - }, - } - for _, testCase := range testCases { - testCase := testCase - t.Run(testCase.name, func(t *testing.T) { - - if testCase.expectStatusCode == 0 { - testCase.expectStatusCode = http.StatusOK - } - - expectedCursor := bunpaginate.Cursor[ledger.ExpandedAccount]{ - Data: []ledger.ExpandedAccount{ - { - Account: ledger.Account{ - Address: "world", - Metadata: metadata.Metadata{}, - }, - }, - }, - } - - backend, mockLedger := newTestingBackend(t, true) - if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { - mockLedger.EXPECT(). - GetAccountsWithVolumes(gomock.Any(), ledgerstore.NewGetAccountsQuery(testCase.expectQuery)). - Return(&expectedCursor, nil) - } - - router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodGet, "/xxx/accounts", nil) - rec := httptest.NewRecorder() - req.URL.RawQuery = testCase.queryParams.Encode() - - router.ServeHTTP(rec, req) - - require.Equal(t, testCase.expectStatusCode, rec.Code) - if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { - cursor := sharedapi.DecodeCursorResponse[ledger.ExpandedAccount](t, rec.Body) - require.Equal(t, expectedCursor, *cursor) - } else { - err := sharedapi.ErrorResponse{} - sharedapi.Decode(t, rec.Body, &err) - require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) - } - }) - } -} - -func TestGetAccount(t *testing.T) { - t.Parallel() - - account := ledger.ExpandedAccount{ - Account: ledger.Account{ - Address: "foo", - Metadata: metadata.Metadata{}, - }, - } - - backend, mock := newTestingBackend(t, true) - mock.EXPECT(). - GetAccountWithVolumes(gomock.Any(), ledgerstore.NewGetAccountQuery("foo").WithExpandVolumes()). - Return(&account, nil) - - router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodGet, "/xxx/accounts/foo", nil) - rec := httptest.NewRecorder() - - router.ServeHTTP(rec, req) - - require.Equal(t, http.StatusOK, rec.Code) - response, _ := sharedapi.DecodeSingleResponse[ledger.ExpandedAccount](t, rec.Body) - require.Equal(t, account, response) -} - -func TestGetAccountWithEncoded(t *testing.T) { - t.Parallel() - - account := ledger.ExpandedAccount{ - Account: ledger.Account{ - Address: "foo:bar", - Metadata: metadata.Metadata{}, - }, - } - - backend, mock := newTestingBackend(t, true) - mock.EXPECT(). - GetAccountWithVolumes(gomock.Any(), ledgerstore.NewGetAccountQuery("foo:bar").WithExpandVolumes()). - Return(&account, nil) - - router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodGet, "/xxx/accounts/foo%3Abar", nil) - rec := httptest.NewRecorder() - - router.ServeHTTP(rec, req) - - require.Equal(t, http.StatusOK, rec.Code) - response, _ := sharedapi.DecodeSingleResponse[ledger.ExpandedAccount](t, rec.Body) - require.Equal(t, account, response) -} - -func TestPostAccountMetadata(t *testing.T) { - t.Parallel() - - type testCase struct { - name string - queryParams url.Values - expectStatusCode int - expectedErrorCode string - account string - body any - } - - testCases := []testCase{ - { - name: "nominal", - account: "world", - body: metadata.Metadata{ - "foo": "bar", - }, - }, - { - name: "nominal dash 1", - account: "-test", - body: metadata.Metadata{ - "foo": "bar", - }, - }, - { - name: "nominal dash 2", - account: "-", - body: metadata.Metadata{ - "foo": "bar", - }, - }, - { - name: "nominal dash 2", - account: "-tes--t--t--t-----", - body: metadata.Metadata{ - "foo": "bar", - }, - }, - { - name: "invalid body", - account: "world", - body: "invalid - not an object", - expectStatusCode: http.StatusBadRequest, - expectedErrorCode: v1.ErrValidation, - }, - } - for _, testCase := range testCases { - testCase := testCase - t.Run(testCase.name, func(t *testing.T) { - - if testCase.expectStatusCode == 0 { - testCase.expectStatusCode = http.StatusNoContent - } - - backend, mock := newTestingBackend(t, true) - if testCase.expectStatusCode == http.StatusNoContent { - mock.EXPECT(). - SaveMeta(gomock.Any(), command.Parameters{}, ledger.MetaTargetTypeAccount, testCase.account, testCase.body). - Return(nil) - } - - router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodPost, "/xxx/accounts/"+testCase.account+"/metadata", sharedapi.Buffer(t, testCase.body)) - rec := httptest.NewRecorder() - req.URL.RawQuery = testCase.queryParams.Encode() - - router.ServeHTTP(rec, req) - - require.Equal(t, testCase.expectStatusCode, rec.Code) - if testCase.expectStatusCode >= 300 || testCase.expectStatusCode < 200 { - err := sharedapi.ErrorResponse{} - sharedapi.Decode(t, rec.Body, &err) - require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) - } - }) - } -} diff --git a/internal/api/v1/controllers_balances.go b/internal/api/v1/controllers_balances.go deleted file mode 100644 index 23025a7d1..000000000 --- a/internal/api/v1/controllers_balances.go +++ /dev/null @@ -1,92 +0,0 @@ -package v1 - -import ( - "math/big" - "net/http" - - "github.com/formancehq/go-libs/pointer" - - "github.com/formancehq/go-libs/bun/bunpaginate" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/query" - "github.com/formancehq/ledger/internal/api/backend" - "github.com/formancehq/ledger/internal/storage/ledgerstore" -) - -func buildAggregatedBalancesQuery(r *http.Request) (query.Builder, error) { - if address := r.URL.Query().Get("address"); address != "" { - return query.Match("address", address), nil - } - - return nil, nil -} - -func getBalancesAggregated(w http.ResponseWriter, r *http.Request) { - - pitFilter, err := getPITFilter(r) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - - queryBuilder, err := buildAggregatedBalancesQuery(r) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - - query := ledgerstore.NewGetAggregatedBalancesQuery(*pitFilter, queryBuilder, - // notes(gfyrag): if pit is not specified, always use insertion date to be backward compatible - r.URL.Query().Get("pit") == "" || sharedapi.QueryParamBool(r, "useInsertionDate") || sharedapi.QueryParamBool(r, "use_insertion_date")) - - balances, err := backend.LedgerFromContext(r.Context()).GetAggregatedBalances(r.Context(), query) - if err != nil { - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.Ok(w, balances) -} - -func getBalances(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - q, err := bunpaginate.Extract[ledgerstore.GetAccountsQuery](r, func() (*ledgerstore.GetAccountsQuery, error) { - options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) - if err != nil { - return nil, err - } - options.QueryBuilder, err = buildAccountsFilterQuery(r) - return pointer.For(ledgerstore.NewGetAccountsQuery(*options)), nil - }) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - - cursor, err := l.GetAccountsWithVolumes(r.Context(), q.WithExpandVolumes()) - if err != nil { - sharedapi.InternalServerError(w, r, err) - return - } - - ret := make([]map[string]map[string]*big.Int, 0) - for _, item := range cursor.Data { - e := map[string]map[string]*big.Int{ - item.Address: {}, - } - for asset, volumes := range item.Volumes { - e[item.Address][asset] = volumes.Balance() - } - ret = append(ret, e) - } - - sharedapi.RenderCursor(w, bunpaginate.Cursor[map[string]map[string]*big.Int]{ - PageSize: cursor.PageSize, - HasMore: cursor.HasMore, - Previous: cursor.Previous, - Next: cursor.Next, - Data: ret, - }) -} diff --git a/internal/api/v1/controllers_balances_aggregates.go b/internal/api/v1/controllers_balances_aggregates.go new file mode 100644 index 000000000..d17187cde --- /dev/null +++ b/internal/api/v1/controllers_balances_aggregates.go @@ -0,0 +1,41 @@ +package v1 + +import ( + "net/http" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/query" + "github.com/formancehq/ledger/internal/api/common" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" +) + +func buildAggregatedBalancesQuery(r *http.Request) query.Builder { + if address := r.URL.Query().Get("address"); address != "" { + return query.Match("address", address) + } + + return nil +} + +func getBalancesAggregated(w http.ResponseWriter, r *http.Request) { + + pitFilter, err := getPITFilter(r) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + queryBuilder := buildAggregatedBalancesQuery(r) + + query := ledgercontroller.NewGetAggregatedBalancesQuery(*pitFilter, queryBuilder, + // notes(gfyrag): if pit is not specified, always use insertion date to be backward compatible + r.URL.Query().Get("pit") == "" || api.QueryParamBool(r, "useInsertionDate") || api.QueryParamBool(r, "use_insertion_date")) + + balances, err := common.LedgerFromContext(r.Context()).GetAggregatedBalances(r.Context(), query) + if err != nil { + common.HandleCommonErrors(w, r, err) + return + } + + api.Ok(w, balances) +} diff --git a/internal/api/v1/controllers_balances_test.go b/internal/api/v1/controllers_balances_aggregates_test.go similarity index 61% rename from internal/api/v1/controllers_balances_test.go rename to internal/api/v1/controllers_balances_aggregates_test.go index 8482dc97c..18e0f535a 100644 --- a/internal/api/v1/controllers_balances_test.go +++ b/internal/api/v1/controllers_balances_aggregates_test.go @@ -1,32 +1,32 @@ -package v1_test +package v1 import ( "math/big" "net/http" "net/http/httptest" "net/url" + "os" "testing" - "github.com/formancehq/go-libs/time" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/query" + "github.com/formancehq/go-libs/v2/time" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/query" ledger "github.com/formancehq/ledger/internal" - v1 "github.com/formancehq/ledger/internal/api/v1" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/formancehq/ledger/internal/storage/ledgerstore" "github.com/stretchr/testify/require" "go.uber.org/mock/gomock" ) -func TestGetBalancesAggregated(t *testing.T) { +func TestBalancesAggregates(t *testing.T) { t.Parallel() type testCase struct { name string queryParams url.Values - expectQuery ledgerstore.GetAggregatedBalanceQuery + expectQuery ledgercontroller.GetAggregatedBalanceQuery } now := time.Now() @@ -34,7 +34,7 @@ func TestGetBalancesAggregated(t *testing.T) { testCases := []testCase{ { name: "nominal", - expectQuery: ledgerstore.GetAggregatedBalanceQuery{ + expectQuery: ledgercontroller.GetAggregatedBalanceQuery{ UseInsertionDate: true, }, }, @@ -43,7 +43,7 @@ func TestGetBalancesAggregated(t *testing.T) { queryParams: url.Values{ "address": []string{"foo"}, }, - expectQuery: ledgerstore.GetAggregatedBalanceQuery{ + expectQuery: ledgercontroller.GetAggregatedBalanceQuery{ QueryBuilder: query.Match("address", "foo"), UseInsertionDate: true, }, @@ -53,8 +53,8 @@ func TestGetBalancesAggregated(t *testing.T) { queryParams: url.Values{ "pit": []string{now.Format(time.RFC3339Nano)}, }, - expectQuery: ledgerstore.GetAggregatedBalanceQuery{ - PITFilter: ledgerstore.PITFilter{ + expectQuery: ledgercontroller.GetAggregatedBalanceQuery{ + PITFilter: ledgercontroller.PITFilter{ PIT: &now, }, }, @@ -65,8 +65,8 @@ func TestGetBalancesAggregated(t *testing.T) { "pit": []string{now.Format(time.RFC3339Nano)}, "useInsertionDate": []string{"true"}, }, - expectQuery: ledgerstore.GetAggregatedBalanceQuery{ - PITFilter: ledgerstore.PITFilter{ + expectQuery: ledgercontroller.GetAggregatedBalanceQuery{ + PITFilter: ledgercontroller.PITFilter{ PIT: &now, }, UseInsertionDate: true, @@ -80,12 +80,12 @@ func TestGetBalancesAggregated(t *testing.T) { expectedBalances := ledger.BalancesByAssets{ "world": big.NewInt(-100), } - backend, mock := newTestingBackend(t, true) - mock.EXPECT(). + systemController, ledgerController := newTestingSystemController(t, true) + ledgerController.EXPECT(). GetAggregatedBalances(gomock.Any(), testCase.expectQuery). Return(expectedBalances, nil) - router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) + router := NewRouter(systemController, auth.NewNoAuth(), "develop", os.Getenv("DEBUG") == "true") req := httptest.NewRequest(http.MethodGet, "/xxx/aggregate/balances", nil) rec := httptest.NewRecorder() @@ -94,7 +94,7 @@ func TestGetBalancesAggregated(t *testing.T) { router.ServeHTTP(rec, req) require.Equal(t, http.StatusOK, rec.Code) - balances, ok := sharedapi.DecodeSingleResponse[ledger.BalancesByAssets](t, rec.Body) + balances, ok := api.DecodeSingleResponse[ledger.BalancesByAssets](t, rec.Body) require.True(t, ok) require.Equal(t, expectedBalances, balances) }) diff --git a/internal/api/v1/controllers_balances_list.go b/internal/api/v1/controllers_balances_list.go new file mode 100644 index 000000000..ee76acc98 --- /dev/null +++ b/internal/api/v1/controllers_balances_list.go @@ -0,0 +1,57 @@ +package v1 + +import ( + "math/big" + "net/http" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/pointer" + "github.com/formancehq/ledger/internal/api/common" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" +) + +func getBalances(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + q, err := bunpaginate.Extract[ledgercontroller.ListAccountsQuery](r, func() (*ledgercontroller.ListAccountsQuery, error) { + options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) + if err != nil { + return nil, err + } + options.QueryBuilder, err = buildAccountsFilterQuery(r) + if err != nil { + return nil, err + } + return pointer.For(ledgercontroller.NewListAccountsQuery(*options)), nil + }) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + cursor, err := l.ListAccounts(r.Context(), q.WithExpandVolumes()) + if err != nil { + common.HandleCommonErrors(w, r, err) + return + } + + ret := make([]map[string]map[string]*big.Int, 0) + for _, item := range cursor.Data { + e := map[string]map[string]*big.Int{ + item.Address: {}, + } + for asset, volumes := range item.Volumes { + e[item.Address][asset] = volumes.Balance() + } + ret = append(ret, e) + } + + api.RenderCursor(w, bunpaginate.Cursor[map[string]map[string]*big.Int]{ + PageSize: cursor.PageSize, + HasMore: cursor.HasMore, + Previous: cursor.Previous, + Next: cursor.Next, + Data: ret, + }) +} diff --git a/internal/api/v1/controllers_config.go b/internal/api/v1/controllers_config.go index ede2286ae..b0ee1172d 100644 --- a/internal/api/v1/controllers_config.go +++ b/internal/api/v1/controllers_config.go @@ -3,15 +3,18 @@ package v1 import ( "context" _ "embed" + "github.com/formancehq/ledger/internal/api/common" "net/http" - "github.com/formancehq/go-libs/bun/bunpaginate" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/formancehq/ledger/internal/controller/system" - "github.com/formancehq/go-libs/collectionutils" - "github.com/formancehq/ledger/internal/storage/systemstore" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + ledger "github.com/formancehq/ledger/internal" - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/ledger/internal/api/backend" + "github.com/formancehq/go-libs/v2/collectionutils" + + "github.com/formancehq/go-libs/v2/api" ) type ConfigInfo struct { @@ -29,28 +32,28 @@ type LedgerStorage struct { Ledgers []string `json:"ledgers"` } -func getInfo(backend backend.Backend) func(w http.ResponseWriter, r *http.Request) { +func getInfo(systemController system.Controller, version string) func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) { ledgerNames := make([]string, 0) - if err := bunpaginate.Iterate(r.Context(), systemstore.NewListLedgersQuery(100), - func(ctx context.Context, q systemstore.ListLedgersQuery) (*bunpaginate.Cursor[systemstore.Ledger], error) { - return backend.ListLedgers(ctx, q) + if err := bunpaginate.Iterate(r.Context(), ledgercontroller.NewListLedgersQuery(100), + func(ctx context.Context, q ledgercontroller.ListLedgersQuery) (*bunpaginate.Cursor[ledger.Ledger], error) { + return systemController.ListLedgers(ctx, q) }, - func(cursor *bunpaginate.Cursor[systemstore.Ledger]) error { - ledgerNames = append(ledgerNames, collectionutils.Map(cursor.Data, func(from systemstore.Ledger) string { + func(cursor *bunpaginate.Cursor[ledger.Ledger]) error { + ledgerNames = append(ledgerNames, collectionutils.Map(cursor.Data, func(from ledger.Ledger) string { return from.Name })...) return nil }, ); err != nil { - sharedapi.InternalServerError(w, r, err) + common.HandleCommonErrors(w, r, err) return } - sharedapi.Ok(w, ConfigInfo{ + api.Ok(w, ConfigInfo{ Server: "ledger", - Version: backend.GetVersion(), + Version: version, Config: &LedgerConfig{ LedgerStorage: &LedgerStorage{ Driver: "postgres", diff --git a/internal/api/v1/controllers_config_test.go b/internal/api/v1/controllers_config_test.go index 4871a015a..3d162123c 100644 --- a/internal/api/v1/controllers_config_test.go +++ b/internal/api/v1/controllers_config_test.go @@ -1,19 +1,18 @@ -package v1_test +package v1 import ( "net/http" "net/http/httptest" + "os" "testing" - "github.com/formancehq/go-libs/bun/bunpaginate" + ledger "github.com/formancehq/ledger/internal" - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/auth" - v1 "github.com/formancehq/ledger/internal/api/v1" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" - "github.com/formancehq/ledger/internal/storage/systemstore" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" "github.com/stretchr/testify/require" "go.uber.org/mock/gomock" ) @@ -21,14 +20,14 @@ import ( func TestGetInfo(t *testing.T) { t.Parallel() - backend, _ := newTestingBackend(t, false) - router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) + systemController, _ := newTestingSystemController(t, false) + router := NewRouter(systemController, auth.NewNoAuth(), "develop", os.Getenv("DEBUG") == "true") - backend. + systemController. EXPECT(). ListLedgers(gomock.Any(), gomock.Any()). - Return(&bunpaginate.Cursor[systemstore.Ledger]{ - Data: []systemstore.Ledger{ + Return(&bunpaginate.Cursor[ledger.Ledger]{ + Data: []ledger.Ledger{ { Name: "a", }, @@ -38,11 +37,6 @@ func TestGetInfo(t *testing.T) { }, }, nil) - backend. - EXPECT(). - GetVersion(). - Return("latest") - req := httptest.NewRequest(http.MethodGet, "/_info", nil) rec := httptest.NewRecorder() @@ -50,13 +44,13 @@ func TestGetInfo(t *testing.T) { require.Equal(t, http.StatusOK, rec.Code) - info, _ := sharedapi.DecodeSingleResponse[v1.ConfigInfo](t, rec.Body) + info, _ := api.DecodeSingleResponse[ConfigInfo](t, rec.Body) - require.EqualValues(t, v1.ConfigInfo{ + require.EqualValues(t, ConfigInfo{ Server: "ledger", - Version: "latest", - Config: &v1.LedgerConfig{ - LedgerStorage: &v1.LedgerStorage{ + Version: "develop", + Config: &LedgerConfig{ + LedgerStorage: &LedgerStorage{ Driver: "postgres", Ledgers: []string{"a", "b"}, }, diff --git a/internal/api/v1/controllers_info.go b/internal/api/v1/controllers_info.go index c8ec8538f..5fe9f4245 100644 --- a/internal/api/v1/controllers_info.go +++ b/internal/api/v1/controllers_info.go @@ -5,15 +5,9 @@ import ( "github.com/go-chi/chi/v5" - "github.com/formancehq/go-libs/bun/bunpaginate" - "github.com/formancehq/ledger/internal/api/backend" - "github.com/formancehq/ledger/internal/engine" - "github.com/pkg/errors" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/migrations" - "github.com/formancehq/go-libs/query" - "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/migrations" + "github.com/formancehq/ledger/internal/api/common" ) type Info struct { @@ -26,7 +20,7 @@ type StorageInfo struct { } func getLedgerInfo(w http.ResponseWriter, r *http.Request) { - ledger := backend.LedgerFromContext(r.Context()) + ledger := common.LedgerFromContext(r.Context()) var err error res := Info{ @@ -35,92 +29,9 @@ func getLedgerInfo(w http.ResponseWriter, r *http.Request) { } res.Storage.Migrations, err = ledger.GetMigrationsInfo(r.Context()) if err != nil { - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.Ok(w, res) -} - -func getStats(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - stats, err := l.Stats(r.Context()) - if err != nil { - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.Ok(w, stats) -} - -func buildGetLogsQuery(r *http.Request) (query.Builder, error) { - clauses := make([]query.Builder, 0) - if after := r.URL.Query().Get("after"); after != "" { - clauses = append(clauses, query.Lt("id", after)) - } - - if startTime := r.URL.Query().Get("start_time"); startTime != "" { - clauses = append(clauses, query.Gte("date", startTime)) - } - if endTime := r.URL.Query().Get("end_time"); endTime != "" { - clauses = append(clauses, query.Lt("date", endTime)) - } - - if len(clauses) == 0 { - return nil, nil - } - if len(clauses) == 1 { - return clauses[0], nil - } - - return query.And(clauses...), nil -} - -func getLogs(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - query := ledgerstore.GetLogsQuery{} - - if r.URL.Query().Get(QueryKeyCursor) != "" { - err := bunpaginate.UnmarshalCursor(r.URL.Query().Get(QueryKeyCursor), &query) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, errors.Errorf("invalid '%s' query param", QueryKeyCursor)) - return - } - } else { - var err error - - pageSize, err := bunpaginate.GetPageSize(r, - bunpaginate.WithDefaultPageSize(DefaultPageSize), - bunpaginate.WithMaxPageSize(MaxPageSize)) - if err != nil { - switch { - case engine.IsStorageError(err): - sharedapi.BadRequest(w, ErrValidation, err) - default: - sharedapi.InternalServerError(w, r, err) - } - return - } - - qb, err := buildGetLogsQuery(r) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - - query = ledgerstore.NewGetLogsQuery(ledgerstore.PaginatedQueryOptions[any]{ - QueryBuilder: qb, - PageSize: uint64(pageSize), - }) - } - - cursor, err := l.GetLogs(r.Context(), query) - if err != nil { - sharedapi.InternalServerError(w, r, err) + common.HandleCommonErrors(w, r, err) return } - sharedapi.RenderCursor(w, *cursor) + api.Ok(w, res) } diff --git a/internal/api/v1/controllers_info_test.go b/internal/api/v1/controllers_info_test.go index fa83e8c0a..95b618a43 100644 --- a/internal/api/v1/controllers_info_test.go +++ b/internal/api/v1/controllers_info_test.go @@ -1,26 +1,16 @@ -package v1_test +package v1 import ( - "encoding/json" "net/http" "net/http/httptest" - "net/url" + "os" "testing" - "github.com/formancehq/go-libs/time" + "github.com/formancehq/go-libs/v2/time" - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/bun/bunpaginate" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/migrations" - "github.com/formancehq/go-libs/query" - ledger "github.com/formancehq/ledger/internal" - v1 "github.com/formancehq/ledger/internal/api/v1" - "github.com/formancehq/ledger/internal/engine" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/migrations" "github.com/stretchr/testify/require" "go.uber.org/mock/gomock" ) @@ -28,8 +18,8 @@ import ( func TestGetLedgerInfo(t *testing.T) { t.Parallel() - backend, mock := newTestingBackend(t, false) - router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) + systemController, mock := newTestingSystemController(t, false) + router := NewRouter(systemController, auth.NewNoAuth(), "develop", os.Getenv("DEBUG") == "true") migrationInfo := []migrations.Info{ { @@ -57,145 +47,13 @@ func TestGetLedgerInfo(t *testing.T) { require.Equal(t, http.StatusOK, rec.Code) - info, ok := sharedapi.DecodeSingleResponse[v1.Info](t, rec.Body) + info, ok := api.DecodeSingleResponse[Info](t, rec.Body) require.True(t, ok) - require.EqualValues(t, v1.Info{ + require.EqualValues(t, Info{ Name: "xxx", - Storage: v1.StorageInfo{ + Storage: StorageInfo{ Migrations: migrationInfo, }, }, info) } - -func TestGetStats(t *testing.T) { - t.Parallel() - - backend, mock := newTestingBackend(t, true) - router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - expectedStats := engine.Stats{ - Transactions: 10, - Accounts: 5, - } - - mock.EXPECT(). - Stats(gomock.Any()). - Return(expectedStats, nil) - - req := httptest.NewRequest(http.MethodGet, "/xxx/stats", nil) - rec := httptest.NewRecorder() - - router.ServeHTTP(rec, req) - - require.Equal(t, http.StatusOK, rec.Code) - - stats, ok := sharedapi.DecodeSingleResponse[engine.Stats](t, rec.Body) - require.True(t, ok) - - require.EqualValues(t, expectedStats, stats) -} - -func TestGetLogs(t *testing.T) { - t.Parallel() - - type testCase struct { - name string - queryParams url.Values - expectQuery ledgerstore.PaginatedQueryOptions[any] - expectStatusCode int - expectedErrorCode string - } - - now := time.Now() - testCases := []testCase{ - { - name: "nominal", - expectQuery: ledgerstore.NewPaginatedQueryOptions[any](nil), - }, - { - name: "using start time", - queryParams: url.Values{ - "start_time": []string{now.Format(time.DateFormat)}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions[any](nil).WithQueryBuilder(query.Gte("date", now.Format(time.DateFormat))), - }, - { - name: "using end time", - queryParams: url.Values{ - "end_time": []string{now.Format(time.DateFormat)}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions[any](nil). - WithQueryBuilder(query.Lt("date", now.Format(time.DateFormat))), - }, - { - name: "using empty cursor", - queryParams: url.Values{ - "cursor": []string{bunpaginate.EncodeCursor(ledgerstore.NewGetLogsQuery(ledgerstore.NewPaginatedQueryOptions[any](nil)))}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions[any](nil), - }, - { - name: "using invalid cursor", - queryParams: url.Values{ - "cursor": []string{"xxx"}, - }, - expectStatusCode: http.StatusBadRequest, - expectedErrorCode: v1.ErrValidation, - }, - } - for _, testCase := range testCases { - testCase := testCase - t.Run(testCase.name, func(t *testing.T) { - - if testCase.expectStatusCode == 0 { - testCase.expectStatusCode = http.StatusOK - } - - expectedCursor := bunpaginate.Cursor[ledger.ChainedLog]{ - Data: []ledger.ChainedLog{ - *ledger.NewTransactionLog(ledger.NewTransaction(), map[string]metadata.Metadata{}). - ChainLog(nil), - }, - } - - backend, mockLedger := newTestingBackend(t, true) - if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { - mockLedger.EXPECT(). - GetLogs(gomock.Any(), ledgerstore.NewGetLogsQuery(testCase.expectQuery)). - Return(&expectedCursor, nil) - } - - router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodGet, "/xxx/logs", nil) - rec := httptest.NewRecorder() - req.URL.RawQuery = testCase.queryParams.Encode() - - router.ServeHTTP(rec, req) - - require.Equal(t, testCase.expectStatusCode, rec.Code) - if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { - cursor := sharedapi.DecodeCursorResponse[ledger.ChainedLog](t, rec.Body) - - cursorData, err := json.Marshal(cursor) - require.NoError(t, err) - - cursorAsMap := make(map[string]any) - require.NoError(t, json.Unmarshal(cursorData, &cursorAsMap)) - - expectedCursorData, err := json.Marshal(expectedCursor) - require.NoError(t, err) - - expectedCursorAsMap := make(map[string]any) - require.NoError(t, json.Unmarshal(expectedCursorData, &expectedCursorAsMap)) - - require.Equal(t, expectedCursorAsMap, cursorAsMap) - } else { - err := sharedapi.ErrorResponse{} - sharedapi.Decode(t, rec.Body, &err) - require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) - } - }) - } -} diff --git a/internal/api/v1/controllers_logs_list.go b/internal/api/v1/controllers_logs_list.go new file mode 100644 index 000000000..01aa23441 --- /dev/null +++ b/internal/api/v1/controllers_logs_list.go @@ -0,0 +1,72 @@ +package v1 + +import ( + "fmt" + "net/http" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/query" + "github.com/formancehq/ledger/internal/api/common" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" +) + +func buildGetLogsQuery(r *http.Request) query.Builder { + clauses := make([]query.Builder, 0) + if after := r.URL.Query().Get("after"); after != "" { + clauses = append(clauses, query.Lt("id", after)) + } + + if startTime := r.URL.Query().Get("start_time"); startTime != "" { + clauses = append(clauses, query.Gte("date", startTime)) + } + if endTime := r.URL.Query().Get("end_time"); endTime != "" { + clauses = append(clauses, query.Lt("date", endTime)) + } + + if len(clauses) == 0 { + return nil + } + if len(clauses) == 1 { + return clauses[0] + } + + return query.And(clauses...) +} + +func getLogs(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + query := ledgercontroller.GetLogsQuery{} + + if r.URL.Query().Get(QueryKeyCursor) != "" { + err := bunpaginate.UnmarshalCursor(r.URL.Query().Get(QueryKeyCursor), &query) + if err != nil { + api.BadRequest(w, ErrValidation, fmt.Errorf("invalid '%s' query param: %w", QueryKeyCursor, err)) + return + } + } else { + var err error + + pageSize, err := bunpaginate.GetPageSize(r, + bunpaginate.WithDefaultPageSize(DefaultPageSize), + bunpaginate.WithMaxPageSize(MaxPageSize)) + if err != nil { + common.HandleCommonErrors(w, r, err) + return + } + + query = ledgercontroller.NewListLogsQuery(ledgercontroller.PaginatedQueryOptions[any]{ + QueryBuilder: buildGetLogsQuery(r), + PageSize: pageSize, + }) + } + + cursor, err := l.ListLogs(r.Context(), query) + if err != nil { + common.HandleCommonErrors(w, r, err) + return + } + + api.RenderCursor(w, *cursor) +} diff --git a/internal/api/v1/controllers_logs_list_test.go b/internal/api/v1/controllers_logs_list_test.go new file mode 100644 index 000000000..1fb433d84 --- /dev/null +++ b/internal/api/v1/controllers_logs_list_test.go @@ -0,0 +1,126 @@ +package v1 + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/query" + "github.com/formancehq/go-libs/v2/time" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestGetLogs(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + expectQuery ledgercontroller.PaginatedQueryOptions[any] + expectStatusCode int + expectedErrorCode string + } + + now := time.Now() + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgercontroller.NewPaginatedQueryOptions[any](nil), + }, + { + name: "using start time", + queryParams: url.Values{ + "start_time": []string{now.Format(time.DateFormat)}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions[any](nil).WithQueryBuilder(query.Gte("date", now.Format(time.DateFormat))), + }, + { + name: "using end time", + queryParams: url.Values{ + "end_time": []string{now.Format(time.DateFormat)}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions[any](nil). + WithQueryBuilder(query.Lt("date", now.Format(time.DateFormat))), + }, + { + name: "using empty cursor", + queryParams: url.Values{ + "cursor": []string{bunpaginate.EncodeCursor(ledgercontroller.NewListLogsQuery(ledgercontroller.NewPaginatedQueryOptions[any](nil)))}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions[any](nil), + }, + { + name: "using invalid cursor", + queryParams: url.Values{ + "cursor": []string{"xxx"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusOK + } + + expectedCursor := bunpaginate.Cursor[ledger.Log]{ + Data: []ledger.Log{ + ledger.NewLog(ledger.CreatedTransaction{ + Transaction: ledger.NewTransaction(), + AccountMetadata: ledger.AccountMetadata{}, + }).ChainLog(nil), + }, + } + + systemController, ledgerController := newTestingSystemController(t, true) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + ledgerController.EXPECT(). + ListLogs(gomock.Any(), ledgercontroller.NewListLogsQuery(testCase.expectQuery)). + Return(&expectedCursor, nil) + } + + router := NewRouter(systemController, auth.NewNoAuth(), "develop", os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodGet, "/xxx/logs", nil) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + cursor := api.DecodeCursorResponse[ledger.Log](t, rec.Body) + + cursorData, err := json.Marshal(cursor) + require.NoError(t, err) + + cursorAsMap := make(map[string]any) + require.NoError(t, json.Unmarshal(cursorData, &cursorAsMap)) + + expectedCursorData, err := json.Marshal(expectedCursor) + require.NoError(t, err) + + expectedCursorAsMap := make(map[string]any) + require.NoError(t, json.Unmarshal(expectedCursorData, &expectedCursorAsMap)) + + require.Equal(t, expectedCursorAsMap, cursorAsMap) + } else { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v1/controllers_stats.go b/internal/api/v1/controllers_stats.go new file mode 100644 index 000000000..ddda7b7f3 --- /dev/null +++ b/internal/api/v1/controllers_stats.go @@ -0,0 +1,20 @@ +package v1 + +import ( + "net/http" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/ledger/internal/api/common" +) + +func getStats(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + stats, err := l.GetStats(r.Context()) + if err != nil { + common.HandleCommonErrors(w, r, err) + return + } + + api.Ok(w, stats) +} diff --git a/internal/api/v1/controllers_stats_test.go b/internal/api/v1/controllers_stats_test.go new file mode 100644 index 000000000..91bb2958a --- /dev/null +++ b/internal/api/v1/controllers_stats_test.go @@ -0,0 +1,42 @@ +package v1 + +import ( + "net/http" + "net/http/httptest" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestGetStats(t *testing.T) { + t.Parallel() + + systemController, ledgerController := newTestingSystemController(t, true) + router := NewRouter(systemController, auth.NewNoAuth(), "develop", os.Getenv("DEBUG") == "true") + + expectedStats := ledgercontroller.Stats{ + Transactions: 10, + Accounts: 5, + } + + ledgerController.EXPECT(). + GetStats(gomock.Any()). + Return(expectedStats, nil) + + req := httptest.NewRequest(http.MethodGet, "/xxx/stats", nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + + stats, ok := api.DecodeSingleResponse[ledgercontroller.Stats](t, rec.Body) + require.True(t, ok) + + require.EqualValues(t, expectedStats, stats) +} diff --git a/internal/api/v1/controllers_transactions.go b/internal/api/v1/controllers_transactions.go index d92cd9688..6bf5baab8 100644 --- a/internal/api/v1/controllers_transactions.go +++ b/internal/api/v1/controllers_transactions.go @@ -1,58 +1,59 @@ package v1 import ( - "encoding/json" - "fmt" "math/big" "net/http" - "strconv" "strings" - "github.com/go-chi/chi/v5" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/pointer" - - "github.com/formancehq/go-libs/bun/bunpaginate" - "github.com/formancehq/ledger/internal/api/backend" - "github.com/formancehq/ledger/internal/engine" - "github.com/formancehq/ledger/internal/engine/command" - "github.com/formancehq/ledger/internal/machine" - storageerrors "github.com/formancehq/ledger/internal/storage/sqlutils" - "github.com/pkg/errors" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/collectionutils" - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/query" + "github.com/formancehq/go-libs/v2/query" ledger "github.com/formancehq/ledger/internal" - "github.com/formancehq/ledger/internal/storage/ledgerstore" ) func mapTransactionToV1(tx ledger.Transaction) any { - return struct { - ledger.Transaction - TxID *big.Int `json:"txid"` - ID *big.Int `json:"-"` - }{ - Transaction: tx, - TxID: tx.ID, + type Aux ledger.Transaction + type Ret struct { + Aux + + Reverted bool `json:"reverted"` + PreCommitVolumes ledger.PostCommitVolumes `json:"preCommitVolumes,omitempty"` + PreCommitEffectiveVolumes ledger.PostCommitVolumes `json:"preCommitEffectiveVolumes,omitempty"` + TxID int `json:"txid"` + ID int `json:"-"` + } + + var ( + preCommitVolumes ledger.PostCommitVolumes + preCommitEffectiveVolumes ledger.PostCommitVolumes + ) + if len(tx.PostCommitVolumes) > 0 { + if tx.PostCommitVolumes != nil { + preCommitVolumes = tx.PostCommitVolumes.Copy() + for _, posting := range tx.Postings { + preCommitVolumes.AddOutput(posting.Source, posting.Asset, big.NewInt(0).Neg(posting.Amount)) + preCommitVolumes.AddInput(posting.Destination, posting.Asset, big.NewInt(0).Neg(posting.Amount)) + } + } + } + if len(tx.PostCommitEffectiveVolumes) > 0 { + if tx.PostCommitEffectiveVolumes != nil { + preCommitEffectiveVolumes = tx.PostCommitEffectiveVolumes.Copy() + for _, posting := range tx.Postings { + preCommitEffectiveVolumes.AddOutput(posting.Source, posting.Asset, big.NewInt(0).Neg(posting.Amount)) + preCommitEffectiveVolumes.AddInput(posting.Destination, posting.Asset, big.NewInt(0).Neg(posting.Amount)) + } + } } -} -func mapExpandedTransactionToV1(tx ledger.ExpandedTransaction) any { - return struct { - ledger.ExpandedTransaction - TxID *big.Int `json:"txid"` - ID *big.Int `json:"-"` - }{ - ExpandedTransaction: tx, - TxID: tx.ID, + return &Ret{ + Aux: Aux(tx), + Reverted: tx.RevertedAt != nil && !tx.RevertedAt.IsZero(), + PreCommitVolumes: preCommitVolumes, + PreCommitEffectiveVolumes: preCommitEffectiveVolumes, + TxID: tx.ID, } } -func buildGetTransactionsQuery(r *http.Request) (query.Builder, error) { +func buildGetTransactionsQuery(r *http.Request) query.Builder { clauses := make([]query.Builder, 0) if after := r.URL.Query().Get("after"); after != "" { clauses = append(clauses, query.Lt("id", after)) @@ -84,321 +85,11 @@ func buildGetTransactionsQuery(r *http.Request) (query.Builder, error) { } if len(clauses) == 0 { - return nil, nil + return nil } if len(clauses) == 1 { - return clauses[0], nil - } - - return query.And(clauses...), nil -} - -func countTransactions(w http.ResponseWriter, r *http.Request) { - - options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - options.QueryBuilder, err = buildGetTransactionsQuery(r) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - - count, err := backend.LedgerFromContext(r.Context()). - CountTransactions(r.Context(), ledgerstore.NewGetTransactionsQuery(*options)) - if err != nil { - sharedapi.InternalServerError(w, r, err) - return - } - - w.Header().Set("Count", fmt.Sprint(count)) - sharedapi.NoContent(w) -} - -func getTransactions(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - query, err := bunpaginate.Extract[ledgerstore.GetTransactionsQuery](r, func() (*ledgerstore.GetTransactionsQuery, error) { - options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) - if err != nil { - return nil, err - } - options.QueryBuilder, err = buildGetTransactionsQuery(r) - if err != nil { - return nil, err - } - return pointer.For(ledgerstore.NewGetTransactionsQuery(*options)), nil - }) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - - cursor, err := l.GetTransactions(r.Context(), *query) - if err != nil { - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.RenderCursor(w, *bunpaginate.MapCursor(cursor, mapExpandedTransactionToV1)) -} - -type Script struct { - ledger.Script - Vars map[string]json.RawMessage `json:"vars"` -} - -func (s Script) ToCore() (*ledger.Script, error) { - s.Script.Vars = map[string]string{} - for k, v := range s.Vars { - - m := make(map[string]json.RawMessage) - if err := json.Unmarshal(v, &m); err != nil { - var rawValue string - if err := json.Unmarshal(v, &rawValue); err != nil { - panic(err) - } - s.Script.Vars[k] = rawValue - continue - } - - // Is a monetary - var asset string - if err := json.Unmarshal(m["asset"], &asset); err != nil { - return nil, errors.Wrap(err, "unmarshalling asset") - } - amount := &big.Int{} - if err := json.Unmarshal(m["amount"], amount); err != nil { - return nil, errors.Wrap(err, "unmarshalling amount") - } - - s.Script.Vars[k] = fmt.Sprintf("%s %s", asset, amount) - } - return &s.Script, nil -} - -type PostTransactionRequest struct { - Postings ledger.Postings `json:"postings"` - Script Script `json:"script"` - Timestamp time.Time `json:"timestamp"` - Reference string `json:"reference"` - Metadata metadata.Metadata `json:"metadata" swaggertype:"object"` -} - -func postTransaction(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - payload := PostTransactionRequest{} - if err := json.NewDecoder(r.Body).Decode(&payload); err != nil { - sharedapi.BadRequest(w, ErrValidation, errors.New("invalid transaction format")) - return - } - - if len(payload.Postings) > 0 && payload.Script.Plain != "" || - len(payload.Postings) == 0 && payload.Script.Plain == "" { - sharedapi.BadRequest(w, ErrValidation, errors.New("invalid payload: should contain either postings or script")) - return - } else if len(payload.Postings) > 0 { - if _, err := payload.Postings.Validate(); err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - txData := ledger.TransactionData{ - Postings: payload.Postings, - Timestamp: payload.Timestamp, - Reference: payload.Reference, - Metadata: payload.Metadata, - } - - res, err := l.CreateTransaction(r.Context(), getCommandParameters(r), ledger.TxToScriptData(txData, false)) - if err != nil { - switch { - case engine.IsCommandError(err): - switch { - case command.IsErrMachine(err): - switch { - case machine.IsInsufficientFundError(err): - sharedapi.BadRequest(w, ErrInsufficientFund, err) - return - case machine.IsMetadataOverride(err): - sharedapi.BadRequest(w, ErrScriptMetadataOverride, err) - return - } - case command.IsInvalidTransactionError(err, command.ErrInvalidTransactionCodeConflict): - sharedapi.BadRequest(w, ErrConflict, err) - return - case command.IsInvalidTransactionError(err, command.ErrInvalidTransactionCodeCompilationFailed): - sharedapi.BadRequestWithDetails(w, ErrScriptCompilationFailed, err, backend.EncodeLink(err.Error())) - return - } - sharedapi.BadRequest(w, ErrValidation, err) - return - } - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.Ok(w, []any{mapTransactionToV1(*res)}) - return - } - - script, err := payload.Script.ToCore() - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - - runScript := ledger.RunScript{ - Script: *script, - Timestamp: payload.Timestamp, - Reference: payload.Reference, - Metadata: payload.Metadata, - } - - res, err := l.CreateTransaction(r.Context(), getCommandParameters(r), runScript) - if err != nil { - switch { - case engine.IsCommandError(err): - switch { - case command.IsErrMachine(err): - switch { - case machine.IsInsufficientFundError(err): - sharedapi.BadRequest(w, ErrInsufficientFund, err) - return - } - case command.IsInvalidTransactionError(err, command.ErrInvalidTransactionCodeConflict): - sharedapi.BadRequest(w, ErrConflict, err) - return - case command.IsInvalidTransactionError(err, command.ErrInvalidTransactionCodeCompilationFailed): - sharedapi.BadRequestWithDetails(w, ErrScriptCompilationFailed, err, backend.EncodeLink(err.Error())) - return - } - sharedapi.BadRequest(w, ErrValidation, err) - return - } - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.Ok(w, []any{mapTransactionToV1(*res)}) -} - -func getTransaction(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - txId, ok := big.NewInt(0).SetString(chi.URLParam(r, "id"), 10) - if !ok { - sharedapi.BadRequest(w, ErrValidation, errors.New("invalid transaction ID")) - return - } - - query := ledgerstore.NewGetTransactionQuery(txId) - if collectionutils.Contains(r.URL.Query()["expand"], "volumes") { - query = query.WithExpandVolumes() - } - if collectionutils.Contains(r.URL.Query()["expand"], "effectiveVolumes") { - query = query.WithExpandEffectiveVolumes() - } - - tx, err := l.GetTransactionWithVolumes(r.Context(), query) - if err != nil { - switch { - case storageerrors.IsNotFoundError(err): - sharedapi.NotFound(w, err) - default: - sharedapi.InternalServerError(w, r, err) - } - return - } - - sharedapi.Ok(w, mapExpandedTransactionToV1(*tx)) -} - -func revertTransaction(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - transactionID, ok := big.NewInt(0).SetString(chi.URLParam(r, "id"), 10) - if !ok { - sharedapi.NotFound(w, errors.New("invalid transaction ID")) - return - } - - tx, err := l.RevertTransaction(r.Context(), getCommandParameters(r), transactionID, - sharedapi.QueryParamBool(r, "disableChecks"), false) - if err != nil { - switch { - case engine.IsCommandError(err): - switch { - case command.IsErrMachine(err): - switch { - case machine.IsInsufficientFundError(err): - sharedapi.BadRequest(w, ErrInsufficientFund, err) - return - } - case command.IsRevertError(err, command.ErrRevertTransactionCodeNotFound): - sharedapi.NotFound(w, err) - return - } - sharedapi.BadRequest(w, ErrValidation, err) - return - } - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.Created(w, mapTransactionToV1(*tx)) -} - -func postTransactionMetadata(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - var m metadata.Metadata - if err := json.NewDecoder(r.Body).Decode(&m); err != nil { - sharedapi.BadRequest(w, ErrValidation, errors.New("invalid metadata format")) - return - } - - txID, ok := big.NewInt(0).SetString(chi.URLParam(r, "id"), 10) - if !ok { - sharedapi.NotFound(w, errors.New("invalid transaction ID")) - return - } - - if err := l.SaveMeta(r.Context(), getCommandParameters(r), ledger.MetaTargetTypeTransaction, txID, m); err != nil { - switch { - case command.IsSaveMetaError(err, command.ErrSaveMetaCodeTransactionNotFound): - sharedapi.NotFound(w, err) - default: - sharedapi.InternalServerError(w, r, err) - } - return - } - - sharedapi.NoContent(w) -} - -func deleteTransactionMetadata(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - transactionID, err := strconv.ParseUint(chi.URLParam(r, "id"), 10, 64) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, errors.New("invalid transaction ID")) - return - } - - metadataKey := chi.URLParam(r, "key") - - if err := l.DeleteMetadata(r.Context(), getCommandParameters(r), ledger.MetaTargetTypeTransaction, transactionID, metadataKey); err != nil { - switch { - case command.IsSaveMetaError(err, command.ErrSaveMetaCodeTransactionNotFound): - sharedapi.NotFound(w, err) - default: - sharedapi.InternalServerError(w, r, err) - } - return + return clauses[0] } - sharedapi.NoContent(w) + return query.And(clauses...) } diff --git a/internal/api/v1/controllers_transactions_add_metadata.go b/internal/api/v1/controllers_transactions_add_metadata.go new file mode 100644 index 000000000..8982b4250 --- /dev/null +++ b/internal/api/v1/controllers_transactions_add_metadata.go @@ -0,0 +1,46 @@ +package v1 + +import ( + "encoding/json" + "net/http" + "strconv" + + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/ledger/internal/api/common" + "github.com/go-chi/chi/v5" +) + +func addTransactionMetadata(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + var m metadata.Metadata + if err := json.NewDecoder(r.Body).Decode(&m); err != nil { + api.BadRequest(w, ErrValidation, errors.New("invalid metadata format")) + return + } + + txID, err := strconv.ParseInt(chi.URLParam(r, "id"), 10, 64) + if err != nil { + api.NotFound(w, errors.New("invalid transaction ID")) + return + } + + if err := l.SaveTransactionMetadata(r.Context(), getCommandParameters(r, ledgercontroller.SaveTransactionMetadata{ + TransactionID: int(txID), + Metadata: m, + })); err != nil { + switch { + case errors.Is(err, ledgercontroller.ErrNotFound): + api.NotFound(w, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + api.NoContent(w) +} diff --git a/internal/api/v1/controllers_transactions_add_metadata_test.go b/internal/api/v1/controllers_transactions_add_metadata_test.go new file mode 100644 index 000000000..30e2df4b3 --- /dev/null +++ b/internal/api/v1/controllers_transactions_add_metadata_test.go @@ -0,0 +1,80 @@ +package v1 + +import ( + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/metadata" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestTransactionsAddMetadata(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + expectStatusCode int + expectedErrorCode string + body any + } + + testCases := []testCase{ + { + name: "nominal", + body: metadata.Metadata{ + "foo": "bar", + }, + }, + { + name: "invalid body", + body: "invalid - not an object", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + t.Parallel() + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusNoContent + } + + systemController, ledgerController := newTestingSystemController(t, true) + if testCase.expectStatusCode == http.StatusNoContent { + ledgerController.EXPECT(). + SaveTransactionMetadata(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.SaveTransactionMetadata]{ + Input: ledgercontroller.SaveTransactionMetadata{ + TransactionID: 0, + Metadata: testCase.body.(metadata.Metadata), + }, + }). + Return(nil) + } + + router := NewRouter(systemController, auth.NewNoAuth(), "develop", os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodPost, "/xxx/transactions/0/metadata", api.Buffer(t, testCase.body)) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode >= 300 || testCase.expectStatusCode < 200 { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v1/controllers_transactions_count.go b/internal/api/v1/controllers_transactions_count.go new file mode 100644 index 000000000..49ba722c7 --- /dev/null +++ b/internal/api/v1/controllers_transactions_count.go @@ -0,0 +1,30 @@ +package v1 + +import ( + "fmt" + "net/http" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/ledger/internal/api/common" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" +) + +func countTransactions(w http.ResponseWriter, r *http.Request) { + + options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + options.QueryBuilder = buildGetTransactionsQuery(r) + + count, err := common.LedgerFromContext(r.Context()). + CountTransactions(r.Context(), ledgercontroller.NewListTransactionsQuery(*options)) + if err != nil { + common.HandleCommonErrors(w, r, err) + return + } + + w.Header().Set("Count", fmt.Sprint(count)) + api.NoContent(w) +} diff --git a/internal/api/v1/controllers_transactions_count_test.go b/internal/api/v1/controllers_transactions_count_test.go new file mode 100644 index 000000000..1985abe49 --- /dev/null +++ b/internal/api/v1/controllers_transactions_count_test.go @@ -0,0 +1,126 @@ +package v1 + +import ( + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/query" + "github.com/formancehq/go-libs/v2/time" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestCountTransactions(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + expectQuery ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes] + expectStatusCode int + expectedErrorCode string + } + now := time.Now() + + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}), + }, + { + name: "using metadata", + queryParams: url.Values{ + "metadata[roles]": []string{"admin"}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("metadata[roles]", "admin")), + }, + { + name: "using startTime", + queryParams: url.Values{ + "start_time": []string{now.Format(time.DateFormat)}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Gte("date", now.Format(time.DateFormat))), + }, + { + name: "using endTime", + queryParams: url.Values{ + "end_time": []string{now.Format(time.DateFormat)}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Lt("date", now.Format(time.DateFormat))), + }, + { + name: "using account", + queryParams: url.Values{ + "account": []string{"xxx"}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("account", "xxx")), + }, + { + name: "using reference", + queryParams: url.Values{ + "reference": []string{"xxx"}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("reference", "xxx")), + }, + { + name: "using destination", + queryParams: url.Values{ + "destination": []string{"xxx"}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("destination", "xxx")), + }, + { + name: "using source", + queryParams: url.Values{ + "source": []string{"xxx"}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("source", "xxx")), + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusNoContent + } + + systemController, ledgerController := newTestingSystemController(t, true) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + ledgerController.EXPECT(). + CountTransactions(gomock.Any(), ledgercontroller.NewListTransactionsQuery(testCase.expectQuery)). + Return(10, nil) + } + + router := NewRouter(systemController, auth.NewNoAuth(), "develop", os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodHead, "/xxx/transactions", nil) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + require.Equal(t, "10", rec.Header().Get("Count")) + } else { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v1/controllers_transactions_create.go b/internal/api/v1/controllers_transactions_create.go new file mode 100644 index 000000000..71385b9a1 --- /dev/null +++ b/internal/api/v1/controllers_transactions_create.go @@ -0,0 +1,142 @@ +package v1 + +import ( + "encoding/json" + "fmt" + "math/big" + "net/http" + + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/time" + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/api/common" +) + +type Script struct { + ledgercontroller.Script + Vars map[string]json.RawMessage `json:"vars"` +} + +func (s Script) ToCore() (*ledgercontroller.Script, error) { + s.Script.Vars = map[string]string{} + for k, v := range s.Vars { + + m := make(map[string]json.RawMessage) + if err := json.Unmarshal(v, &m); err != nil { + var rawValue string + if err := json.Unmarshal(v, &rawValue); err != nil { + panic(err) + } + s.Script.Vars[k] = rawValue + continue + } + + // Is a monetary + var asset string + if err := json.Unmarshal(m["asset"], &asset); err != nil { + return nil, fmt.Errorf("unmarshalling asset: %w", err) + } + amount := &big.Int{} + if err := json.Unmarshal(m["amount"], amount); err != nil { + return nil, fmt.Errorf("unmarshalling amount: %w", err) + } + + s.Script.Vars[k] = fmt.Sprintf("%s %s", asset, amount) + } + return &s.Script, nil +} + +type CreateTransactionRequest struct { + Postings ledger.Postings `json:"postings"` + Script Script `json:"script"` + Timestamp time.Time `json:"timestamp"` + Reference string `json:"reference"` + Metadata metadata.Metadata `json:"metadata" swaggertype:"object"` +} + +func createTransaction(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + payload := CreateTransactionRequest{} + if err := json.NewDecoder(r.Body).Decode(&payload); err != nil { + api.BadRequest(w, ErrValidation, errors.New("invalid transaction format")) + return + } + + if len(payload.Postings) > 0 && payload.Script.Plain != "" || + len(payload.Postings) == 0 && payload.Script.Plain == "" { + api.BadRequest(w, ErrValidation, errors.New("invalid payload: should contain either postings or script")) + return + } else if len(payload.Postings) > 0 { + if _, err := payload.Postings.Validate(); err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + txData := ledger.TransactionData{ + Postings: payload.Postings, + Timestamp: payload.Timestamp, + Reference: payload.Reference, + Metadata: payload.Metadata, + } + + res, err := l.CreateTransaction(r.Context(), getCommandParameters(r, common.TxToScriptData(txData, false))) + if err != nil { + switch { + case errors.Is(err, &ledgercontroller.ErrInsufficientFunds{}): + api.BadRequest(w, ErrInsufficientFund, err) + case errors.Is(err, &ledgercontroller.ErrInvalidVars{}) || errors.Is(err, ledgercontroller.ErrCompilationFailed{}): + api.BadRequest(w, ErrScriptCompilationFailed, err) + case errors.Is(err, &ledgercontroller.ErrMetadataOverride{}): + api.BadRequest(w, ErrScriptMetadataOverride, err) + case errors.Is(err, ledgercontroller.ErrNoPostings) || + errors.Is(err, ledgercontroller.ErrInvalidIdempotencyInput{}): + api.BadRequest(w, ErrValidation, err) + case errors.Is(err, ledgercontroller.ErrTransactionReferenceConflict{}): + api.WriteErrorResponse(w, http.StatusConflict, ErrConflict, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + api.Ok(w, []any{mapTransactionToV1(res.Transaction)}) + return + } + + script, err := payload.Script.ToCore() + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + runScript := ledgercontroller.RunScript{ + Script: *script, + Timestamp: payload.Timestamp, + Reference: payload.Reference, + Metadata: payload.Metadata, + } + + res, err := l.CreateTransaction(r.Context(), getCommandParameters(r, runScript)) + if err != nil { + switch { + case errors.Is(err, &ledgercontroller.ErrInsufficientFunds{}): + api.BadRequest(w, ErrInsufficientFund, err) + case errors.Is(err, &ledgercontroller.ErrInvalidVars{}) || + errors.Is(err, ledgercontroller.ErrCompilationFailed{}) || + errors.Is(err, &ledgercontroller.ErrMetadataOverride{}) || + errors.Is(err, ledgercontroller.ErrInvalidIdempotencyInput{}) || + errors.Is(err, ledgercontroller.ErrNoPostings): + api.BadRequest(w, ErrValidation, err) + case errors.Is(err, ledgercontroller.ErrTransactionReferenceConflict{}): + api.WriteErrorResponse(w, http.StatusConflict, ErrConflict, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + api.Ok(w, []any{mapTransactionToV1(res.Transaction)}) +} diff --git a/internal/api/v1/controllers_transactions_create_test.go b/internal/api/v1/controllers_transactions_create_test.go new file mode 100644 index 000000000..1ff7be7b7 --- /dev/null +++ b/internal/api/v1/controllers_transactions_create_test.go @@ -0,0 +1,258 @@ +package v1 + +import ( + "encoding/json" + "github.com/formancehq/ledger/internal/api/common" + "math/big" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/time" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestTransactionsCreate(t *testing.T) { + type testCase struct { + name string + expectedPreview bool + expectedRunScript ledgercontroller.RunScript + payload any + expectedStatusCode int + expectedErrorCode string + queryParams url.Values + } + + testCases := []testCase{ + { + name: "using plain numscript", + payload: CreateTransactionRequest{ + Script: Script{ + Script: ledgercontroller.Script{ + Plain: `XXX`, + }, + }, + }, + expectedRunScript: ledgercontroller.RunScript{ + Script: ledgercontroller.Script{ + Plain: `XXX`, + Vars: map[string]string{}, + }, + }, + }, + { + name: "using plain numscript with variables", + payload: CreateTransactionRequest{ + Script: Script{ + Script: ledgercontroller.Script{ + Plain: `vars { + monetary $val + } + + send $val ( + source = @world + destination = @bank + )`, + }, + Vars: map[string]json.RawMessage{ + "val": json.RawMessage(`"USD/2 100"`), + }, + }, + }, + expectedRunScript: ledgercontroller.RunScript{ + Script: ledgercontroller.Script{ + Plain: `vars { + monetary $val + } + + send $val ( + source = @world + destination = @bank + )`, + Vars: map[string]string{ + "val": "USD/2 100", + }, + }, + }, + }, + { + name: "using plain numscript with variables (legacy format)", + payload: CreateTransactionRequest{ + Script: Script{ + Script: ledgercontroller.Script{ + Plain: `vars { + monetary $val + } + + send $val ( + source = @world + destination = @bank + )`, + }, + Vars: map[string]json.RawMessage{ + "val": json.RawMessage(`{ + "asset": "USD/2", + "amount": 100 + }`), + }, + }, + }, + expectedRunScript: ledgercontroller.RunScript{ + Script: ledgercontroller.Script{ + Plain: `vars { + monetary $val + } + + send $val ( + source = @world + destination = @bank + )`, + Vars: map[string]string{ + "val": "USD/2 100", + }, + }, + }, + }, + { + name: "using plain numscript and dry run", + payload: CreateTransactionRequest{ + Script: Script{ + Script: ledgercontroller.Script{ + Plain: `send ( + source = @world + destination = @bank + )`, + }, + }, + }, + expectedRunScript: ledgercontroller.RunScript{ + Script: ledgercontroller.Script{ + Plain: `send ( + source = @world + destination = @bank + )`, + Vars: map[string]string{}, + }, + }, + expectedPreview: true, + queryParams: url.Values{ + "preview": []string{"true"}, + }, + }, + { + name: "using JSON postings", + payload: CreateTransactionRequest{ + Postings: []ledger.Posting{ + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + }, + }, + expectedRunScript: common.TxToScriptData(ledger.NewTransactionData().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ), false), + }, + { + name: "using JSON postings and dry run", + queryParams: url.Values{ + "preview": []string{"true"}, + }, + payload: CreateTransactionRequest{ + Postings: []ledger.Posting{ + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + }, + }, + expectedPreview: true, + expectedRunScript: common.TxToScriptData(ledger.NewTransactionData().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ), false), + }, + { + name: "no postings or script", + payload: CreateTransactionRequest{}, + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "postings and script", + payload: CreateTransactionRequest{ + Postings: ledger.Postings{ + { + Source: "world", + Destination: "alice", + Amount: big.NewInt(100), + Asset: "COIN", + }, + }, + Script: Script{ + Script: ledgercontroller.Script{ + Plain: ` + send [COIN 100] ( + source = @world + destination = @bob + )`, + }, + }, + }, + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "using invalid body", + payload: "not a valid payload", + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + } + + for _, testCase := range testCases { + tc := testCase + t.Run(tc.name, func(t *testing.T) { + if testCase.expectedStatusCode == 0 { + testCase.expectedStatusCode = http.StatusOK + } + + expectedTx := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ) + + systemController, ledgerController := newTestingSystemController(t, true) + if testCase.expectedStatusCode < 300 && testCase.expectedStatusCode >= 200 { + testCase.expectedRunScript.Timestamp = time.Time{} + ledgerController.EXPECT(). + CreateTransaction(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.RunScript]{ + DryRun: tc.expectedPreview, + Input: testCase.expectedRunScript, + }). + Return(&ledger.CreatedTransaction{ + Transaction: expectedTx, + }, nil) + } + + router := NewRouter(systemController, auth.NewNoAuth(), "develop", os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodPost, "/xxx/transactions", api.Buffer(t, testCase.payload)) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectedStatusCode, rec.Code) + if testCase.expectedStatusCode < 300 && testCase.expectedStatusCode >= 200 { + tx, ok := api.DecodeSingleResponse[[]ledger.Transaction](t, rec.Body) + require.True(t, ok) + require.Equal(t, expectedTx, tx[0]) + } else { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v1/controllers_transactions_delete_metadata.go b/internal/api/v1/controllers_transactions_delete_metadata.go new file mode 100644 index 000000000..1f706fca4 --- /dev/null +++ b/internal/api/v1/controllers_transactions_delete_metadata.go @@ -0,0 +1,40 @@ +package v1 + +import ( + "net/http" + "strconv" + + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/ledger/internal/api/common" + "github.com/go-chi/chi/v5" +) + +func deleteTransactionMetadata(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + transactionID, err := strconv.ParseUint(chi.URLParam(r, "id"), 10, 64) + if err != nil { + api.BadRequest(w, ErrValidation, errors.New("invalid transaction ID")) + return + } + + metadataKey := chi.URLParam(r, "key") + + if err := l.DeleteTransactionMetadata(r.Context(), getCommandParameters(r, ledgercontroller.DeleteTransactionMetadata{ + TransactionID: int(transactionID), + Key: metadataKey, + })); err != nil { + switch { + case errors.Is(err, ledgercontroller.ErrNotFound): + api.NotFound(w, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + api.NoContent(w) +} diff --git a/internal/api/v1/controllers_transactions_delete_metadata_test.go b/internal/api/v1/controllers_transactions_delete_metadata_test.go new file mode 100644 index 000000000..a9006dc92 --- /dev/null +++ b/internal/api/v1/controllers_transactions_delete_metadata_test.go @@ -0,0 +1,90 @@ +package v1 + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/logging" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestTransactionsDeleteMetadata(t *testing.T) { + t.Parallel() + + ctx := logging.TestingContext() + + type testCase struct { + name string + queryParams url.Values + returnErr error + expectedStatusCode int + expectedErrorCode string + expectBackendCall bool + } + + for _, tc := range []testCase{ + { + name: "nominal", + expectBackendCall: true, + }, + { + name: "unexpected backend error", + expectBackendCall: true, + returnErr: errors.New("undefined error"), + expectedStatusCode: http.StatusInternalServerError, + expectedErrorCode: api.ErrorInternal, + }, + { + name: "not found", + expectBackendCall: true, + returnErr: ledgercontroller.ErrNotFound, + expectedStatusCode: http.StatusNotFound, + expectedErrorCode: api.ErrorCodeNotFound, + }, + } { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + systemController, ledgerController := newTestingSystemController(t, true) + + if tc.expectBackendCall { + ledgerController.EXPECT(). + DeleteTransactionMetadata(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.DeleteTransactionMetadata]{ + Input: ledgercontroller.DeleteTransactionMetadata{ + TransactionID: 1, + Key: "foo", + }, + }). + Return(tc.returnErr) + } + + router := NewRouter(systemController, auth.NewNoAuth(), "develop", os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodDelete, "/ledger0/transactions/1/metadata/foo", nil) + req = req.WithContext(ctx) + req.URL.RawQuery = tc.queryParams.Encode() + + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + if tc.expectedStatusCode == 0 || tc.expectedStatusCode == http.StatusOK { + require.Equal(t, http.StatusNoContent, rec.Code) + } else { + require.Equal(t, tc.expectedStatusCode, rec.Code) + errorResponse := api.ErrorResponse{} + require.NoError(t, json.Unmarshal(rec.Body.Bytes(), &errorResponse)) + require.Equal(t, tc.expectedErrorCode, errorResponse.ErrorCode) + } + }) + } +} diff --git a/internal/api/v1/controllers_transactions_list.go b/internal/api/v1/controllers_transactions_list.go new file mode 100644 index 000000000..62b4f89c8 --- /dev/null +++ b/internal/api/v1/controllers_transactions_list.go @@ -0,0 +1,37 @@ +package v1 + +import ( + "net/http" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/pointer" + "github.com/formancehq/ledger/internal/api/common" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" +) + +func listTransactions(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + query, err := bunpaginate.Extract[ledgercontroller.ListTransactionsQuery](r, func() (*ledgercontroller.ListTransactionsQuery, error) { + options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) + if err != nil { + return nil, err + } + options.QueryBuilder = buildGetTransactionsQuery(r) + + return pointer.For(ledgercontroller.NewListTransactionsQuery(*options)), nil + }) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + cursor, err := l.ListTransactions(r.Context(), *query) + if err != nil { + common.HandleCommonErrors(w, r, err) + return + } + + api.RenderCursor(w, *bunpaginate.MapCursor(cursor, mapTransactionToV1)) +} diff --git a/internal/api/v1/controllers_transactions_list_test.go b/internal/api/v1/controllers_transactions_list_test.go new file mode 100644 index 000000000..4fde95fad --- /dev/null +++ b/internal/api/v1/controllers_transactions_list_test.go @@ -0,0 +1,169 @@ +package v1 + +import ( + "math/big" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/query" + "github.com/formancehq/go-libs/v2/time" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestTransactionsList(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + expectQuery ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes] + expectStatusCode int + expectedErrorCode string + } + now := time.Now() + + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}), + }, + { + name: "using metadata", + queryParams: url.Values{ + "metadata[roles]": []string{"admin"}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("metadata[roles]", "admin")), + }, + { + name: "using startTime", + queryParams: url.Values{ + "start_time": []string{now.Format(time.DateFormat)}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Gte("date", now.Format(time.DateFormat))), + }, + { + name: "using endTime", + queryParams: url.Values{ + "end_time": []string{now.Format(time.DateFormat)}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Lt("date", now.Format(time.DateFormat))), + }, + { + name: "using account", + queryParams: url.Values{ + "account": []string{"xxx"}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("account", "xxx")), + }, + { + name: "using reference", + queryParams: url.Values{ + "reference": []string{"xxx"}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("reference", "xxx")), + }, + { + name: "using destination", + queryParams: url.Values{ + "destination": []string{"xxx"}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("destination", "xxx")), + }, + { + name: "using source", + queryParams: url.Values{ + "source": []string{"xxx"}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("source", "xxx")), + }, + { + name: "using empty cursor", + queryParams: url.Values{ + "cursor": []string{bunpaginate.EncodeCursor(ledgercontroller.NewListTransactionsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{})))}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}), + }, + { + name: "using invalid cursor", + queryParams: url.Values{ + "cursor": []string{"XXX"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "invalid page size", + queryParams: url.Values{ + "pageSize": []string{"nan"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "page size over maximum", + queryParams: url.Values{ + "pageSize": []string{"1000000"}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithPageSize(MaxPageSize), + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusOK + } + + expectedCursor := bunpaginate.Cursor[ledger.Transaction]{ + Data: []ledger.Transaction{ + ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ), + }, + } + + systemController, ledgerController := newTestingSystemController(t, true) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + ledgerController.EXPECT(). + ListTransactions(gomock.Any(), ledgercontroller.NewListTransactionsQuery(testCase.expectQuery)). + Return(&expectedCursor, nil) + } + + router := NewRouter(systemController, auth.NewNoAuth(), "develop", os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodGet, "/xxx/transactions", nil) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + cursor := api.DecodeCursorResponse[ledger.Transaction](t, rec.Body) + require.Equal(t, expectedCursor, *cursor) + } else { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v1/controllers_transactions_read.go b/internal/api/v1/controllers_transactions_read.go new file mode 100644 index 000000000..d8aa8292a --- /dev/null +++ b/internal/api/v1/controllers_transactions_read.go @@ -0,0 +1,44 @@ +package v1 + +import ( + "net/http" + "strconv" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/collectionutils" + "github.com/formancehq/go-libs/v2/platform/postgres" + "github.com/formancehq/ledger/internal/api/common" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/go-chi/chi/v5" +) + +func readTransaction(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + txId, err := strconv.ParseInt(chi.URLParam(r, "id"), 10, 64) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + query := ledgercontroller.NewGetTransactionQuery(int(txId)) + if collectionutils.Contains(r.URL.Query()["expand"], "volumes") { + query = query.WithExpandVolumes() + } + if collectionutils.Contains(r.URL.Query()["expand"], "effectiveVolumes") { + query = query.WithExpandEffectiveVolumes() + } + + tx, err := l.GetTransaction(r.Context(), query) + if err != nil { + switch { + case postgres.IsNotFoundError(err): + api.NotFound(w, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + api.Ok(w, mapTransactionToV1(*tx)) +} diff --git a/internal/api/v1/controllers_transactions_read_test.go b/internal/api/v1/controllers_transactions_read_test.go new file mode 100644 index 000000000..7369ff644 --- /dev/null +++ b/internal/api/v1/controllers_transactions_read_test.go @@ -0,0 +1,40 @@ +package v1 + +import ( + "math/big" + "net/http" + "net/http/httptest" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestTransactionsRead(t *testing.T) { + t.Parallel() + + tx := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ) + + systemController, ledgerController := newTestingSystemController(t, true) + ledgerController.EXPECT(). + GetTransaction(gomock.Any(), ledgercontroller.NewGetTransactionQuery(0)). + Return(&tx, nil) + + router := NewRouter(systemController, auth.NewNoAuth(), "develop", os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodGet, "/xxx/transactions/0", nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + response, _ := api.DecodeSingleResponse[ledger.Transaction](t, rec.Body) + require.Equal(t, tx, response) +} diff --git a/internal/api/v1/controllers_transactions_revert.go b/internal/api/v1/controllers_transactions_revert.go new file mode 100644 index 000000000..611f96e71 --- /dev/null +++ b/internal/api/v1/controllers_transactions_revert.go @@ -0,0 +1,47 @@ +package v1 + +import ( + "net/http" + "strconv" + + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/ledger/internal/api/common" + "github.com/go-chi/chi/v5" +) + +func revertTransaction(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + txID, err := strconv.ParseInt(chi.URLParam(r, "id"), 10, 64) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + ret, err := l.RevertTransaction( + r.Context(), + getCommandParameters(r, ledgercontroller.RevertTransaction{ + Force: api.QueryParamBool(r, "disableChecks"), + AtEffectiveDate: false, + TransactionID: int(txID), + }), + ) + if err != nil { + switch { + case errors.Is(err, &ledgercontroller.ErrInsufficientFunds{}): + api.BadRequest(w, ErrInsufficientFund, err) + case errors.Is(err, ledgercontroller.ErrAlreadyReverted{}): + api.BadRequest(w, ErrAlreadyRevert, err) + case errors.Is(err, ledgercontroller.ErrNotFound): + api.NotFound(w, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + api.Created(w, mapTransactionToV1(ret.RevertTransaction)) +} diff --git a/internal/api/v1/controllers_transactions_revert_test.go b/internal/api/v1/controllers_transactions_revert_test.go new file mode 100644 index 000000000..301fff877 --- /dev/null +++ b/internal/api/v1/controllers_transactions_revert_test.go @@ -0,0 +1,106 @@ +package v1 + +import ( + "math/big" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/pointer" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestTransactionsRevert(t *testing.T) { + t.Parallel() + type testCase struct { + name string + queryParams url.Values + returnTx ledger.Transaction + returnErr error + expectForce bool + expectStatusCode int + expectErrorCode string + } + + testCases := []testCase{ + { + name: "nominal", + returnTx: ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ), + }, + { + name: "force revert", + returnTx: ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ), + expectForce: true, + queryParams: map[string][]string{"disableChecks": {"true"}}, + }, + { + name: "with insufficient fund", + returnErr: &ledgercontroller.ErrInsufficientFunds{}, + expectStatusCode: http.StatusBadRequest, + expectErrorCode: ErrInsufficientFund, + }, + { + name: "with already revert", + returnErr: &ledgercontroller.ErrAlreadyReverted{}, + expectStatusCode: http.StatusBadRequest, + expectErrorCode: ErrAlreadyRevert, + }, + { + name: "with transaction not found", + returnErr: ledgercontroller.ErrNotFound, + expectStatusCode: http.StatusNotFound, + expectErrorCode: api.ErrorCodeNotFound, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + systemController, ledgerController := newTestingSystemController(t, true) + ledgerController. + EXPECT(). + RevertTransaction(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.RevertTransaction]{ + Input: ledgercontroller.RevertTransaction{ + Force: tc.expectForce, + }, + }). + Return(pointer.For(ledger.RevertedTransaction{ + RevertTransaction: tc.returnTx, + }), tc.returnErr) + + router := NewRouter(systemController, auth.NewNoAuth(), "develop", os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodPost, "/xxx/transactions/0/revert", nil) + if tc.queryParams != nil { + req.URL.RawQuery = tc.queryParams.Encode() + } + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + if tc.expectStatusCode == 0 { + require.Equal(t, http.StatusCreated, rec.Code) + tx, ok := api.DecodeSingleResponse[ledger.Transaction](t, rec.Body) + require.True(t, ok) + require.Equal(t, tc.returnTx, tx) + } else { + require.Equal(t, tc.expectStatusCode, rec.Code) + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, tc.expectErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v1/controllers_transactions_test.go b/internal/api/v1/controllers_transactions_test.go deleted file mode 100644 index 62a0b4b7a..000000000 --- a/internal/api/v1/controllers_transactions_test.go +++ /dev/null @@ -1,655 +0,0 @@ -package v1_test - -import ( - "encoding/json" - "math/big" - "net/http" - "net/http/httptest" - "net/url" - "testing" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/bun/bunpaginate" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/query" - ledger "github.com/formancehq/ledger/internal" - v1 "github.com/formancehq/ledger/internal/api/v1" - "github.com/formancehq/ledger/internal/engine/command" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/formancehq/ledger/internal/storage/ledgerstore" - "github.com/stretchr/testify/require" - "go.uber.org/mock/gomock" -) - -func TestPostTransactions(t *testing.T) { - type testCase struct { - name string - expectedPreview bool - expectedRunScript ledger.RunScript - payload any - expectedStatusCode int - expectedErrorCode string - queryParams url.Values - } - - testCases := []testCase{ - { - name: "using plain numscript", - payload: v1.PostTransactionRequest{ - Script: v1.Script{ - Script: ledger.Script{ - Plain: `XXX`, - }, - }, - }, - expectedRunScript: ledger.RunScript{ - Script: ledger.Script{ - Plain: `XXX`, - Vars: map[string]string{}, - }, - }, - }, - { - name: "using plain numscript with variables", - payload: v1.PostTransactionRequest{ - Script: v1.Script{ - Script: ledger.Script{ - Plain: `vars { - monetary $val - } - - send $val ( - source = @world - destination = @bank - )`, - }, - Vars: map[string]json.RawMessage{ - "val": json.RawMessage(`"USD/2 100"`), - }, - }, - }, - expectedRunScript: ledger.RunScript{ - Script: ledger.Script{ - Plain: `vars { - monetary $val - } - - send $val ( - source = @world - destination = @bank - )`, - Vars: map[string]string{ - "val": "USD/2 100", - }, - }, - }, - }, - { - name: "using plain numscript with variables (legacy format)", - payload: v1.PostTransactionRequest{ - Script: v1.Script{ - Script: ledger.Script{ - Plain: `vars { - monetary $val - } - - send $val ( - source = @world - destination = @bank - )`, - }, - Vars: map[string]json.RawMessage{ - "val": json.RawMessage(`{ - "asset": "USD/2", - "amount": 100 - }`), - }, - }, - }, - expectedRunScript: ledger.RunScript{ - Script: ledger.Script{ - Plain: `vars { - monetary $val - } - - send $val ( - source = @world - destination = @bank - )`, - Vars: map[string]string{ - "val": "USD/2 100", - }, - }, - }, - }, - { - name: "using plain numscript and dry run", - payload: v1.PostTransactionRequest{ - Script: v1.Script{ - Script: ledger.Script{ - Plain: `send ( - source = @world - destination = @bank - )`, - }, - }, - }, - expectedRunScript: ledger.RunScript{ - Script: ledger.Script{ - Plain: `send ( - source = @world - destination = @bank - )`, - Vars: map[string]string{}, - }, - }, - expectedPreview: true, - queryParams: url.Values{ - "preview": []string{"true"}, - }, - }, - { - name: "using JSON postings", - payload: v1.PostTransactionRequest{ - Postings: []ledger.Posting{ - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - }, - }, - expectedRunScript: ledger.TxToScriptData(ledger.NewTransactionData().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ), false), - }, - { - name: "using JSON postings and dry run", - queryParams: url.Values{ - "preview": []string{"true"}, - }, - payload: v1.PostTransactionRequest{ - Postings: []ledger.Posting{ - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - }, - }, - expectedPreview: true, - expectedRunScript: ledger.TxToScriptData(ledger.NewTransactionData().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ), false), - }, - { - name: "no postings or script", - payload: v1.PostTransactionRequest{}, - expectedStatusCode: http.StatusBadRequest, - expectedErrorCode: v1.ErrValidation, - }, - { - name: "postings and script", - payload: v1.PostTransactionRequest{ - Postings: ledger.Postings{ - { - Source: "world", - Destination: "alice", - Amount: big.NewInt(100), - Asset: "COIN", - }, - }, - Script: v1.Script{ - Script: ledger.Script{ - Plain: ` - send [COIN 100] ( - source = @world - destination = @bob - )`, - }, - }, - }, - expectedStatusCode: http.StatusBadRequest, - expectedErrorCode: v1.ErrValidation, - }, - { - name: "using invalid body", - payload: "not a valid payload", - expectedStatusCode: http.StatusBadRequest, - expectedErrorCode: v1.ErrValidation, - }, - } - - for _, testCase := range testCases { - tc := testCase - t.Run(tc.name, func(t *testing.T) { - if testCase.expectedStatusCode == 0 { - testCase.expectedStatusCode = http.StatusOK - } - - expectedTx := ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ) - - backend, mockLedger := newTestingBackend(t, true) - if testCase.expectedStatusCode < 300 && testCase.expectedStatusCode >= 200 { - mockLedger.EXPECT(). - CreateTransaction(gomock.Any(), command.Parameters{ - DryRun: tc.expectedPreview, - }, testCase.expectedRunScript). - Return(expectedTx, nil) - } - - router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodPost, "/xxx/transactions", sharedapi.Buffer(t, testCase.payload)) - rec := httptest.NewRecorder() - req.URL.RawQuery = testCase.queryParams.Encode() - - router.ServeHTTP(rec, req) - - require.Equal(t, testCase.expectedStatusCode, rec.Code) - if testCase.expectedStatusCode < 300 && testCase.expectedStatusCode >= 200 { - tx, ok := sharedapi.DecodeSingleResponse[[]ledger.Transaction](t, rec.Body) - require.True(t, ok) - require.Equal(t, *expectedTx, tx[0]) - } else { - err := sharedapi.ErrorResponse{} - sharedapi.Decode(t, rec.Body, &err) - require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) - } - }) - } -} - -func TestPostTransactionMetadata(t *testing.T) { - t.Parallel() - - type testCase struct { - name string - queryParams url.Values - expectStatusCode int - expectedErrorCode string - body any - } - - testCases := []testCase{ - { - name: "nominal", - body: metadata.Metadata{ - "foo": "bar", - }, - }, - { - name: "invalid body", - body: "invalid - not an object", - expectStatusCode: http.StatusBadRequest, - expectedErrorCode: v1.ErrValidation, - }, - } - for _, testCase := range testCases { - testCase := testCase - t.Run(testCase.name, func(t *testing.T) { - - if testCase.expectStatusCode == 0 { - testCase.expectStatusCode = http.StatusNoContent - } - - backend, mock := newTestingBackend(t, true) - if testCase.expectStatusCode == http.StatusNoContent { - mock.EXPECT(). - SaveMeta(gomock.Any(), command.Parameters{}, ledger.MetaTargetTypeTransaction, big.NewInt(0), testCase.body). - Return(nil) - } - - router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodPost, "/xxx/transactions/0/metadata", sharedapi.Buffer(t, testCase.body)) - rec := httptest.NewRecorder() - req.URL.RawQuery = testCase.queryParams.Encode() - - router.ServeHTTP(rec, req) - - require.Equal(t, testCase.expectStatusCode, rec.Code) - if testCase.expectStatusCode >= 300 || testCase.expectStatusCode < 200 { - err := sharedapi.ErrorResponse{} - sharedapi.Decode(t, rec.Body, &err) - require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) - } - }) - } -} - -func TestGetTransaction(t *testing.T) { - t.Parallel() - - tx := ledger.ExpandTransaction( - ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ), - nil, - ) - - backend, mock := newTestingBackend(t, true) - mock.EXPECT(). - GetTransactionWithVolumes(gomock.Any(), ledgerstore.NewGetTransactionQuery(big.NewInt(0))). - Return(&tx, nil) - - router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodGet, "/xxx/transactions/0", nil) - rec := httptest.NewRecorder() - - router.ServeHTTP(rec, req) - - require.Equal(t, http.StatusOK, rec.Code) - response, _ := sharedapi.DecodeSingleResponse[ledger.ExpandedTransaction](t, rec.Body) - require.Equal(t, tx, response) -} - -func TestGetTransactions(t *testing.T) { - t.Parallel() - - type testCase struct { - name string - queryParams url.Values - expectQuery ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes] - expectStatusCode int - expectedErrorCode string - } - now := time.Now() - - testCases := []testCase{ - { - name: "nominal", - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}), - }, - { - name: "using metadata", - queryParams: url.Values{ - "metadata[roles]": []string{"admin"}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithQueryBuilder(query.Match("metadata[roles]", "admin")), - }, - { - name: "using startTime", - queryParams: url.Values{ - "start_time": []string{now.Format(time.DateFormat)}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithQueryBuilder(query.Gte("date", now.Format(time.DateFormat))), - }, - { - name: "using endTime", - queryParams: url.Values{ - "end_time": []string{now.Format(time.DateFormat)}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithQueryBuilder(query.Lt("date", now.Format(time.DateFormat))), - }, - { - name: "using account", - queryParams: url.Values{ - "account": []string{"xxx"}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithQueryBuilder(query.Match("account", "xxx")), - }, - { - name: "using reference", - queryParams: url.Values{ - "reference": []string{"xxx"}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithQueryBuilder(query.Match("reference", "xxx")), - }, - { - name: "using destination", - queryParams: url.Values{ - "destination": []string{"xxx"}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithQueryBuilder(query.Match("destination", "xxx")), - }, - { - name: "using source", - queryParams: url.Values{ - "source": []string{"xxx"}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithQueryBuilder(query.Match("source", "xxx")), - }, - { - name: "using empty cursor", - queryParams: url.Values{ - "cursor": []string{bunpaginate.EncodeCursor(ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{})))}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}), - }, - { - name: "using invalid cursor", - queryParams: url.Values{ - "cursor": []string{"XXX"}, - }, - expectStatusCode: http.StatusBadRequest, - expectedErrorCode: v1.ErrValidation, - }, - { - name: "invalid page size", - queryParams: url.Values{ - "pageSize": []string{"nan"}, - }, - expectStatusCode: http.StatusBadRequest, - expectedErrorCode: v1.ErrValidation, - }, - { - name: "page size over maximum", - queryParams: url.Values{ - "pageSize": []string{"1000000"}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithPageSize(v1.MaxPageSize), - }, - } - for _, testCase := range testCases { - testCase := testCase - t.Run(testCase.name, func(t *testing.T) { - - if testCase.expectStatusCode == 0 { - testCase.expectStatusCode = http.StatusOK - } - - expectedCursor := bunpaginate.Cursor[ledger.ExpandedTransaction]{ - Data: []ledger.ExpandedTransaction{ - ledger.ExpandTransaction( - ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ), - nil, - ), - }, - } - - backend, mockLedger := newTestingBackend(t, true) - if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { - mockLedger.EXPECT(). - GetTransactions(gomock.Any(), ledgerstore.NewGetTransactionsQuery(testCase.expectQuery)). - Return(&expectedCursor, nil) - } - - router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodGet, "/xxx/transactions", nil) - rec := httptest.NewRecorder() - req.URL.RawQuery = testCase.queryParams.Encode() - - router.ServeHTTP(rec, req) - - require.Equal(t, testCase.expectStatusCode, rec.Code) - if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { - cursor := sharedapi.DecodeCursorResponse[ledger.ExpandedTransaction](t, rec.Body) - require.Equal(t, expectedCursor, *cursor) - } else { - err := sharedapi.ErrorResponse{} - sharedapi.Decode(t, rec.Body, &err) - require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) - } - }) - } -} - -func TestCountTransactions(t *testing.T) { - t.Parallel() - - type testCase struct { - name string - queryParams url.Values - expectQuery ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes] - expectStatusCode int - expectedErrorCode string - } - now := time.Now() - - testCases := []testCase{ - { - name: "nominal", - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}), - }, - { - name: "using metadata", - queryParams: url.Values{ - "metadata[roles]": []string{"admin"}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithQueryBuilder(query.Match("metadata[roles]", "admin")), - }, - { - name: "using startTime", - queryParams: url.Values{ - "start_time": []string{now.Format(time.DateFormat)}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithQueryBuilder(query.Gte("date", now.Format(time.DateFormat))), - }, - { - name: "using endTime", - queryParams: url.Values{ - "end_time": []string{now.Format(time.DateFormat)}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithQueryBuilder(query.Lt("date", now.Format(time.DateFormat))), - }, - { - name: "using account", - queryParams: url.Values{ - "account": []string{"xxx"}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithQueryBuilder(query.Match("account", "xxx")), - }, - { - name: "using reference", - queryParams: url.Values{ - "reference": []string{"xxx"}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithQueryBuilder(query.Match("reference", "xxx")), - }, - { - name: "using destination", - queryParams: url.Values{ - "destination": []string{"xxx"}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithQueryBuilder(query.Match("destination", "xxx")), - }, - { - name: "using source", - queryParams: url.Values{ - "source": []string{"xxx"}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}). - WithQueryBuilder(query.Match("source", "xxx")), - }, - } - for _, testCase := range testCases { - testCase := testCase - t.Run(testCase.name, func(t *testing.T) { - - if testCase.expectStatusCode == 0 { - testCase.expectStatusCode = http.StatusNoContent - } - - backend, mockLedger := newTestingBackend(t, true) - if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { - mockLedger.EXPECT(). - CountTransactions(gomock.Any(), ledgerstore.NewGetTransactionsQuery(testCase.expectQuery)). - Return(10, nil) - } - - router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodHead, "/xxx/transactions", nil) - rec := httptest.NewRecorder() - req.URL.RawQuery = testCase.queryParams.Encode() - - router.ServeHTTP(rec, req) - - require.Equal(t, testCase.expectStatusCode, rec.Code) - if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { - require.Equal(t, "10", rec.Header().Get("Count")) - } else { - err := sharedapi.ErrorResponse{} - sharedapi.Decode(t, rec.Body, &err) - require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) - } - }) - } -} - -func TestRevertTransaction(t *testing.T) { - - expectedTx := ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ) - - backend, mockLedger := newTestingBackend(t, true) - mockLedger. - EXPECT(). - RevertTransaction(gomock.Any(), command.Parameters{}, big.NewInt(0), false, false). - Return(expectedTx, nil) - - router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodPost, "/xxx/transactions/0/revert", nil) - rec := httptest.NewRecorder() - - router.ServeHTTP(rec, req) - - require.Equal(t, http.StatusCreated, rec.Code) - tx, ok := sharedapi.DecodeSingleResponse[ledger.Transaction](t, rec.Body) - require.True(t, ok) - require.Equal(t, *expectedTx, tx) -} - -func TestForceRevertTransaction(t *testing.T) { - - expectedTx := ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ) - - backend, mockLedger := newTestingBackend(t, true) - mockLedger. - EXPECT(). - RevertTransaction(gomock.Any(), command.Parameters{}, big.NewInt(0), true, false). - Return(expectedTx, nil) - - router := v1.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodPost, "/xxx/transactions/0/revert?disableChecks=true", nil) - rec := httptest.NewRecorder() - - router.ServeHTTP(rec, req) - - require.Equal(t, http.StatusCreated, rec.Code) - tx, ok := sharedapi.DecodeSingleResponse[ledger.Transaction](t, rec.Body) - require.True(t, ok) - require.Equal(t, *expectedTx, tx) -} diff --git a/internal/api/v1/errors.go b/internal/api/v1/errors.go index af23a06cb..80bb5828d 100644 --- a/internal/api/v1/errors.go +++ b/internal/api/v1/errors.go @@ -4,6 +4,7 @@ const ( ErrConflict = "CONFLICT" ErrInsufficientFund = "INSUFFICIENT_FUND" ErrValidation = "VALIDATION" + ErrAlreadyRevert = "ALREADY_REVERT" ErrScriptCompilationFailed = "COMPILATION_FAILED" ErrScriptMetadataOverride = "METADATA_OVERRIDE" diff --git a/internal/api/v1/middleware_auto_create_ledger.go b/internal/api/v1/middleware_auto_create_ledger.go index dbc812a71..42b29d843 100644 --- a/internal/api/v1/middleware_auto_create_ledger.go +++ b/internal/api/v1/middleware_auto_create_ledger.go @@ -1,34 +1,47 @@ package v1 import ( + "go.opentelemetry.io/otel/trace" "net/http" - "github.com/go-chi/chi/v5" + "errors" + + "github.com/formancehq/ledger/internal/controller/system" + + "github.com/formancehq/go-libs/v2/platform/postgres" + ledger "github.com/formancehq/ledger/internal" - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/ledger/internal/api/backend" - "github.com/formancehq/ledger/internal/storage/driver" - "github.com/formancehq/ledger/internal/storage/sqlutils" + "github.com/formancehq/go-libs/v2/api" + "github.com/go-chi/chi/v5" ) -func autoCreateMiddleware(backend backend.Backend) func(handler http.Handler) http.Handler { +func autoCreateMiddleware(backend system.Controller, tracer trace.Tracer) func(handler http.Handler) http.Handler { return func(handler http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx, span := tracer.Start(r.Context(), "AutomaticLedgerCreate") + defer span.End() + ledgerName := chi.URLParam(r, "ledger") - if _, err := backend.GetLedger(r.Context(), ledgerName); err != nil { - if !sqlutils.IsNotFoundError(err) { - sharedapi.InternalServerError(w, r, err) + if _, err := backend.GetLedger(ctx, ledgerName); err != nil { + if !postgres.IsNotFoundError(err) { + api.InternalServerError(w, r, err) return } - if err := backend.CreateLedger(r.Context(), ledgerName, driver.LedgerConfiguration{ + if err := backend.CreateLedger(ctx, ledgerName, ledger.Configuration{ Bucket: ledgerName, }); err != nil { - sharedapi.InternalServerError(w, r, err) + switch { + case errors.Is(err, ledger.ErrInvalidLedgerName{}): + api.BadRequest(w, ErrValidation, err) + default: + api.InternalServerError(w, r, err) + } return } } + span.End() handler.ServeHTTP(w, r) }) diff --git a/internal/api/v1/middlewares_metrics.go b/internal/api/v1/middlewares_metrics.go deleted file mode 100644 index dac79cb4a..000000000 --- a/internal/api/v1/middlewares_metrics.go +++ /dev/null @@ -1,55 +0,0 @@ -package v1 - -import ( - "net/http" - - "github.com/go-chi/chi/v5" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/metric" -) - -type statusRecorder struct { - http.ResponseWriter - Status int -} - -func newStatusRecorder(w http.ResponseWriter) *statusRecorder { - return &statusRecorder{ResponseWriter: w} -} - -func (r *statusRecorder) WriteHeader(status int) { - r.Status = status - r.ResponseWriter.WriteHeader(status) -} - -func MetricsMiddleware(globalMetricsRegistry metrics.GlobalRegistry) func(h http.Handler) http.Handler { - return func(h http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - attrs := []attribute.KeyValue{} - - ctx := r.Context() - name := chi.URLParam(r, "ledger") - if name != "" { - attrs = append(attrs, attribute.String("ledger", name)) - } - - recorder := newStatusRecorder(w) - - start := time.Now() - h.ServeHTTP(recorder, r) - latency := time.Since(start) - - attrs = append(attrs, - attribute.String("route", chi.RouteContext(r.Context()).RoutePattern())) - - globalMetricsRegistry.APILatencies().Record(ctx, latency.Milliseconds(), metric.WithAttributes(attrs...)) - - attrs = append(attrs, attribute.Int("status", recorder.Status)) - globalMetricsRegistry.StatusCodes().Add(ctx, 1, metric.WithAttributes(attrs...)) - }) - } -} diff --git a/internal/api/v1/mocks.go b/internal/api/v1/mocks.go new file mode 100644 index 000000000..f10db2ce4 --- /dev/null +++ b/internal/api/v1/mocks.go @@ -0,0 +1,3 @@ +//go:generate mockgen -write_source_comment=false -write_package_comment=false -source ../../controller/system/controller.go -destination mocks_system_controller_test.go -package v1 --mock_names Controller=SystemController . Controller +//go:generate mockgen -write_source_comment=false -write_package_comment=false -source ../../controller/ledger/controller.go -destination mocks_ledger_controller_test.go -package v1 --mock_names Controller=LedgerController . Controller +package v1 \ No newline at end of file diff --git a/internal/api/v1/mocks_ledger_controller_test.go b/internal/api/v1/mocks_ledger_controller_test.go new file mode 100644 index 000000000..4e7fadbf2 --- /dev/null +++ b/internal/api/v1/mocks_ledger_controller_test.go @@ -0,0 +1,334 @@ +// Code generated by MockGen. DO NOT EDIT. +// +// Generated by this command: +// +// mockgen -write_source_comment=false -write_package_comment=false -source ../../controller/ledger/controller.go -destination mocks_ledger_controller_test.go -package v1 --mock_names Controller=LedgerController . Controller +package v1 + +import ( + context "context" + reflect "reflect" + + bunpaginate "github.com/formancehq/go-libs/v2/bun/bunpaginate" + migrations "github.com/formancehq/go-libs/v2/migrations" + ledger "github.com/formancehq/ledger/internal" + ledger0 "github.com/formancehq/ledger/internal/controller/ledger" + gomock "go.uber.org/mock/gomock" +) + +// LedgerController is a mock of Controller interface. +type LedgerController struct { + ctrl *gomock.Controller + recorder *LedgerControllerMockRecorder +} + +// LedgerControllerMockRecorder is the mock recorder for LedgerController. +type LedgerControllerMockRecorder struct { + mock *LedgerController +} + +// NewLedgerController creates a new mock instance. +func NewLedgerController(ctrl *gomock.Controller) *LedgerController { + mock := &LedgerController{ctrl: ctrl} + mock.recorder = &LedgerControllerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *LedgerController) EXPECT() *LedgerControllerMockRecorder { + return m.recorder +} + +// CountAccounts mocks base method. +func (m *LedgerController) CountAccounts(ctx context.Context, query ledger0.ListAccountsQuery) (int, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CountAccounts", ctx, query) + ret0, _ := ret[0].(int) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CountAccounts indicates an expected call of CountAccounts. +func (mr *LedgerControllerMockRecorder) CountAccounts(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountAccounts", reflect.TypeOf((*LedgerController)(nil).CountAccounts), ctx, query) +} + +// CountTransactions mocks base method. +func (m *LedgerController) CountTransactions(ctx context.Context, query ledger0.ListTransactionsQuery) (int, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CountTransactions", ctx, query) + ret0, _ := ret[0].(int) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CountTransactions indicates an expected call of CountTransactions. +func (mr *LedgerControllerMockRecorder) CountTransactions(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountTransactions", reflect.TypeOf((*LedgerController)(nil).CountTransactions), ctx, query) +} + +// CreateTransaction mocks base method. +func (m *LedgerController) CreateTransaction(ctx context.Context, parameters ledger0.Parameters[ledger0.RunScript]) (*ledger.CreatedTransaction, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateTransaction", ctx, parameters) + ret0, _ := ret[0].(*ledger.CreatedTransaction) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateTransaction indicates an expected call of CreateTransaction. +func (mr *LedgerControllerMockRecorder) CreateTransaction(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateTransaction", reflect.TypeOf((*LedgerController)(nil).CreateTransaction), ctx, parameters) +} + +// DeleteAccountMetadata mocks base method. +func (m *LedgerController) DeleteAccountMetadata(ctx context.Context, parameters ledger0.Parameters[ledger0.DeleteAccountMetadata]) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteAccountMetadata", ctx, parameters) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteAccountMetadata indicates an expected call of DeleteAccountMetadata. +func (mr *LedgerControllerMockRecorder) DeleteAccountMetadata(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteAccountMetadata", reflect.TypeOf((*LedgerController)(nil).DeleteAccountMetadata), ctx, parameters) +} + +// DeleteTransactionMetadata mocks base method. +func (m *LedgerController) DeleteTransactionMetadata(ctx context.Context, parameters ledger0.Parameters[ledger0.DeleteTransactionMetadata]) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteTransactionMetadata", ctx, parameters) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteTransactionMetadata indicates an expected call of DeleteTransactionMetadata. +func (mr *LedgerControllerMockRecorder) DeleteTransactionMetadata(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteTransactionMetadata", reflect.TypeOf((*LedgerController)(nil).DeleteTransactionMetadata), ctx, parameters) +} + +// Export mocks base method. +func (m *LedgerController) Export(ctx context.Context, w ledger0.ExportWriter) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Export", ctx, w) + ret0, _ := ret[0].(error) + return ret0 +} + +// Export indicates an expected call of Export. +func (mr *LedgerControllerMockRecorder) Export(ctx, w any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Export", reflect.TypeOf((*LedgerController)(nil).Export), ctx, w) +} + +// GetAccount mocks base method. +func (m *LedgerController) GetAccount(ctx context.Context, query ledger0.GetAccountQuery) (*ledger.Account, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAccount", ctx, query) + ret0, _ := ret[0].(*ledger.Account) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAccount indicates an expected call of GetAccount. +func (mr *LedgerControllerMockRecorder) GetAccount(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccount", reflect.TypeOf((*LedgerController)(nil).GetAccount), ctx, query) +} + +// GetAggregatedBalances mocks base method. +func (m *LedgerController) GetAggregatedBalances(ctx context.Context, q ledger0.GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAggregatedBalances", ctx, q) + ret0, _ := ret[0].(ledger.BalancesByAssets) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAggregatedBalances indicates an expected call of GetAggregatedBalances. +func (mr *LedgerControllerMockRecorder) GetAggregatedBalances(ctx, q any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAggregatedBalances", reflect.TypeOf((*LedgerController)(nil).GetAggregatedBalances), ctx, q) +} + +// GetMigrationsInfo mocks base method. +func (m *LedgerController) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetMigrationsInfo", ctx) + ret0, _ := ret[0].([]migrations.Info) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetMigrationsInfo indicates an expected call of GetMigrationsInfo. +func (mr *LedgerControllerMockRecorder) GetMigrationsInfo(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMigrationsInfo", reflect.TypeOf((*LedgerController)(nil).GetMigrationsInfo), ctx) +} + +// GetStats mocks base method. +func (m *LedgerController) GetStats(ctx context.Context) (ledger0.Stats, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetStats", ctx) + ret0, _ := ret[0].(ledger0.Stats) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetStats indicates an expected call of GetStats. +func (mr *LedgerControllerMockRecorder) GetStats(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetStats", reflect.TypeOf((*LedgerController)(nil).GetStats), ctx) +} + +// GetTransaction mocks base method. +func (m *LedgerController) GetTransaction(ctx context.Context, query ledger0.GetTransactionQuery) (*ledger.Transaction, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetTransaction", ctx, query) + ret0, _ := ret[0].(*ledger.Transaction) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetTransaction indicates an expected call of GetTransaction. +func (mr *LedgerControllerMockRecorder) GetTransaction(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTransaction", reflect.TypeOf((*LedgerController)(nil).GetTransaction), ctx, query) +} + +// GetVolumesWithBalances mocks base method. +func (m *LedgerController) GetVolumesWithBalances(ctx context.Context, q ledger0.GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetVolumesWithBalances", ctx, q) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetVolumesWithBalances indicates an expected call of GetVolumesWithBalances. +func (mr *LedgerControllerMockRecorder) GetVolumesWithBalances(ctx, q any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetVolumesWithBalances", reflect.TypeOf((*LedgerController)(nil).GetVolumesWithBalances), ctx, q) +} + +// Import mocks base method. +func (m *LedgerController) Import(ctx context.Context, stream chan ledger.Log) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Import", ctx, stream) + ret0, _ := ret[0].(error) + return ret0 +} + +// Import indicates an expected call of Import. +func (mr *LedgerControllerMockRecorder) Import(ctx, stream any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Import", reflect.TypeOf((*LedgerController)(nil).Import), ctx, stream) +} + +// IsDatabaseUpToDate mocks base method. +func (m *LedgerController) IsDatabaseUpToDate(ctx context.Context) (bool, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "IsDatabaseUpToDate", ctx) + ret0, _ := ret[0].(bool) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// IsDatabaseUpToDate indicates an expected call of IsDatabaseUpToDate. +func (mr *LedgerControllerMockRecorder) IsDatabaseUpToDate(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsDatabaseUpToDate", reflect.TypeOf((*LedgerController)(nil).IsDatabaseUpToDate), ctx) +} + +// ListAccounts mocks base method. +func (m *LedgerController) ListAccounts(ctx context.Context, query ledger0.ListAccountsQuery) (*bunpaginate.Cursor[ledger.Account], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListAccounts", ctx, query) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Account]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListAccounts indicates an expected call of ListAccounts. +func (mr *LedgerControllerMockRecorder) ListAccounts(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListAccounts", reflect.TypeOf((*LedgerController)(nil).ListAccounts), ctx, query) +} + +// ListLogs mocks base method. +func (m *LedgerController) ListLogs(ctx context.Context, query ledger0.GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListLogs", ctx, query) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Log]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListLogs indicates an expected call of ListLogs. +func (mr *LedgerControllerMockRecorder) ListLogs(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListLogs", reflect.TypeOf((*LedgerController)(nil).ListLogs), ctx, query) +} + +// ListTransactions mocks base method. +func (m *LedgerController) ListTransactions(ctx context.Context, query ledger0.ListTransactionsQuery) (*bunpaginate.Cursor[ledger.Transaction], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListTransactions", ctx, query) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Transaction]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListTransactions indicates an expected call of ListTransactions. +func (mr *LedgerControllerMockRecorder) ListTransactions(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListTransactions", reflect.TypeOf((*LedgerController)(nil).ListTransactions), ctx, query) +} + +// RevertTransaction mocks base method. +func (m *LedgerController) RevertTransaction(ctx context.Context, parameters ledger0.Parameters[ledger0.RevertTransaction]) (*ledger.RevertedTransaction, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RevertTransaction", ctx, parameters) + ret0, _ := ret[0].(*ledger.RevertedTransaction) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// RevertTransaction indicates an expected call of RevertTransaction. +func (mr *LedgerControllerMockRecorder) RevertTransaction(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RevertTransaction", reflect.TypeOf((*LedgerController)(nil).RevertTransaction), ctx, parameters) +} + +// SaveAccountMetadata mocks base method. +func (m *LedgerController) SaveAccountMetadata(ctx context.Context, parameters ledger0.Parameters[ledger0.SaveAccountMetadata]) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SaveAccountMetadata", ctx, parameters) + ret0, _ := ret[0].(error) + return ret0 +} + +// SaveAccountMetadata indicates an expected call of SaveAccountMetadata. +func (mr *LedgerControllerMockRecorder) SaveAccountMetadata(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SaveAccountMetadata", reflect.TypeOf((*LedgerController)(nil).SaveAccountMetadata), ctx, parameters) +} + +// SaveTransactionMetadata mocks base method. +func (m *LedgerController) SaveTransactionMetadata(ctx context.Context, parameters ledger0.Parameters[ledger0.SaveTransactionMetadata]) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SaveTransactionMetadata", ctx, parameters) + ret0, _ := ret[0].(error) + return ret0 +} + +// SaveTransactionMetadata indicates an expected call of SaveTransactionMetadata. +func (mr *LedgerControllerMockRecorder) SaveTransactionMetadata(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SaveTransactionMetadata", reflect.TypeOf((*LedgerController)(nil).SaveTransactionMetadata), ctx, parameters) +} diff --git a/internal/api/v1/mocks_system_controller_test.go b/internal/api/v1/mocks_system_controller_test.go new file mode 100644 index 000000000..1ad57614e --- /dev/null +++ b/internal/api/v1/mocks_system_controller_test.go @@ -0,0 +1,126 @@ +// Code generated by MockGen. DO NOT EDIT. +// +// Generated by this command: +// +// mockgen -write_source_comment=false -write_package_comment=false -source ../../controller/system/controller.go -destination mocks_system_controller_test.go -package v1 --mock_names Controller=SystemController . Controller +package v1 + +import ( + context "context" + reflect "reflect" + + bunpaginate "github.com/formancehq/go-libs/v2/bun/bunpaginate" + ledger "github.com/formancehq/ledger/internal" + ledger0 "github.com/formancehq/ledger/internal/controller/ledger" + gomock "go.uber.org/mock/gomock" +) + +// SystemController is a mock of Controller interface. +type SystemController struct { + ctrl *gomock.Controller + recorder *SystemControllerMockRecorder +} + +// SystemControllerMockRecorder is the mock recorder for SystemController. +type SystemControllerMockRecorder struct { + mock *SystemController +} + +// NewSystemController creates a new mock instance. +func NewSystemController(ctrl *gomock.Controller) *SystemController { + mock := &SystemController{ctrl: ctrl} + mock.recorder = &SystemControllerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *SystemController) EXPECT() *SystemControllerMockRecorder { + return m.recorder +} + +// CreateLedger mocks base method. +func (m *SystemController) CreateLedger(ctx context.Context, name string, configuration ledger.Configuration) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateLedger", ctx, name, configuration) + ret0, _ := ret[0].(error) + return ret0 +} + +// CreateLedger indicates an expected call of CreateLedger. +func (mr *SystemControllerMockRecorder) CreateLedger(ctx, name, configuration any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateLedger", reflect.TypeOf((*SystemController)(nil).CreateLedger), ctx, name, configuration) +} + +// DeleteLedgerMetadata mocks base method. +func (m *SystemController) DeleteLedgerMetadata(ctx context.Context, param, key string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteLedgerMetadata", ctx, param, key) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteLedgerMetadata indicates an expected call of DeleteLedgerMetadata. +func (mr *SystemControllerMockRecorder) DeleteLedgerMetadata(ctx, param, key any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteLedgerMetadata", reflect.TypeOf((*SystemController)(nil).DeleteLedgerMetadata), ctx, param, key) +} + +// GetLedger mocks base method. +func (m *SystemController) GetLedger(ctx context.Context, name string) (*ledger.Ledger, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetLedger", ctx, name) + ret0, _ := ret[0].(*ledger.Ledger) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetLedger indicates an expected call of GetLedger. +func (mr *SystemControllerMockRecorder) GetLedger(ctx, name any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLedger", reflect.TypeOf((*SystemController)(nil).GetLedger), ctx, name) +} + +// GetLedgerController mocks base method. +func (m *SystemController) GetLedgerController(ctx context.Context, name string) (ledger0.Controller, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetLedgerController", ctx, name) + ret0, _ := ret[0].(ledger0.Controller) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetLedgerController indicates an expected call of GetLedgerController. +func (mr *SystemControllerMockRecorder) GetLedgerController(ctx, name any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLedgerController", reflect.TypeOf((*SystemController)(nil).GetLedgerController), ctx, name) +} + +// ListLedgers mocks base method. +func (m *SystemController) ListLedgers(ctx context.Context, query ledger0.ListLedgersQuery) (*bunpaginate.Cursor[ledger.Ledger], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListLedgers", ctx, query) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Ledger]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListLedgers indicates an expected call of ListLedgers. +func (mr *SystemControllerMockRecorder) ListLedgers(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListLedgers", reflect.TypeOf((*SystemController)(nil).ListLedgers), ctx, query) +} + +// UpdateLedgerMetadata mocks base method. +func (m_2 *SystemController) UpdateLedgerMetadata(ctx context.Context, name string, m map[string]string) error { + m_2.ctrl.T.Helper() + ret := m_2.ctrl.Call(m_2, "UpdateLedgerMetadata", ctx, name, m) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpdateLedgerMetadata indicates an expected call of UpdateLedgerMetadata. +func (mr *SystemControllerMockRecorder) UpdateLedgerMetadata(ctx, name, m any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateLedgerMetadata", reflect.TypeOf((*SystemController)(nil).UpdateLedgerMetadata), ctx, name, m) +} diff --git a/internal/api/v1/query.go b/internal/api/v1/query.go index d8d83736b..695f1e2e8 100644 --- a/internal/api/v1/query.go +++ b/internal/api/v1/query.go @@ -12,12 +12,12 @@ const ( QueryKeyBalanceOperator = "balanceOperator" ) -func getBalanceOperator(c *http.Request) (string, error) { +func getBalanceOperator(c *http.Request) string { balanceOperator := "eq" balanceOperatorStr := c.URL.Query().Get(QueryKeyBalanceOperator) if balanceOperatorStr != "" { - return balanceOperatorStr, nil + return balanceOperatorStr } - return balanceOperator, nil + return balanceOperator } diff --git a/internal/api/v1/routes.go b/internal/api/v1/routes.go index 0bfdf6aba..9ac88edac 100644 --- a/internal/api/v1/routes.go +++ b/internal/api/v1/routes.go @@ -1,44 +1,38 @@ package v1 import ( + "go.opentelemetry.io/otel/trace" + nooptracer "go.opentelemetry.io/otel/trace/noop" "net/http" - "github.com/go-chi/chi/v5" + "github.com/formancehq/ledger/internal/controller/system" - "github.com/formancehq/go-libs/service" + "github.com/formancehq/go-libs/v2/service" - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/health" - "github.com/formancehq/ledger/internal/api/backend" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/go-chi/chi/v5/middleware" - "github.com/go-chi/cors" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/ledger/internal/api/common" + "github.com/go-chi/chi/v5" ) func NewRouter( - b backend.Backend, - healthController *health.HealthController, - globalMetricsRegistry metrics.GlobalRegistry, + systemController system.Controller, authenticator auth.Authenticator, + version string, debug bool, + opts ...RouterOption, ) chi.Router { - router := chi.NewMux() - router.Use( - cors.New(cors.Options{ - AllowOriginFunc: func(r *http.Request, origin string) bool { - return true - }, - AllowCredentials: true, - }).Handler, - MetricsMiddleware(globalMetricsRegistry), - middleware.Recoverer, - ) + routerOptions := &routerOptions{} + for _, opt := range append(defaultRouterOptions, opts...) { + opt(routerOptions) + } - router.Get("/_healthcheck", healthController.Check) - router.Get("/_info", getInfo(b)) + router := chi.NewMux() + + router.Get("/_info", getInfo(systemController, version)) router.Group(func(router chi.Router) { + router.Use(routerOptions.middlewares...) router.Use(auth.Middleware(authenticator)) router.Use(service.OTLPMiddleware("ledger", debug)) @@ -48,8 +42,10 @@ func NewRouter( handler.ServeHTTP(w, r) }) }) - router.Use(autoCreateMiddleware(b)) - router.Use(backend.LedgerMiddleware(b, []string{"/_info"})) + router.Use(autoCreateMiddleware(systemController, routerOptions.tracer)) + router.Use(common.LedgerMiddleware(systemController, func(r *http.Request) string { + return chi.URLParam(r, "ledger") + }, routerOptions.tracer, "/_info")) // LedgerController router.Get("/_info", getLedgerInfo) @@ -57,24 +53,24 @@ func NewRouter( router.Get("/logs", getLogs) // AccountController - router.Get("/accounts", getAccounts) + router.Get("/accounts", listAccounts) router.Head("/accounts", countAccounts) router.Get("/accounts/{address}", getAccount) - router.Post("/accounts/{address}/metadata", postAccountMetadata) + router.Post("/accounts/{address}/metadata", addAccountMetadata) router.Delete("/accounts/{address}/metadata/{key}", deleteAccountMetadata) // TransactionController - router.Get("/transactions", getTransactions) + router.Get("/transactions", listTransactions) router.Head("/transactions", countTransactions) - router.Post("/transactions", postTransaction) - router.Post("/transactions/batch", func(w http.ResponseWriter, r *http.Request) { + router.Post("/transactions", createTransaction) + router.Post("/transactions/batch", func(w http.ResponseWriter, _ *http.Request) { http.Error(w, "not supported", http.StatusBadRequest) }) - router.Get("/transactions/{id}", getTransaction) + router.Get("/transactions/{id}", readTransaction) router.Post("/transactions/{id}/revert", revertTransaction) - router.Post("/transactions/{id}/metadata", postTransactionMetadata) + router.Post("/transactions/{id}/metadata", addTransactionMetadata) router.Delete("/transactions/{id}/metadata/{key}", deleteTransactionMetadata) router.Get("/balances", getBalances) @@ -84,3 +80,26 @@ func NewRouter( return router } + +type routerOptions struct { + tracer trace.Tracer + middlewares []func(handler http.Handler) http.Handler +} + +type RouterOption func(ro *routerOptions) + +func WithTracer(tracer trace.Tracer) RouterOption { + return func(ro *routerOptions) { + ro.tracer = tracer + } +} + +func WithMiddlewares(handlers ...func(http.Handler) http.Handler) RouterOption { + return func(ro *routerOptions) { + ro.middlewares = append(ro.middlewares, handlers...) + } +} + +var defaultRouterOptions = []RouterOption{ + WithTracer(nooptracer.Tracer{}), +} diff --git a/internal/api/v1/utils.go b/internal/api/v1/utils.go index b14114246..f94d5cb07 100644 --- a/internal/api/v1/utils.go +++ b/internal/api/v1/utils.go @@ -4,37 +4,37 @@ import ( "net/http" "strings" - "github.com/formancehq/go-libs/time" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" - "github.com/formancehq/go-libs/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/time" - "github.com/formancehq/go-libs/collectionutils" - "github.com/formancehq/go-libs/pointer" - "github.com/formancehq/go-libs/query" - "github.com/formancehq/ledger/internal/engine/command" - "github.com/formancehq/ledger/internal/storage/ledgerstore" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + + "github.com/formancehq/go-libs/v2/collectionutils" + "github.com/formancehq/go-libs/v2/pointer" + "github.com/formancehq/go-libs/v2/query" ) -func getPITFilter(r *http.Request) (*ledgerstore.PITFilter, error) { +func getPITFilter(r *http.Request) (*ledgercontroller.PITFilter, error) { pitString := r.URL.Query().Get("pit") if pitString == "" { - return &ledgerstore.PITFilter{}, nil + return &ledgercontroller.PITFilter{}, nil } pit, err := time.ParseTime(pitString) if err != nil { return nil, err } - return &ledgerstore.PITFilter{ + return &ledgercontroller.PITFilter{ PIT: &pit, }, nil } -func getPITFilterWithVolumes(r *http.Request) (*ledgerstore.PITFilterWithVolumes, error) { +func getPITFilterWithVolumes(r *http.Request) (*ledgercontroller.PITFilterWithVolumes, error) { pit, err := getPITFilter(r) if err != nil { return nil, err } - return &ledgerstore.PITFilterWithVolumes{ + return &ledgercontroller.PITFilterWithVolumes{ PITFilter: *pit, ExpandVolumes: collectionutils.Contains(r.URL.Query()["expand"], "volumes"), ExpandEffectiveVolumes: collectionutils.Contains(r.URL.Query()["expand"], "effectiveVolumes"), @@ -45,7 +45,7 @@ func getQueryBuilder(r *http.Request) (query.Builder, error) { return query.ParseJSON(r.URL.Query().Get("query")) } -func getPaginatedQueryOptionsOfPITFilterWithVolumes(r *http.Request) (*ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes], error) { +func getPaginatedQueryOptionsOfPITFilterWithVolumes(r *http.Request) (*ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes], error) { qb, err := getQueryBuilder(r) if err != nil { return nil, err @@ -61,19 +61,20 @@ func getPaginatedQueryOptionsOfPITFilterWithVolumes(r *http.Request) (*ledgersto return nil, err } - return pointer.For(ledgerstore.NewPaginatedQueryOptions(*pitFilter). + return pointer.For(ledgercontroller.NewPaginatedQueryOptions(*pitFilter). WithQueryBuilder(qb). WithPageSize(pageSize)), nil } -func getCommandParameters(r *http.Request) command.Parameters { +func getCommandParameters[INPUT any](r *http.Request, input INPUT) ledgercontroller.Parameters[INPUT] { dryRunAsString := r.URL.Query().Get("preview") dryRun := strings.ToUpper(dryRunAsString) == "YES" || strings.ToUpper(dryRunAsString) == "TRUE" || dryRunAsString == "1" idempotencyKey := r.Header.Get("Idempotency-Key") - return command.Parameters{ + return ledgercontroller.Parameters[INPUT]{ DryRun: dryRun, IdempotencyKey: idempotencyKey, + Input: input, } } diff --git a/internal/api/v2/api_utils_test.go b/internal/api/v2/api_utils_test.go index ecc4aa0fc..a73a084bb 100644 --- a/internal/api/v2/api_utils_test.go +++ b/internal/api/v2/api_utils_test.go @@ -1,20 +1,18 @@ -package v2_test +package v2 import ( "testing" "go.uber.org/mock/gomock" - - "github.com/formancehq/ledger/internal/api/backend" ) -func newTestingBackend(t *testing.T, expectedSchemaCheck bool) (*backend.MockBackend, *backend.MockLedger) { +func newTestingSystemController(t *testing.T, expectedSchemaCheck bool) (*SystemController, *LedgerController) { ctrl := gomock.NewController(t) - mockLedger := backend.NewMockLedger(ctrl) - backend := backend.NewMockBackend(ctrl) + mockLedger := NewLedgerController(ctrl) + backend := NewSystemController(ctrl) backend. EXPECT(). - GetLedgerEngine(gomock.Any(), gomock.Any()). + GetLedgerController(gomock.Any(), gomock.Any()). MinTimes(0). Return(mockLedger, nil) t.Cleanup(func() { diff --git a/internal/api/v2/bulk.go b/internal/api/v2/bulk.go deleted file mode 100644 index c7c637060..000000000 --- a/internal/api/v2/bulk.go +++ /dev/null @@ -1,209 +0,0 @@ -package v2 - -import ( - "context" - "encoding/json" - "fmt" - "math/big" - - "github.com/formancehq/ledger/internal/opentelemetry/tracer" - - sharedapi "github.com/formancehq/go-libs/api" - - "github.com/formancehq/ledger/internal/engine" - "github.com/formancehq/ledger/internal/machine" - - "github.com/formancehq/go-libs/metadata" - ledger "github.com/formancehq/ledger/internal" - "github.com/formancehq/ledger/internal/api/backend" - "github.com/formancehq/ledger/internal/engine/command" -) - -const ( - ActionCreateTransaction = "CREATE_TRANSACTION" - ActionAddMetadata = "ADD_METADATA" - ActionRevertTransaction = "REVERT_TRANSACTION" - ActionDeleteMetadata = "DELETE_METADATA" -) - -type Bulk []Element - -type Element struct { - Action string `json:"action"` - IdempotencyKey string `json:"ik"` - Data json.RawMessage `json:"data"` -} - -type Result struct { - ErrorCode string `json:"errorCode,omitempty"` - ErrorDescription string `json:"errorDescription,omitempty"` - ErrorDetails string `json:"errorDetails,omitempty"` - Data any `json:"data,omitempty"` - ResponseType string `json:"responseType"` // Added for sdk generation (discriminator in oneOf) -} - -func ProcessBulk(ctx context.Context, l backend.Ledger, bulk Bulk, continueOnFailure bool) ([]Result, bool, error) { - - ctx, span := tracer.Start(ctx, "Bulk") - defer span.End() - - ret := make([]Result, 0, len(bulk)) - - errorsInBulk := false - var bulkError = func(action, code string, err error) { - ret = append(ret, Result{ - ErrorCode: code, - ErrorDescription: err.Error(), - ResponseType: "ERROR", - }) - errorsInBulk = true - } - - for i, element := range bulk { - parameters := command.Parameters{ - DryRun: false, - IdempotencyKey: element.IdempotencyKey, - } - - switch element.Action { - case ActionCreateTransaction: - req := &ledger.TransactionRequest{} - if err := json.Unmarshal(element.Data, req); err != nil { - return nil, errorsInBulk, fmt.Errorf("error parsing element %d: %s", i, err) - } - rs := req.ToRunScript() - - tx, err := l.CreateTransaction(ctx, parameters, *rs) - if err != nil { - var code string - switch { - case machine.IsInsufficientFundError(err): - code = ErrInsufficientFund - case engine.IsCommandError(err): - code = ErrValidation - default: - code = sharedapi.ErrorInternal - } - bulkError(element.Action, code, err) - if !continueOnFailure { - return ret, errorsInBulk, nil - } - } else { - ret = append(ret, Result{ - Data: tx, - ResponseType: element.Action, - }) - } - case ActionAddMetadata: - type addMetadataRequest struct { - TargetType string `json:"targetType"` - TargetID json.RawMessage `json:"targetId"` - Metadata metadata.Metadata `json:"metadata"` - } - req := &addMetadataRequest{} - if err := json.Unmarshal(element.Data, req); err != nil { - return nil, errorsInBulk, fmt.Errorf("error parsing element %d: %s", i, err) - } - - var targetID any - switch req.TargetType { - case ledger.MetaTargetTypeAccount: - targetID = "" - case ledger.MetaTargetTypeTransaction: - targetID = big.NewInt(0) - } - if err := json.Unmarshal(req.TargetID, &targetID); err != nil { - return nil, errorsInBulk, err - } - - if err := l.SaveMeta(ctx, parameters, req.TargetType, targetID, req.Metadata); err != nil { - var code string - switch { - case command.IsSaveMetaError(err, command.ErrSaveMetaCodeTransactionNotFound): - code = sharedapi.ErrorCodeNotFound - default: - code = sharedapi.ErrorInternal - } - bulkError(element.Action, code, err) - if !continueOnFailure { - return ret, errorsInBulk, nil - } - } else { - ret = append(ret, Result{ - ResponseType: element.Action, - }) - } - case ActionRevertTransaction: - type revertTransactionRequest struct { - ID *big.Int `json:"id"` - Force bool `json:"force"` - AtEffectiveDate bool `json:"atEffectiveDate"` - } - req := &revertTransactionRequest{} - if err := json.Unmarshal(element.Data, req); err != nil { - return nil, errorsInBulk, fmt.Errorf("error parsing element %d: %s", i, err) - } - - tx, err := l.RevertTransaction(ctx, parameters, req.ID, req.Force, req.AtEffectiveDate) - if err != nil { - var code string - switch { - case engine.IsCommandError(err): - code = ErrValidation - default: - code = sharedapi.ErrorInternal - } - bulkError(element.Action, code, err) - if !continueOnFailure { - return ret, errorsInBulk, nil - } - } else { - ret = append(ret, Result{ - Data: tx, - ResponseType: element.Action, - }) - } - case ActionDeleteMetadata: - type deleteMetadataRequest struct { - TargetType string `json:"targetType"` - TargetID json.RawMessage `json:"targetId"` - Key string `json:"key"` - } - req := &deleteMetadataRequest{} - if err := json.Unmarshal(element.Data, req); err != nil { - return nil, errorsInBulk, fmt.Errorf("error parsing element %d: %s", i, err) - } - - var targetID any - switch req.TargetType { - case ledger.MetaTargetTypeAccount: - targetID = "" - case ledger.MetaTargetTypeTransaction: - targetID = big.NewInt(0) - } - if err := json.Unmarshal(req.TargetID, &targetID); err != nil { - return nil, errorsInBulk, err - } - - err := l.DeleteMetadata(ctx, parameters, req.TargetType, targetID, req.Key) - if err != nil { - var code string - switch { - case command.IsDeleteMetaError(err, command.ErrSaveMetaCodeTransactionNotFound): - code = sharedapi.ErrorCodeNotFound - default: - code = sharedapi.ErrorInternal - } - bulkError(element.Action, code, err) - if !continueOnFailure { - return ret, errorsInBulk, nil - } - } else { - ret = append(ret, Result{ - ResponseType: element.Action, - }) - } - } - } - return ret, errorsInBulk, nil -} diff --git a/internal/api/v2/common.go b/internal/api/v2/common.go new file mode 100644 index 000000000..3351bc524 --- /dev/null +++ b/internal/api/v2/common.go @@ -0,0 +1,215 @@ +package v2 + +import ( + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/api/common" + "io" + "net/http" + "slices" + "strconv" + "strings" + + "github.com/formancehq/go-libs/v2/api" + + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/time" + + "github.com/formancehq/go-libs/v2/pointer" + "github.com/formancehq/go-libs/v2/query" +) + +func getPITOOTFilter(r *http.Request) (*ledgercontroller.PITFilter, error) { + pitString := r.URL.Query().Get("endTime") + ootString := r.URL.Query().Get("startTime") + + var ( + pit *time.Time + oot *time.Time + ) + + if pitString != "" { + var err error + _pit, err := time.ParseTime(pitString) + if err != nil { + return nil, err + } + + pit = &_pit + } + + if ootString != "" { + var err error + _oot, err := time.ParseTime(ootString) + if err != nil { + return nil, err + } + + oot = &_oot + } + + return &ledgercontroller.PITFilter{ + PIT: pit, + OOT: oot, + }, nil +} + +func getPITFilter(r *http.Request) (*ledgercontroller.PITFilter, error) { + pitString := r.URL.Query().Get("pit") + + var pit *time.Time + if pitString != "" { + var err error + _pit, err := time.ParseTime(pitString) + if err != nil { + return nil, err + } + + pit = &_pit + } + + return &ledgercontroller.PITFilter{ + PIT: pit, + }, nil +} + +func getPITFilterWithVolumes(r *http.Request) (*ledgercontroller.PITFilterWithVolumes, error) { + pit, err := getPITFilter(r) + if err != nil { + return nil, err + } + return &ledgercontroller.PITFilterWithVolumes{ + PITFilter: *pit, + ExpandVolumes: hasExpandVolumes(r), + ExpandEffectiveVolumes: hasExpandEffectiveVolumes(r), + }, nil +} + +func hasExpandVolumes(r *http.Request) bool { + parts := strings.Split(r.URL.Query().Get("expand"), ",") + return slices.Contains(parts, "volumes") +} + +func hasExpandEffectiveVolumes(r *http.Request) bool { + parts := strings.Split(r.URL.Query().Get("expand"), ",") + return slices.Contains(parts, "effectiveVolumes") +} + +func getFiltersForVolumes(r *http.Request) (*ledgercontroller.FiltersForVolumes, error) { + pit, err := getPITOOTFilter(r) + if err != nil { + return nil, err + } + + useInsertionDate := api.QueryParamBool(r, "insertionDate") + groupLvl := 0 + + groupLvlStr := r.URL.Query().Get("groupBy") + if groupLvlStr != "" { + groupLvlInt, err := strconv.Atoi(groupLvlStr) + if err != nil { + return nil, err + } + if groupLvlInt > 0 { + groupLvl = groupLvlInt + } + } + return &ledgercontroller.FiltersForVolumes{ + PITFilter: *pit, + UseInsertionDate: useInsertionDate, + GroupLvl: groupLvl, + }, nil +} + +func getQueryBuilder(r *http.Request) (query.Builder, error) { + q := r.URL.Query().Get("query") + if q == "" { + data, err := io.ReadAll(r.Body) + if err != nil { + return nil, err + } + q = string(data) + } + + if len(q) > 0 { + return query.ParseJSON(q) + } + return nil, nil +} + +func getPaginatedQueryOptionsOfPITFilterWithVolumes(r *http.Request) (*ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes], error) { + qb, err := getQueryBuilder(r) + if err != nil { + return nil, err + } + + pitFilter, err := getPITFilterWithVolumes(r) + if err != nil { + return nil, err + } + + pageSize, err := bunpaginate.GetPageSize(r) + if err != nil { + return nil, err + } + + return pointer.For(ledgercontroller.NewPaginatedQueryOptions(*pitFilter). + WithQueryBuilder(qb). + WithPageSize(pageSize)), nil +} + +func getPaginatedQueryOptionsOfFiltersForVolumes(r *http.Request) (*ledgercontroller.PaginatedQueryOptions[ledgercontroller.FiltersForVolumes], error) { + qb, err := getQueryBuilder(r) + if err != nil { + return nil, err + } + + filtersForVolumes, err := getFiltersForVolumes(r) + if err != nil { + return nil, err + } + + pageSize, err := bunpaginate.GetPageSize(r) + if err != nil { + return nil, err + } + + return pointer.For(ledgercontroller.NewPaginatedQueryOptions(*filtersForVolumes). + WithPageSize(pageSize). + WithQueryBuilder(qb)), nil +} + +type TransactionRequest struct { + Postings ledger.Postings `json:"postings"` + Script ledgercontroller.ScriptV1 `json:"script"` + Timestamp time.Time `json:"timestamp"` + Reference string `json:"reference"` + Metadata metadata.Metadata `json:"metadata" swaggertype:"object"` +} + +func (req *TransactionRequest) ToRunScript(allowUnboundedOverdrafts bool) (*ledgercontroller.RunScript, error) { + + if _, err := req.Postings.Validate(); err != nil { + return nil, err + } + + if len(req.Postings) > 0 { + txData := ledger.TransactionData{ + Postings: req.Postings, + Timestamp: req.Timestamp, + Reference: req.Reference, + Metadata: req.Metadata, + } + + return pointer.For(common.TxToScriptData(txData, allowUnboundedOverdrafts)), nil + } + + return &ledgercontroller.RunScript{ + Script: req.Script.ToCore(), + Timestamp: req.Timestamp, + Reference: req.Reference, + Metadata: req.Metadata, + }, nil +} diff --git a/internal/api/v2/controller_export_logs.go b/internal/api/v2/controller_export_logs.go deleted file mode 100644 index 3c00e62e0..000000000 --- a/internal/api/v2/controller_export_logs.go +++ /dev/null @@ -1,23 +0,0 @@ -package v2 - -import ( - "context" - "encoding/json" - "net/http" - - "github.com/formancehq/go-libs/api" - ledger "github.com/formancehq/ledger/internal" - "github.com/formancehq/ledger/internal/api/backend" - "github.com/formancehq/ledger/internal/engine" -) - -func exportLogs(w http.ResponseWriter, r *http.Request) { - enc := json.NewEncoder(w) - w.Header().Set("Content-Type", "application/octet-stream") - if err := backend.LedgerFromContext(r.Context()).Export(r.Context(), engine.ExportWriterFn(func(ctx context.Context, log *ledger.ChainedLog) error { - return enc.Encode(log) - })); err != nil { - api.InternalServerError(w, r, err) - return - } -} diff --git a/internal/api/v2/controllers_accounts.go b/internal/api/v2/controllers_accounts.go deleted file mode 100644 index 0a363d6b4..000000000 --- a/internal/api/v2/controllers_accounts.go +++ /dev/null @@ -1,166 +0,0 @@ -package v2 - -import ( - "encoding/json" - "fmt" - "net/http" - "net/url" - - "github.com/formancehq/ledger/pkg/core/accounts" - - "github.com/go-chi/chi/v5" - - storageerrors "github.com/formancehq/ledger/internal/storage/sqlutils" - - "github.com/formancehq/go-libs/pointer" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/bun/bunpaginate" - "github.com/formancehq/go-libs/collectionutils" - "github.com/formancehq/go-libs/metadata" - ledger "github.com/formancehq/ledger/internal" - "github.com/formancehq/ledger/internal/api/backend" - "github.com/formancehq/ledger/internal/storage/ledgerstore" - "github.com/pkg/errors" -) - -func countAccounts(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - - count, err := l.CountAccounts(r.Context(), ledgerstore.NewGetAccountsQuery(*options)) - if err != nil { - switch { - case ledgerstore.IsErrInvalidQuery(err): - sharedapi.BadRequest(w, ErrValidation, err) - default: - sharedapi.InternalServerError(w, r, err) - } - return - } - - w.Header().Set("Count", fmt.Sprint(count)) - sharedapi.NoContent(w) -} - -func getAccounts(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - query, err := bunpaginate.Extract[ledgerstore.GetAccountsQuery](r, func() (*ledgerstore.GetAccountsQuery, error) { - options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) - if err != nil { - return nil, err - } - return pointer.For(ledgerstore.NewGetAccountsQuery(*options)), nil - }) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - - cursor, err := l.GetAccountsWithVolumes(r.Context(), *query) - if err != nil { - switch { - case ledgerstore.IsErrInvalidQuery(err): - sharedapi.BadRequest(w, ErrValidation, err) - default: - sharedapi.InternalServerError(w, r, err) - } - return - } - - sharedapi.RenderCursor(w, *cursor) -} - -func getAccount(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - param, err := url.PathUnescape(chi.URLParam(r, "address")) - if err != nil { - sharedapi.BadRequestWithDetails(w, ErrValidation, err, err.Error()) - return - } - - query := ledgerstore.NewGetAccountQuery(param) - if collectionutils.Contains(r.URL.Query()["expand"], "volumes") { - query = query.WithExpandVolumes() - } - if collectionutils.Contains(r.URL.Query()["expand"], "effectiveVolumes") { - query = query.WithExpandEffectiveVolumes() - } - pitFilter, err := getPITFilter(r) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - query.PITFilter = *pitFilter - - acc, err := l.GetAccountWithVolumes(r.Context(), query) - if err != nil { - switch { - case storageerrors.IsNotFoundError(err): - sharedapi.NotFound(w, err) - default: - sharedapi.InternalServerError(w, r, err) - } - return - } - - sharedapi.Ok(w, acc) -} - -func postAccountMetadata(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - param, err := url.PathUnescape(chi.URLParam(r, "address")) - if err != nil { - sharedapi.BadRequestWithDetails(w, ErrValidation, err, err.Error()) - return - } - - if !accounts.ValidateAddress(param) { - sharedapi.BadRequest(w, ErrValidation, errors.New("invalid account address format")) - return - } - - var m metadata.Metadata - if err := json.NewDecoder(r.Body).Decode(&m); err != nil { - sharedapi.BadRequest(w, ErrValidation, errors.New("invalid metadata format")) - return - } - - err = l.SaveMeta(r.Context(), getCommandParameters(r), ledger.MetaTargetTypeAccount, chi.URLParam(r, "address"), m) - if err != nil { - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.NoContent(w) -} - -func deleteAccountMetadata(w http.ResponseWriter, r *http.Request) { - param, err := url.PathUnescape(chi.URLParam(r, "address")) - if err != nil { - sharedapi.BadRequestWithDetails(w, ErrValidation, err, err.Error()) - return - } - - if err := backend.LedgerFromContext(r.Context()). - DeleteMetadata( - r.Context(), - getCommandParameters(r), - ledger.MetaTargetTypeAccount, - param, - chi.URLParam(r, "key"), - ); err != nil { - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.NoContent(w) -} diff --git a/internal/api/v2/controllers_accounts_add_metadata.go b/internal/api/v2/controllers_accounts_add_metadata.go new file mode 100644 index 000000000..b38dee690 --- /dev/null +++ b/internal/api/v2/controllers_accounts_add_metadata.go @@ -0,0 +1,41 @@ +package v2 + +import ( + "encoding/json" + "github.com/formancehq/ledger/internal/controller/ledger" + "net/http" + "net/url" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/ledger/internal/api/common" + "github.com/go-chi/chi/v5" +) + +func addAccountMetadata(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + address, err := url.PathUnescape(chi.URLParam(r, "address")) + if err != nil { + api.BadRequestWithDetails(w, ErrValidation, err, err.Error()) + return + } + + var m metadata.Metadata + if err := json.NewDecoder(r.Body).Decode(&m); err != nil { + api.BadRequest(w, ErrValidation, errors.New("invalid metadata format")) + return + } + + err = l.SaveAccountMetadata(r.Context(), getCommandParameters(r, ledger.SaveAccountMetadata{ + Address: address, + Metadata: m, + })) + if err != nil { + common.HandleCommonErrors(w, r, err) + return + } + + api.NoContent(w) +} diff --git a/internal/api/v2/controllers_accounts_add_metadata_test.go b/internal/api/v2/controllers_accounts_add_metadata_test.go new file mode 100644 index 000000000..505170334 --- /dev/null +++ b/internal/api/v2/controllers_accounts_add_metadata_test.go @@ -0,0 +1,91 @@ +package v2 + +import ( + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestAccountsAddMetadata(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + expectStatusCode int + expectedErrorCode string + account string + body any + } + + testCases := []testCase{ + { + name: "nominal", + account: "world", + body: metadata.Metadata{ + "foo": "bar", + }, + }, + { + name: "invalid body", + account: "world", + body: "invalid - not an object", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "invalid account address", + account: "%8X%2F", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusNoContent + } + + systemController, ledgerController := newTestingSystemController(t, true) + if testCase.expectStatusCode == http.StatusNoContent { + ledgerController.EXPECT(). + SaveAccountMetadata(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.SaveAccountMetadata]{ + Input: ledgercontroller.SaveAccountMetadata{ + Address: testCase.account, + Metadata: testCase.body.(metadata.Metadata), + }, + }). + Return(nil) + } + + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodPost, "/", api.Buffer(t, testCase.body)) + // httptest.NewRequest check for invalid urls while we want to test invalid urls + req.URL.Path = "/xxx/accounts/" + testCase.account + "/metadata" + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode >= 300 || testCase.expectStatusCode < 200 { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v2/controllers_accounts_count.go b/internal/api/v2/controllers_accounts_count.go new file mode 100644 index 000000000..1aadff47d --- /dev/null +++ b/internal/api/v2/controllers_accounts_count.go @@ -0,0 +1,35 @@ +package v2 + +import ( + "fmt" + "net/http" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/ledger/internal/api/common" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" +) + +func countAccounts(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + count, err := l.CountAccounts(r.Context(), ledgercontroller.NewListAccountsQuery(*options)) + if err != nil { + switch { + case errors.Is(err, ledgercontroller.ErrInvalidQuery{}) || errors.Is(err, ledgercontroller.ErrMissingFeature{}): + api.BadRequest(w, ErrValidation, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + w.Header().Set("Count", fmt.Sprint(count)) + api.NoContent(w) +} diff --git a/internal/api/v2/controllers_accounts_count_test.go b/internal/api/v2/controllers_accounts_count_test.go new file mode 100644 index 000000000..91b3c7515 --- /dev/null +++ b/internal/api/v2/controllers_accounts_count_test.go @@ -0,0 +1,200 @@ +package v2 + +import ( + "bytes" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/query" + "github.com/formancehq/go-libs/v2/time" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestAccountsCount(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + body string + expectQuery ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes] + expectStatusCode int + expectedErrorCode string + returnErr error + expectBackendCall bool + } + before := time.Now() + + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithPageSize(DefaultPageSize), + expectBackendCall: true, + }, + { + name: "using metadata", + body: `{"$match": { "metadata[roles]": "admin" }}`, + expectBackendCall: true, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithQueryBuilder(query.Match("metadata[roles]", "admin")). + WithPageSize(DefaultPageSize), + }, + { + name: "using address", + body: `{"$match": { "address": "foo" }}`, + expectBackendCall: true, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithQueryBuilder(query.Match("address", "foo")). + WithPageSize(DefaultPageSize), + }, + { + name: "invalid page size", + queryParams: url.Values{ + "pageSize": []string{"nan"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "page size over maximum", + expectBackendCall: true, + queryParams: url.Values{ + "pageSize": []string{"1000000"}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithPageSize(MaxPageSize), + }, + { + name: "using balance filter", + body: `{"$lt": { "balance[USD/2]": 100 }}`, + expectBackendCall: true, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithQueryBuilder(query.Lt("balance[USD/2]", float64(100))). + WithPageSize(DefaultPageSize), + }, + { + name: "using exists filter", + body: `{"$exists": { "metadata": "foo" }}`, + expectBackendCall: true, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithQueryBuilder(query.Exists("metadata", "foo")). + WithPageSize(DefaultPageSize), + }, + { + name: "using invalid query payload", + body: `[]`, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "with invalid query from core point of view", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + expectBackendCall: true, + returnErr: ledgercontroller.ErrInvalidQuery{}, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithPageSize(DefaultPageSize), + }, + { + name: "with missing feature", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + expectBackendCall: true, + returnErr: ledgercontroller.ErrMissingFeature{}, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithPageSize(DefaultPageSize), + }, + { + name: "with unexpected error", + expectStatusCode: http.StatusInternalServerError, + expectedErrorCode: api.ErrorInternal, + expectBackendCall: true, + returnErr: errors.New("undefined error"), + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithPageSize(DefaultPageSize), + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusNoContent + } + + systemController, ledgerController := newTestingSystemController(t, true) + if testCase.expectBackendCall { + ledgerController.EXPECT(). + CountAccounts(gomock.Any(), ledgercontroller.NewListAccountsQuery(testCase.expectQuery)). + Return(10, testCase.returnErr) + } + + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodHead, "/xxx/accounts?pit="+before.Format(time.RFC3339Nano), bytes.NewBufferString(testCase.body)) + rec := httptest.NewRecorder() + params := url.Values{} + if testCase.queryParams != nil { + params = testCase.queryParams + } + params.Set("pit", before.Format(time.RFC3339Nano)) + req.URL.RawQuery = params.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + require.Equal(t, "10", rec.Header().Get("Count")) + } else { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v2/controllers_accounts_delete_metadata.go b/internal/api/v2/controllers_accounts_delete_metadata.go new file mode 100644 index 000000000..0e3115b91 --- /dev/null +++ b/internal/api/v2/controllers_accounts_delete_metadata.go @@ -0,0 +1,34 @@ +package v2 + +import ( + "github.com/formancehq/ledger/internal/controller/ledger" + "net/http" + "net/url" + + "github.com/go-chi/chi/v5" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/ledger/internal/api/common" +) + +func deleteAccountMetadata(w http.ResponseWriter, r *http.Request) { + address, err := url.PathUnescape(chi.URLParam(r, "address")) + if err != nil { + api.BadRequestWithDetails(w, ErrValidation, err, err.Error()) + return + } + + if err := common.LedgerFromContext(r.Context()). + DeleteAccountMetadata( + r.Context(), + getCommandParameters(r, ledger.DeleteAccountMetadata{ + Address: address, + Key: chi.URLParam(r, "key"), + }), + ); err != nil { + common.HandleCommonErrors(w, r, err) + return + } + + api.NoContent(w) +} diff --git a/internal/api/v2/controllers_accounts_delete_metadata_test.go b/internal/api/v2/controllers_accounts_delete_metadata_test.go new file mode 100644 index 000000000..3130d49f1 --- /dev/null +++ b/internal/api/v2/controllers_accounts_delete_metadata_test.go @@ -0,0 +1,94 @@ +package v2 + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/logging" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestAccountsDeleteMetadata(t *testing.T) { + t.Parallel() + + ctx := logging.TestingContext() + + type testCase struct { + name string + queryParams url.Values + returnErr error + expectedStatusCode int + expectedErrorCode string + expectBackendCall bool + account string + } + + for _, tc := range []testCase{ + { + name: "nominal", + expectBackendCall: true, + account: "account0", + }, + { + name: "unexpected backend error", + expectBackendCall: true, + returnErr: errors.New("undefined error"), + expectedStatusCode: http.StatusInternalServerError, + expectedErrorCode: api.ErrorInternal, + account: "account0", + }, + { + name: "invalid account address", + account: "%8X%2F", + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + expectBackendCall: false, + }, + } { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + systemController, ledgerController := newTestingSystemController(t, true) + + if tc.expectBackendCall { + ledgerController.EXPECT(). + DeleteAccountMetadata(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.DeleteAccountMetadata]{ + Input: ledgercontroller.DeleteAccountMetadata{ + Address: tc.account, + Key: "foo", + }, + }). + Return(tc.returnErr) + } + + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodDelete, "/", nil) + req.URL.Path = "/ledger0/accounts/" + tc.account + "/metadata/foo" + req = req.WithContext(ctx) + req.URL.RawQuery = tc.queryParams.Encode() + + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + if tc.expectedStatusCode == 0 || tc.expectedStatusCode == http.StatusOK { + require.Equal(t, http.StatusNoContent, rec.Code) + } else { + require.Equal(t, tc.expectedStatusCode, rec.Code) + errorResponse := api.ErrorResponse{} + require.NoError(t, json.Unmarshal(rec.Body.Bytes(), &errorResponse)) + require.Equal(t, tc.expectedErrorCode, errorResponse.ErrorCode) + } + }) + } +} diff --git a/internal/api/v2/controllers_accounts_list.go b/internal/api/v2/controllers_accounts_list.go new file mode 100644 index 000000000..c4edc6a4f --- /dev/null +++ b/internal/api/v2/controllers_accounts_list.go @@ -0,0 +1,41 @@ +package v2 + +import ( + "net/http" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/pointer" + "github.com/formancehq/ledger/internal/api/common" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" +) + +func listAccounts(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + query, err := bunpaginate.Extract[ledgercontroller.ListAccountsQuery](r, func() (*ledgercontroller.ListAccountsQuery, error) { + options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) + if err != nil { + return nil, err + } + return pointer.For(ledgercontroller.NewListAccountsQuery(*options)), nil + }) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + cursor, err := l.ListAccounts(r.Context(), *query) + if err != nil { + switch { + case errors.Is(err, ledgercontroller.ErrInvalidQuery{}) || errors.Is(err, ledgercontroller.ErrMissingFeature{}): + api.BadRequest(w, ErrValidation, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + api.RenderCursor(w, *cursor) +} diff --git a/internal/api/v2/controllers_accounts_list_test.go b/internal/api/v2/controllers_accounts_list_test.go new file mode 100644 index 000000000..ba6319773 --- /dev/null +++ b/internal/api/v2/controllers_accounts_list_test.go @@ -0,0 +1,229 @@ +package v2 + +import ( + "bytes" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/query" + "github.com/formancehq/go-libs/v2/time" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestAccountsList(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + body string + expectQuery ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes] + expectStatusCode int + expectedErrorCode string + expectBackendCall bool + returnErr error + } + before := time.Now() + + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithPageSize(DefaultPageSize), + expectBackendCall: true, + }, + { + name: "using metadata", + body: `{"$match": { "metadata[roles]": "admin" }}`, + expectBackendCall: true, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithQueryBuilder(query.Match("metadata[roles]", "admin")). + WithPageSize(DefaultPageSize), + }, + { + name: "using address", + body: `{"$match": { "address": "foo" }}`, + expectBackendCall: true, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithQueryBuilder(query.Match("address", "foo")). + WithPageSize(DefaultPageSize), + }, + { + name: "using empty cursor", + expectBackendCall: true, + queryParams: url.Values{ + "cursor": []string{bunpaginate.EncodeCursor(ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{})))}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}), + }, + { + name: "using invalid cursor", + queryParams: url.Values{ + "cursor": []string{"XXX"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "invalid page size", + queryParams: url.Values{ + "pageSize": []string{"nan"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "page size over maximum", + expectBackendCall: true, + queryParams: url.Values{ + "pageSize": []string{"1000000"}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithPageSize(MaxPageSize), + }, + { + name: "using balance filter", + expectBackendCall: true, + body: `{"$lt": { "balance[USD/2]": 100 }}`, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithQueryBuilder(query.Lt("balance[USD/2]", float64(100))). + WithPageSize(DefaultPageSize), + }, + { + name: "using exists filter", + expectBackendCall: true, + body: `{"$exists": { "metadata": "foo" }}`, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithQueryBuilder(query.Exists("metadata", "foo")). + WithPageSize(DefaultPageSize), + }, + { + name: "using invalid query payload", + body: `[]`, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "with invalid query from core point of view", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + expectBackendCall: true, + returnErr: ledgercontroller.ErrInvalidQuery{}, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithPageSize(DefaultPageSize), + }, + { + name: "with missing feature", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + expectBackendCall: true, + returnErr: ledgercontroller.ErrMissingFeature{}, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithPageSize(DefaultPageSize), + }, + { + name: "with unexpected error", + expectStatusCode: http.StatusInternalServerError, + expectedErrorCode: api.ErrorInternal, + expectBackendCall: true, + returnErr: errors.New("undefined error"), + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithPageSize(DefaultPageSize), + }, + } + for _, testCase := range testCases { + tc := testCase + t.Run(tc.name, func(t *testing.T) { + + if tc.expectStatusCode == 0 { + tc.expectStatusCode = http.StatusOK + } + + expectedCursor := bunpaginate.Cursor[ledger.Account]{ + Data: []ledger.Account{ + { + Address: "world", + Metadata: metadata.Metadata{}, + }, + }, + } + + systemController, ledgerController := newTestingSystemController(t, true) + if tc.expectBackendCall { + ledgerController.EXPECT(). + ListAccounts(gomock.Any(), ledgercontroller.NewListAccountsQuery(tc.expectQuery)). + Return(&expectedCursor, tc.returnErr) + } + + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodGet, "/xxx/accounts?pit="+before.Format(time.RFC3339Nano), bytes.NewBufferString(tc.body)) + rec := httptest.NewRecorder() + params := url.Values{} + if tc.queryParams != nil { + params = tc.queryParams + } + params.Set("pit", before.Format(time.RFC3339Nano)) + req.URL.RawQuery = params.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, tc.expectStatusCode, rec.Code) + if tc.expectStatusCode < 300 && tc.expectStatusCode >= 200 { + cursor := api.DecodeCursorResponse[ledger.Account](t, rec.Body) + require.Equal(t, expectedCursor, *cursor) + } else { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, tc.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v2/controllers_accounts_read.go b/internal/api/v2/controllers_accounts_read.go new file mode 100644 index 000000000..d1eddd262 --- /dev/null +++ b/internal/api/v2/controllers_accounts_read.go @@ -0,0 +1,49 @@ +package v2 + +import ( + "net/http" + "net/url" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/platform/postgres" + "github.com/formancehq/ledger/internal/api/common" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/go-chi/chi/v5" +) + +func readAccount(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + param, err := url.PathUnescape(chi.URLParam(r, "address")) + if err != nil { + api.BadRequestWithDetails(w, ErrValidation, err, err.Error()) + return + } + + query := ledgercontroller.NewGetAccountQuery(param) + if hasExpandVolumes(r) { + query = query.WithExpandVolumes() + } + if hasExpandEffectiveVolumes(r) { + query = query.WithExpandEffectiveVolumes() + } + pitFilter, err := getPITFilter(r) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + query.PITFilter = *pitFilter + + acc, err := l.GetAccount(r.Context(), query) + if err != nil { + switch { + case postgres.IsNotFoundError(err): + api.NotFound(w, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + api.Ok(w, acc) +} diff --git a/internal/api/v2/controllers_accounts_read_test.go b/internal/api/v2/controllers_accounts_read_test.go new file mode 100644 index 000000000..93c742c88 --- /dev/null +++ b/internal/api/v2/controllers_accounts_read_test.go @@ -0,0 +1,108 @@ +package v2 + +import ( + "bytes" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/time" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestAccountsRead(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + body string + expectQuery ledgercontroller.GetAccountQuery + expectStatusCode int + expectedErrorCode string + expectBackendCall bool + returnErr error + account string + } + before := time.Now() + + testCases := []testCase{ + { + name: "nominal", + account: "foo", + expectQuery: ledgercontroller.NewGetAccountQuery("foo").WithPIT(before), + expectBackendCall: true, + }, + { + name: "with expand volumes", + account: "foo", + expectQuery: ledgercontroller.NewGetAccountQuery("foo").WithPIT(before).WithExpandVolumes(), + expectBackendCall: true, + queryParams: url.Values{ + "expand": {"volumes"}, + }, + }, + { + name: "with expand effective volumes", + account: "foo", + expectQuery: ledgercontroller.NewGetAccountQuery("foo").WithPIT(before).WithExpandEffectiveVolumes(), + expectBackendCall: true, + queryParams: url.Values{ + "expand": {"effectiveVolumes"}, + }, + }, + { + name: "invalid account address", + account: "%8X%2F", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + } + for _, testCase := range testCases { + tc := testCase + t.Run(tc.name, func(t *testing.T) { + + if tc.expectStatusCode == 0 { + tc.expectStatusCode = http.StatusOK + } + + systemController, ledgerController := newTestingSystemController(t, true) + if tc.expectBackendCall { + ledgerController.EXPECT(). + GetAccount(gomock.Any(), tc.expectQuery). + Return(&ledger.Account{}, tc.returnErr) + } + + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodGet, "/", bytes.NewBufferString(tc.body)) + req.URL.Path = "/xxx/accounts/" + tc.account + rec := httptest.NewRecorder() + params := url.Values{} + if tc.queryParams != nil { + params = tc.queryParams + } + params.Set("pit", before.Format(time.RFC3339Nano)) + req.URL.RawQuery = params.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, tc.expectStatusCode, rec.Code) + if tc.expectStatusCode < 300 && tc.expectStatusCode >= 200 { + _, ok := api.DecodeSingleResponse[ledger.Account](t, rec.Body) + require.True(t, ok) + } else { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, tc.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v2/controllers_accounts_test.go b/internal/api/v2/controllers_accounts_test.go deleted file mode 100644 index 5b4053990..000000000 --- a/internal/api/v2/controllers_accounts_test.go +++ /dev/null @@ -1,323 +0,0 @@ -package v2_test - -import ( - "bytes" - "net/http" - "net/http/httptest" - "net/url" - "testing" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/bun/bunpaginate" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/query" - ledger "github.com/formancehq/ledger/internal" - v2 "github.com/formancehq/ledger/internal/api/v2" - "github.com/formancehq/ledger/internal/engine/command" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/formancehq/ledger/internal/storage/ledgerstore" - "github.com/stretchr/testify/require" - "go.uber.org/mock/gomock" -) - -func TestGetAccounts(t *testing.T) { - t.Parallel() - - type testCase struct { - name string - queryParams url.Values - body string - expectQuery ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes] - expectStatusCode int - expectedErrorCode string - } - before := time.Now() - - testCases := []testCase{ - { - name: "nominal", - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &before, - }, - }). - WithPageSize(v2.DefaultPageSize), - }, - { - name: "using metadata", - body: `{"$match": { "metadata[roles]": "admin" }}`, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &before, - }, - }). - WithQueryBuilder(query.Match("metadata[roles]", "admin")). - WithPageSize(v2.DefaultPageSize), - }, - { - name: "using address", - body: `{"$match": { "address": "foo" }}`, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &before, - }, - }). - WithQueryBuilder(query.Match("address", "foo")). - WithPageSize(v2.DefaultPageSize), - }, - { - name: "using empty cursor", - queryParams: url.Values{ - "cursor": []string{bunpaginate.EncodeCursor(ledgerstore.NewGetAccountsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{})))}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}), - }, - { - name: "using invalid cursor", - queryParams: url.Values{ - "cursor": []string{"XXX"}, - }, - expectStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrValidation, - }, - { - name: "invalid page size", - queryParams: url.Values{ - "pageSize": []string{"nan"}, - }, - expectStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrValidation, - }, - { - name: "page size over maximum", - queryParams: url.Values{ - "pageSize": []string{"1000000"}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &before, - }, - }). - WithPageSize(v2.MaxPageSize), - }, - { - name: "using balance filter", - body: `{"$lt": { "balance[USD/2]": 100 }}`, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &before, - }, - }). - WithQueryBuilder(query.Lt("balance[USD/2]", float64(100))). - WithPageSize(v2.DefaultPageSize), - }, - { - name: "using exists filter", - body: `{"$exists": { "metadata": "foo" }}`, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &before, - }, - }). - WithQueryBuilder(query.Exists("metadata", "foo")). - WithPageSize(v2.DefaultPageSize), - }, - { - name: "using invalid query payload", - body: `[]`, - expectStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrValidation, - }, - } - for _, testCase := range testCases { - testCase := testCase - t.Run(testCase.name, func(t *testing.T) { - - if testCase.expectStatusCode == 0 { - testCase.expectStatusCode = http.StatusOK - } - - expectedCursor := bunpaginate.Cursor[ledger.ExpandedAccount]{ - Data: []ledger.ExpandedAccount{ - { - Account: ledger.Account{ - Address: "world", - Metadata: metadata.Metadata{}, - }, - }, - }, - } - - backend, mockLedger := newTestingBackend(t, true) - if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { - mockLedger.EXPECT(). - GetAccountsWithVolumes(gomock.Any(), ledgerstore.NewGetAccountsQuery(testCase.expectQuery)). - Return(&expectedCursor, nil) - } - - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodGet, "/xxx/accounts?pit="+before.Format(time.RFC3339Nano), bytes.NewBufferString(testCase.body)) - rec := httptest.NewRecorder() - params := url.Values{} - if testCase.queryParams != nil { - params = testCase.queryParams - } - params.Set("pit", before.Format(time.RFC3339Nano)) - req.URL.RawQuery = params.Encode() - - router.ServeHTTP(rec, req) - - require.Equal(t, testCase.expectStatusCode, rec.Code) - if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { - cursor := sharedapi.DecodeCursorResponse[ledger.ExpandedAccount](t, rec.Body) - require.Equal(t, expectedCursor, *cursor) - } else { - err := sharedapi.ErrorResponse{} - sharedapi.Decode(t, rec.Body, &err) - require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) - } - }) - } -} - -func TestGetAccount(t *testing.T) { - t.Parallel() - - account := ledger.ExpandedAccount{ - Account: ledger.Account{ - Address: "foo", - Metadata: metadata.Metadata{}, - }, - } - - now := time.Now() - query := ledgerstore.NewGetAccountQuery("foo") - query.PIT = &now - - backend, mock := newTestingBackend(t, true) - mock.EXPECT(). - GetAccountWithVolumes(gomock.Any(), query). - Return(&account, nil) - - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodGet, "/xxx/accounts/foo?pit="+now.Format(time.RFC3339Nano), nil) - rec := httptest.NewRecorder() - - router.ServeHTTP(rec, req) - - require.Equal(t, http.StatusOK, rec.Code) - response, _ := sharedapi.DecodeSingleResponse[ledger.ExpandedAccount](t, rec.Body) - require.Equal(t, account, response) -} - -func TestGetAccountWithEncoded(t *testing.T) { - t.Parallel() - - account := ledger.ExpandedAccount{ - Account: ledger.Account{ - Address: "foo:bar", - Metadata: metadata.Metadata{}, - }, - } - - now := time.Now() - query := ledgerstore.NewGetAccountQuery("foo:bar") - query.PIT = &now - - backend, mock := newTestingBackend(t, true) - mock.EXPECT(). - GetAccountWithVolumes(gomock.Any(), query). - Return(&account, nil) - - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodGet, "/xxx/accounts/foo%3Abar?pit="+now.Format(time.RFC3339Nano), nil) - rec := httptest.NewRecorder() - - router.ServeHTTP(rec, req) - - require.Equal(t, http.StatusOK, rec.Code) - response, _ := sharedapi.DecodeSingleResponse[ledger.ExpandedAccount](t, rec.Body) - require.Equal(t, account, response) -} - -func TestPostAccountMetadata(t *testing.T) { - t.Parallel() - - type testCase struct { - name string - queryParams url.Values - expectStatusCode int - expectedErrorCode string - account string - body any - } - - testCases := []testCase{ - { - name: "nominal", - account: "world", - body: metadata.Metadata{ - "foo": "bar", - }, - }, - { - name: "nominal", - account: "test-", - body: metadata.Metadata{ - "foo": "bar", - }, - }, - { - name: "nominal", - account: "-t--e-st-", - body: metadata.Metadata{ - "foo": "bar", - }, - }, - { - name: "invalid body", - account: "world", - body: "invalid - not an object", - expectStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrValidation, - }, - } - for _, testCase := range testCases { - testCase := testCase - t.Run(testCase.name, func(t *testing.T) { - - if testCase.expectStatusCode == 0 { - testCase.expectStatusCode = http.StatusNoContent - } - - backend, mock := newTestingBackend(t, true) - if testCase.expectStatusCode == http.StatusNoContent { - mock.EXPECT(). - SaveMeta(gomock.Any(), command.Parameters{}, ledger.MetaTargetTypeAccount, testCase.account, testCase.body). - Return(nil) - } - - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodPost, "/xxx/accounts/"+testCase.account+"/metadata", sharedapi.Buffer(t, testCase.body)) - rec := httptest.NewRecorder() - req.URL.RawQuery = testCase.queryParams.Encode() - - router.ServeHTTP(rec, req) - - require.Equal(t, testCase.expectStatusCode, rec.Code) - if testCase.expectStatusCode >= 300 || testCase.expectStatusCode < 200 { - err := sharedapi.ErrorResponse{} - sharedapi.Decode(t, rec.Body, &err) - require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) - } - }) - } -} diff --git a/internal/api/v2/controllers_balances.go b/internal/api/v2/controllers_balances.go index 3b16c0d08..66d16fd80 100644 --- a/internal/api/v2/controllers_balances.go +++ b/internal/api/v2/controllers_balances.go @@ -3,37 +3,39 @@ package v2 import ( "net/http" - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/ledger/internal/api/backend" - "github.com/formancehq/ledger/internal/storage/ledgerstore" + "errors" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/ledger/internal/api/common" ) -func getBalancesAggregated(w http.ResponseWriter, r *http.Request) { +func readBalancesAggregated(w http.ResponseWriter, r *http.Request) { pitFilter, err := getPITFilter(r) if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) + api.BadRequest(w, ErrValidation, err) return } queryBuilder, err := getQueryBuilder(r) if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) + api.BadRequest(w, ErrValidation, err) return } - balances, err := backend.LedgerFromContext(r.Context()). - GetAggregatedBalances(r.Context(), ledgerstore.NewGetAggregatedBalancesQuery( - *pitFilter, queryBuilder, sharedapi.QueryParamBool(r, "use_insertion_date") || sharedapi.QueryParamBool(r, "useInsertionDate"))) + balances, err := common.LedgerFromContext(r.Context()). + GetAggregatedBalances(r.Context(), ledgercontroller.NewGetAggregatedBalancesQuery( + *pitFilter, queryBuilder, api.QueryParamBool(r, "use_insertion_date") || api.QueryParamBool(r, "useInsertionDate"))) if err != nil { switch { - case ledgerstore.IsErrInvalidQuery(err): - sharedapi.BadRequest(w, ErrValidation, err) + case errors.Is(err, ledgercontroller.ErrInvalidQuery{}) || errors.Is(err, ledgercontroller.ErrMissingFeature{}): + api.BadRequest(w, ErrValidation, err) default: - sharedapi.InternalServerError(w, r, err) + common.HandleCommonErrors(w, r, err) } return } - sharedapi.Ok(w, balances) + api.Ok(w, balances) } diff --git a/internal/api/v2/controllers_balances_test.go b/internal/api/v2/controllers_balances_test.go index 1d018e702..5d1de3a8b 100644 --- a/internal/api/v2/controllers_balances_test.go +++ b/internal/api/v2/controllers_balances_test.go @@ -1,4 +1,4 @@ -package v2_test +package v2 import ( "bytes" @@ -6,29 +6,29 @@ import ( "net/http" "net/http/httptest" "net/url" + "os" "testing" - "github.com/formancehq/go-libs/time" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/query" + "github.com/formancehq/go-libs/v2/time" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/query" ledger "github.com/formancehq/ledger/internal" - v2 "github.com/formancehq/ledger/internal/api/v2" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/formancehq/ledger/internal/storage/ledgerstore" "github.com/stretchr/testify/require" "go.uber.org/mock/gomock" ) -func TestGetBalancesAggregated(t *testing.T) { +func TestBalancesAggregates(t *testing.T) { t.Parallel() type testCase struct { name string queryParams url.Values body string - expectQuery ledgerstore.GetAggregatedBalanceQuery + expectQuery ledgercontroller.GetAggregatedBalanceQuery } now := time.Now() @@ -36,8 +36,8 @@ func TestGetBalancesAggregated(t *testing.T) { testCases := []testCase{ { name: "nominal", - expectQuery: ledgerstore.GetAggregatedBalanceQuery{ - PITFilter: ledgerstore.PITFilter{ + expectQuery: ledgercontroller.GetAggregatedBalanceQuery{ + PITFilter: ledgercontroller.PITFilter{ PIT: &now, }, }, @@ -45,8 +45,8 @@ func TestGetBalancesAggregated(t *testing.T) { { name: "using address", body: `{"$match": {"address": "foo"}}`, - expectQuery: ledgerstore.GetAggregatedBalanceQuery{ - PITFilter: ledgerstore.PITFilter{ + expectQuery: ledgercontroller.GetAggregatedBalanceQuery{ + PITFilter: ledgercontroller.PITFilter{ PIT: &now, }, QueryBuilder: query.Match("address", "foo"), @@ -55,8 +55,8 @@ func TestGetBalancesAggregated(t *testing.T) { { name: "using exists metadata filter", body: `{"$exists": {"metadata": "foo"}}`, - expectQuery: ledgerstore.GetAggregatedBalanceQuery{ - PITFilter: ledgerstore.PITFilter{ + expectQuery: ledgercontroller.GetAggregatedBalanceQuery{ + PITFilter: ledgercontroller.PITFilter{ PIT: &now, }, QueryBuilder: query.Exists("metadata", "foo"), @@ -67,8 +67,8 @@ func TestGetBalancesAggregated(t *testing.T) { queryParams: url.Values{ "pit": []string{now.Format(time.RFC3339Nano)}, }, - expectQuery: ledgerstore.GetAggregatedBalanceQuery{ - PITFilter: ledgerstore.PITFilter{ + expectQuery: ledgercontroller.GetAggregatedBalanceQuery{ + PITFilter: ledgercontroller.PITFilter{ PIT: &now, }, }, @@ -79,8 +79,8 @@ func TestGetBalancesAggregated(t *testing.T) { "pit": []string{now.Format(time.RFC3339Nano)}, "useInsertionDate": []string{"true"}, }, - expectQuery: ledgerstore.GetAggregatedBalanceQuery{ - PITFilter: ledgerstore.PITFilter{ + expectQuery: ledgercontroller.GetAggregatedBalanceQuery{ + PITFilter: ledgercontroller.PITFilter{ PIT: &now, }, UseInsertionDate: true, @@ -94,12 +94,12 @@ func TestGetBalancesAggregated(t *testing.T) { expectedBalances := ledger.BalancesByAssets{ "world": big.NewInt(-100), } - backend, mock := newTestingBackend(t, true) - mock.EXPECT(). + systemController, ledgerController := newTestingSystemController(t, true) + ledgerController.EXPECT(). GetAggregatedBalances(gomock.Any(), testCase.expectQuery). Return(expectedBalances, nil) - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") req := httptest.NewRequest(http.MethodGet, "/xxx/aggregate/balances?pit="+now.Format(time.RFC3339Nano), bytes.NewBufferString(testCase.body)) rec := httptest.NewRecorder() @@ -110,7 +110,7 @@ func TestGetBalancesAggregated(t *testing.T) { router.ServeHTTP(rec, req) require.Equal(t, http.StatusOK, rec.Code) - balances, ok := sharedapi.DecodeSingleResponse[ledger.BalancesByAssets](t, rec.Body) + balances, ok := api.DecodeSingleResponse[ledger.BalancesByAssets](t, rec.Body) require.True(t, ok) require.Equal(t, expectedBalances, balances) }) diff --git a/internal/api/v2/controllers_bulk.go b/internal/api/v2/controllers_bulk.go index f7afc3eec..b3b70e8dd 100644 --- a/internal/api/v2/controllers_bulk.go +++ b/internal/api/v2/controllers_bulk.go @@ -1,33 +1,287 @@ package v2 import ( + "context" "encoding/json" + "fmt" "net/http" - "github.com/formancehq/go-libs/contextutil" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/ledger/internal/api/backend" + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/metadata" + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/api/common" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" ) -func bulkHandler(w http.ResponseWriter, r *http.Request) { - b := Bulk{} - if err := json.NewDecoder(r.Body).Decode(&b); err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return +func bulkHandler(bulkMaxSize int) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + b := Bulk{} + if err := json.NewDecoder(r.Body).Decode(&b); err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + if bulkMaxSize != 0 && len(b) > bulkMaxSize { + api.WriteErrorResponse(w, http.StatusRequestEntityTooLarge, ErrBulkSizeExceeded, fmt.Errorf("bulk size exceeded, max size is %d", bulkMaxSize)) + return + } + + w.Header().Set("Content-Type", "application/json") + + ret, errorsInBulk, err := ProcessBulk(r.Context(), common.LedgerFromContext(r.Context()), b, api.QueryParamBool(r, "continueOnFailure")) + if err != nil || errorsInBulk { + w.WriteHeader(http.StatusBadRequest) + } + + if err := json.NewEncoder(w).Encode(api.BaseResponse[[]Result]{ + Data: &ret, + }); err != nil { + panic(err) + } } +} + +const ( + ActionCreateTransaction = "CREATE_TRANSACTION" + ActionAddMetadata = "ADD_METADATA" + ActionRevertTransaction = "REVERT_TRANSACTION" + ActionDeleteMetadata = "DELETE_METADATA" +) + +type Bulk []Element + +type Element struct { + Action string `json:"action"` + IdempotencyKey string `json:"ik"` + Data json.RawMessage `json:"data"` +} + +type Result struct { + ErrorCode string `json:"errorCode,omitempty"` + ErrorDescription string `json:"errorDescription,omitempty"` + ErrorDetails string `json:"errorDetails,omitempty"` + Data any `json:"data,omitempty"` + ResponseType string `json:"responseType"` // Added for sdk generation (discriminator in oneOf) +} - w.Header().Set("Content-Type", "application/json") +func ProcessBulk( + ctx context.Context, + l ledgercontroller.Controller, + bulk Bulk, + continueOnFailure bool, +) ([]Result, bool, error) { - ctx, _ := contextutil.Detached(r.Context()) - ret, errorsInBulk, err := ProcessBulk(ctx, backend.LedgerFromContext(r.Context()), b, sharedapi.QueryParamBool(r, "continueOnFailure")) - if err != nil || errorsInBulk { - w.WriteHeader(http.StatusBadRequest) + ret := make([]Result, 0, len(bulk)) + + errorsInBulk := false + var bulkError = func(action, code string, err error) { + ret = append(ret, Result{ + ErrorCode: code, + ErrorDescription: err.Error(), + ResponseType: "ERROR", + }) + errorsInBulk = true } - if err := json.NewEncoder(w).Encode(sharedapi.BaseResponse[[]Result]{ - Data: &ret, - }); err != nil { - panic(err) + for i, element := range bulk { + switch element.Action { + case ActionCreateTransaction: + req := &TransactionRequest{} + if err := json.Unmarshal(element.Data, req); err != nil { + return nil, errorsInBulk, fmt.Errorf("error parsing element %d: %s", i, err) + } + rs, err := req.ToRunScript(false) + if err != nil { + return nil, errorsInBulk, fmt.Errorf("error parsing element %d: %s", i, err) + } + + createTransactionResult, err := l.CreateTransaction(ctx, ledgercontroller.Parameters[ledgercontroller.RunScript]{ + DryRun: false, + IdempotencyKey: element.IdempotencyKey, + Input: *rs, + }) + if err != nil { + var code string + + switch { + case errors.Is(err, &ledgercontroller.ErrInsufficientFunds{}): + code = ErrInsufficientFund + case errors.Is(err, &ledgercontroller.ErrInvalidVars{}) || errors.Is(err, ledgercontroller.ErrCompilationFailed{}): + code = ErrCompilationFailed + case errors.Is(err, &ledgercontroller.ErrMetadataOverride{}): + code = ErrMetadataOverride + case errors.Is(err, ledgercontroller.ErrNoPostings): + code = ErrNoPostings + case errors.Is(err, ledgercontroller.ErrTransactionReferenceConflict{}): + code = ErrConflict + default: + code = api.ErrorInternal + } + + bulkError(element.Action, code, err) + if !continueOnFailure { + return ret, errorsInBulk, nil + } + } else { + ret = append(ret, Result{ + Data: createTransactionResult.Transaction, + ResponseType: element.Action, + }) + } + case ActionAddMetadata: + type addMetadataRequest struct { + TargetType string `json:"targetType"` + TargetID json.RawMessage `json:"targetId"` + Metadata metadata.Metadata `json:"metadata"` + } + req := &addMetadataRequest{} + if err := json.Unmarshal(element.Data, req); err != nil { + return nil, errorsInBulk, fmt.Errorf("error parsing element %d: %s", i, err) + } + + var err error + switch req.TargetType { + case ledger.MetaTargetTypeAccount: + address := "" + if err := json.Unmarshal(req.TargetID, &address); err != nil { + return nil, errorsInBulk, err + } + err = l.SaveAccountMetadata(ctx, ledgercontroller.Parameters[ledgercontroller.SaveAccountMetadata]{ + DryRun: false, + IdempotencyKey: element.IdempotencyKey, + Input: ledgercontroller.SaveAccountMetadata{ + Address: address, + Metadata: req.Metadata, + }, + }) + case ledger.MetaTargetTypeTransaction: + transactionID := 0 + if err := json.Unmarshal(req.TargetID, &transactionID); err != nil { + return nil, errorsInBulk, err + } + err = l.SaveTransactionMetadata(ctx, ledgercontroller.Parameters[ledgercontroller.SaveTransactionMetadata]{ + DryRun: false, + IdempotencyKey: element.IdempotencyKey, + Input: ledgercontroller.SaveTransactionMetadata{ + TransactionID: transactionID, + Metadata: req.Metadata, + }, + }) + } + if err != nil { + var code string + switch { + case errors.Is(err, ledgercontroller.ErrNotFound): + code = api.ErrorCodeNotFound + default: + code = api.ErrorInternal + } + bulkError(element.Action, code, err) + if !continueOnFailure { + return ret, errorsInBulk, nil + } + } else { + ret = append(ret, Result{ + ResponseType: element.Action, + }) + } + case ActionRevertTransaction: + type revertTransactionRequest struct { + ID int `json:"id"` + Force bool `json:"force"` + AtEffectiveDate bool `json:"atEffectiveDate"` + } + req := &revertTransactionRequest{} + if err := json.Unmarshal(element.Data, req); err != nil { + return nil, errorsInBulk, fmt.Errorf("error parsing element %d: %s", i, err) + } + + revertTransactionResult, err := l.RevertTransaction(ctx, ledgercontroller.Parameters[ledgercontroller.RevertTransaction]{ + DryRun: false, + IdempotencyKey: element.IdempotencyKey, + Input: ledgercontroller.RevertTransaction{ + Force: req.Force, + AtEffectiveDate: req.AtEffectiveDate, + TransactionID: req.ID, + }, + }) + if err != nil { + var code string + switch { + case errors.Is(err, ledgercontroller.ErrNotFound): + code = api.ErrorCodeNotFound + default: + code = api.ErrorInternal + } + bulkError(element.Action, code, err) + if !continueOnFailure { + return ret, errorsInBulk, nil + } + } else { + ret = append(ret, Result{ + Data: revertTransactionResult.RevertTransaction, + ResponseType: element.Action, + }) + } + case ActionDeleteMetadata: + type deleteMetadataRequest struct { + TargetType string `json:"targetType"` + TargetID json.RawMessage `json:"targetId"` + Key string `json:"key"` + } + req := &deleteMetadataRequest{} + if err := json.Unmarshal(element.Data, req); err != nil { + return nil, errorsInBulk, fmt.Errorf("error parsing element %d: %s", i, err) + } + + var err error + switch req.TargetType { + case ledger.MetaTargetTypeAccount: + address := "" + if err := json.Unmarshal(req.TargetID, &address); err != nil { + return nil, errorsInBulk, err + } + err = l.DeleteAccountMetadata(ctx, ledgercontroller.Parameters[ledgercontroller.DeleteAccountMetadata]{ + DryRun: false, + IdempotencyKey: element.IdempotencyKey, + Input: ledgercontroller.DeleteAccountMetadata{ + Address: address, + Key: req.Key, + }, + }) + case ledger.MetaTargetTypeTransaction: + transactionID := 0 + if err := json.Unmarshal(req.TargetID, &transactionID); err != nil { + return nil, errorsInBulk, err + } + err = l.DeleteTransactionMetadata(ctx, ledgercontroller.Parameters[ledgercontroller.DeleteTransactionMetadata]{ + DryRun: false, + IdempotencyKey: element.IdempotencyKey, + Input: ledgercontroller.DeleteTransactionMetadata{ + TransactionID: transactionID, + Key: req.Key, + }, + }) + } + if err != nil { + var code string + switch { + case errors.Is(err, ledgercontroller.ErrNotFound): + code = api.ErrorCodeNotFound + default: + code = api.ErrorInternal + } + bulkError(element.Action, code, err) + if !continueOnFailure { + return ret, errorsInBulk, nil + } + } else { + ret = append(ret, Result{ + ResponseType: element.Action, + }) + } + } } + return ret, errorsInBulk, nil } diff --git a/internal/api/v2/controllers_bulk_test.go b/internal/api/v2/controllers_bulk_test.go index 4a3e5692d..aa6b6f38b 100644 --- a/internal/api/v2/controllers_bulk_test.go +++ b/internal/api/v2/controllers_bulk_test.go @@ -1,25 +1,26 @@ -package v2_test +package v2 import ( "bytes" "fmt" + "github.com/formancehq/ledger/internal/api/common" "math/big" "net/http" "net/http/httptest" "net/url" + "os" "testing" - "github.com/formancehq/go-libs/time" + "github.com/formancehq/go-libs/v2/collectionutils" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/metadata" + "github.com/formancehq/go-libs/v2/time" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/metadata" ledger "github.com/formancehq/ledger/internal" - "github.com/formancehq/ledger/internal/api/backend" - v2 "github.com/formancehq/ledger/internal/api/v2" - "github.com/formancehq/ledger/internal/engine/command" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/pkg/errors" "github.com/stretchr/testify/require" "go.uber.org/mock/gomock" ) @@ -33,9 +34,9 @@ func TestBulk(t *testing.T) { name string queryParams url.Values body string - expectations func(mockLedger *backend.MockLedger) + expectations func(mockLedger *LedgerController) expectError bool - expectResults []v2.Result + expectResults []Result } testCases := []bulkTestCase{ @@ -53,7 +54,7 @@ func TestBulk(t *testing.T) { "timestamp": "%s" } }]`, now.Format(time.RFC3339Nano)), - expectations: func(mockLedger *backend.MockLedger) { + expectations: func(mockLedger *LedgerController) { postings := []ledger.Posting{{ Source: "world", Destination: "bank", @@ -61,20 +62,23 @@ func TestBulk(t *testing.T) { Asset: "USD/2", }} mockLedger.EXPECT(). - CreateTransaction(gomock.Any(), command.Parameters{}, ledger.TxToScriptData(ledger.TransactionData{ - Postings: postings, - Timestamp: now, - }, false)). - Return(&ledger.Transaction{ - TransactionData: ledger.TransactionData{ + CreateTransaction(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.RunScript]{ + Input: common.TxToScriptData(ledger.TransactionData{ Postings: postings, - Metadata: metadata.Metadata{}, Timestamp: now, + }, false), + }). + Return(&ledger.CreatedTransaction{ + Transaction: ledger.Transaction{ + TransactionData: ledger.TransactionData{ + Postings: postings, + Metadata: metadata.Metadata{}, + Timestamp: now, + }, }, - ID: big.NewInt(0), }, nil) }, - expectResults: []v2.Result{{ + expectResults: []Result{{ Data: map[string]any{ "postings": []any{ map[string]any{ @@ -89,7 +93,7 @@ func TestBulk(t *testing.T) { "reverted": false, "id": float64(0), }, - ResponseType: v2.ActionCreateTransaction, + ResponseType: ActionCreateTransaction, }}, }, { @@ -104,15 +108,20 @@ func TestBulk(t *testing.T) { } } }]`, - expectations: func(mockLedger *backend.MockLedger) { + expectations: func(mockLedger *LedgerController) { mockLedger.EXPECT(). - SaveMeta(gomock.Any(), command.Parameters{}, ledger.MetaTargetTypeTransaction, big.NewInt(1), metadata.Metadata{ - "foo": "bar", + SaveTransactionMetadata(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.SaveTransactionMetadata]{ + Input: ledgercontroller.SaveTransactionMetadata{ + TransactionID: 1, + Metadata: metadata.Metadata{ + "foo": "bar", + }, + }, }). Return(nil) }, - expectResults: []v2.Result{{ - ResponseType: v2.ActionAddMetadata, + expectResults: []Result{{ + ResponseType: ActionAddMetadata, }}, }, { @@ -127,15 +136,20 @@ func TestBulk(t *testing.T) { } } }]`, - expectations: func(mockLedger *backend.MockLedger) { + expectations: func(mockLedger *LedgerController) { mockLedger.EXPECT(). - SaveMeta(gomock.Any(), command.Parameters{}, ledger.MetaTargetTypeAccount, "world", metadata.Metadata{ - "foo": "bar", + SaveAccountMetadata(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.SaveAccountMetadata]{ + Input: ledgercontroller.SaveAccountMetadata{ + Address: "world", + Metadata: metadata.Metadata{ + "foo": "bar", + }, + }, }). Return(nil) }, - expectResults: []v2.Result{{ - ResponseType: v2.ActionAddMetadata, + expectResults: []Result{{ + ResponseType: ActionAddMetadata, }}, }, { @@ -146,20 +160,24 @@ func TestBulk(t *testing.T) { "id": 1 } }]`, - expectations: func(mockLedger *backend.MockLedger) { + expectations: func(mockLedger *LedgerController) { mockLedger.EXPECT(). - RevertTransaction(gomock.Any(), command.Parameters{}, big.NewInt(1), false, false). - Return(&ledger.Transaction{}, nil) + RevertTransaction(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.RevertTransaction]{ + Input: ledgercontroller.RevertTransaction{ + TransactionID: 1, + }, + }). + Return(&ledger.RevertedTransaction{}, nil) }, - expectResults: []v2.Result{{ + expectResults: []Result{{ Data: map[string]any{ - "id": nil, + "id": float64(0), "metadata": nil, "postings": nil, "reverted": false, "timestamp": "0001-01-01T00:00:00Z", }, - ResponseType: v2.ActionRevertTransaction, + ResponseType: ActionRevertTransaction, }}, }, { @@ -172,13 +190,18 @@ func TestBulk(t *testing.T) { "key": "foo" } }]`, - expectations: func(mockLedger *backend.MockLedger) { + expectations: func(mockLedger *LedgerController) { mockLedger.EXPECT(). - DeleteMetadata(gomock.Any(), command.Parameters{}, ledger.MetaTargetTypeTransaction, big.NewInt(1), "foo"). + DeleteTransactionMetadata(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.DeleteTransactionMetadata]{ + Input: ledgercontroller.DeleteTransactionMetadata{ + TransactionID: 1, + Key: "foo", + }, + }). Return(nil) }, - expectResults: []v2.Result{{ - ResponseType: v2.ActionDeleteMetadata, + expectResults: []Result{{ + ResponseType: ActionDeleteMetadata, }}, }, { @@ -215,22 +238,32 @@ func TestBulk(t *testing.T) { } } ]`, - expectations: func(mockLedger *backend.MockLedger) { + expectations: func(mockLedger *LedgerController) { mockLedger.EXPECT(). - SaveMeta(gomock.Any(), command.Parameters{}, ledger.MetaTargetTypeAccount, "world", metadata.Metadata{ - "foo": "bar", + SaveAccountMetadata(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.SaveAccountMetadata]{ + Input: ledgercontroller.SaveAccountMetadata{ + Address: "world", + Metadata: metadata.Metadata{ + "foo": "bar", + }, + }, }). Return(nil) mockLedger.EXPECT(). - SaveMeta(gomock.Any(), command.Parameters{}, ledger.MetaTargetTypeAccount, "world", metadata.Metadata{ - "foo2": "bar2", + SaveAccountMetadata(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.SaveAccountMetadata]{ + Input: ledgercontroller.SaveAccountMetadata{ + Address: "world", + Metadata: metadata.Metadata{ + "foo2": "bar2", + }, + }, }). Return(errors.New("unexpected error")) }, - expectResults: []v2.Result{{ - ResponseType: v2.ActionAddMetadata, + expectResults: []Result{{ + ResponseType: ActionAddMetadata, }, { - ErrorCode: "INTERNAL", + ErrorCode: api.ErrorInternal, ErrorDescription: "unexpected error", ResponseType: "ERROR", }}, @@ -273,31 +306,46 @@ func TestBulk(t *testing.T) { queryParams: map[string][]string{ "continueOnFailure": {"true"}, }, - expectations: func(mockLedger *backend.MockLedger) { + expectations: func(mockLedger *LedgerController) { mockLedger.EXPECT(). - SaveMeta(gomock.Any(), command.Parameters{}, ledger.MetaTargetTypeAccount, "world", metadata.Metadata{ - "foo": "bar", + SaveAccountMetadata(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.SaveAccountMetadata]{ + Input: ledgercontroller.SaveAccountMetadata{ + Address: "world", + Metadata: metadata.Metadata{ + "foo": "bar", + }, + }, }). Return(nil) mockLedger.EXPECT(). - SaveMeta(gomock.Any(), command.Parameters{}, ledger.MetaTargetTypeAccount, "world", metadata.Metadata{ - "foo2": "bar2", + SaveAccountMetadata(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.SaveAccountMetadata]{ + Input: ledgercontroller.SaveAccountMetadata{ + Address: "world", + Metadata: metadata.Metadata{ + "foo2": "bar2", + }, + }, }). Return(errors.New("unexpected error")) mockLedger.EXPECT(). - SaveMeta(gomock.Any(), command.Parameters{}, ledger.MetaTargetTypeAccount, "world", metadata.Metadata{ - "foo3": "bar3", + SaveAccountMetadata(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.SaveAccountMetadata]{ + Input: ledgercontroller.SaveAccountMetadata{ + Address: "world", + Metadata: metadata.Metadata{ + "foo3": "bar3", + }, + }, }). Return(nil) }, - expectResults: []v2.Result{{ - ResponseType: v2.ActionAddMetadata, + expectResults: []Result{{ + ResponseType: ActionAddMetadata, }, { ResponseType: "ERROR", - ErrorCode: "INTERNAL", + ErrorCode: api.ErrorInternal, ErrorDescription: "unexpected error", }, { - ResponseType: v2.ActionAddMetadata, + ResponseType: ActionAddMetadata, }}, expectError: true, }, @@ -306,10 +354,10 @@ func TestBulk(t *testing.T) { testCase := testCase t.Run(testCase.name, func(t *testing.T) { - backend, mock := newTestingBackend(t, true) - testCase.expectations(mock) + systemController, ledgerController := newTestingSystemController(t, true) + testCase.expectations(ledgerController) - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") req := httptest.NewRequest(http.MethodPost, "/xxx/_bulk", bytes.NewBufferString(testCase.body)) rec := httptest.NewRecorder() @@ -325,7 +373,14 @@ func TestBulk(t *testing.T) { require.Equal(t, http.StatusOK, rec.Code) } - ret, _ := sharedapi.DecodeSingleResponse[[]v2.Result](t, rec.Body) + ret, _ := api.DecodeSingleResponse[[]Result](t, rec.Body) + ret = collectionutils.Map(ret, func(from Result) Result { + switch data := from.Data.(type) { + case map[string]any: + delete(data, "insertedAt") + } + return from + }) require.Equal(t, testCase.expectResults, ret) }) } diff --git a/internal/api/v2/controllers_config.go b/internal/api/v2/controllers_config.go deleted file mode 100644 index a9b610e17..000000000 --- a/internal/api/v2/controllers_config.go +++ /dev/null @@ -1,23 +0,0 @@ -package v2 - -import ( - _ "embed" - "net/http" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/ledger/internal/api/backend" -) - -type ConfigInfo struct { - Server string `json:"server"` - Version string `json:"version"` -} - -func getInfo(backend backend.Backend) func(w http.ResponseWriter, r *http.Request) { - return func(w http.ResponseWriter, r *http.Request) { - sharedapi.RawOk(w, ConfigInfo{ - Server: "ledger", - Version: backend.GetVersion(), - }) - } -} diff --git a/internal/api/v2/controllers_config_test.go b/internal/api/v2/controllers_config_test.go deleted file mode 100644 index 5c10297e2..000000000 --- a/internal/api/v2/controllers_config_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package v2_test - -import ( - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/formancehq/go-libs/auth" - v2 "github.com/formancehq/ledger/internal/api/v2" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/stretchr/testify/require" -) - -func TestGetInfo(t *testing.T) { - t.Parallel() - - backend, _ := newTestingBackend(t, false) - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - backend. - EXPECT(). - GetVersion(). - Return("latest") - - req := httptest.NewRequest(http.MethodGet, "/_info", nil) - rec := httptest.NewRecorder() - - router.ServeHTTP(rec, req) - - require.Equal(t, http.StatusOK, rec.Code) - - info := v2.ConfigInfo{} - require.NoError(t, json.NewDecoder(rec.Body).Decode(&info)) - - require.EqualValues(t, v2.ConfigInfo{ - Server: "ledger", - Version: "latest", - }, info) -} diff --git a/internal/api/v2/controllers_create_ledger.go b/internal/api/v2/controllers_create_ledger.go deleted file mode 100644 index 84a05af6e..000000000 --- a/internal/api/v2/controllers_create_ledger.go +++ /dev/null @@ -1,45 +0,0 @@ -package v2 - -import ( - "encoding/json" - "io" - "net/http" - - "github.com/go-chi/chi/v5" - - "github.com/formancehq/ledger/internal/storage/driver" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/ledger/internal/api/backend" - "github.com/pkg/errors" -) - -func createLedger(b backend.Backend) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - configuration := driver.LedgerConfiguration{} - - data, err := io.ReadAll(r.Body) - if err != nil && !errors.Is(err, io.EOF) { - sharedapi.InternalServerError(w, r, err) - return - } - - if len(data) > 0 { - if err := json.Unmarshal(data, &configuration); err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - } - - if err := b.CreateLedger(r.Context(), chi.URLParam(r, "ledger"), configuration); err != nil { - switch { - case errors.Is(err, driver.ErrLedgerAlreadyExists): - sharedapi.BadRequest(w, ErrValidation, err) - default: - sharedapi.InternalServerError(w, r, err) - } - return - } - sharedapi.NoContent(w) - } -} diff --git a/internal/api/v2/controllers_create_ledger_test.go b/internal/api/v2/controllers_create_ledger_test.go deleted file mode 100644 index 286143dd3..000000000 --- a/internal/api/v2/controllers_create_ledger_test.go +++ /dev/null @@ -1,70 +0,0 @@ -package v2_test - -import ( - "net/http" - "net/http/httptest" - "testing" - - "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/auth" - v2 "github.com/formancehq/ledger/internal/api/v2" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/google/uuid" - "github.com/stretchr/testify/require" - "go.uber.org/mock/gomock" - - "github.com/formancehq/ledger/internal/storage/driver" -) - -func TestConfigureLedger(t *testing.T) { - t.Parallel() - - type testCase struct { - configuration driver.LedgerConfiguration - name string - } - - testCases := []testCase{ - { - name: "nominal", - configuration: driver.LedgerConfiguration{}, - }, - { - name: "with alternative bucket", - configuration: driver.LedgerConfiguration{ - Bucket: "bucket0", - }, - }, - { - name: "with metadata", - configuration: driver.LedgerConfiguration{ - Metadata: map[string]string{ - "foo": "bar", - }, - }, - }, - } - - for _, testCase := range testCases { - testCase := testCase - t.Run(testCase.name, func(t *testing.T) { - t.Parallel() - - b, _ := newTestingBackend(t, false) - router := v2.NewRouter(b, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - name := uuid.NewString() - b. - EXPECT(). - CreateLedger(gomock.Any(), name, testCase.configuration). - Return(nil) - - req := httptest.NewRequest(http.MethodPost, "/"+name, api.Buffer(t, testCase.configuration)) - rec := httptest.NewRecorder() - - router.ServeHTTP(rec, req) - - require.Equal(t, http.StatusNoContent, rec.Code) - }) - } -} diff --git a/internal/api/v2/controllers_get_ledger.go b/internal/api/v2/controllers_get_ledger.go deleted file mode 100644 index 23c01fe32..000000000 --- a/internal/api/v2/controllers_get_ledger.go +++ /dev/null @@ -1,46 +0,0 @@ -package v2 - -import ( - "encoding/json" - "io" - "net/http" - - "github.com/go-chi/chi/v5" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/ledger/internal/api/backend" - "github.com/formancehq/ledger/internal/storage/driver" - "github.com/formancehq/ledger/internal/storage/sqlutils" - "github.com/pkg/errors" -) - -func getLedger(b backend.Backend) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - configuration := driver.LedgerState{} - - data, err := io.ReadAll(r.Body) - if err != nil && !errors.Is(err, io.EOF) { - sharedapi.InternalServerError(w, r, err) - return - } - - if len(data) > 0 { - if err := json.Unmarshal(data, &configuration); err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - } - - ledger, err := b.GetLedger(r.Context(), chi.URLParam(r, "ledger")) - if err != nil { - switch { - case sqlutils.IsNotFoundError(err): - sharedapi.NotFound(w, err) - default: - sharedapi.InternalServerError(w, r, err) - } - return - } - sharedapi.Ok(w, ledger) - } -} diff --git a/internal/api/v2/controllers_get_ledger_test.go b/internal/api/v2/controllers_get_ledger_test.go deleted file mode 100644 index f0d998cbf..000000000 --- a/internal/api/v2/controllers_get_ledger_test.go +++ /dev/null @@ -1,47 +0,0 @@ -package v2_test - -import ( - "net/http" - "net/http/httptest" - "testing" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/ledger/internal/storage/systemstore" - - v2 "github.com/formancehq/ledger/internal/api/v2" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/google/uuid" - "github.com/stretchr/testify/require" - "go.uber.org/mock/gomock" -) - -func TestGetLedger(t *testing.T) { - t.Parallel() - - b, _ := newTestingBackend(t, false) - router := v2.NewRouter(b, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - name := uuid.NewString() - now := time.Now() - ledger := systemstore.Ledger{ - Name: name, - AddedAt: now, - Bucket: "bucket0", - } - b. - EXPECT(). - GetLedger(gomock.Any(), name). - Return(&ledger, nil) - - req := httptest.NewRequest(http.MethodGet, "/"+name, nil) - rec := httptest.NewRecorder() - - router.ServeHTTP(rec, req) - - require.Equal(t, http.StatusOK, rec.Code) - ledgerFromAPI, _ := api.DecodeSingleResponse[systemstore.Ledger](t, rec.Body) - require.Equal(t, ledger, ledgerFromAPI) -} diff --git a/internal/api/v2/controllers_get_logs.go b/internal/api/v2/controllers_get_logs.go deleted file mode 100644 index 3ba86b32b..000000000 --- a/internal/api/v2/controllers_get_logs.go +++ /dev/null @@ -1,52 +0,0 @@ -package v2 - -import ( - "fmt" - "net/http" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/bun/bunpaginate" - "github.com/formancehq/ledger/internal/api/backend" - "github.com/formancehq/ledger/internal/storage/ledgerstore" -) - -func getLogs(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - query := ledgerstore.GetLogsQuery{} - - if r.URL.Query().Get(QueryKeyCursor) != "" { - err := bunpaginate.UnmarshalCursor(r.URL.Query().Get(QueryKeyCursor), &query) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, fmt.Errorf("invalid '%s' query param", QueryKeyCursor)) - return - } - } else { - var err error - - pageSize, err := bunpaginate.GetPageSize(r) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - - qb, err := getQueryBuilder(r) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - - query = ledgerstore.NewGetLogsQuery(ledgerstore.PaginatedQueryOptions[any]{ - QueryBuilder: qb, - PageSize: pageSize, - }) - } - - cursor, err := l.GetLogs(r.Context(), query) - if err != nil { - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.RenderCursor(w, *cursor) -} diff --git a/internal/api/v2/controllers_info_test.go b/internal/api/v2/controllers_info_test.go deleted file mode 100644 index 075f26644..000000000 --- a/internal/api/v2/controllers_info_test.go +++ /dev/null @@ -1,202 +0,0 @@ -package v2_test - -import ( - "bytes" - "encoding/json" - "fmt" - "net/http" - "net/http/httptest" - "net/url" - "testing" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/bun/bunpaginate" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/migrations" - "github.com/formancehq/go-libs/query" - ledger "github.com/formancehq/ledger/internal" - v2 "github.com/formancehq/ledger/internal/api/v2" - "github.com/formancehq/ledger/internal/engine" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/formancehq/ledger/internal/storage/ledgerstore" - "github.com/stretchr/testify/require" - "go.uber.org/mock/gomock" -) - -func TestGetLedgerInfo(t *testing.T) { - t.Parallel() - - backend, mock := newTestingBackend(t, false) - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - migrationInfo := []migrations.Info{ - { - Version: "1", - Name: "init", - State: "ready", - Date: time.Now().Add(-2 * time.Minute).Round(time.Second).UTC(), - }, - { - Version: "2", - Name: "fix", - State: "ready", - Date: time.Now().Add(-time.Minute).Round(time.Second).UTC(), - }, - } - - mock.EXPECT(). - GetMigrationsInfo(gomock.Any()). - Return(migrationInfo, nil) - - req := httptest.NewRequest(http.MethodGet, "/xxx/_info", nil) - rec := httptest.NewRecorder() - - router.ServeHTTP(rec, req) - - require.Equal(t, http.StatusOK, rec.Code) - - info, ok := sharedapi.DecodeSingleResponse[v2.Info](t, rec.Body) - require.True(t, ok) - - require.EqualValues(t, v2.Info{ - Name: "xxx", - Storage: v2.StorageInfo{ - Migrations: migrationInfo, - }, - }, info) -} - -func TestGetStats(t *testing.T) { - t.Parallel() - - backend, mock := newTestingBackend(t, true) - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - expectedStats := engine.Stats{ - Transactions: 10, - Accounts: 5, - } - - mock.EXPECT(). - Stats(gomock.Any()). - Return(expectedStats, nil) - - req := httptest.NewRequest(http.MethodGet, "/xxx/stats", nil) - rec := httptest.NewRecorder() - - router.ServeHTTP(rec, req) - - require.Equal(t, http.StatusOK, rec.Code) - - stats, ok := sharedapi.DecodeSingleResponse[engine.Stats](t, rec.Body) - require.True(t, ok) - - require.EqualValues(t, expectedStats, stats) -} - -func TestGetLogs(t *testing.T) { - t.Parallel() - - type testCase struct { - name string - queryParams url.Values - body string - expectQuery ledgerstore.PaginatedQueryOptions[any] - expectStatusCode int - expectedErrorCode string - } - - now := time.Now() - testCases := []testCase{ - { - name: "nominal", - expectQuery: ledgerstore.NewPaginatedQueryOptions[any](nil), - }, - { - name: "using start time", - body: fmt.Sprintf(`{"$gte": {"date": "%s"}}`, now.Format(time.DateFormat)), - expectQuery: ledgerstore.NewPaginatedQueryOptions[any](nil).WithQueryBuilder(query.Gte("date", now.Format(time.DateFormat))), - }, - { - name: "using end time", - body: fmt.Sprintf(`{"$lt": {"date": "%s"}}`, now.Format(time.DateFormat)), - expectQuery: ledgerstore.NewPaginatedQueryOptions[any](nil). - WithQueryBuilder(query.Lt("date", now.Format(time.DateFormat))), - }, - { - name: "using empty cursor", - queryParams: url.Values{ - "cursor": []string{bunpaginate.EncodeCursor(ledgerstore.NewGetLogsQuery(ledgerstore.NewPaginatedQueryOptions[any](nil)))}, - }, - expectQuery: ledgerstore.NewPaginatedQueryOptions[any](nil), - }, - { - name: "using invalid cursor", - queryParams: url.Values{ - "cursor": []string{"xxx"}, - }, - expectStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrValidation, - }, - } - for _, testCase := range testCases { - testCase := testCase - t.Run(testCase.name, func(t *testing.T) { - - if testCase.expectStatusCode == 0 { - testCase.expectStatusCode = http.StatusOK - } - - expectedCursor := bunpaginate.Cursor[ledger.ChainedLog]{ - Data: []ledger.ChainedLog{ - *ledger.NewTransactionLog(ledger.NewTransaction(), map[string]metadata.Metadata{}). - ChainLog(nil), - }, - } - - backend, mockLedger := newTestingBackend(t, true) - if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { - mockLedger.EXPECT(). - GetLogs(gomock.Any(), ledgerstore.NewGetLogsQuery(testCase.expectQuery)). - Return(&expectedCursor, nil) - } - - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodGet, "/xxx/logs", bytes.NewBufferString(testCase.body)) - rec := httptest.NewRecorder() - if testCase.queryParams != nil { - req.URL.RawQuery = testCase.queryParams.Encode() - } - - router.ServeHTTP(rec, req) - - require.Equal(t, testCase.expectStatusCode, rec.Code) - if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { - cursor := sharedapi.DecodeCursorResponse[ledger.ChainedLog](t, rec.Body) - - cursorData, err := json.Marshal(cursor) - require.NoError(t, err) - - cursorAsMap := make(map[string]any) - require.NoError(t, json.Unmarshal(cursorData, &cursorAsMap)) - - expectedCursorData, err := json.Marshal(expectedCursor) - require.NoError(t, err) - - expectedCursorAsMap := make(map[string]any) - require.NoError(t, json.Unmarshal(expectedCursorData, &expectedCursorAsMap)) - - require.Equal(t, expectedCursorAsMap, cursorAsMap) - } else { - err := sharedapi.ErrorResponse{} - sharedapi.Decode(t, rec.Body, &err) - require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) - } - }) - } -} diff --git a/internal/api/v2/controllers_ledgers.go b/internal/api/v2/controllers_ledgers.go deleted file mode 100644 index 67e7d0212..000000000 --- a/internal/api/v2/controllers_ledgers.go +++ /dev/null @@ -1,75 +0,0 @@ -package v2 - -import ( - "encoding/json" - "net/http" - - "github.com/go-chi/chi/v5" - - "github.com/formancehq/go-libs/metadata" - "github.com/pkg/errors" - - "github.com/formancehq/go-libs/pointer" - - "github.com/formancehq/go-libs/bun/bunpaginate" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/ledger/internal/storage/systemstore" - - "github.com/formancehq/ledger/internal/api/backend" -) - -func listLedgers(b backend.Backend) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - - query, err := bunpaginate.Extract[systemstore.ListLedgersQuery](r, func() (*systemstore.ListLedgersQuery, error) { - pageSize, err := bunpaginate.GetPageSize(r) - if err != nil { - return nil, err - } - - return pointer.For(systemstore.NewListLedgersQuery(pageSize)), nil - }) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - - ledgers, err := b.ListLedgers(r.Context(), *query) - if err != nil { - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.RenderCursor(w, *ledgers) - } -} - -func updateLedgerMetadata(b backend.Backend) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - - m := metadata.Metadata{} - if err := json.NewDecoder(r.Body).Decode(&m); err != nil { - sharedapi.BadRequest(w, "VALIDATION", errors.New("invalid format")) - return - } - - if err := b.UpdateLedgerMetadata(r.Context(), chi.URLParam(r, "ledger"), m); err != nil { - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.NoContent(w) - } -} - -func deleteLedgerMetadata(b backend.Backend) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - if err := b.DeleteLedgerMetadata(r.Context(), chi.URLParam(r, "ledger"), chi.URLParam(r, "key")); err != nil { - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.NoContent(w) - } -} diff --git a/internal/api/v2/controllers_ledgers_create.go b/internal/api/v2/controllers_ledgers_create.go new file mode 100644 index 000000000..ace33cbc0 --- /dev/null +++ b/internal/api/v2/controllers_ledgers_create.go @@ -0,0 +1,48 @@ +package v2 + +import ( + "encoding/json" + "github.com/formancehq/ledger/internal/api/common" + "io" + "net/http" + + "github.com/formancehq/ledger/internal/controller/system" + + ledger "github.com/formancehq/ledger/internal" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/go-chi/chi/v5" +) + +func createLedger(systemController system.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + configuration := ledger.Configuration{} + data, err := io.ReadAll(r.Body) + if err != nil && !errors.Is(err, io.EOF) { + api.InternalServerError(w, r, err) + return + } + + if len(data) > 0 { + if err := json.Unmarshal(data, &configuration); err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + } + + if err := systemController.CreateLedger(r.Context(), chi.URLParam(r, "ledger"), configuration); err != nil { + switch { + case errors.Is(err, system.ErrInvalidLedgerConfiguration{}) || + errors.Is(err, system.ErrLedgerAlreadyExists) || + errors.Is(err, ledger.ErrInvalidLedgerName{}) || + errors.Is(err, ledger.ErrInvalidBucketName{}): + api.BadRequest(w, ErrValidation, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + api.NoContent(w) + } +} diff --git a/internal/api/v2/controllers_ledgers_create_test.go b/internal/api/v2/controllers_ledgers_create_test.go new file mode 100644 index 000000000..7c302f0bf --- /dev/null +++ b/internal/api/v2/controllers_ledgers_create_test.go @@ -0,0 +1,130 @@ +package v2 + +import ( + "bytes" + "encoding/json" + "net/http" + "net/http/httptest" + "os" + "testing" + + "github.com/formancehq/ledger/internal/controller/system" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/logging" + ledger "github.com/formancehq/ledger/internal" + + "github.com/formancehq/go-libs/v2/auth" + "github.com/google/uuid" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestLedgersCreate(t *testing.T) { + t.Parallel() + + ctx := logging.TestingContext() + + type testCase struct { + configuration string + name string + expectedBackendCall bool + returnErr error + expectStatusCode int + expectErrorCode string + } + + testCases := []testCase{ + { + name: "nominal", + expectedBackendCall: true, + }, + { + name: "with alternative bucket", + configuration: `{"bucket": "bucket0"}`, + expectedBackendCall: true, + }, + { + name: "with metadata", + configuration: `{"metadata": {"foo": "bar"}}`, + expectedBackendCall: true, + }, + { + name: "ledger already exists", + expectedBackendCall: true, + returnErr: system.ErrLedgerAlreadyExists, + expectStatusCode: http.StatusBadRequest, + expectErrorCode: ErrValidation, + }, + { + name: "invalid ledger name", + expectedBackendCall: true, + returnErr: ledger.ErrInvalidLedgerName{}, + expectStatusCode: http.StatusBadRequest, + expectErrorCode: ErrValidation, + }, + { + name: "invalid bucket name", + expectedBackendCall: true, + returnErr: ledger.ErrInvalidBucketName{}, + expectStatusCode: http.StatusBadRequest, + expectErrorCode: ErrValidation, + }, + { + name: "invalid ledger configuration", + expectedBackendCall: true, + returnErr: system.ErrInvalidLedgerConfiguration{}, + expectStatusCode: http.StatusBadRequest, + expectErrorCode: ErrValidation, + }, + { + name: "unexpected error", + expectedBackendCall: true, + returnErr: errors.New("unexpected error"), + expectStatusCode: http.StatusInternalServerError, + expectErrorCode: api.ErrorInternal, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + systemController, _ := newTestingSystemController(t, false) + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + name := uuid.NewString() + + if tc.expectedBackendCall { + configuration := ledger.Configuration{} + if tc.configuration != "" { + require.NoError(t, json.Unmarshal([]byte(tc.configuration), &configuration)) + } + systemController. + EXPECT(). + CreateLedger(gomock.Any(), name, configuration). + Return(tc.returnErr) + } + + buf := bytes.NewBuffer(nil) + if tc.configuration != "" { + buf.Write([]byte(tc.configuration)) + } + + req := httptest.NewRequest(http.MethodPost, "/"+name, buf) + req = req.WithContext(ctx) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + if tc.expectStatusCode == 0 || tc.expectStatusCode == http.StatusNoContent { + require.Equal(t, http.StatusNoContent, rec.Code) + } else { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, tc.expectErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v2/controllers_ledgers_delete_metadata.go b/internal/api/v2/controllers_ledgers_delete_metadata.go new file mode 100644 index 000000000..060f7861b --- /dev/null +++ b/internal/api/v2/controllers_ledgers_delete_metadata.go @@ -0,0 +1,22 @@ +package v2 + +import ( + "github.com/formancehq/ledger/internal/api/common" + "net/http" + + "github.com/formancehq/ledger/internal/controller/system" + + "github.com/formancehq/go-libs/v2/api" + "github.com/go-chi/chi/v5" +) + +func deleteLedgerMetadata(b system.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + if err := b.DeleteLedgerMetadata(r.Context(), chi.URLParam(r, "ledger"), chi.URLParam(r, "key")); err != nil { + common.HandleCommonErrors(w, r, err) + return + } + + api.NoContent(w) + } +} diff --git a/internal/api/v2/controllers_ledgers_delete_metadata_test.go b/internal/api/v2/controllers_ledgers_delete_metadata_test.go new file mode 100644 index 000000000..d1e742a66 --- /dev/null +++ b/internal/api/v2/controllers_ledgers_delete_metadata_test.go @@ -0,0 +1,75 @@ +package v2 + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "os" + "testing" + + "errors" + "github.com/formancehq/go-libs/v2/api" + + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/logging" + "github.com/google/uuid" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestLedgersDeleteMetadata(t *testing.T) { + t.Parallel() + + ctx := logging.TestingContext() + + type testCase struct { + name string + returnErr error + expectedStatusCode int + expectedErrorCode string + expectBackendCall bool + } + + for _, tc := range []testCase{ + { + name: "nominal", + expectBackendCall: true, + }, + { + name: "unexpected backend error", + expectBackendCall: true, + returnErr: errors.New("undefined error"), + expectedStatusCode: http.StatusInternalServerError, + expectedErrorCode: api.ErrorInternal, + }, + } { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + name := uuid.NewString() + systemController, _ := newTestingSystemController(t, false) + if tc.expectBackendCall { + systemController.EXPECT(). + DeleteLedgerMetadata(gomock.Any(), name, "foo"). + Return(tc.returnErr) + } + + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodDelete, "/"+name+"/metadata/foo", nil) + req = req.WithContext(ctx) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + if tc.expectedStatusCode == 0 || tc.expectedStatusCode == http.StatusOK { + require.Equal(t, http.StatusNoContent, rec.Code) + } else { + require.Equal(t, tc.expectedStatusCode, rec.Code) + errorResponse := api.ErrorResponse{} + require.NoError(t, json.Unmarshal(rec.Body.Bytes(), &errorResponse)) + require.Equal(t, tc.expectedErrorCode, errorResponse.ErrorCode) + } + }) + } +} diff --git a/internal/api/v2/controllers_info.go b/internal/api/v2/controllers_ledgers_info.go similarity index 50% rename from internal/api/v2/controllers_info.go rename to internal/api/v2/controllers_ledgers_info.go index 74bf4f779..688c81da0 100644 --- a/internal/api/v2/controllers_info.go +++ b/internal/api/v2/controllers_ledgers_info.go @@ -5,9 +5,9 @@ import ( "github.com/go-chi/chi/v5" - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/migrations" - "github.com/formancehq/ledger/internal/api/backend" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/migrations" + "github.com/formancehq/ledger/internal/api/common" ) type Info struct { @@ -20,7 +20,7 @@ type StorageInfo struct { } func getLedgerInfo(w http.ResponseWriter, r *http.Request) { - ledger := backend.LedgerFromContext(r.Context()) + ledger := common.LedgerFromContext(r.Context()) var err error res := Info{ @@ -29,21 +29,9 @@ func getLedgerInfo(w http.ResponseWriter, r *http.Request) { } res.Storage.Migrations, err = ledger.GetMigrationsInfo(r.Context()) if err != nil { - sharedapi.InternalServerError(w, r, err) + common.HandleCommonErrors(w, r, err) return } - sharedapi.Ok(w, res) -} - -func getStats(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - stats, err := l.Stats(r.Context()) - if err != nil { - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.Ok(w, stats) + api.Ok(w, res) } diff --git a/internal/api/v2/controllers_ledgers_info_test.go b/internal/api/v2/controllers_ledgers_info_test.go new file mode 100644 index 000000000..5b72b204e --- /dev/null +++ b/internal/api/v2/controllers_ledgers_info_test.go @@ -0,0 +1,59 @@ +package v2 + +import ( + "net/http" + "net/http/httptest" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/time" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/migrations" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestLedgersInfo(t *testing.T) { + t.Parallel() + + systemController, ledgerController := newTestingSystemController(t, false) + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + migrationInfo := []migrations.Info{ + { + Version: "1", + Name: "init", + State: "ready", + Date: time.Now().Add(-2 * time.Minute).Round(time.Second).UTC(), + }, + { + Version: "2", + Name: "fix", + State: "ready", + Date: time.Now().Add(-time.Minute).Round(time.Second).UTC(), + }, + } + + ledgerController.EXPECT(). + GetMigrationsInfo(gomock.Any()). + Return(migrationInfo, nil) + + req := httptest.NewRequest(http.MethodGet, "/xxx/_info", nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + + info, ok := api.DecodeSingleResponse[Info](t, rec.Body) + require.True(t, ok) + + require.EqualValues(t, Info{ + Name: "xxx", + Storage: StorageInfo{ + Migrations: migrationInfo, + }, + }, info) +} diff --git a/internal/api/v2/controllers_ledgers_list.go b/internal/api/v2/controllers_ledgers_list.go new file mode 100644 index 000000000..6ef87fd35 --- /dev/null +++ b/internal/api/v2/controllers_ledgers_list.go @@ -0,0 +1,44 @@ +package v2 + +import ( + "github.com/formancehq/ledger/internal/api/common" + "net/http" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/pointer" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/formancehq/ledger/internal/controller/system" +) + +func listLedgers(b system.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + + query, err := bunpaginate.Extract[ledgercontroller.ListLedgersQuery](r, func() (*ledgercontroller.ListLedgersQuery, error) { + pageSize, err := bunpaginate.GetPageSize(r) + if err != nil { + return nil, err + } + + return pointer.For(ledgercontroller.NewListLedgersQuery(pageSize)), nil + }) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + ledgers, err := b.ListLedgers(r.Context(), *query) + if err != nil { + switch { + case errors.Is(err, ledgercontroller.ErrInvalidQuery{}) || errors.Is(err, ledgercontroller.ErrMissingFeature{}): + api.BadRequest(w, ErrValidation, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + api.RenderCursor(w, *ledgers) + } +} diff --git a/internal/api/v2/controllers_ledgers_list_test.go b/internal/api/v2/controllers_ledgers_list_test.go new file mode 100644 index 000000000..ca8627348 --- /dev/null +++ b/internal/api/v2/controllers_ledgers_list_test.go @@ -0,0 +1,120 @@ +package v2 + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/logging" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/google/uuid" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestListLedgers(t *testing.T) { + t.Parallel() + + ctx := logging.TestingContext() + + type testCase struct { + name string + expectQuery ledgercontroller.ListLedgersQuery + queryParams url.Values + returnData []ledger.Ledger + returnErr error + expectedStatusCode int + expectedErrorCode string + expectBackendCall bool + } + + for _, tc := range []testCase{ + { + name: "nominal", + expectQuery: ledgercontroller.NewListLedgersQuery(15), + returnData: []ledger.Ledger{ + ledger.MustNewWithDefault(uuid.NewString()), + ledger.MustNewWithDefault(uuid.NewString()), + }, + expectBackendCall: true, + }, + { + name: "invalid page size", + expectQuery: ledgercontroller.NewListLedgersQuery(15), + queryParams: url.Values{ + "pageSize": {"-1"}, + }, + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + expectBackendCall: false, + }, + { + name: "error from backend", + expectQuery: ledgercontroller.NewListLedgersQuery(15), + expectedStatusCode: http.StatusInternalServerError, + expectedErrorCode: api.ErrorInternal, + expectBackendCall: true, + returnErr: errors.New("undefined error"), + }, + { + name: "with invalid query from core point of view", + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + expectBackendCall: true, + returnErr: ledgercontroller.ErrInvalidQuery{}, + expectQuery: ledgercontroller.NewListLedgersQuery(DefaultPageSize), + }, + { + name: "with missing feature", + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + expectBackendCall: true, + returnErr: ledgercontroller.ErrMissingFeature{}, + expectQuery: ledgercontroller.NewListLedgersQuery(DefaultPageSize), + }, + } { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + systemController, _ := newTestingSystemController(t, false) + + if tc.expectBackendCall { + systemController.EXPECT(). + ListLedgers(gomock.Any(), ledgercontroller.NewListLedgersQuery(15)). + Return(&bunpaginate.Cursor[ledger.Ledger]{ + Data: tc.returnData, + }, tc.returnErr) + } + + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodGet, "/", nil) + req = req.WithContext(ctx) + req.URL.RawQuery = tc.queryParams.Encode() + + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + if tc.expectedStatusCode == 0 || tc.expectedStatusCode == http.StatusOK { + require.Equal(t, http.StatusOK, rec.Code) + cursor := api.DecodeCursorResponse[ledger.Ledger](t, rec.Body) + + require.Equal(t, tc.returnData, cursor.Data) + } else { + require.Equal(t, tc.expectedStatusCode, rec.Code) + errorResponse := api.ErrorResponse{} + require.NoError(t, json.Unmarshal(rec.Body.Bytes(), &errorResponse)) + require.Equal(t, tc.expectedErrorCode, errorResponse.ErrorCode) + } + }) + } +} diff --git a/internal/api/v2/controllers_ledgers_read.go b/internal/api/v2/controllers_ledgers_read.go new file mode 100644 index 000000000..59d94e7b1 --- /dev/null +++ b/internal/api/v2/controllers_ledgers_read.go @@ -0,0 +1,29 @@ +package v2 + +import ( + "github.com/formancehq/ledger/internal/api/common" + "net/http" + + "github.com/formancehq/ledger/internal/controller/system" + + "github.com/formancehq/go-libs/v2/platform/postgres" + + "github.com/formancehq/go-libs/v2/api" + "github.com/go-chi/chi/v5" +) + +func readLedger(b system.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ledger, err := b.GetLedger(r.Context(), chi.URLParam(r, "ledger")) + if err != nil { + switch { + case postgres.IsNotFoundError(err): + api.NotFound(w, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + api.Ok(w, ledger) + } +} diff --git a/internal/api/v2/controllers_ledgers_read_test.go b/internal/api/v2/controllers_ledgers_read_test.go new file mode 100644 index 000000000..6b5156cb0 --- /dev/null +++ b/internal/api/v2/controllers_ledgers_read_test.go @@ -0,0 +1,49 @@ +package v2 + +import ( + "net/http" + "net/http/httptest" + "os" + "testing" + + ledger "github.com/formancehq/ledger/internal" + + "github.com/formancehq/go-libs/v2/time" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestLedgersRead(t *testing.T) { + t.Parallel() + + systemController, _ := newTestingSystemController(t, false) + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + name := uuid.NewString() + now := time.Now() + l := ledger.Ledger{ + Name: name, + AddedAt: now, + Configuration: ledger.Configuration{ + Bucket: "bucket0", + }, + } + systemController. + EXPECT(). + GetLedger(gomock.Any(), name). + Return(&l, nil) + + req := httptest.NewRequest(http.MethodGet, "/"+name, nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + ledgerFromAPI, _ := api.DecodeSingleResponse[ledger.Ledger](t, rec.Body) + require.Equal(t, l, ledgerFromAPI) +} diff --git a/internal/api/v2/controllers_ledgers_test.go b/internal/api/v2/controllers_ledgers_test.go deleted file mode 100644 index 8dfedef4d..000000000 --- a/internal/api/v2/controllers_ledgers_test.go +++ /dev/null @@ -1,59 +0,0 @@ -package v2_test - -import ( - "net/http" - "net/http/httptest" - "testing" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/logging" - v2 "github.com/formancehq/ledger/internal/api/v2" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/google/uuid" - "github.com/stretchr/testify/require" - "go.uber.org/mock/gomock" -) - -func TestUpdateLedgerMetadata(t *testing.T) { - ctx := logging.TestingContext() - - name := uuid.NewString() - metadata := map[string]string{ - "foo": "bar", - } - backend, _ := newTestingBackend(t, false) - backend.EXPECT(). - UpdateLedgerMetadata(gomock.Any(), name, metadata). - Return(nil) - - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodPut, "/"+name+"/metadata", sharedapi.Buffer(t, metadata)) - req = req.WithContext(ctx) - rec := httptest.NewRecorder() - - router.ServeHTTP(rec, req) - - require.Equal(t, http.StatusNoContent, rec.Code) -} - -func TestDeleteLedgerMetadata(t *testing.T) { - ctx := logging.TestingContext() - - name := uuid.NewString() - backend, _ := newTestingBackend(t, false) - backend.EXPECT(). - DeleteLedgerMetadata(gomock.Any(), name, "foo"). - Return(nil) - - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodDelete, "/"+name+"/metadata/foo", nil) - req = req.WithContext(ctx) - rec := httptest.NewRecorder() - - router.ServeHTTP(rec, req) - - require.Equal(t, http.StatusNoContent, rec.Code) -} diff --git a/internal/api/v2/controllers_ledgers_update_metadata.go b/internal/api/v2/controllers_ledgers_update_metadata.go new file mode 100644 index 000000000..d7f487775 --- /dev/null +++ b/internal/api/v2/controllers_ledgers_update_metadata.go @@ -0,0 +1,31 @@ +package v2 + +import ( + "encoding/json" + "github.com/formancehq/ledger/internal/api/common" + "net/http" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/metadata" + systemcontroller "github.com/formancehq/ledger/internal/controller/system" + "github.com/go-chi/chi/v5" +) + +func updateLedgerMetadata(systemController systemcontroller.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + + m := metadata.Metadata{} + if err := json.NewDecoder(r.Body).Decode(&m); err != nil { + api.BadRequest(w, ErrValidation, errors.New("invalid format")) + return + } + + if err := systemController.UpdateLedgerMetadata(r.Context(), chi.URLParam(r, "ledger"), m); err != nil { + common.HandleCommonErrors(w, r, err) + return + } + + api.NoContent(w) + } +} diff --git a/internal/api/v2/controllers_ledgers_update_metadata_test.go b/internal/api/v2/controllers_ledgers_update_metadata_test.go new file mode 100644 index 000000000..b65884af1 --- /dev/null +++ b/internal/api/v2/controllers_ledgers_update_metadata_test.go @@ -0,0 +1,38 @@ +package v2 + +import ( + "net/http" + "net/http/httptest" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/logging" + "github.com/google/uuid" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestLedgersUpdateMetadata(t *testing.T) { + ctx := logging.TestingContext() + + name := uuid.NewString() + metadata := map[string]string{ + "foo": "bar", + } + systemController, _ := newTestingSystemController(t, false) + systemController.EXPECT(). + UpdateLedgerMetadata(gomock.Any(), name, metadata). + Return(nil) + + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodPut, "/"+name+"/metadata", api.Buffer(t, metadata)) + req = req.WithContext(ctx) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusNoContent, rec.Code) +} diff --git a/internal/api/v2/controllers_logs_export.go b/internal/api/v2/controllers_logs_export.go new file mode 100644 index 000000000..b9a0bf299 --- /dev/null +++ b/internal/api/v2/controllers_logs_export.go @@ -0,0 +1,23 @@ +package v2 + +import ( + "context" + "encoding/json" + "net/http" + + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/api/common" +) + +func exportLogs(w http.ResponseWriter, r *http.Request) { + enc := json.NewEncoder(w) + w.Header().Set("Content-Type", "application/octet-stream") + if err := common.LedgerFromContext(r.Context()).Export(r.Context(), ledgercontroller.ExportWriterFn(func(ctx context.Context, log ledger.Log) error { + return enc.Encode(log) + })); err != nil { + common.HandleCommonErrors(w, r, err) + return + } +} diff --git a/internal/api/v2/controllers_logs_export_test.go b/internal/api/v2/controllers_logs_export_test.go new file mode 100644 index 000000000..93d75f458 --- /dev/null +++ b/internal/api/v2/controllers_logs_export_test.go @@ -0,0 +1,84 @@ +package v2 + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "os" + "testing" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestLogsExport(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + expectStatusCode int + expectedErrorCode string + returnErr error + } + + testCases := []testCase{ + { + name: "nominal", + }, + { + name: "undefined error", + returnErr: errors.New("unexpected error"), + expectStatusCode: http.StatusInternalServerError, + expectedErrorCode: api.ErrorInternal, + }, + } + for _, testCase := range testCases { + tc := testCase + t.Run(tc.name, func(t *testing.T) { + + if tc.expectStatusCode == 0 { + tc.expectStatusCode = http.StatusOK + } + + log := ledger.NewLog(ledger.CreatedTransaction{ + Transaction: ledger.NewTransaction(), + AccountMetadata: ledger.AccountMetadata{}, + }) + + systemController, ledgerController := newTestingSystemController(t, true) + ledgerController.EXPECT(). + Export(gomock.Any(), gomock.Any()). + DoAndReturn(func(ctx context.Context, exporter ledgercontroller.ExportWriter) error { + if tc.returnErr != nil { + return tc.returnErr + } + require.NoError(t, exporter.Write(ctx, log)) + return nil + }) + + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodPost, "/xxx/logs/export", nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, tc.expectStatusCode, rec.Code) + if tc.expectStatusCode < 300 && tc.expectStatusCode >= 200 { + logFromExport := ledger.Log{} + require.NoError(t, json.NewDecoder(rec.Body).Decode(&logFromExport)) + require.Equal(t, log, logFromExport) + } else { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, tc.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v2/controller_import_logs.go b/internal/api/v2/controllers_logs_import.go similarity index 62% rename from internal/api/v2/controller_import_logs.go rename to internal/api/v2/controllers_logs_import.go index 32b50f8ed..e8df74a33 100644 --- a/internal/api/v2/controller_import_logs.go +++ b/internal/api/v2/controllers_logs_import.go @@ -5,33 +5,33 @@ import ( "io" "net/http" - "github.com/formancehq/ledger/internal/engine" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" - "github.com/formancehq/go-libs/api" + "errors" + "github.com/formancehq/go-libs/v2/api" ledger "github.com/formancehq/ledger/internal" - "github.com/formancehq/ledger/internal/api/backend" - "github.com/pkg/errors" + "github.com/formancehq/ledger/internal/api/common" ) func importLogs(w http.ResponseWriter, r *http.Request) { - stream := make(chan *ledger.ChainedLog) + stream := make(chan ledger.Log) errChan := make(chan error, 1) go func() { - errChan <- backend.LedgerFromContext(r.Context()).Import(r.Context(), stream) + errChan <- common.LedgerFromContext(r.Context()).Import(r.Context(), stream) }() dec := json.NewDecoder(r.Body) handleError := func(err error) { switch { - case errors.Is(err, engine.ImportError{}): - api.WriteErrorResponse(w, http.StatusBadRequest, "IMPORT", err) + case errors.Is(err, ledgercontroller.ErrImport{}): + api.BadRequest(w, "IMPORT", err) default: - api.InternalServerError(w, r, err) + common.HandleCommonErrors(w, r, err) } } for { - l := &ledger.ChainedLog{} - if err := dec.Decode(l); err != nil { + l := ledger.Log{} + if err := dec.Decode(&l); err != nil { if errors.Is(err, io.EOF) { close(stream) break diff --git a/internal/api/v2/controllers_logs_import_test.go b/internal/api/v2/controllers_logs_import_test.go new file mode 100644 index 000000000..c8474637a --- /dev/null +++ b/internal/api/v2/controllers_logs_import_test.go @@ -0,0 +1,94 @@ +package v2 + +import ( + "bytes" + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "os" + "testing" + "time" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + ledger "github.com/formancehq/ledger/internal" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestLogsImport(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + expectStatusCode int + expectedErrorCode string + returnErr error + } + + testCases := []testCase{ + { + name: "nominal", + }, + { + name: "undefined error", + returnErr: errors.New("unexpected error"), + expectStatusCode: http.StatusInternalServerError, + expectedErrorCode: api.ErrorInternal, + }, + } + for _, testCase := range testCases { + tc := testCase + t.Run(tc.name, func(t *testing.T) { + + if tc.expectStatusCode == 0 { + tc.expectStatusCode = http.StatusNoContent + } + + log := ledger.NewLog(ledger.CreatedTransaction{ + Transaction: ledger.NewTransaction(), + AccountMetadata: ledger.AccountMetadata{}, + }) + + systemController, ledgerController := newTestingSystemController(t, true) + ledgerController.EXPECT(). + Import(gomock.Any(), gomock.Any()). + DoAndReturn(func(ctx context.Context, stream chan ledger.Log) error { + if tc.returnErr != nil { + return tc.returnErr + } + select { + case <-ctx.Done(): + return ctx.Err() + case logFromStream := <-stream: + require.Equal(t, log, logFromStream) + select { + case <-time.After(time.Second): + require.Fail(t, "stream should have been closed") + case <-stream: + } + return nil + } + }) + + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + buf := bytes.NewBuffer(nil) + require.NoError(t, json.NewEncoder(buf).Encode(log)) + + req := httptest.NewRequest(http.MethodPost, "/xxx/logs/import", buf) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, tc.expectStatusCode, rec.Code) + if tc.expectStatusCode > 300 { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, tc.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v2/controllers_logs_list.go b/internal/api/v2/controllers_logs_list.go new file mode 100644 index 000000000..8fa863c02 --- /dev/null +++ b/internal/api/v2/controllers_logs_list.go @@ -0,0 +1,59 @@ +package v2 + +import ( + "fmt" + "net/http" + + "errors" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/ledger/internal/api/common" +) + +func listLogs(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + query := ledgercontroller.GetLogsQuery{} + + if r.URL.Query().Get(QueryKeyCursor) != "" { + err := bunpaginate.UnmarshalCursor(r.URL.Query().Get(QueryKeyCursor), &query) + if err != nil { + api.BadRequest(w, ErrValidation, fmt.Errorf("invalid '%s' query param", QueryKeyCursor)) + return + } + } else { + var err error + + pageSize, err := bunpaginate.GetPageSize(r) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + qb, err := getQueryBuilder(r) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + query = ledgercontroller.NewListLogsQuery(ledgercontroller.PaginatedQueryOptions[any]{ + QueryBuilder: qb, + PageSize: pageSize, + }) + } + + cursor, err := l.ListLogs(r.Context(), query) + if err != nil { + switch { + case errors.Is(err, ledgercontroller.ErrInvalidQuery{}): + api.BadRequest(w, ErrValidation, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + api.RenderCursor(w, *cursor) +} diff --git a/internal/api/v2/controllers_logs_list_test.go b/internal/api/v2/controllers_logs_list_test.go new file mode 100644 index 000000000..a301a7694 --- /dev/null +++ b/internal/api/v2/controllers_logs_list_test.go @@ -0,0 +1,165 @@ +package v2 + +import ( + "bytes" + "encoding/json" + "fmt" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/query" + "github.com/formancehq/go-libs/v2/time" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestGetLogs(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + body string + expectQuery ledgercontroller.PaginatedQueryOptions[any] + expectStatusCode int + expectedErrorCode string + expectBackendCall bool + returnErr error + } + + now := time.Now() + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgercontroller.NewPaginatedQueryOptions[any](nil), + expectBackendCall: true, + }, + { + name: "using start time", + body: fmt.Sprintf(`{"$gte": {"date": "%s"}}`, now.Format(time.DateFormat)), + expectQuery: ledgercontroller.NewPaginatedQueryOptions[any](nil).WithQueryBuilder(query.Gte("date", now.Format(time.DateFormat))), + expectBackendCall: true, + }, + { + name: "using end time", + body: fmt.Sprintf(`{"$lt": {"date": "%s"}}`, now.Format(time.DateFormat)), + expectQuery: ledgercontroller.NewPaginatedQueryOptions[any](nil). + WithQueryBuilder(query.Lt("date", now.Format(time.DateFormat))), + expectBackendCall: true, + }, + { + name: "using empty cursor", + queryParams: url.Values{ + "cursor": []string{bunpaginate.EncodeCursor(ledgercontroller.NewListLogsQuery(ledgercontroller.NewPaginatedQueryOptions[any](nil)))}, + }, + expectQuery: ledgercontroller.NewPaginatedQueryOptions[any](nil), + expectBackendCall: true, + }, + { + name: "using invalid cursor", + queryParams: url.Values{ + "cursor": []string{"xxx"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "using invalid page size", + queryParams: url.Values{ + "pageSize": []string{"-1"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "using malformed query", + body: `[]`, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "with invalid query", + expectStatusCode: http.StatusBadRequest, + expectQuery: ledgercontroller.NewPaginatedQueryOptions[any](nil), + expectedErrorCode: ErrValidation, + expectBackendCall: true, + returnErr: ledgercontroller.ErrInvalidQuery{}, + }, + { + name: "with unexpected error", + expectStatusCode: http.StatusInternalServerError, + expectQuery: ledgercontroller.NewPaginatedQueryOptions[any](nil), + expectedErrorCode: api.ErrorInternal, + expectBackendCall: true, + returnErr: errors.New("unexpected error"), + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusOK + } + + expectedCursor := bunpaginate.Cursor[ledger.Log]{ + Data: []ledger.Log{ + ledger.NewLog(ledger.CreatedTransaction{ + Transaction: ledger.NewTransaction(), + AccountMetadata: ledger.AccountMetadata{}, + }). + ChainLog(nil), + }, + } + + systemController, ledgerController := newTestingSystemController(t, true) + if testCase.expectBackendCall { + ledgerController.EXPECT(). + ListLogs(gomock.Any(), ledgercontroller.NewListLogsQuery(testCase.expectQuery)). + Return(&expectedCursor, testCase.returnErr) + } + + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodGet, "/xxx/logs", bytes.NewBufferString(testCase.body)) + rec := httptest.NewRecorder() + if testCase.queryParams != nil { + req.URL.RawQuery = testCase.queryParams.Encode() + } + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + cursor := api.DecodeCursorResponse[ledger.Log](t, rec.Body) + + cursorData, err := json.Marshal(cursor) + require.NoError(t, err) + + cursorAsMap := make(map[string]any) + require.NoError(t, json.Unmarshal(cursorData, &cursorAsMap)) + + expectedCursorData, err := json.Marshal(expectedCursor) + require.NoError(t, err) + + expectedCursorAsMap := make(map[string]any) + require.NoError(t, json.Unmarshal(expectedCursorData, &expectedCursorAsMap)) + + require.Equal(t, expectedCursorAsMap, cursorAsMap) + } else { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v2/controllers_stats.go b/internal/api/v2/controllers_stats.go new file mode 100644 index 000000000..569cd82f3 --- /dev/null +++ b/internal/api/v2/controllers_stats.go @@ -0,0 +1,20 @@ +package v2 + +import ( + "net/http" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/ledger/internal/api/common" +) + +func readStats(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + stats, err := l.GetStats(r.Context()) + if err != nil { + common.HandleCommonErrors(w, r, err) + return + } + + api.Ok(w, stats) +} diff --git a/internal/api/v2/controllers_stats_test.go b/internal/api/v2/controllers_stats_test.go new file mode 100644 index 000000000..6ec35deb1 --- /dev/null +++ b/internal/api/v2/controllers_stats_test.go @@ -0,0 +1,42 @@ +package v2 + +import ( + "net/http" + "net/http/httptest" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestStats(t *testing.T) { + t.Parallel() + + systemController, ledgerController := newTestingSystemController(t, true) + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + expectedStats := ledgercontroller.Stats{ + Transactions: 10, + Accounts: 5, + } + + ledgerController.EXPECT(). + GetStats(gomock.Any()). + Return(expectedStats, nil) + + req := httptest.NewRequest(http.MethodGet, "/xxx/stats", nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + + stats, ok := api.DecodeSingleResponse[ledgercontroller.Stats](t, rec.Body) + require.True(t, ok) + + require.EqualValues(t, expectedStats, stats) +} diff --git a/internal/api/v2/controllers_transactions.go b/internal/api/v2/controllers_transactions.go deleted file mode 100644 index c33dc5f59..000000000 --- a/internal/api/v2/controllers_transactions.go +++ /dev/null @@ -1,271 +0,0 @@ -package v2 - -import ( - "encoding/json" - "fmt" - "math/big" - "net/http" - - "github.com/go-chi/chi/v5" - - "github.com/formancehq/go-libs/contextutil" - "github.com/formancehq/go-libs/pointer" - - "github.com/formancehq/go-libs/bun/bunpaginate" - "github.com/formancehq/ledger/internal/api/backend" - "github.com/formancehq/ledger/internal/engine" - "github.com/formancehq/ledger/internal/engine/command" - "github.com/formancehq/ledger/internal/machine" - storageerrors "github.com/formancehq/ledger/internal/storage/sqlutils" - "github.com/pkg/errors" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/collectionutils" - "github.com/formancehq/go-libs/metadata" - ledger "github.com/formancehq/ledger/internal" - "github.com/formancehq/ledger/internal/storage/ledgerstore" -) - -func countTransactions(w http.ResponseWriter, r *http.Request) { - - options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - - count, err := backend.LedgerFromContext(r.Context()). - CountTransactions(r.Context(), ledgerstore.NewGetTransactionsQuery(*options)) - if err != nil { - switch { - case ledgerstore.IsErrInvalidQuery(err): - sharedapi.BadRequest(w, ErrValidation, err) - default: - sharedapi.InternalServerError(w, r, err) - } - return - } - - w.Header().Set("Count", fmt.Sprint(count)) - sharedapi.NoContent(w) -} - -func getTransactions(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - query, err := bunpaginate.Extract[ledgerstore.GetTransactionsQuery](r, func() (*ledgerstore.GetTransactionsQuery, error) { - options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) - if err != nil { - return nil, err - } - q := ledgerstore.NewGetTransactionsQuery(*options) - - if r.URL.Query().Get("order") == "effective" { - q.Column = "timestamp" - } - if r.URL.Query().Get("reverse") == "true" { - q.Order = bunpaginate.OrderAsc - } - - return pointer.For(q), nil - }) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - - cursor, err := l.GetTransactions(r.Context(), *query) - if err != nil { - switch { - case ledgerstore.IsErrInvalidQuery(err): - sharedapi.BadRequest(w, ErrValidation, err) - default: - sharedapi.InternalServerError(w, r, err) - } - return - } - - sharedapi.RenderCursor(w, *cursor) -} - -func postTransaction(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - payload := ledger.TransactionRequest{} - if err := json.NewDecoder(r.Body).Decode(&payload); err != nil { - sharedapi.BadRequest(w, ErrValidation, errors.New("invalid transaction format")) - return - } - - if len(payload.Postings) > 0 && payload.Script.Plain != "" { - sharedapi.BadRequest(w, ErrValidation, errors.New("cannot pass postings and numscript in the same request")) - return - } - - ctx, _ := contextutil.Detached(r.Context()) - - res, err := l.CreateTransaction(ctx, getCommandParameters(r), *payload.ToRunScript()) - if err != nil { - switch { - case engine.IsCommandError(err): - switch { - case command.IsErrMachine(err): - switch { - case machine.IsInsufficientFundError(err): - sharedapi.BadRequest(w, ErrInsufficientFund, err) - return - case machine.IsMetadataOverride(err): - sharedapi.BadRequest(w, ErrMetadataOverride, err) - return - } - case command.IsInvalidTransactionError(err, command.ErrInvalidTransactionCodeConflict): - sharedapi.BadRequest(w, ErrConflict, err) - return - case command.IsInvalidTransactionError(err, command.ErrInvalidTransactionCodeNoPostings): - sharedapi.BadRequest(w, ErrNoPostings, err) - return - case command.IsInvalidTransactionError(err, command.ErrInvalidTransactionCodeNoScript): - sharedapi.BadRequest(w, ErrNoScript, err) - return - case command.IsInvalidTransactionError(err, command.ErrInvalidTransactionCodeCompilationFailed): - sharedapi.BadRequestWithDetails(w, ErrCompilationFailed, err, backend.EncodeLink(errors.Cause(err).Error())) - return - } - } - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.Ok(w, res) -} - -func getTransaction(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - txId, ok := big.NewInt(0).SetString(chi.URLParam(r, "id"), 10) - if !ok { - sharedapi.BadRequest(w, ErrValidation, errors.New("invalid transaction id")) - return - } - - query := ledgerstore.NewGetTransactionQuery(txId) - if collectionutils.Contains(r.URL.Query()["expand"], "volumes") { - query = query.WithExpandVolumes() - } - if collectionutils.Contains(r.URL.Query()["expand"], "effectiveVolumes") { - query = query.WithExpandEffectiveVolumes() - } - - pitFilter, err := getPITFilter(r) - if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) - return - } - query.PITFilter = *pitFilter - - tx, err := l.GetTransactionWithVolumes(r.Context(), query) - if err != nil { - switch { - case storageerrors.IsNotFoundError(err): - sharedapi.NotFound(w, err) - default: - sharedapi.InternalServerError(w, r, err) - } - return - } - - sharedapi.Ok(w, tx) -} - -func revertTransaction(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - transactionID, ok := big.NewInt(0).SetString(chi.URLParam(r, "id"), 10) - if !ok { - sharedapi.NotFound(w, errors.New("invalid transaction ID")) - return - } - - tx, err := l.RevertTransaction(r.Context(), getCommandParameters(r), transactionID, - sharedapi.QueryParamBool(r, "force"), - sharedapi.QueryParamBool(r, "atEffectiveDate"), - ) - if err != nil { - switch { - case engine.IsCommandError(err): - switch { - case command.IsErrMachine(err): - switch { - case machine.IsInsufficientFundError(err): - sharedapi.BadRequest(w, ErrInsufficientFund, err) - return - } - case command.IsRevertError(err, command.ErrRevertTransactionCodeNotFound): - sharedapi.NotFound(w, err) - return - case command.IsRevertError(err, command.ErrRevertTransactionCodeOccurring): - sharedapi.BadRequest(w, ErrRevertOccurring, err) - return - case command.IsRevertError(err, command.ErrRevertTransactionCodeAlreadyReverted): - sharedapi.BadRequest(w, ErrAlreadyRevert, err) - return - } - } - sharedapi.InternalServerError(w, r, err) - return - } - - sharedapi.Created(w, tx) -} - -func postTransactionMetadata(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - var m metadata.Metadata - if err := json.NewDecoder(r.Body).Decode(&m); err != nil { - sharedapi.BadRequest(w, ErrValidation, errors.New("invalid metadata format")) - return - } - - txID, ok := big.NewInt(0).SetString(chi.URLParam(r, "id"), 10) - if !ok { - sharedapi.NotFound(w, errors.New("invalid transaction ID")) - return - } - - if err := l.SaveMeta(r.Context(), getCommandParameters(r), ledger.MetaTargetTypeTransaction, txID, m); err != nil { - switch { - case command.IsSaveMetaError(err, command.ErrSaveMetaCodeTransactionNotFound): - sharedapi.NotFound(w, err) - default: - sharedapi.InternalServerError(w, r, err) - } - return - } - - sharedapi.NoContent(w) -} - -func deleteTransactionMetadata(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) - - transactionID, ok := big.NewInt(0).SetString(chi.URLParam(r, "id"), 10) - if !ok { - sharedapi.BadRequest(w, ErrValidation, errors.New("invalid transaction ID")) - return - } - - metadataKey := chi.URLParam(r, "key") - - if err := l.DeleteMetadata(r.Context(), getCommandParameters(r), ledger.MetaTargetTypeTransaction, transactionID, metadataKey); err != nil { - switch { - case command.IsSaveMetaError(err, command.ErrSaveMetaCodeTransactionNotFound): - sharedapi.NotFound(w, err) - default: - sharedapi.InternalServerError(w, r, err) - } - return - } - - sharedapi.NoContent(w) -} diff --git a/internal/api/v2/controllers_transactions_add_metadata.go b/internal/api/v2/controllers_transactions_add_metadata.go new file mode 100644 index 000000000..fa81f7292 --- /dev/null +++ b/internal/api/v2/controllers_transactions_add_metadata.go @@ -0,0 +1,46 @@ +package v2 + +import ( + "encoding/json" + "net/http" + "strconv" + + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/ledger/internal/api/common" + "github.com/go-chi/chi/v5" +) + +func addTransactionMetadata(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + var m metadata.Metadata + if err := json.NewDecoder(r.Body).Decode(&m); err != nil { + api.BadRequest(w, ErrValidation, errors.New("invalid metadata format")) + return + } + + txID, err := strconv.ParseInt(chi.URLParam(r, "id"), 10, 64) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + if err := l.SaveTransactionMetadata(r.Context(), getCommandParameters(r, ledgercontroller.SaveTransactionMetadata{ + TransactionID: int(txID), + Metadata: m, + })); err != nil { + switch { + case errors.Is(err, ledgercontroller.ErrNotFound): + api.NotFound(w, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + api.NoContent(w) +} diff --git a/internal/api/v2/controllers_transactions_add_metadata_test.go b/internal/api/v2/controllers_transactions_add_metadata_test.go new file mode 100644 index 000000000..eadc9de5a --- /dev/null +++ b/internal/api/v2/controllers_transactions_add_metadata_test.go @@ -0,0 +1,117 @@ +package v2 + +import ( + "fmt" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "errors" + + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestTransactionsAddMetadata(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + expectStatusCode int + expectedErrorCode string + body any + id any + expectBackendCall bool + returnErr error + } + + testCases := []testCase{ + { + name: "nominal", + body: metadata.Metadata{ + "foo": "bar", + }, + expectBackendCall: true, + }, + { + name: "invalid body", + body: "invalid - not an object", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "invalid id", + id: "abc", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "not found", + body: metadata.Metadata{ + "foo": "bar", + }, + expectBackendCall: true, + returnErr: ledgercontroller.ErrNotFound, + expectStatusCode: http.StatusNotFound, + expectedErrorCode: api.ErrorCodeNotFound, + }, + { + name: "unexpected error", + body: metadata.Metadata{ + "foo": "bar", + }, + expectBackendCall: true, + returnErr: errors.New("unexpected error"), + expectStatusCode: http.StatusInternalServerError, + expectedErrorCode: api.ErrorInternal, + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusNoContent + } + + if testCase.id == nil { + testCase.id = 1 + } + + systemController, ledgerController := newTestingSystemController(t, true) + if testCase.expectBackendCall { + ledgerController.EXPECT(). + SaveTransactionMetadata(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.SaveTransactionMetadata]{ + Input: ledgercontroller.SaveTransactionMetadata{ + TransactionID: 1, + Metadata: testCase.body.(metadata.Metadata), + }, + }). + Return(testCase.returnErr) + } + + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodPost, fmt.Sprintf("/xxx/transactions/%v/metadata", testCase.id), api.Buffer(t, testCase.body)) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode >= 300 || testCase.expectStatusCode < 200 { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v2/controllers_transactions_count.go b/internal/api/v2/controllers_transactions_count.go new file mode 100644 index 000000000..d256d2973 --- /dev/null +++ b/internal/api/v2/controllers_transactions_count.go @@ -0,0 +1,35 @@ +package v2 + +import ( + "fmt" + "net/http" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/ledger/internal/api/common" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" +) + +func countTransactions(w http.ResponseWriter, r *http.Request) { + + options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + count, err := common.LedgerFromContext(r.Context()). + CountTransactions(r.Context(), ledgercontroller.NewListTransactionsQuery(*options)) + if err != nil { + switch { + case errors.Is(err, ledgercontroller.ErrInvalidQuery{}) || errors.Is(err, ledgercontroller.ErrMissingFeature{}): + api.BadRequest(w, ErrValidation, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + w.Header().Set("Count", fmt.Sprint(count)) + api.NoContent(w) +} diff --git a/internal/api/v2/controllers_transactions_count_test.go b/internal/api/v2/controllers_transactions_count_test.go new file mode 100644 index 000000000..52fc52e14 --- /dev/null +++ b/internal/api/v2/controllers_transactions_count_test.go @@ -0,0 +1,197 @@ +package v2 + +import ( + "bytes" + "fmt" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/query" + "github.com/formancehq/go-libs/v2/time" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestTransactionsCount(t *testing.T) { + t.Parallel() + + before := time.Now() + + type testCase struct { + name string + queryParams url.Values + body string + expectQuery ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes] + expectStatusCode int + expectedErrorCode string + expectBackendCall bool + returnErr error + } + now := time.Now() + + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }), + expectBackendCall: true, + }, + { + name: "using metadata", + body: `{"$match": {"metadata[roles]": "admin"}}`, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithQueryBuilder(query.Match("metadata[roles]", "admin")), + expectBackendCall: true, + }, + { + name: "using startTime", + body: fmt.Sprintf(`{"$gte": {"date": "%s"}}`, now.Format(time.DateFormat)), + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithQueryBuilder(query.Gte("date", now.Format(time.DateFormat))), + expectBackendCall: true, + }, + { + name: "using endTime", + body: fmt.Sprintf(`{"$gte": {"date": "%s"}}`, now.Format(time.DateFormat)), + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithQueryBuilder(query.Gte("date", now.Format(time.DateFormat))), + expectBackendCall: true, + }, + { + name: "using account", + body: `{"$match": {"account": "xxx"}}`, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithQueryBuilder(query.Match("account", "xxx")), + expectBackendCall: true, + }, + { + name: "using reference", + body: `{"$match": {"reference": "xxx"}}`, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithQueryBuilder(query.Match("reference", "xxx")), + expectBackendCall: true, + }, + { + name: "using destination", + body: `{"$match": {"destination": "xxx"}}`, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithQueryBuilder(query.Match("destination", "xxx")), + expectBackendCall: true, + }, + { + name: "using source", + body: `{"$match": {"source": "xxx"}}`, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }). + WithQueryBuilder(query.Match("source", "xxx")), + expectBackendCall: true, + }, + { + name: "error from backend", + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }), + expectStatusCode: http.StatusInternalServerError, + expectedErrorCode: api.ErrorInternal, + expectBackendCall: true, + returnErr: errors.New("undefined error"), + }, + { + name: "with invalid query from core point of view", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + expectBackendCall: true, + returnErr: ledgercontroller.ErrInvalidQuery{}, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }), + }, + { + name: "with missing feature", + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + expectBackendCall: true, + returnErr: ledgercontroller.ErrMissingFeature{}, + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &before, + }, + }), + }, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + + if tc.expectStatusCode == 0 { + tc.expectStatusCode = http.StatusNoContent + } + + systemController, ledgerController := newTestingSystemController(t, true) + if tc.expectBackendCall { + ledgerController.EXPECT(). + CountTransactions(gomock.Any(), ledgercontroller.NewListTransactionsQuery(tc.expectQuery)). + Return(10, tc.returnErr) + } + + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodHead, "/xxx/transactions?pit="+before.Format(time.RFC3339Nano), bytes.NewBufferString(tc.body)) + rec := httptest.NewRecorder() + if tc.queryParams != nil { + req.URL.RawQuery = tc.queryParams.Encode() + } + + router.ServeHTTP(rec, req) + + require.Equal(t, tc.expectStatusCode, rec.Code) + if tc.expectStatusCode < 300 && tc.expectStatusCode >= 200 { + require.Equal(t, "10", rec.Header().Get("Count")) + } else { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, tc.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v2/controllers_transactions_create.go b/internal/api/v2/controllers_transactions_create.go new file mode 100644 index 000000000..b54129fbd --- /dev/null +++ b/internal/api/v2/controllers_transactions_create.go @@ -0,0 +1,60 @@ +package v2 + +import ( + "encoding/json" + "net/http" + + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/ledger/internal/api/common" +) + +func createTransaction(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + payload := TransactionRequest{} + if err := json.NewDecoder(r.Body).Decode(&payload); err != nil { + api.BadRequest(w, ErrValidation, errors.New("invalid transaction format")) + return + } + + if len(payload.Postings) > 0 && payload.Script.Plain != "" { + api.BadRequest(w, ErrValidation, errors.New("cannot pass postings and numscript in the same request")) + return + } + + if len(payload.Postings) == 0 && payload.Script.Plain == "" { + api.BadRequest(w, ErrNoPostings, errors.New("you need to pass either a posting array or a numscript script")) + return + } + runScript, err := payload.ToRunScript(api.QueryParamBool(r, "force")) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + res, err := l.CreateTransaction(r.Context(), getCommandParameters(r, *runScript)) + if err != nil { + switch { + case errors.Is(err, &ledgercontroller.ErrInsufficientFunds{}): + api.BadRequest(w, ErrInsufficientFund, err) + case errors.Is(err, &ledgercontroller.ErrInvalidVars{}) || errors.Is(err, ledgercontroller.ErrCompilationFailed{}): + api.BadRequest(w, ErrCompilationFailed, err) + case errors.Is(err, &ledgercontroller.ErrMetadataOverride{}): + api.BadRequest(w, ErrMetadataOverride, err) + case errors.Is(err, ledgercontroller.ErrNoPostings): + api.BadRequest(w, ErrNoPostings, err) + case errors.Is(err, ledgercontroller.ErrTransactionReferenceConflict{}): + api.WriteErrorResponse(w, http.StatusConflict, ErrConflict, err) + case errors.Is(err, ledgercontroller.ErrInvalidIdempotencyInput{}): + api.BadRequest(w, ErrValidation, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + api.Ok(w, res.Transaction) +} diff --git a/internal/api/v2/controllers_transactions_create_test.go b/internal/api/v2/controllers_transactions_create_test.go new file mode 100644 index 000000000..d243b675e --- /dev/null +++ b/internal/api/v2/controllers_transactions_create_test.go @@ -0,0 +1,438 @@ +package v2 + +import ( + "github.com/formancehq/ledger/internal/api/common" + "math/big" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/time" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestTransactionCreate(t *testing.T) { + type testCase struct { + name string + expectedDryRun bool + expectedRunScript ledgercontroller.RunScript + returnError error + payload any + expectedStatusCode int + expectedErrorCode string + queryParams url.Values + expectControllerCall bool + } + + testCases := []testCase{ + { + name: "using plain numscript", + payload: TransactionRequest{ + Script: ledgercontroller.ScriptV1{ + Script: ledgercontroller.Script{ + Plain: `XXX`, + }, + }, + }, + expectedRunScript: ledgercontroller.RunScript{ + Script: ledgercontroller.Script{ + Plain: `XXX`, + Vars: map[string]string{}, + }, + }, + expectControllerCall: true, + }, + { + name: "using plain numscript with variables", + payload: TransactionRequest{ + Script: ledgercontroller.ScriptV1{ + Script: ledgercontroller.Script{ + Plain: `vars { + monetary $val + } + + send $val ( + source = @world + destination = @bank + )`, + }, + Vars: map[string]any{ + "val": "USD/2 100", + }, + }, + }, + expectControllerCall: true, + expectedRunScript: ledgercontroller.RunScript{ + Script: ledgercontroller.Script{ + Plain: `vars { + monetary $val + } + + send $val ( + source = @world + destination = @bank + )`, + Vars: map[string]string{ + "val": "USD/2 100", + }, + }, + }, + }, + { + name: "using plain numscript with variables (legacy format)", + expectControllerCall: true, + payload: TransactionRequest{ + Script: ledgercontroller.ScriptV1{ + Script: ledgercontroller.Script{ + Plain: `vars { + monetary $val + } + + send $val ( + source = @world + destination = @bank + )`, + }, + Vars: map[string]any{ + "val": map[string]any{ + "asset": "USD/2", + "amount": 100, + }, + }, + }, + }, + expectedRunScript: ledgercontroller.RunScript{ + Script: ledgercontroller.Script{ + Plain: `vars { + monetary $val + } + + send $val ( + source = @world + destination = @bank + )`, + Vars: map[string]string{ + "val": "USD/2 100", + }, + }, + }, + }, + { + name: "using plain numscript and dry run", + expectControllerCall: true, + payload: TransactionRequest{ + Script: ledgercontroller.ScriptV1{ + Script: ledgercontroller.Script{ + Plain: `send ( + source = @world + destination = @bank + )`, + }, + }, + }, + expectedRunScript: ledgercontroller.RunScript{ + Script: ledgercontroller.Script{ + Plain: `send ( + source = @world + destination = @bank + )`, + Vars: map[string]string{}, + }, + }, + expectedDryRun: true, + queryParams: url.Values{ + "dryRun": []string{"true"}, + }, + }, + { + name: "using JSON postings", + expectControllerCall: true, + payload: TransactionRequest{ + Postings: []ledger.Posting{ + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + }, + }, + expectedRunScript: common.TxToScriptData(ledger.NewTransactionData().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ), false), + }, + { + name: "using JSON postings and dry run", + expectControllerCall: true, + queryParams: url.Values{ + "dryRun": []string{"true"}, + }, + payload: TransactionRequest{ + Postings: []ledger.Posting{ + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + }, + }, + expectedDryRun: true, + expectedRunScript: common.TxToScriptData(ledger.NewTransactionData().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ), false), + }, + { + name: "no postings or script", + payload: TransactionRequest{ + Metadata: map[string]string{}, + }, + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrNoPostings, + returnError: errors.New("you need to pass either a posting array or a numscript script"), + }, + { + name: "postings and script", + payload: TransactionRequest{ + Postings: ledger.Postings{ + { + Source: "world", + Destination: "alice", + Amount: big.NewInt(100), + Asset: "COIN", + }, + }, + Script: ledgercontroller.ScriptV1{ + Script: ledgercontroller.Script{ + Plain: ` + send [COIN 100] ( + source = @world + destination = @bob + )`, + }, + }, + }, + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "using invalid body", + payload: "not a valid payload", + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "with insufficient funds", + expectControllerCall: true, + payload: TransactionRequest{ + Script: ledgercontroller.ScriptV1{ + Script: ledgercontroller.Script{ + Plain: `XXX`, + }, + }, + }, + expectedRunScript: ledgercontroller.RunScript{ + Script: ledgercontroller.Script{ + Plain: `XXX`, + Vars: map[string]string{}, + }, + }, + returnError: &ledgercontroller.ErrInsufficientFunds{}, + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrInsufficientFund, + }, + { + name: "using JSON postings and negative amount", + payload: TransactionRequest{ + Postings: []ledger.Posting{ + ledger.NewPosting("world", "bank", "USD", big.NewInt(-100)), + }, + }, + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + expectControllerCall: true, + name: "numscript and negative amount", + payload: TransactionRequest{ + Script: ledgercontroller.ScriptV1{ + Script: ledgercontroller.Script{ + Plain: `send [COIN -100] ( + source = @world + destination = @bob + )`, + }, + }, + }, + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrCompilationFailed, + expectedRunScript: ledgercontroller.RunScript{ + Script: ledgercontroller.Script{ + Plain: `send [COIN -100] ( + source = @world + destination = @bob + )`, + Vars: map[string]string{}, + }, + }, + returnError: &ledgercontroller.ErrInvalidVars{}, + }, + { + name: "numscript and compilation failed", + expectControllerCall: true, + payload: TransactionRequest{ + Script: ledgercontroller.ScriptV1{ + Script: ledgercontroller.Script{ + Plain: `send [COIN XXX] ( + source = @world + destination = @bob + )`, + }, + }, + }, + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrCompilationFailed, + expectedRunScript: ledgercontroller.RunScript{ + Script: ledgercontroller.Script{ + Plain: `send [COIN XXX] ( + source = @world + destination = @bob + )`, + Vars: map[string]string{}, + }, + }, + returnError: ledgercontroller.ErrCompilationFailed{}, + }, + { + name: "numscript and no postings", + expectControllerCall: true, + payload: TransactionRequest{ + Script: ledgercontroller.ScriptV1{ + Script: ledgercontroller.Script{ + Plain: `vars {}`, + }, + }, + }, + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrNoPostings, + expectedRunScript: ledgercontroller.RunScript{ + Script: ledgercontroller.Script{ + Plain: `vars {}`, + Vars: map[string]string{}, + }, + }, + returnError: ledgercontroller.ErrNoPostings, + }, + { + name: "numscript and metadata override", + expectControllerCall: true, + payload: TransactionRequest{ + Script: ledgercontroller.ScriptV1{ + Script: ledgercontroller.Script{ + Plain: `send [COIN 100] ( + source = @world + destination = @bob + ) + set_tx_meta("foo", "bar")`, + }, + }, + Reference: "xxx", + Metadata: map[string]string{ + "foo": "baz", + }, + }, + expectedStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrMetadataOverride, + expectedRunScript: ledgercontroller.RunScript{ + Script: ledgercontroller.Script{ + Plain: `send [COIN 100] ( + source = @world + destination = @bob + ) + set_tx_meta("foo", "bar")`, + Vars: map[string]string{}, + }, + Reference: "xxx", + Metadata: map[string]string{ + "foo": "baz", + }, + }, + returnError: &ledgercontroller.ErrMetadataOverride{}, + }, + { + name: "unexpected error", + expectControllerCall: true, + payload: TransactionRequest{ + Script: ledgercontroller.ScriptV1{ + Script: ledgercontroller.Script{ + Plain: `send [COIN 100] ( + source = @world + destination = @bob + )`, + }, + }, + }, + expectedStatusCode: http.StatusInternalServerError, + expectedErrorCode: api.ErrorInternal, + expectedRunScript: ledgercontroller.RunScript{ + Script: ledgercontroller.Script{ + Plain: `send [COIN 100] ( + source = @world + destination = @bob + )`, + Vars: map[string]string{}, + }, + }, + returnError: errors.New("unexpected error"), + }, + } + + for _, testCase := range testCases { + tc := testCase + t.Run(tc.name, func(t *testing.T) { + if testCase.expectedStatusCode == 0 { + testCase.expectedStatusCode = http.StatusOK + } + + expectedTx := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ) + + systemController, ledgerController := newTestingSystemController(t, true) + if testCase.expectControllerCall { + testCase.expectedRunScript.Timestamp = time.Time{} + expect := ledgerController.EXPECT(). + CreateTransaction(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.RunScript]{ + DryRun: tc.expectedDryRun, + Input: testCase.expectedRunScript, + }) + + if tc.returnError == nil { + expect.Return(&ledger.CreatedTransaction{ + Transaction: expectedTx, + }, nil) + } else { + expect.Return(nil, tc.returnError) + } + } + + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodPost, "/xxx/transactions", api.Buffer(t, testCase.payload)) + rec := httptest.NewRecorder() + req.URL.RawQuery = testCase.queryParams.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectedStatusCode, rec.Code) + if testCase.expectedStatusCode < 300 && testCase.expectedStatusCode >= 200 { + tx, ok := api.DecodeSingleResponse[ledger.Transaction](t, rec.Body) + require.True(t, ok) + require.Equal(t, expectedTx, tx) + } else { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v2/controllers_transactions_delete_metadata.go b/internal/api/v2/controllers_transactions_delete_metadata.go new file mode 100644 index 000000000..0d7c281a1 --- /dev/null +++ b/internal/api/v2/controllers_transactions_delete_metadata.go @@ -0,0 +1,42 @@ +package v2 + +import ( + "net/http" + "strconv" + + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "github.com/go-chi/chi/v5" + + "errors" + "github.com/formancehq/ledger/internal/api/common" + + "github.com/formancehq/go-libs/v2/api" +) + +func deleteTransactionMetadata(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + txID, err := strconv.ParseInt(chi.URLParam(r, "id"), 10, 64) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + metadataKey := chi.URLParam(r, "key") + + if err := l.DeleteTransactionMetadata(r.Context(), getCommandParameters(r, ledgercontroller.DeleteTransactionMetadata{ + TransactionID: int(txID), + Key: metadataKey, + })); err != nil { + switch { + case errors.Is(err, ledgercontroller.ErrNotFound): + api.NotFound(w, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + api.NoContent(w) +} diff --git a/internal/api/v2/controllers_transactions_delete_metadata_test.go b/internal/api/v2/controllers_transactions_delete_metadata_test.go new file mode 100644 index 000000000..882ca685e --- /dev/null +++ b/internal/api/v2/controllers_transactions_delete_metadata_test.go @@ -0,0 +1,90 @@ +package v2 + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/logging" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestTransactionsDeleteMetadata(t *testing.T) { + t.Parallel() + + ctx := logging.TestingContext() + + type testCase struct { + name string + queryParams url.Values + returnErr error + expectedStatusCode int + expectedErrorCode string + expectBackendCall bool + } + + for _, tc := range []testCase{ + { + name: "nominal", + expectBackendCall: true, + }, + { + name: "unexpected backend error", + expectBackendCall: true, + returnErr: errors.New("undefined error"), + expectedStatusCode: http.StatusInternalServerError, + expectedErrorCode: api.ErrorInternal, + }, + { + name: "not found", + expectBackendCall: true, + returnErr: ledgercontroller.ErrNotFound, + expectedStatusCode: http.StatusNotFound, + expectedErrorCode: api.ErrorCodeNotFound, + }, + } { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + systemController, ledgerController := newTestingSystemController(t, true) + + if tc.expectBackendCall { + ledgerController.EXPECT(). + DeleteTransactionMetadata(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.DeleteTransactionMetadata]{ + Input: ledgercontroller.DeleteTransactionMetadata{ + TransactionID: 1, + Key: "foo", + }, + }). + Return(tc.returnErr) + } + + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodDelete, "/ledger0/transactions/1/metadata/foo", nil) + req = req.WithContext(ctx) + req.URL.RawQuery = tc.queryParams.Encode() + + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + if tc.expectedStatusCode == 0 || tc.expectedStatusCode == http.StatusOK { + require.Equal(t, http.StatusNoContent, rec.Code) + } else { + require.Equal(t, tc.expectedStatusCode, rec.Code) + errorResponse := api.ErrorResponse{} + require.NoError(t, json.Unmarshal(rec.Body.Bytes(), &errorResponse)) + require.Equal(t, tc.expectedErrorCode, errorResponse.ErrorCode) + } + }) + } +} diff --git a/internal/api/v2/controllers_transactions_list.go b/internal/api/v2/controllers_transactions_list.go new file mode 100644 index 000000000..e0ee50f75 --- /dev/null +++ b/internal/api/v2/controllers_transactions_list.go @@ -0,0 +1,50 @@ +package v2 + +import ( + "net/http" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/pointer" + "github.com/formancehq/ledger/internal/api/common" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" +) + +func listTransactions(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + query, err := bunpaginate.Extract[ledgercontroller.ListTransactionsQuery](r, func() (*ledgercontroller.ListTransactionsQuery, error) { + options, err := getPaginatedQueryOptionsOfPITFilterWithVolumes(r) + if err != nil { + return nil, err + } + q := ledgercontroller.NewListTransactionsQuery(*options) + + if r.URL.Query().Get("order") == "effective" { + q.Column = "timestamp" + } + if r.URL.Query().Get("reverse") == "true" { + q.Order = bunpaginate.OrderAsc + } + + return pointer.For(q), nil + }) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + cursor, err := l.ListTransactions(r.Context(), *query) + if err != nil { + switch { + case errors.Is(err, ledgercontroller.ErrInvalidQuery{}) || errors.Is(err, ledgercontroller.ErrMissingFeature{}): + api.BadRequest(w, ErrValidation, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + api.RenderCursor(w, *cursor) +} diff --git a/internal/api/v2/controllers_transactions_list_test.go b/internal/api/v2/controllers_transactions_list_test.go new file mode 100644 index 000000000..d4fc10e01 --- /dev/null +++ b/internal/api/v2/controllers_transactions_list_test.go @@ -0,0 +1,229 @@ +package v2 + +import ( + "bytes" + "fmt" + "math/big" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/query" + "github.com/formancehq/go-libs/v2/time" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestTransactionsList(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + queryParams url.Values + body string + expectQuery ledgercontroller.ListTransactionsQuery + expectStatusCode int + expectedErrorCode string + } + now := time.Now() + + testCases := []testCase{ + { + name: "nominal", + expectQuery: ledgercontroller.NewListTransactionsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + })), + }, + { + name: "using metadata", + body: `{"$match": {"metadata[roles]": "admin"}}`, + expectQuery: ledgercontroller.NewListTransactionsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + }). + WithQueryBuilder(query.Match("metadata[roles]", "admin"))), + }, + { + name: "using startTime", + body: fmt.Sprintf(`{"$gte": {"start_time": "%s"}}`, now.Format(time.DateFormat)), + expectQuery: ledgercontroller.NewListTransactionsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + }). + WithQueryBuilder(query.Gte("start_time", now.Format(time.DateFormat)))), + }, + { + name: "using endTime", + body: fmt.Sprintf(`{"$lte": {"end_time": "%s"}}`, now.Format(time.DateFormat)), + expectQuery: ledgercontroller.NewListTransactionsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + }). + WithQueryBuilder(query.Lte("end_time", now.Format(time.DateFormat)))), + }, + { + name: "using account", + body: `{"$match": {"account": "xxx"}}`, + expectQuery: ledgercontroller.NewListTransactionsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + }). + WithQueryBuilder(query.Match("account", "xxx"))), + }, + { + name: "using reference", + body: `{"$match": {"reference": "xxx"}}`, + expectQuery: ledgercontroller.NewListTransactionsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + }). + WithQueryBuilder(query.Match("reference", "xxx"))), + }, + { + name: "using destination", + body: `{"$match": {"destination": "xxx"}}`, + expectQuery: ledgercontroller.NewListTransactionsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + }). + WithQueryBuilder(query.Match("destination", "xxx"))), + }, + { + name: "using source", + body: `{"$match": {"source": "xxx"}}`, + expectQuery: ledgercontroller.NewListTransactionsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + }). + WithQueryBuilder(query.Match("source", "xxx"))), + }, + { + name: "using empty cursor", + queryParams: url.Values{ + "cursor": []string{bunpaginate.EncodeCursor(ledgercontroller.NewListTransactionsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{})))}, + }, + expectQuery: ledgercontroller.NewListTransactionsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{}, + })), + }, + { + name: "using invalid cursor", + queryParams: url.Values{ + "cursor": []string{"XXX"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "invalid page size", + queryParams: url.Values{ + "pageSize": []string{"nan"}, + }, + expectStatusCode: http.StatusBadRequest, + expectedErrorCode: ErrValidation, + }, + { + name: "page size over maximum", + queryParams: url.Values{ + "pageSize": []string{"1000000"}, + }, + expectQuery: ledgercontroller.NewListTransactionsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + }). + WithPageSize(MaxPageSize)), + }, + { + name: "using cursor", + queryParams: url.Values{ + "cursor": []string{"eyJwYWdlU2l6ZSI6MTUsImJvdHRvbSI6bnVsbCwiY29sdW1uIjoiaWQiLCJwYWdpbmF0aW9uSUQiOm51bGwsIm9yZGVyIjoxLCJmaWx0ZXJzIjp7InFiIjp7fSwicGFnZVNpemUiOjE1LCJvcHRpb25zIjp7InBpdCI6bnVsbCwidm9sdW1lcyI6ZmFsc2UsImVmZmVjdGl2ZVZvbHVtZXMiOmZhbHNlfX0sInJldmVyc2UiOmZhbHNlfQ"}, + }, + expectQuery: ledgercontroller.NewListTransactionsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{})), + }, + { + name: "using $exists metadata filter", + body: `{"$exists": {"metadata": "foo"}}`, + expectQuery: ledgercontroller.NewListTransactionsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + }). + WithQueryBuilder(query.Exists("metadata", "foo"))), + }, + { + name: "paginate using effective order", + queryParams: map[string][]string{"order": {"effective"}}, + expectQuery: ledgercontroller.NewListTransactionsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + })). + WithColumn("timestamp"), + }, + } + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + + if testCase.expectStatusCode == 0 { + testCase.expectStatusCode = http.StatusOK + } + + expectedCursor := bunpaginate.Cursor[ledger.Transaction]{ + Data: []ledger.Transaction{ + ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ), + }, + } + + systemController, ledgerController := newTestingSystemController(t, true) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + ledgerController.EXPECT(). + ListTransactions(gomock.Any(), testCase.expectQuery). + Return(&expectedCursor, nil) + } + + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodGet, "/xxx/transactions", bytes.NewBufferString(testCase.body)) + rec := httptest.NewRecorder() + params := url.Values{} + if testCase.queryParams != nil { + params = testCase.queryParams + } + params.Set("pit", now.Format(time.RFC3339Nano)) + req.URL.RawQuery = params.Encode() + + router.ServeHTTP(rec, req) + + require.Equal(t, testCase.expectStatusCode, rec.Code) + if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { + cursor := api.DecodeCursorResponse[ledger.Transaction](t, rec.Body) + require.Equal(t, expectedCursor, *cursor) + } else { + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) + + } + }) + } +} diff --git a/internal/api/v2/controllers_transactions_read.go b/internal/api/v2/controllers_transactions_read.go new file mode 100644 index 000000000..6899c3bb2 --- /dev/null +++ b/internal/api/v2/controllers_transactions_read.go @@ -0,0 +1,50 @@ +package v2 + +import ( + "net/http" + "strconv" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/platform/postgres" + "github.com/formancehq/ledger/internal/api/common" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/go-chi/chi/v5" +) + +func readTransaction(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + txId, err := strconv.ParseInt(chi.URLParam(r, "id"), 10, 64) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + query := ledgercontroller.NewGetTransactionQuery(int(txId)) + if hasExpandVolumes(r) { + query = query.WithExpandVolumes() + } + if hasExpandEffectiveVolumes(r) { + query = query.WithExpandEffectiveVolumes() + } + + pitFilter, err := getPITFilter(r) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + query.PITFilter = *pitFilter + + tx, err := l.GetTransaction(r.Context(), query) + if err != nil { + switch { + case postgres.IsNotFoundError(err): + api.NotFound(w, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + api.Ok(w, tx) +} diff --git a/internal/api/v2/controllers_transactions_read_test.go b/internal/api/v2/controllers_transactions_read_test.go new file mode 100644 index 000000000..8eba1283f --- /dev/null +++ b/internal/api/v2/controllers_transactions_read_test.go @@ -0,0 +1,46 @@ +package v2 + +import ( + "math/big" + "net/http" + "net/http/httptest" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/time" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestTransactionsRead(t *testing.T) { + t.Parallel() + + now := time.Now() + + tx := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ) + + query := ledgercontroller.NewGetTransactionQuery(0) + query.PIT = &now + + systemController, ledgerController := newTestingSystemController(t, true) + ledgerController.EXPECT(). + GetTransaction(gomock.Any(), query). + Return(&tx, nil) + + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodGet, "/xxx/transactions/0?pit="+now.Format(time.RFC3339Nano), nil) + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + response, _ := api.DecodeSingleResponse[ledger.Transaction](t, rec.Body) + require.Equal(t, tx, response) +} diff --git a/internal/api/v2/controllers_transactions_revert.go b/internal/api/v2/controllers_transactions_revert.go new file mode 100644 index 000000000..2d7af3255 --- /dev/null +++ b/internal/api/v2/controllers_transactions_revert.go @@ -0,0 +1,47 @@ +package v2 + +import ( + "net/http" + "strconv" + + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/ledger/internal/api/common" + "github.com/go-chi/chi/v5" +) + +func revertTransaction(w http.ResponseWriter, r *http.Request) { + l := common.LedgerFromContext(r.Context()) + + txId, err := strconv.ParseInt(chi.URLParam(r, "id"), 10, 64) + if err != nil { + api.BadRequest(w, ErrValidation, err) + return + } + + ret, err := l.RevertTransaction( + r.Context(), + getCommandParameters(r, ledgercontroller.RevertTransaction{ + Force: api.QueryParamBool(r, "force"), + AtEffectiveDate: api.QueryParamBool(r, "atEffectiveDate"), + TransactionID: int(txId), + }), + ) + if err != nil { + switch { + case errors.Is(err, &ledgercontroller.ErrInsufficientFunds{}): + api.BadRequest(w, ErrInsufficientFund, err) + case errors.Is(err, ledgercontroller.ErrAlreadyReverted{}): + api.BadRequest(w, ErrAlreadyRevert, err) + case errors.Is(err, ledgercontroller.ErrNotFound): + api.NotFound(w, err) + default: + common.HandleCommonErrors(w, r, err) + } + return + } + + api.Created(w, ret.RevertTransaction) +} diff --git a/internal/api/v2/controllers_transactions_revert_test.go b/internal/api/v2/controllers_transactions_revert_test.go new file mode 100644 index 000000000..987c4d513 --- /dev/null +++ b/internal/api/v2/controllers_transactions_revert_test.go @@ -0,0 +1,106 @@ +package v2 + +import ( + "math/big" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/auth" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestTransactionsRevert(t *testing.T) { + t.Parallel() + type testCase struct { + name string + queryParams url.Values + returnTx ledger.Transaction + returnErr error + expectForce bool + expectStatusCode int + expectErrorCode string + } + + testCases := []testCase{ + { + name: "nominal", + returnTx: ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ), + }, + { + name: "force revert", + returnTx: ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ), + expectForce: true, + queryParams: map[string][]string{"force": {"true"}}, + }, + { + name: "with insufficient fund", + returnErr: &ledgercontroller.ErrInsufficientFunds{}, + expectStatusCode: http.StatusBadRequest, + expectErrorCode: ErrInsufficientFund, + }, + { + name: "with already revert", + returnErr: &ledgercontroller.ErrAlreadyReverted{}, + expectStatusCode: http.StatusBadRequest, + expectErrorCode: ErrAlreadyRevert, + }, + { + name: "with transaction not found", + returnErr: ledgercontroller.ErrNotFound, + expectStatusCode: http.StatusNotFound, + expectErrorCode: api.ErrorCodeNotFound, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + systemController, ledgerController := newTestingSystemController(t, true) + ledgerController. + EXPECT(). + RevertTransaction(gomock.Any(), ledgercontroller.Parameters[ledgercontroller.RevertTransaction]{ + Input: ledgercontroller.RevertTransaction{ + Force: tc.expectForce, + }, + }). + Return(&ledger.RevertedTransaction{ + RevertTransaction: tc.returnTx, + }, tc.returnErr) + + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") + + req := httptest.NewRequest(http.MethodPost, "/xxx/transactions/0/revert", nil) + if tc.queryParams != nil { + req.URL.RawQuery = tc.queryParams.Encode() + } + rec := httptest.NewRecorder() + + router.ServeHTTP(rec, req) + + if tc.expectStatusCode == 0 { + require.Equal(t, http.StatusCreated, rec.Code) + tx, ok := api.DecodeSingleResponse[ledger.Transaction](t, rec.Body) + require.True(t, ok) + require.Equal(t, tc.returnTx, tx) + } else { + require.Equal(t, tc.expectStatusCode, rec.Code) + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) + require.EqualValues(t, tc.expectErrorCode, err.ErrorCode) + } + }) + } +} diff --git a/internal/api/v2/controllers_transactions_test.go b/internal/api/v2/controllers_transactions_test.go deleted file mode 100644 index cbc146994..000000000 --- a/internal/api/v2/controllers_transactions_test.go +++ /dev/null @@ -1,1002 +0,0 @@ -package v2_test - -import ( - "bytes" - "fmt" - "math/big" - "net/http" - "net/http/httptest" - "net/url" - "testing" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/bun/bunpaginate" - - "github.com/formancehq/ledger/internal/api/backend" - "github.com/pkg/errors" - - "github.com/formancehq/ledger/internal/engine" - - "github.com/formancehq/ledger/internal/machine" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/query" - ledger "github.com/formancehq/ledger/internal" - v2 "github.com/formancehq/ledger/internal/api/v2" - "github.com/formancehq/ledger/internal/engine/command" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/formancehq/ledger/internal/storage/ledgerstore" - "github.com/stretchr/testify/require" - "go.uber.org/mock/gomock" -) - -func TestPostTransactions(t *testing.T) { - type testCase struct { - name string - expectedDryRun bool - expectedRunScript ledger.RunScript - returnError error - payload any - expectedStatusCode int - expectedErrorCode string - expectedErrorDetails string - queryParams url.Values - expectEngineCall bool - } - - testCases := []testCase{ - { - name: "using plain numscript", - payload: ledger.TransactionRequest{ - Script: ledger.ScriptV1{ - Script: ledger.Script{ - Plain: `XXX`, - }, - }, - }, - expectedRunScript: ledger.RunScript{ - Script: ledger.Script{ - Plain: `XXX`, - Vars: map[string]string{}, - }, - }, - expectEngineCall: true, - }, - { - name: "using plain numscript with variables", - payload: ledger.TransactionRequest{ - Script: ledger.ScriptV1{ - Script: ledger.Script{ - Plain: `vars { - monetary $val - } - - send $val ( - source = @world - destination = @bank - )`, - }, - Vars: map[string]any{ - "val": "USD/2 100", - }, - }, - }, - expectEngineCall: true, - expectedRunScript: ledger.RunScript{ - Script: ledger.Script{ - Plain: `vars { - monetary $val - } - - send $val ( - source = @world - destination = @bank - )`, - Vars: map[string]string{ - "val": "USD/2 100", - }, - }, - }, - }, - { - name: "using plain numscript with variables (legacy format)", - expectEngineCall: true, - payload: ledger.TransactionRequest{ - Script: ledger.ScriptV1{ - Script: ledger.Script{ - Plain: `vars { - monetary $val - } - - send $val ( - source = @world - destination = @bank - )`, - }, - Vars: map[string]any{ - "val": map[string]any{ - "asset": "USD/2", - "amount": 100, - }, - }, - }, - }, - expectedRunScript: ledger.RunScript{ - Script: ledger.Script{ - Plain: `vars { - monetary $val - } - - send $val ( - source = @world - destination = @bank - )`, - Vars: map[string]string{ - "val": "USD/2 100", - }, - }, - }, - }, - { - name: "using plain numscript and dry run", - expectEngineCall: true, - payload: ledger.TransactionRequest{ - Script: ledger.ScriptV1{ - Script: ledger.Script{ - Plain: `send ( - source = @world - destination = @bank - )`, - }, - }, - }, - expectedRunScript: ledger.RunScript{ - Script: ledger.Script{ - Plain: `send ( - source = @world - destination = @bank - )`, - Vars: map[string]string{}, - }, - }, - expectedDryRun: true, - queryParams: url.Values{ - "dryRun": []string{"true"}, - }, - }, - { - name: "using JSON postings", - expectEngineCall: true, - payload: ledger.TransactionRequest{ - Postings: []ledger.Posting{ - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - }, - }, - expectedRunScript: ledger.TxToScriptData(ledger.NewTransactionData().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ), false), - }, - { - name: "using JSON postings and dry run", - expectEngineCall: true, - queryParams: url.Values{ - "dryRun": []string{"true"}, - }, - payload: ledger.TransactionRequest{ - Postings: []ledger.Posting{ - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - }, - }, - expectedDryRun: true, - expectedRunScript: ledger.TxToScriptData(ledger.NewTransactionData().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ), false), - }, - { - name: "no postings or script", - expectEngineCall: true, - payload: ledger.TransactionRequest{ - Script: ledger.ScriptV1{ - Script: ledger.TxToScriptData(ledger.NewTransactionData(), false).Script, - }, - Metadata: map[string]string{}, - }, - expectedRunScript: ledger.TxToScriptData(ledger.NewTransactionData(), false), - expectedStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrNoPostings, - returnError: engine.NewCommandError(command.NewErrNoPostings()), - }, - { - name: "postings and script", - payload: ledger.TransactionRequest{ - Postings: ledger.Postings{ - { - Source: "world", - Destination: "alice", - Amount: big.NewInt(100), - Asset: "COIN", - }, - }, - Script: ledger.ScriptV1{ - Script: ledger.Script{ - Plain: ` - send [COIN 100] ( - source = @world - destination = @bob - )`, - }, - }, - }, - expectedStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrValidation, - }, - { - name: "using invalid body", - payload: "not a valid payload", - expectedStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrValidation, - }, - { - name: "with insufficient funds", - expectEngineCall: true, - payload: ledger.TransactionRequest{ - Script: ledger.ScriptV1{ - Script: ledger.Script{ - Plain: `XXX`, - }, - }, - }, - expectedRunScript: ledger.RunScript{ - Script: ledger.Script{ - Plain: `XXX`, - Vars: map[string]string{}, - }, - }, - returnError: engine.NewCommandError(command.NewErrMachine(&machine.ErrInsufficientFund{})), - expectedStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrInsufficientFund, - }, - { - name: "using JSON postings and negative amount", - payload: ledger.TransactionRequest{ - Postings: []ledger.Posting{ - ledger.NewPosting("world", "bank", "USD", big.NewInt(-100)), - }, - }, - expectEngineCall: true, - expectedStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrCompilationFailed, - expectedRunScript: ledger.TxToScriptData(ledger.NewTransactionData().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(-100)), - ), false), - expectedErrorDetails: backend.EncodeLink(`compilation failed`), - returnError: engine.NewCommandError( - command.NewErrInvalidTransaction(command.ErrInvalidTransactionCodeCompilationFailed, errors.New("compilation failed")), - ), - }, - { - expectEngineCall: true, - name: "numscript and negative amount", - payload: ledger.TransactionRequest{ - Script: ledger.ScriptV1{ - Script: ledger.Script{ - Plain: `send [COIN -100] ( - source = @world - destination = @bob - )`, - }, - }, - }, - expectedStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrCompilationFailed, - expectedErrorDetails: backend.EncodeLink("compilation failed"), - expectedRunScript: ledger.RunScript{ - Script: ledger.Script{ - Plain: `send [COIN -100] ( - source = @world - destination = @bob - )`, - Vars: map[string]string{}, - }, - }, - returnError: engine.NewCommandError( - command.NewErrInvalidTransaction(command.ErrInvalidTransactionCodeCompilationFailed, errors.New("compilation failed")), - ), - }, - { - name: "numscript and compilation failed", - expectEngineCall: true, - payload: ledger.TransactionRequest{ - Script: ledger.ScriptV1{ - Script: ledger.Script{ - Plain: `send [COIN XXX] ( - source = @world - destination = @bob - )`, - }, - }, - }, - expectedStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrCompilationFailed, - expectedRunScript: ledger.RunScript{ - Script: ledger.Script{ - Plain: `send [COIN XXX] ( - source = @world - destination = @bob - )`, - Vars: map[string]string{}, - }, - }, - expectedErrorDetails: backend.EncodeLink("compilation failed"), - returnError: engine.NewCommandError( - command.NewErrCompilationFailed(fmt.Errorf("compilation failed")), - ), - }, - { - name: "numscript and no postings", - expectEngineCall: true, - payload: ledger.TransactionRequest{ - Script: ledger.ScriptV1{ - Script: ledger.Script{ - Plain: `vars {}`, - }, - }, - }, - expectedStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrNoPostings, - expectedRunScript: ledger.RunScript{ - Script: ledger.Script{ - Plain: `vars {}`, - Vars: map[string]string{}, - }, - }, - returnError: engine.NewCommandError( - command.NewErrNoPostings(), - ), - }, - { - name: "numscript and conflict", - expectEngineCall: true, - payload: ledger.TransactionRequest{ - Script: ledger.ScriptV1{ - Script: ledger.Script{ - Plain: `vars {}`, - }, - }, - Reference: "xxx", - }, - expectedStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrConflict, - expectedRunScript: ledger.RunScript{ - Script: ledger.Script{ - Plain: `vars {}`, - Vars: map[string]string{}, - }, - Reference: "xxx", - }, - returnError: engine.NewCommandError( - command.NewErrConflict(), - ), - }, - { - name: "numscript and metadata override", - expectEngineCall: true, - payload: ledger.TransactionRequest{ - Script: ledger.ScriptV1{ - Script: ledger.Script{ - Plain: `send [COIN 100] ( - source = @world - destination = @bob - ) - set_tx_meta("foo", "bar")`, - }, - }, - Reference: "xxx", - Metadata: map[string]string{ - "foo": "baz", - }, - }, - expectedStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrMetadataOverride, - expectedRunScript: ledger.RunScript{ - Script: ledger.Script{ - Plain: `send [COIN 100] ( - source = @world - destination = @bob - ) - set_tx_meta("foo", "bar")`, - Vars: map[string]string{}, - }, - Reference: "xxx", - Metadata: map[string]string{ - "foo": "baz", - }, - }, - returnError: engine.NewCommandError( - command.NewErrMachine(&machine.ErrMetadataOverride{}), - ), - }, - } - - for _, testCase := range testCases { - tc := testCase - t.Run(tc.name, func(t *testing.T) { - if testCase.expectedStatusCode == 0 { - testCase.expectedStatusCode = http.StatusOK - } - - expectedTx := ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ) - - backend, mockLedger := newTestingBackend(t, true) - if testCase.expectEngineCall { - expect := mockLedger.EXPECT(). - CreateTransaction(gomock.Any(), command.Parameters{ - DryRun: tc.expectedDryRun, - }, testCase.expectedRunScript) - - if tc.returnError == nil { - expect.Return(expectedTx, nil) - } else { - expect.Return(nil, tc.returnError) - } - } - - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodPost, "/xxx/transactions", sharedapi.Buffer(t, testCase.payload)) - rec := httptest.NewRecorder() - req.URL.RawQuery = testCase.queryParams.Encode() - - router.ServeHTTP(rec, req) - - require.Equal(t, testCase.expectedStatusCode, rec.Code) - if testCase.expectedStatusCode < 300 && testCase.expectedStatusCode >= 200 { - tx, ok := sharedapi.DecodeSingleResponse[ledger.Transaction](t, rec.Body) - require.True(t, ok) - require.Equal(t, *expectedTx, tx) - } else { - err := sharedapi.ErrorResponse{} - sharedapi.Decode(t, rec.Body, &err) - require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) - require.EqualValues(t, testCase.expectedErrorDetails, err.Details) - - } - }) - } -} - -func TestPostTransactionMetadata(t *testing.T) { - t.Parallel() - - type testCase struct { - name string - queryParams url.Values - expectStatusCode int - expectedErrorCode string - body any - } - - testCases := []testCase{ - { - name: "nominal", - body: metadata.Metadata{ - "foo": "bar", - }, - }, - { - name: "invalid body", - body: "invalid - not an object", - expectStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrValidation, - }, - } - for _, testCase := range testCases { - testCase := testCase - t.Run(testCase.name, func(t *testing.T) { - - if testCase.expectStatusCode == 0 { - testCase.expectStatusCode = http.StatusNoContent - } - - backend, mock := newTestingBackend(t, true) - if testCase.expectStatusCode == http.StatusNoContent { - mock.EXPECT(). - SaveMeta(gomock.Any(), command.Parameters{}, ledger.MetaTargetTypeTransaction, big.NewInt(0), testCase.body). - Return(nil) - } - - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodPost, "/xxx/transactions/0/metadata", sharedapi.Buffer(t, testCase.body)) - rec := httptest.NewRecorder() - req.URL.RawQuery = testCase.queryParams.Encode() - - router.ServeHTTP(rec, req) - - require.Equal(t, testCase.expectStatusCode, rec.Code) - if testCase.expectStatusCode >= 300 || testCase.expectStatusCode < 200 { - err := sharedapi.ErrorResponse{} - sharedapi.Decode(t, rec.Body, &err) - require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) - } - }) - } -} - -func TestGetTransaction(t *testing.T) { - t.Parallel() - - now := time.Now() - - tx := ledger.ExpandTransaction( - ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ), - nil, - ) - - query := ledgerstore.NewGetTransactionQuery(big.NewInt(0)) - query.PIT = &now - - backend, mock := newTestingBackend(t, true) - mock.EXPECT(). - GetTransactionWithVolumes(gomock.Any(), query). - Return(&tx, nil) - - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodGet, "/xxx/transactions/0?pit="+now.Format(time.RFC3339Nano), nil) - rec := httptest.NewRecorder() - - router.ServeHTTP(rec, req) - - require.Equal(t, http.StatusOK, rec.Code) - response, _ := sharedapi.DecodeSingleResponse[ledger.ExpandedTransaction](t, rec.Body) - require.Equal(t, tx, response) -} - -func TestGetTransactions(t *testing.T) { - t.Parallel() - - type testCase struct { - name string - queryParams url.Values - body string - expectQuery ledgerstore.GetTransactionsQuery - expectStatusCode int - expectedErrorCode string - } - now := time.Now() - - testCases := []testCase{ - { - name: "nominal", - expectQuery: ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &now, - }, - })), - }, - { - name: "using metadata", - body: `{"$match": {"metadata[roles]": "admin"}}`, - expectQuery: ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &now, - }, - }). - WithQueryBuilder(query.Match("metadata[roles]", "admin"))), - }, - { - name: "using startTime", - body: fmt.Sprintf(`{"$gte": {"start_time": "%s"}}`, now.Format(time.DateFormat)), - expectQuery: ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &now, - }, - }). - WithQueryBuilder(query.Gte("start_time", now.Format(time.DateFormat)))), - }, - { - name: "using endTime", - body: fmt.Sprintf(`{"$lte": {"end_time": "%s"}}`, now.Format(time.DateFormat)), - expectQuery: ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &now, - }, - }). - WithQueryBuilder(query.Lte("end_time", now.Format(time.DateFormat)))), - }, - { - name: "using account", - body: `{"$match": {"account": "xxx"}}`, - expectQuery: ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &now, - }, - }). - WithQueryBuilder(query.Match("account", "xxx"))), - }, - { - name: "using reference", - body: `{"$match": {"reference": "xxx"}}`, - expectQuery: ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &now, - }, - }). - WithQueryBuilder(query.Match("reference", "xxx"))), - }, - { - name: "using destination", - body: `{"$match": {"destination": "xxx"}}`, - expectQuery: ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &now, - }, - }). - WithQueryBuilder(query.Match("destination", "xxx"))), - }, - { - name: "using source", - body: `{"$match": {"source": "xxx"}}`, - expectQuery: ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &now, - }, - }). - WithQueryBuilder(query.Match("source", "xxx"))), - }, - { - name: "using empty cursor", - queryParams: url.Values{ - "cursor": []string{bunpaginate.EncodeCursor(ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{})))}, - }, - expectQuery: ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{}, - })), - }, - { - name: "using invalid cursor", - queryParams: url.Values{ - "cursor": []string{"XXX"}, - }, - expectStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrValidation, - }, - { - name: "invalid page size", - queryParams: url.Values{ - "pageSize": []string{"nan"}, - }, - expectStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrValidation, - }, - { - name: "page size over maximum", - queryParams: url.Values{ - "pageSize": []string{"1000000"}, - }, - expectQuery: ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &now, - }, - }). - WithPageSize(v2.MaxPageSize)), - }, - { - name: "using cursor", - queryParams: url.Values{ - "cursor": []string{"eyJwYWdlU2l6ZSI6MTUsImJvdHRvbSI6bnVsbCwiY29sdW1uIjoiaWQiLCJwYWdpbmF0aW9uSUQiOm51bGwsIm9yZGVyIjoxLCJmaWx0ZXJzIjp7InFiIjp7fSwicGFnZVNpemUiOjE1LCJvcHRpb25zIjp7InBpdCI6bnVsbCwidm9sdW1lcyI6ZmFsc2UsImVmZmVjdGl2ZVZvbHVtZXMiOmZhbHNlfX0sInJldmVyc2UiOmZhbHNlfQ"}, - }, - expectQuery: ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{})), - }, - { - name: "using $exists metadata filter", - body: `{"$exists": {"metadata": "foo"}}`, - expectQuery: ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &now, - }, - }). - WithQueryBuilder(query.Exists("metadata", "foo"))), - }, - { - name: "paginate using effective order", - queryParams: map[string][]string{"order": {"effective"}}, - expectQuery: ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &now, - }, - })). - WithColumn("timestamp"), - }, - } - for _, testCase := range testCases { - testCase := testCase - t.Run(testCase.name, func(t *testing.T) { - - if testCase.expectStatusCode == 0 { - testCase.expectStatusCode = http.StatusOK - } - - expectedCursor := bunpaginate.Cursor[ledger.ExpandedTransaction]{ - Data: []ledger.ExpandedTransaction{ - ledger.ExpandTransaction( - ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ), - nil, - ), - }, - } - - backend, mockLedger := newTestingBackend(t, true) - if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { - mockLedger.EXPECT(). - GetTransactions(gomock.Any(), testCase.expectQuery). - Return(&expectedCursor, nil) - } - - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodGet, "/xxx/transactions", bytes.NewBufferString(testCase.body)) - rec := httptest.NewRecorder() - params := url.Values{} - if testCase.queryParams != nil { - params = testCase.queryParams - } - params.Set("pit", now.Format(time.RFC3339Nano)) - req.URL.RawQuery = params.Encode() - - router.ServeHTTP(rec, req) - - require.Equal(t, testCase.expectStatusCode, rec.Code) - if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { - cursor := sharedapi.DecodeCursorResponse[ledger.ExpandedTransaction](t, rec.Body) - require.Equal(t, expectedCursor, *cursor) - } else { - err := sharedapi.ErrorResponse{} - sharedapi.Decode(t, rec.Body, &err) - require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) - - } - }) - } -} - -func TestCountTransactions(t *testing.T) { - t.Parallel() - - before := time.Now() - - type testCase struct { - name string - queryParams url.Values - body string - expectQuery ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes] - expectStatusCode int - expectedErrorCode string - } - now := time.Now() - - testCases := []testCase{ - { - name: "nominal", - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &before, - }, - }), - }, - { - name: "using metadata", - body: `{"$match": {"metadata[roles]": "admin"}}`, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &before, - }, - }). - WithQueryBuilder(query.Match("metadata[roles]", "admin")), - }, - { - name: "using startTime", - body: fmt.Sprintf(`{"$gte": {"date": "%s"}}`, now.Format(time.DateFormat)), - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &before, - }, - }). - WithQueryBuilder(query.Gte("date", now.Format(time.DateFormat))), - }, - { - name: "using endTime", - body: fmt.Sprintf(`{"$gte": {"date": "%s"}}`, now.Format(time.DateFormat)), - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &before, - }, - }). - WithQueryBuilder(query.Gte("date", now.Format(time.DateFormat))), - }, - { - name: "using account", - body: `{"$match": {"account": "xxx"}}`, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &before, - }, - }). - WithQueryBuilder(query.Match("account", "xxx")), - }, - { - name: "using reference", - body: `{"$match": {"reference": "xxx"}}`, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &before, - }, - }). - WithQueryBuilder(query.Match("reference", "xxx")), - }, - { - name: "using destination", - body: `{"$match": {"destination": "xxx"}}`, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &before, - }, - }). - WithQueryBuilder(query.Match("destination", "xxx")), - }, - { - name: "using source", - body: `{"$match": {"source": "xxx"}}`, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{ - PITFilter: ledgerstore.PITFilter{ - PIT: &before, - }, - }). - WithQueryBuilder(query.Match("source", "xxx")), - }, - } - for _, testCase := range testCases { - testCase := testCase - t.Run(testCase.name, func(t *testing.T) { - - if testCase.expectStatusCode == 0 { - testCase.expectStatusCode = http.StatusNoContent - } - - backend, mockLedger := newTestingBackend(t, true) - if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { - mockLedger.EXPECT(). - CountTransactions(gomock.Any(), ledgerstore.NewGetTransactionsQuery(testCase.expectQuery)). - Return(10, nil) - } - - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodHead, "/xxx/transactions?pit="+before.Format(time.RFC3339Nano), bytes.NewBufferString(testCase.body)) - rec := httptest.NewRecorder() - if testCase.queryParams != nil { - req.URL.RawQuery = testCase.queryParams.Encode() - } - - router.ServeHTTP(rec, req) - - require.Equal(t, testCase.expectStatusCode, rec.Code) - if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { - require.Equal(t, "10", rec.Header().Get("Count")) - } else { - err := sharedapi.ErrorResponse{} - sharedapi.Decode(t, rec.Body, &err) - require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) - - } - }) - } -} - -func TestRevert(t *testing.T) { - t.Parallel() - type testCase struct { - name string - queryParams url.Values - returnTx *ledger.Transaction - returnErr error - expectForce bool - expectStatusCode int - expectErrorCode string - } - - testCases := []testCase{ - { - name: "nominal", - returnTx: ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ), - }, - { - name: "force revert", - returnTx: ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ), - expectForce: true, - queryParams: map[string][]string{"force": {"true"}}, - }, - { - name: "with insufficient fund", - returnErr: engine.NewCommandError( - command.NewErrMachine(&machine.ErrInsufficientFund{}), - ), - expectStatusCode: http.StatusBadRequest, - expectErrorCode: v2.ErrInsufficientFund, - }, - { - name: "with revert already occurring", - returnErr: engine.NewCommandError( - command.NewErrRevertTransactionOccurring(), - ), - expectStatusCode: http.StatusBadRequest, - expectErrorCode: v2.ErrRevertOccurring, - }, - { - name: "with already revert", - returnErr: engine.NewCommandError( - command.NewErrRevertTransactionAlreadyReverted(), - ), - expectStatusCode: http.StatusBadRequest, - expectErrorCode: v2.ErrAlreadyRevert, - }, - { - name: "with transaction not found", - returnErr: engine.NewCommandError( - command.NewErrRevertTransactionNotFound(), - ), - expectStatusCode: http.StatusNotFound, - expectErrorCode: sharedapi.ErrorCodeNotFound, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - - backend, mockLedger := newTestingBackend(t, true) - mockLedger. - EXPECT(). - RevertTransaction(gomock.Any(), command.Parameters{}, big.NewInt(0), tc.expectForce, false). - Return(tc.returnTx, tc.returnErr) - - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) - - req := httptest.NewRequest(http.MethodPost, "/xxx/transactions/0/revert", nil) - if tc.queryParams != nil { - req.URL.RawQuery = tc.queryParams.Encode() - } - rec := httptest.NewRecorder() - - router.ServeHTTP(rec, req) - - if tc.expectStatusCode == 0 { - require.Equal(t, http.StatusCreated, rec.Code) - tx, ok := sharedapi.DecodeSingleResponse[ledger.Transaction](t, rec.Body) - require.True(t, ok) - require.Equal(t, *tc.returnTx, tx) - } else { - require.Equal(t, tc.expectStatusCode, rec.Code) - err := sharedapi.ErrorResponse{} - sharedapi.Decode(t, rec.Body, &err) - require.EqualValues(t, tc.expectErrorCode, err.ErrorCode) - - } - }) - } -} diff --git a/internal/api/v2/controllers_volumes.go b/internal/api/v2/controllers_volumes.go index 4316c4491..ce6a85253 100644 --- a/internal/api/v2/controllers_volumes.go +++ b/internal/api/v2/controllers_volumes.go @@ -3,32 +3,34 @@ package v2 import ( "net/http" - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/ledger/internal/api/backend" - "github.com/formancehq/ledger/internal/storage/ledgerstore" + "errors" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" - "github.com/formancehq/go-libs/pointer" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/ledger/internal/api/common" - "github.com/formancehq/go-libs/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/pointer" + + "github.com/formancehq/go-libs/v2/bun/bunpaginate" ) -func getVolumesWithBalances(w http.ResponseWriter, r *http.Request) { +func readVolumes(w http.ResponseWriter, r *http.Request) { - l := backend.LedgerFromContext(r.Context()) + l := common.LedgerFromContext(r.Context()) - query, err := bunpaginate.Extract[ledgerstore.GetVolumesWithBalancesQuery](r, func() (*ledgerstore.GetVolumesWithBalancesQuery, error) { + query, err := bunpaginate.Extract[ledgercontroller.GetVolumesWithBalancesQuery](r, func() (*ledgercontroller.GetVolumesWithBalancesQuery, error) { options, err := getPaginatedQueryOptionsOfFiltersForVolumes(r) if err != nil { return nil, err } - getVolumesWithBalancesQuery := ledgerstore.NewGetVolumesWithBalancesQuery(*options) + getVolumesWithBalancesQuery := ledgercontroller.NewGetVolumesWithBalancesQuery(*options) return pointer.For(getVolumesWithBalancesQuery), nil }) if err != nil { - sharedapi.BadRequest(w, ErrValidation, err) + api.BadRequest(w, ErrValidation, err) return } @@ -36,14 +38,14 @@ func getVolumesWithBalances(w http.ResponseWriter, r *http.Request) { if err != nil { switch { - case ledgerstore.IsErrInvalidQuery(err): - sharedapi.BadRequest(w, ErrValidation, err) + case errors.Is(err, ledgercontroller.ErrInvalidQuery{}) || errors.Is(err, ledgercontroller.ErrMissingFeature{}): + api.BadRequest(w, ErrValidation, err) default: - sharedapi.InternalServerError(w, r, err) + common.HandleCommonErrors(w, r, err) } return } - sharedapi.RenderCursor(w, *cursor) + api.RenderCursor(w, *cursor) } diff --git a/internal/api/v2/controllers_volumes_test.go b/internal/api/v2/controllers_volumes_test.go index 8ff837c5e..7dc7777ed 100644 --- a/internal/api/v2/controllers_volumes_test.go +++ b/internal/api/v2/controllers_volumes_test.go @@ -1,25 +1,24 @@ -package v2_test +package v2 import ( "bytes" - "math/big" "net/http" "net/http/httptest" "net/url" + "os" "testing" - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/bun/bunpaginate" - "github.com/formancehq/go-libs/time" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/time" - sharedapi "github.com/formancehq/go-libs/api" + "github.com/formancehq/go-libs/v2/api" ledger "github.com/formancehq/ledger/internal" - v2 "github.com/formancehq/ledger/internal/api/v2" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/formancehq/ledger/internal/storage/ledgerstore" - "github.com/formancehq/go-libs/query" + "github.com/formancehq/go-libs/v2/query" "github.com/stretchr/testify/require" "go.uber.org/mock/gomock" ) @@ -31,55 +30,51 @@ func TestGetVolumes(t *testing.T) { name string queryParams url.Values body string - expectQuery ledgerstore.PaginatedQueryOptions[ledgerstore.FiltersForVolumes] + expectQuery ledgercontroller.PaginatedQueryOptions[ledgercontroller.FiltersForVolumes] expectStatusCode int expectedErrorCode string } before := time.Now() - zero := time.Time{} testCases := []testCase{ { name: "basic", - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.FiltersForVolumes{ - PITFilter: ledgerstore.PITFilter{ + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{ PIT: &before, - OOT: &zero, }, UseInsertionDate: false, }). - WithPageSize(v2.DefaultPageSize), + WithPageSize(DefaultPageSize), }, { name: "using metadata", body: `{"$match": { "metadata[roles]": "admin" }}`, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.FiltersForVolumes{ - PITFilter: ledgerstore.PITFilter{ + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{ PIT: &before, - OOT: &zero, }, }). WithQueryBuilder(query.Match("metadata[roles]", "admin")). - WithPageSize(v2.DefaultPageSize), + WithPageSize(DefaultPageSize), }, { name: "using account", body: `{"$match": { "account": "foo" }}`, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.FiltersForVolumes{ - PITFilter: ledgerstore.PITFilter{ + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{ PIT: &before, - OOT: &zero, }, }). WithQueryBuilder(query.Match("account", "foo")). - WithPageSize(v2.DefaultPageSize), + WithPageSize(DefaultPageSize), }, { name: "using invalid query payload", body: `[]`, expectStatusCode: http.StatusBadRequest, - expectedErrorCode: v2.ErrValidation, + expectedErrorCode: ErrValidation, }, { name: "using pit", @@ -87,34 +82,31 @@ func TestGetVolumes(t *testing.T) { "pit": []string{before.Format(time.RFC3339Nano)}, "groupBy": []string{"3"}, }, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.FiltersForVolumes{ - PITFilter: ledgerstore.PITFilter{ + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{ PIT: &before, - OOT: &zero, }, GroupLvl: 3, - }).WithPageSize(v2.DefaultPageSize), + }).WithPageSize(DefaultPageSize), }, { name: "using Exists metadata filter", body: `{"$exists": { "metadata": "foo" }}`, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.FiltersForVolumes{ - PITFilter: ledgerstore.PITFilter{ + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{ PIT: &before, - OOT: &zero, }, - }).WithPageSize(v2.DefaultPageSize).WithQueryBuilder(query.Exists("metadata", "foo")), + }).WithPageSize(DefaultPageSize).WithQueryBuilder(query.Exists("metadata", "foo")), }, { name: "using balance filter", body: `{"$gte": { "balance[EUR]": 50 }}`, - expectQuery: ledgerstore.NewPaginatedQueryOptions(ledgerstore.FiltersForVolumes{ - PITFilter: ledgerstore.PITFilter{ + expectQuery: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{ PIT: &before, - OOT: &zero, }, }).WithQueryBuilder(query.Gte("balance[EUR]", float64(50))). - WithPageSize(v2.DefaultPageSize), + WithPageSize(DefaultPageSize), }, } @@ -140,14 +132,14 @@ func TestGetVolumes(t *testing.T) { }, } - backend, mockLedger := newTestingBackend(t, true) + systemController, ledgerController := newTestingSystemController(t, true) if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { - mockLedger.EXPECT(). - GetVolumesWithBalances(gomock.Any(), ledgerstore.NewGetVolumesWithBalancesQuery(testCase.expectQuery)). + ledgerController.EXPECT(). + GetVolumesWithBalances(gomock.Any(), ledgercontroller.NewGetVolumesWithBalancesQuery(testCase.expectQuery)). Return(&expectedCursor, nil) } - router := v2.NewRouter(backend, nil, metrics.NewNoOpRegistry(), auth.NewNoAuth(), testing.Verbose()) + router := NewRouter(systemController, auth.NewNoAuth(), os.Getenv("DEBUG") == "true") req := httptest.NewRequest(http.MethodGet, "/xxx/volumes?endTime="+before.Format(time.RFC3339Nano), bytes.NewBufferString(testCase.body)) rec := httptest.NewRecorder() @@ -163,11 +155,11 @@ func TestGetVolumes(t *testing.T) { require.Equal(t, testCase.expectStatusCode, rec.Code) if testCase.expectStatusCode < 300 && testCase.expectStatusCode >= 200 { - cursor := sharedapi.DecodeCursorResponse[ledger.VolumesWithBalanceByAssetByAccount](t, rec.Body) + cursor := api.DecodeCursorResponse[ledger.VolumesWithBalanceByAssetByAccount](t, rec.Body) require.Equal(t, expectedCursor, *cursor) } else { - err := sharedapi.ErrorResponse{} - sharedapi.Decode(t, rec.Body, &err) + err := api.ErrorResponse{} + api.Decode(t, rec.Body, &err) require.EqualValues(t, testCase.expectedErrorCode, err.ErrorCode) } }) diff --git a/internal/api/v2/errors.go b/internal/api/v2/errors.go index dbfae245a..0e88416c6 100644 --- a/internal/api/v2/errors.go +++ b/internal/api/v2/errors.go @@ -4,10 +4,9 @@ const ( ErrConflict = "CONFLICT" ErrInsufficientFund = "INSUFFICIENT_FUND" ErrValidation = "VALIDATION" - ErrRevertOccurring = "REVERT_OCCURRING" ErrAlreadyRevert = "ALREADY_REVERT" ErrNoPostings = "NO_POSTINGS" ErrCompilationFailed = "COMPILATION_FAILED" ErrMetadataOverride = "METADATA_OVERRIDE" - ErrNoScript = "NO_SCRIPT" + ErrBulkSizeExceeded = "BULK_SIZE_EXCEEDED" ) diff --git a/internal/api/v2/middlewares_metrics.go b/internal/api/v2/middlewares_metrics.go deleted file mode 100644 index 23cfa6ac4..000000000 --- a/internal/api/v2/middlewares_metrics.go +++ /dev/null @@ -1,55 +0,0 @@ -package v2 - -import ( - "net/http" - - "github.com/go-chi/chi/v5" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/metric" -) - -type statusRecorder struct { - http.ResponseWriter - Status int -} - -func newStatusRecorder(w http.ResponseWriter) *statusRecorder { - return &statusRecorder{ResponseWriter: w} -} - -func (r *statusRecorder) WriteHeader(status int) { - r.Status = status - r.ResponseWriter.WriteHeader(status) -} - -func MetricsMiddleware(globalMetricsRegistry metrics.GlobalRegistry) func(h http.Handler) http.Handler { - return func(h http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - attrs := []attribute.KeyValue{} - - ctx := r.Context() - name := chi.URLParam(r, "ledger") - if name != "" { - attrs = append(attrs, attribute.String("ledger", name)) - } - - recorder := newStatusRecorder(w) - - start := time.Now() - h.ServeHTTP(recorder, r) - latency := time.Since(start) - - attrs = append(attrs, - attribute.String("route", chi.RouteContext(r.Context()).RoutePattern())) - - globalMetricsRegistry.APILatencies().Record(ctx, latency.Milliseconds(), metric.WithAttributes(attrs...)) - - attrs = append(attrs, attribute.Int("status", recorder.Status)) - globalMetricsRegistry.StatusCodes().Add(ctx, 1, metric.WithAttributes(attrs...)) - }) - } -} diff --git a/internal/api/v2/mocks.go b/internal/api/v2/mocks.go new file mode 100644 index 000000000..c082bd6a6 --- /dev/null +++ b/internal/api/v2/mocks.go @@ -0,0 +1,3 @@ +//go:generate mockgen -write_source_comment=false -write_package_comment=false -source ../../controller/system/controller.go -destination mocks_system_controller_test.go -package v2 --mock_names Controller=SystemController . Controller +//go:generate mockgen -write_source_comment=false -write_package_comment=false -source ../../controller/ledger/controller.go -destination mocks_ledger_controller_test.go -package v2 --mock_names Controller=LedgerController . Controller +package v2 \ No newline at end of file diff --git a/internal/api/v2/mocks_ledger_controller_test.go b/internal/api/v2/mocks_ledger_controller_test.go new file mode 100644 index 000000000..28d3291a2 --- /dev/null +++ b/internal/api/v2/mocks_ledger_controller_test.go @@ -0,0 +1,334 @@ +// Code generated by MockGen. DO NOT EDIT. +// +// Generated by this command: +// +// mockgen -write_source_comment=false -write_package_comment=false -source ../../controller/ledger/controller.go -destination mocks_ledger_controller_test.go -package v2 --mock_names Controller=LedgerController . Controller +package v2 + +import ( + context "context" + reflect "reflect" + + bunpaginate "github.com/formancehq/go-libs/v2/bun/bunpaginate" + migrations "github.com/formancehq/go-libs/v2/migrations" + ledger "github.com/formancehq/ledger/internal" + ledger0 "github.com/formancehq/ledger/internal/controller/ledger" + gomock "go.uber.org/mock/gomock" +) + +// LedgerController is a mock of Controller interface. +type LedgerController struct { + ctrl *gomock.Controller + recorder *LedgerControllerMockRecorder +} + +// LedgerControllerMockRecorder is the mock recorder for LedgerController. +type LedgerControllerMockRecorder struct { + mock *LedgerController +} + +// NewLedgerController creates a new mock instance. +func NewLedgerController(ctrl *gomock.Controller) *LedgerController { + mock := &LedgerController{ctrl: ctrl} + mock.recorder = &LedgerControllerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *LedgerController) EXPECT() *LedgerControllerMockRecorder { + return m.recorder +} + +// CountAccounts mocks base method. +func (m *LedgerController) CountAccounts(ctx context.Context, query ledger0.ListAccountsQuery) (int, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CountAccounts", ctx, query) + ret0, _ := ret[0].(int) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CountAccounts indicates an expected call of CountAccounts. +func (mr *LedgerControllerMockRecorder) CountAccounts(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountAccounts", reflect.TypeOf((*LedgerController)(nil).CountAccounts), ctx, query) +} + +// CountTransactions mocks base method. +func (m *LedgerController) CountTransactions(ctx context.Context, query ledger0.ListTransactionsQuery) (int, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CountTransactions", ctx, query) + ret0, _ := ret[0].(int) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CountTransactions indicates an expected call of CountTransactions. +func (mr *LedgerControllerMockRecorder) CountTransactions(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountTransactions", reflect.TypeOf((*LedgerController)(nil).CountTransactions), ctx, query) +} + +// CreateTransaction mocks base method. +func (m *LedgerController) CreateTransaction(ctx context.Context, parameters ledger0.Parameters[ledger0.RunScript]) (*ledger.CreatedTransaction, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateTransaction", ctx, parameters) + ret0, _ := ret[0].(*ledger.CreatedTransaction) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateTransaction indicates an expected call of CreateTransaction. +func (mr *LedgerControllerMockRecorder) CreateTransaction(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateTransaction", reflect.TypeOf((*LedgerController)(nil).CreateTransaction), ctx, parameters) +} + +// DeleteAccountMetadata mocks base method. +func (m *LedgerController) DeleteAccountMetadata(ctx context.Context, parameters ledger0.Parameters[ledger0.DeleteAccountMetadata]) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteAccountMetadata", ctx, parameters) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteAccountMetadata indicates an expected call of DeleteAccountMetadata. +func (mr *LedgerControllerMockRecorder) DeleteAccountMetadata(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteAccountMetadata", reflect.TypeOf((*LedgerController)(nil).DeleteAccountMetadata), ctx, parameters) +} + +// DeleteTransactionMetadata mocks base method. +func (m *LedgerController) DeleteTransactionMetadata(ctx context.Context, parameters ledger0.Parameters[ledger0.DeleteTransactionMetadata]) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteTransactionMetadata", ctx, parameters) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteTransactionMetadata indicates an expected call of DeleteTransactionMetadata. +func (mr *LedgerControllerMockRecorder) DeleteTransactionMetadata(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteTransactionMetadata", reflect.TypeOf((*LedgerController)(nil).DeleteTransactionMetadata), ctx, parameters) +} + +// Export mocks base method. +func (m *LedgerController) Export(ctx context.Context, w ledger0.ExportWriter) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Export", ctx, w) + ret0, _ := ret[0].(error) + return ret0 +} + +// Export indicates an expected call of Export. +func (mr *LedgerControllerMockRecorder) Export(ctx, w any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Export", reflect.TypeOf((*LedgerController)(nil).Export), ctx, w) +} + +// GetAccount mocks base method. +func (m *LedgerController) GetAccount(ctx context.Context, query ledger0.GetAccountQuery) (*ledger.Account, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAccount", ctx, query) + ret0, _ := ret[0].(*ledger.Account) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAccount indicates an expected call of GetAccount. +func (mr *LedgerControllerMockRecorder) GetAccount(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccount", reflect.TypeOf((*LedgerController)(nil).GetAccount), ctx, query) +} + +// GetAggregatedBalances mocks base method. +func (m *LedgerController) GetAggregatedBalances(ctx context.Context, q ledger0.GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAggregatedBalances", ctx, q) + ret0, _ := ret[0].(ledger.BalancesByAssets) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAggregatedBalances indicates an expected call of GetAggregatedBalances. +func (mr *LedgerControllerMockRecorder) GetAggregatedBalances(ctx, q any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAggregatedBalances", reflect.TypeOf((*LedgerController)(nil).GetAggregatedBalances), ctx, q) +} + +// GetMigrationsInfo mocks base method. +func (m *LedgerController) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetMigrationsInfo", ctx) + ret0, _ := ret[0].([]migrations.Info) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetMigrationsInfo indicates an expected call of GetMigrationsInfo. +func (mr *LedgerControllerMockRecorder) GetMigrationsInfo(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMigrationsInfo", reflect.TypeOf((*LedgerController)(nil).GetMigrationsInfo), ctx) +} + +// GetStats mocks base method. +func (m *LedgerController) GetStats(ctx context.Context) (ledger0.Stats, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetStats", ctx) + ret0, _ := ret[0].(ledger0.Stats) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetStats indicates an expected call of GetStats. +func (mr *LedgerControllerMockRecorder) GetStats(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetStats", reflect.TypeOf((*LedgerController)(nil).GetStats), ctx) +} + +// GetTransaction mocks base method. +func (m *LedgerController) GetTransaction(ctx context.Context, query ledger0.GetTransactionQuery) (*ledger.Transaction, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetTransaction", ctx, query) + ret0, _ := ret[0].(*ledger.Transaction) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetTransaction indicates an expected call of GetTransaction. +func (mr *LedgerControllerMockRecorder) GetTransaction(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTransaction", reflect.TypeOf((*LedgerController)(nil).GetTransaction), ctx, query) +} + +// GetVolumesWithBalances mocks base method. +func (m *LedgerController) GetVolumesWithBalances(ctx context.Context, q ledger0.GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetVolumesWithBalances", ctx, q) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetVolumesWithBalances indicates an expected call of GetVolumesWithBalances. +func (mr *LedgerControllerMockRecorder) GetVolumesWithBalances(ctx, q any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetVolumesWithBalances", reflect.TypeOf((*LedgerController)(nil).GetVolumesWithBalances), ctx, q) +} + +// Import mocks base method. +func (m *LedgerController) Import(ctx context.Context, stream chan ledger.Log) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Import", ctx, stream) + ret0, _ := ret[0].(error) + return ret0 +} + +// Import indicates an expected call of Import. +func (mr *LedgerControllerMockRecorder) Import(ctx, stream any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Import", reflect.TypeOf((*LedgerController)(nil).Import), ctx, stream) +} + +// IsDatabaseUpToDate mocks base method. +func (m *LedgerController) IsDatabaseUpToDate(ctx context.Context) (bool, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "IsDatabaseUpToDate", ctx) + ret0, _ := ret[0].(bool) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// IsDatabaseUpToDate indicates an expected call of IsDatabaseUpToDate. +func (mr *LedgerControllerMockRecorder) IsDatabaseUpToDate(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsDatabaseUpToDate", reflect.TypeOf((*LedgerController)(nil).IsDatabaseUpToDate), ctx) +} + +// ListAccounts mocks base method. +func (m *LedgerController) ListAccounts(ctx context.Context, query ledger0.ListAccountsQuery) (*bunpaginate.Cursor[ledger.Account], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListAccounts", ctx, query) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Account]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListAccounts indicates an expected call of ListAccounts. +func (mr *LedgerControllerMockRecorder) ListAccounts(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListAccounts", reflect.TypeOf((*LedgerController)(nil).ListAccounts), ctx, query) +} + +// ListLogs mocks base method. +func (m *LedgerController) ListLogs(ctx context.Context, query ledger0.GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListLogs", ctx, query) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Log]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListLogs indicates an expected call of ListLogs. +func (mr *LedgerControllerMockRecorder) ListLogs(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListLogs", reflect.TypeOf((*LedgerController)(nil).ListLogs), ctx, query) +} + +// ListTransactions mocks base method. +func (m *LedgerController) ListTransactions(ctx context.Context, query ledger0.ListTransactionsQuery) (*bunpaginate.Cursor[ledger.Transaction], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListTransactions", ctx, query) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Transaction]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListTransactions indicates an expected call of ListTransactions. +func (mr *LedgerControllerMockRecorder) ListTransactions(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListTransactions", reflect.TypeOf((*LedgerController)(nil).ListTransactions), ctx, query) +} + +// RevertTransaction mocks base method. +func (m *LedgerController) RevertTransaction(ctx context.Context, parameters ledger0.Parameters[ledger0.RevertTransaction]) (*ledger.RevertedTransaction, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RevertTransaction", ctx, parameters) + ret0, _ := ret[0].(*ledger.RevertedTransaction) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// RevertTransaction indicates an expected call of RevertTransaction. +func (mr *LedgerControllerMockRecorder) RevertTransaction(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RevertTransaction", reflect.TypeOf((*LedgerController)(nil).RevertTransaction), ctx, parameters) +} + +// SaveAccountMetadata mocks base method. +func (m *LedgerController) SaveAccountMetadata(ctx context.Context, parameters ledger0.Parameters[ledger0.SaveAccountMetadata]) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SaveAccountMetadata", ctx, parameters) + ret0, _ := ret[0].(error) + return ret0 +} + +// SaveAccountMetadata indicates an expected call of SaveAccountMetadata. +func (mr *LedgerControllerMockRecorder) SaveAccountMetadata(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SaveAccountMetadata", reflect.TypeOf((*LedgerController)(nil).SaveAccountMetadata), ctx, parameters) +} + +// SaveTransactionMetadata mocks base method. +func (m *LedgerController) SaveTransactionMetadata(ctx context.Context, parameters ledger0.Parameters[ledger0.SaveTransactionMetadata]) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SaveTransactionMetadata", ctx, parameters) + ret0, _ := ret[0].(error) + return ret0 +} + +// SaveTransactionMetadata indicates an expected call of SaveTransactionMetadata. +func (mr *LedgerControllerMockRecorder) SaveTransactionMetadata(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SaveTransactionMetadata", reflect.TypeOf((*LedgerController)(nil).SaveTransactionMetadata), ctx, parameters) +} diff --git a/internal/api/v2/mocks_system_controller_test.go b/internal/api/v2/mocks_system_controller_test.go new file mode 100644 index 000000000..45d1eaaa5 --- /dev/null +++ b/internal/api/v2/mocks_system_controller_test.go @@ -0,0 +1,126 @@ +// Code generated by MockGen. DO NOT EDIT. +// +// Generated by this command: +// +// mockgen -write_source_comment=false -write_package_comment=false -source ../../controller/system/controller.go -destination mocks_system_controller_test.go -package v2 --mock_names Controller=SystemController . Controller +package v2 + +import ( + context "context" + reflect "reflect" + + bunpaginate "github.com/formancehq/go-libs/v2/bun/bunpaginate" + ledger "github.com/formancehq/ledger/internal" + ledger0 "github.com/formancehq/ledger/internal/controller/ledger" + gomock "go.uber.org/mock/gomock" +) + +// SystemController is a mock of Controller interface. +type SystemController struct { + ctrl *gomock.Controller + recorder *SystemControllerMockRecorder +} + +// SystemControllerMockRecorder is the mock recorder for SystemController. +type SystemControllerMockRecorder struct { + mock *SystemController +} + +// NewSystemController creates a new mock instance. +func NewSystemController(ctrl *gomock.Controller) *SystemController { + mock := &SystemController{ctrl: ctrl} + mock.recorder = &SystemControllerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *SystemController) EXPECT() *SystemControllerMockRecorder { + return m.recorder +} + +// CreateLedger mocks base method. +func (m *SystemController) CreateLedger(ctx context.Context, name string, configuration ledger.Configuration) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateLedger", ctx, name, configuration) + ret0, _ := ret[0].(error) + return ret0 +} + +// CreateLedger indicates an expected call of CreateLedger. +func (mr *SystemControllerMockRecorder) CreateLedger(ctx, name, configuration any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateLedger", reflect.TypeOf((*SystemController)(nil).CreateLedger), ctx, name, configuration) +} + +// DeleteLedgerMetadata mocks base method. +func (m *SystemController) DeleteLedgerMetadata(ctx context.Context, param, key string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteLedgerMetadata", ctx, param, key) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteLedgerMetadata indicates an expected call of DeleteLedgerMetadata. +func (mr *SystemControllerMockRecorder) DeleteLedgerMetadata(ctx, param, key any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteLedgerMetadata", reflect.TypeOf((*SystemController)(nil).DeleteLedgerMetadata), ctx, param, key) +} + +// GetLedger mocks base method. +func (m *SystemController) GetLedger(ctx context.Context, name string) (*ledger.Ledger, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetLedger", ctx, name) + ret0, _ := ret[0].(*ledger.Ledger) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetLedger indicates an expected call of GetLedger. +func (mr *SystemControllerMockRecorder) GetLedger(ctx, name any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLedger", reflect.TypeOf((*SystemController)(nil).GetLedger), ctx, name) +} + +// GetLedgerController mocks base method. +func (m *SystemController) GetLedgerController(ctx context.Context, name string) (ledger0.Controller, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetLedgerController", ctx, name) + ret0, _ := ret[0].(ledger0.Controller) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetLedgerController indicates an expected call of GetLedgerController. +func (mr *SystemControllerMockRecorder) GetLedgerController(ctx, name any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLedgerController", reflect.TypeOf((*SystemController)(nil).GetLedgerController), ctx, name) +} + +// ListLedgers mocks base method. +func (m *SystemController) ListLedgers(ctx context.Context, query ledger0.ListLedgersQuery) (*bunpaginate.Cursor[ledger.Ledger], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListLedgers", ctx, query) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Ledger]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListLedgers indicates an expected call of ListLedgers. +func (mr *SystemControllerMockRecorder) ListLedgers(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListLedgers", reflect.TypeOf((*SystemController)(nil).ListLedgers), ctx, query) +} + +// UpdateLedgerMetadata mocks base method. +func (m_2 *SystemController) UpdateLedgerMetadata(ctx context.Context, name string, m map[string]string) error { + m_2.ctrl.T.Helper() + ret := m_2.ctrl.Call(m_2, "UpdateLedgerMetadata", ctx, name, m) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpdateLedgerMetadata indicates an expected call of UpdateLedgerMetadata. +func (mr *SystemControllerMockRecorder) UpdateLedgerMetadata(ctx, name, m any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateLedgerMetadata", reflect.TypeOf((*SystemController)(nil).UpdateLedgerMetadata), ctx, name, m) +} diff --git a/internal/api/v2/query.go b/internal/api/v2/query.go index addd05762..22cc3b868 100644 --- a/internal/api/v2/query.go +++ b/internal/api/v2/query.go @@ -2,13 +2,11 @@ package v2 import ( "net/http" - "strings" - "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/bun/bunpaginate" + "github.com/formancehq/ledger/internal/controller/ledger" - "github.com/formancehq/ledger/internal/engine/command" - "github.com/pkg/errors" + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" ) const ( @@ -18,19 +16,10 @@ const ( QueryKeyCursor = "cursor" ) -var ( - ErrInvalidBalanceOperator = errors.New( - "invalid parameter 'balanceOperator', should be one of 'e, ne, gt, gte, lt, lte'") - ErrInvalidStartTime = errors.New("invalid 'startTime' query param") - ErrInvalidEndTime = errors.New("invalid 'endTime' query param") -) - -func getCommandParameters(r *http.Request) command.Parameters { - dryRunAsString := r.URL.Query().Get("dryRun") - dryRun := strings.ToUpper(dryRunAsString) == "YES" || strings.ToUpper(dryRunAsString) == "TRUE" || dryRunAsString == "1" - - return command.Parameters{ - DryRun: dryRun, +func getCommandParameters[INPUT any](r *http.Request, input INPUT) ledger.Parameters[INPUT] { + return ledger.Parameters[INPUT]{ + DryRun: api.QueryParamBool(r, "dryRun"), IdempotencyKey: api.IdempotencyKeyFromRequest(r), + Input: input, } } diff --git a/internal/api/v2/routes.go b/internal/api/v2/routes.go index 5c13204c9..0bc4394b7 100644 --- a/internal/api/v2/routes.go +++ b/internal/api/v2/routes.go @@ -1,51 +1,40 @@ package v2 import ( + nooptracer "go.opentelemetry.io/otel/trace/noop" "net/http" - "github.com/go-chi/chi/v5" + "github.com/formancehq/ledger/internal/controller/system" "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/trace" - "github.com/formancehq/go-libs/service" + "github.com/formancehq/go-libs/v2/service" - "github.com/formancehq/go-libs/auth" - "github.com/formancehq/go-libs/health" - "github.com/formancehq/ledger/internal/api/backend" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/go-chi/chi/v5/middleware" - "github.com/go-chi/cors" + "github.com/formancehq/go-libs/v2/auth" + "github.com/formancehq/ledger/internal/api/common" + "github.com/go-chi/chi/v5" ) func NewRouter( - b backend.Backend, - healthController *health.HealthController, - globalMetricsRegistry metrics.GlobalRegistry, + systemController system.Controller, authenticator auth.Authenticator, debug bool, + opts ...RouterOption, ) chi.Router { - router := chi.NewMux() - - router.Use( - cors.New(cors.Options{ - AllowOriginFunc: func(r *http.Request, origin string) bool { - return true - }, - AllowCredentials: true, - }).Handler, - MetricsMiddleware(globalMetricsRegistry), - middleware.Recoverer, - ) + routerOptions := routerOptions{} + for _, opt := range append(defaultRouterOptions, opts...) { + opt(&routerOptions) + } - router.Get("/_healthcheck", healthController.Check) - router.Get("/_info", getInfo(b)) + router := chi.NewMux() router.Group(func(router chi.Router) { + router.Use(routerOptions.middlewares...) router.Use(auth.Middleware(authenticator)) router.Use(service.OTLPMiddleware("ledger", debug)) - router.Get("/", listLedgers(b)) + router.Get("/", listLedgers(systemController)) router.Route("/{ledger}", func(router chi.Router) { router.Use(func(handler http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { @@ -55,45 +44,77 @@ func NewRouter( handler.ServeHTTP(w, r) }) }) - router.Post("/", createLedger(b)) - router.Get("/", getLedger(b)) - router.Put("/metadata", updateLedgerMetadata(b)) - router.Delete("/metadata/{key}", deleteLedgerMetadata(b)) + router.Post("/", createLedger(systemController)) + router.Get("/", readLedger(systemController)) + router.Put("/metadata", updateLedgerMetadata(systemController)) + router.Delete("/metadata/{key}", deleteLedgerMetadata(systemController)) - router.With(backend.LedgerMiddleware(b, []string{"/_info"})).Group(func(router chi.Router) { - router.Post("/_bulk", bulkHandler) + router.With(common.LedgerMiddleware(systemController, func(r *http.Request) string { + return chi.URLParam(r, "ledger") + }, routerOptions.tracer, "/_info")).Group(func(router chi.Router) { + router.Post("/_bulk", bulkHandler(routerOptions.bulkMaxSize)) // LedgerController router.Get("/_info", getLedgerInfo) - router.Get("/stats", getStats) - router.Get("/logs", getLogs) + router.Get("/stats", readStats) + router.Get("/logs", listLogs) router.Post("/logs/import", importLogs) router.Post("/logs/export", exportLogs) // AccountController - router.Get("/accounts", getAccounts) + router.Get("/accounts", listAccounts) router.Head("/accounts", countAccounts) - router.Get("/accounts/{address}", getAccount) - router.Post("/accounts/{address}/metadata", postAccountMetadata) + router.Get("/accounts/{address}", readAccount) + router.Post("/accounts/{address}/metadata", addAccountMetadata) router.Delete("/accounts/{address}/metadata/{key}", deleteAccountMetadata) // TransactionController - router.Get("/transactions", getTransactions) + router.Get("/transactions", listTransactions) router.Head("/transactions", countTransactions) - router.Post("/transactions", postTransaction) + router.Post("/transactions", createTransaction) - router.Get("/transactions/{id}", getTransaction) + router.Get("/transactions/{id}", readTransaction) router.Post("/transactions/{id}/revert", revertTransaction) - router.Post("/transactions/{id}/metadata", postTransactionMetadata) + router.Post("/transactions/{id}/metadata", addTransactionMetadata) router.Delete("/transactions/{id}/metadata/{key}", deleteTransactionMetadata) - router.Get("/aggregate/balances", getBalancesAggregated) + router.Get("/aggregate/balances", readBalancesAggregated) - router.Get("/volumes", getVolumesWithBalances) + router.Get("/volumes", readVolumes) }) }) }) return router } + +type routerOptions struct { + tracer trace.Tracer + middlewares []func(http.Handler) http.Handler + bulkMaxSize int +} + +type RouterOption func(ro *routerOptions) + +func WithTracer(tracer trace.Tracer) RouterOption { + return func(ro *routerOptions) { + ro.tracer = tracer + } +} + +func WithMiddlewares(middlewares ...func(http.Handler) http.Handler) RouterOption { + return func(ro *routerOptions) { + ro.middlewares = append(ro.middlewares, middlewares...) + } +} + +func WithBulkMaxSize(bulkMaxSize int) RouterOption { + return func(ro *routerOptions) { + ro.bulkMaxSize = bulkMaxSize + } +} + +var defaultRouterOptions = []RouterOption{ + WithTracer(nooptracer.Tracer{}), +} diff --git a/internal/api/v2/utils.go b/internal/api/v2/utils.go deleted file mode 100644 index 22facac1a..000000000 --- a/internal/api/v2/utils.go +++ /dev/null @@ -1,159 +0,0 @@ -package v2 - -import ( - "io" - "net/http" - "strconv" - - "github.com/formancehq/go-libs/bun/bunpaginate" - "github.com/formancehq/go-libs/time" - - sharedapi "github.com/formancehq/go-libs/api" - "github.com/formancehq/go-libs/collectionutils" - "github.com/formancehq/go-libs/pointer" - "github.com/formancehq/go-libs/query" - "github.com/formancehq/ledger/internal/storage/ledgerstore" -) - -func getPITOOTFilter(r *http.Request) (*ledgerstore.PITFilter, error) { - pitString := r.URL.Query().Get("endTime") - ootString := r.URL.Query().Get("startTime") - - pit := time.Now() - oot := time.Time{} - - if pitString != "" { - var err error - pit, err = time.ParseTime(pitString) - if err != nil { - return nil, err - } - } - - if ootString != "" { - var err error - oot, err = time.ParseTime(ootString) - if err != nil { - return nil, err - } - } - - return &ledgerstore.PITFilter{ - PIT: &pit, - OOT: &oot, - }, nil -} - -func getPITFilter(r *http.Request) (*ledgerstore.PITFilter, error) { - pitString := r.URL.Query().Get("pit") - - pit := time.Now() - - if pitString != "" { - var err error - pit, err = time.ParseTime(pitString) - if err != nil { - return nil, err - } - } - - return &ledgerstore.PITFilter{ - PIT: &pit, - }, nil -} - -func getPITFilterWithVolumes(r *http.Request) (*ledgerstore.PITFilterWithVolumes, error) { - pit, err := getPITFilter(r) - if err != nil { - return nil, err - } - return &ledgerstore.PITFilterWithVolumes{ - PITFilter: *pit, - ExpandVolumes: collectionutils.Contains(r.URL.Query()["expand"], "volumes"), - ExpandEffectiveVolumes: collectionutils.Contains(r.URL.Query()["expand"], "effectiveVolumes"), - }, nil -} - -func getFiltersForVolumes(r *http.Request) (*ledgerstore.FiltersForVolumes, error) { - pit, err := getPITOOTFilter(r) - if err != nil { - return nil, err - } - - useInsertionDate := sharedapi.QueryParamBool(r, "insertionDate") - groupLvl := 0 - - groupLvlStr := r.URL.Query().Get("groupBy") - if groupLvlStr != "" { - groupLvlInt, err := strconv.Atoi(groupLvlStr) - if err != nil { - return nil, err - } - if groupLvlInt > 0 { - groupLvl = groupLvlInt - } - } - return &ledgerstore.FiltersForVolumes{ - PITFilter: *pit, - UseInsertionDate: useInsertionDate, - GroupLvl: uint(groupLvl), - }, nil -} - -func getQueryBuilder(r *http.Request) (query.Builder, error) { - q := r.URL.Query().Get("query") - if q == "" { - data, err := io.ReadAll(r.Body) - if err != nil { - return nil, err - } - q = string(data) - } - - if len(q) > 0 { - return query.ParseJSON(q) - } - return nil, nil -} - -func getPaginatedQueryOptionsOfPITFilterWithVolumes(r *http.Request) (*ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes], error) { - qb, err := getQueryBuilder(r) - if err != nil { - return nil, err - } - - pitFilter, err := getPITFilterWithVolumes(r) - if err != nil { - return nil, err - } - - pageSize, err := bunpaginate.GetPageSize(r) - if err != nil { - return nil, err - } - - return pointer.For(ledgerstore.NewPaginatedQueryOptions(*pitFilter). - WithQueryBuilder(qb). - WithPageSize(pageSize)), nil -} - -func getPaginatedQueryOptionsOfFiltersForVolumes(r *http.Request) (*ledgerstore.PaginatedQueryOptions[ledgerstore.FiltersForVolumes], error) { - qb, err := getQueryBuilder(r) - if err != nil { - return nil, err - } - - filtersForVolumes, err := getFiltersForVolumes(r) - if err != nil { - return nil, err - } - - pageSize, err := bunpaginate.GetPageSize(r) - if err != nil { - return nil, err - } - - return pointer.For(ledgerstore.NewPaginatedQueryOptions(*filtersForVolumes). - WithPageSize(pageSize). - WithQueryBuilder(qb)), nil -} diff --git a/internal/bus/listener.go b/internal/bus/listener.go new file mode 100644 index 000000000..8fb268a6f --- /dev/null +++ b/internal/bus/listener.go @@ -0,0 +1,76 @@ +package bus + +import ( + "context" + + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "github.com/ThreeDotsLabs/watermill/message" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/publish" + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/pkg/events" +) + +type LedgerListener struct { + publisher message.Publisher +} + +var _ ledgercontroller.Listener = &LedgerListener{} + +func NewLedgerListener(publisher message.Publisher) *LedgerListener { + return &LedgerListener{ + publisher: publisher, + } +} + +func (lis *LedgerListener) CommittedTransactions(ctx context.Context, l string, txs ledger.Transaction, accountMetadata ledger.AccountMetadata) { + lis.publish(ctx, events.EventTypeCommittedTransactions, + newEventCommittedTransactions(CommittedTransactions{ + Ledger: l, + Transactions: []ledger.Transaction{txs}, + AccountMetadata: accountMetadata, + })) +} + +func (lis *LedgerListener) SavedMetadata(ctx context.Context, l string, targetType, targetID string, metadata metadata.Metadata) { + lis.publish(ctx, events.EventTypeSavedMetadata, + newEventSavedMetadata(SavedMetadata{ + Ledger: l, + TargetType: targetType, + TargetID: targetID, + Metadata: metadata, + })) +} + +func (lis *LedgerListener) RevertedTransaction(ctx context.Context, l string, reverted, revert ledger.Transaction) { + lis.publish(ctx, events.EventTypeRevertedTransaction, + newEventRevertedTransaction(RevertedTransaction{ + Ledger: l, + RevertedTransaction: reverted, + RevertTransaction: revert, + })) +} + +func (lis *LedgerListener) DeletedMetadata(ctx context.Context, l string, targetType string, targetID any, key string) { + lis.publish(ctx, events.EventTypeDeletedMetadata, + newEventDeletedMetadata(DeletedMetadata{ + Ledger: l, + TargetType: targetType, + TargetID: targetID, + Key: key, + })) +} + +func (lis *LedgerListener) publish(ctx context.Context, topic string, ev publish.EventMessage) { + msg := publish.NewMessage(ctx, ev) + logging.FromContext(ctx).WithFields(map[string]any{ + "payload": string(msg.Payload), + "topic": topic, + }).Debugf("send event %s", ev.Type) + if err := lis.publisher.Publish(topic, msg); err != nil { + logging.FromContext(ctx).Errorf("publishing message: %s", err) + return + } +} diff --git a/internal/bus/monitor_test.go b/internal/bus/listener_test.go similarity index 71% rename from internal/bus/monitor_test.go rename to internal/bus/listener_test.go index 6749cab98..aca4ecacc 100644 --- a/internal/bus/monitor_test.go +++ b/internal/bus/listener_test.go @@ -2,6 +2,7 @@ package bus import ( "context" + "os" "testing" "time" @@ -9,7 +10,7 @@ import ( "github.com/ThreeDotsLabs/watermill" "github.com/ThreeDotsLabs/watermill/pubsub/gochannel" - topicmapper "github.com/formancehq/go-libs/publish/topic_mapper" + topicmapper "github.com/formancehq/go-libs/v2/publish/topic_mapper" "github.com/pborman/uuid" "github.com/stretchr/testify/require" ) @@ -20,15 +21,15 @@ func TestMonitor(t *testing.T) { gochannel.Config{ BlockPublishUntilSubscriberAck: true, }, - watermill.NewStdLogger(testing.Verbose(), testing.Verbose()), + watermill.NewStdLogger(os.Getenv("DEBUG") == "true", os.Getenv("DEBUG") == "true"), ) messages, err := pubSub.Subscribe(context.Background(), "testing") require.NoError(t, err) p := topicmapper.NewPublisherDecorator(pubSub, map[string]string{ "*": "testing", }) - m := NewLedgerMonitor(p, uuid.New()) - go m.CommittedTransactions(context.Background(), ledger.Transaction{}, nil) + m := NewLedgerListener(p) + go m.CommittedTransactions(context.Background(), uuid.New(), ledger.Transaction{}, nil) select { case m := <-messages: @@ -36,5 +37,4 @@ func TestMonitor(t *testing.T) { case <-time.After(time.Second): t.Fatal("should have a message") } - } diff --git a/internal/bus/message.go b/internal/bus/message.go index b76bfb7aa..a268c00de 100644 --- a/internal/bus/message.go +++ b/internal/bus/message.go @@ -1,9 +1,9 @@ package bus import ( - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/publish" - "github.com/formancehq/go-libs/time" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/publish" + "github.com/formancehq/go-libs/v2/time" ledger "github.com/formancehq/ledger/internal" "github.com/formancehq/ledger/pkg/events" ) @@ -31,13 +31,13 @@ type SavedMetadata struct { Metadata metadata.Metadata `json:"metadata"` } -func newEventSavedMetadata(metadata SavedMetadata) publish.EventMessage { +func newEventSavedMetadata(savedMetadata SavedMetadata) publish.EventMessage { return publish.EventMessage{ Date: time.Now().Time, App: events.EventApp, Version: events.EventVersion, Type: events.EventTypeSavedMetadata, - Payload: metadata, + Payload: savedMetadata, } } @@ -47,13 +47,13 @@ type RevertedTransaction struct { RevertTransaction ledger.Transaction `json:"revertTransaction"` } -func newEventRevertedTransaction(tx RevertedTransaction) publish.EventMessage { +func newEventRevertedTransaction(revertedTransaction RevertedTransaction) publish.EventMessage { return publish.EventMessage{ Date: time.Now().Time, App: events.EventApp, Version: events.EventVersion, Type: events.EventTypeRevertedTransaction, - Payload: tx, + Payload: revertedTransaction, } } @@ -64,12 +64,12 @@ type DeletedMetadata struct { Key string `json:"key"` } -func newEventDeletedMetadata(tx DeletedMetadata) publish.EventMessage { +func newEventDeletedMetadata(deletedMetadata DeletedMetadata) publish.EventMessage { return publish.EventMessage{ Date: time.Now().Time, App: events.EventApp, Version: events.EventVersion, Type: events.EventTypeDeletedMetadata, - Payload: tx, + Payload: deletedMetadata, } } diff --git a/internal/bus/module.go b/internal/bus/module.go new file mode 100644 index 000000000..bf21c682c --- /dev/null +++ b/internal/bus/module.go @@ -0,0 +1,12 @@ +package bus + +import ( + "github.com/formancehq/ledger/internal/controller/ledger" + "go.uber.org/fx" +) + +func NewFxModule() fx.Option { + return fx.Options( + fx.Provide(fx.Annotate(NewLedgerListener, fx.As(new(ledger.Listener)))), + ) +} diff --git a/internal/bus/monitor.go b/internal/bus/monitor.go deleted file mode 100644 index 7d3d4545f..000000000 --- a/internal/bus/monitor.go +++ /dev/null @@ -1,97 +0,0 @@ -package bus - -import ( - "context" - - "github.com/ThreeDotsLabs/watermill/message" - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/publish" - ledger "github.com/formancehq/ledger/internal" - "github.com/formancehq/ledger/pkg/events" -) - -type Monitor interface { - CommittedTransactions(ctx context.Context, res ledger.Transaction, accountMetadata map[string]metadata.Metadata) - SavedMetadata(ctx context.Context, targetType, id string, metadata metadata.Metadata) - RevertedTransaction(ctx context.Context, reverted, revert *ledger.Transaction) - DeletedMetadata(ctx context.Context, targetType string, targetID any, key string) -} - -type noOpMonitor struct{} - -func (n noOpMonitor) DeletedMetadata(ctx context.Context, targetType string, targetID any, key string) { -} - -func (n noOpMonitor) CommittedTransactions(ctx context.Context, res ledger.Transaction, accountMetadata map[string]metadata.Metadata) { -} -func (n noOpMonitor) SavedMetadata(ctx context.Context, targetType string, id string, metadata metadata.Metadata) { -} -func (n noOpMonitor) RevertedTransaction(ctx context.Context, reverted, revert *ledger.Transaction) { -} - -var _ Monitor = &noOpMonitor{} - -func NewNoOpMonitor() *noOpMonitor { - return &noOpMonitor{} -} - -type ledgerMonitor struct { - publisher message.Publisher - ledgerName string -} - -var _ Monitor = &ledgerMonitor{} - -func NewLedgerMonitor(publisher message.Publisher, ledgerName string) *ledgerMonitor { - m := &ledgerMonitor{ - publisher: publisher, - ledgerName: ledgerName, - } - return m -} - -func (l *ledgerMonitor) CommittedTransactions(ctx context.Context, txs ledger.Transaction, accountMetadata map[string]metadata.Metadata) { - l.publish(ctx, events.EventTypeCommittedTransactions, - newEventCommittedTransactions(CommittedTransactions{ - Ledger: l.ledgerName, - Transactions: []ledger.Transaction{txs}, - AccountMetadata: accountMetadata, - })) -} - -func (l *ledgerMonitor) SavedMetadata(ctx context.Context, targetType, targetID string, metadata metadata.Metadata) { - l.publish(ctx, events.EventTypeSavedMetadata, - newEventSavedMetadata(SavedMetadata{ - Ledger: l.ledgerName, - TargetType: targetType, - TargetID: targetID, - Metadata: metadata, - })) -} - -func (l *ledgerMonitor) RevertedTransaction(ctx context.Context, reverted, revert *ledger.Transaction) { - l.publish(ctx, events.EventTypeRevertedTransaction, - newEventRevertedTransaction(RevertedTransaction{ - Ledger: l.ledgerName, - RevertedTransaction: *reverted, - RevertTransaction: *revert, - })) -} - -func (l *ledgerMonitor) DeletedMetadata(ctx context.Context, targetType string, targetID any, key string) { - l.publish(ctx, events.EventTypeDeletedMetadata, - newEventDeletedMetadata(DeletedMetadata{ - Ledger: l.ledgerName, - TargetType: targetType, - TargetID: targetID, - Key: key, - })) -} - -func (l *ledgerMonitor) publish(ctx context.Context, topic string, ev publish.EventMessage) { - if err := l.publisher.Publish(topic, publish.NewMessage(ctx, ev)); err != nil { - logging.FromContext(ctx).Errorf("publishing message: %s", err) - return - } -} diff --git a/internal/controller/ledger/README.md b/internal/controller/ledger/README.md new file mode 100644 index 000000000..274f02178 --- /dev/null +++ b/internal/controller/ledger/README.md @@ -0,0 +1,1897 @@ + + +# ledger + +```go +import "github.com/formancehq/ledger/internal/controller/ledger" +``` + +Ledger Controller doc + +## Index + +- [Variables](<#variables>) +- [type Balance](<#Balance>) +- [type BalanceQuery](<#BalanceQuery>) +- [type Balances](<#Balances>) +- [type CacheConfiguration](<#CacheConfiguration>) +- [type CachedCompiler](<#CachedCompiler>) + - [func NewCachedCompiler\(compiler Compiler, configuration CacheConfiguration\) \*CachedCompiler](<#NewCachedCompiler>) + - [func \(c \*CachedCompiler\) Compile\(script string\) \(\*program.Program, error\)](<#CachedCompiler.Compile>) +- [type Compiler](<#Compiler>) +- [type CompilerFn](<#CompilerFn>) + - [func NewDefaultCompiler\(\) CompilerFn](<#NewDefaultCompiler>) + - [func \(fn CompilerFn\) Compile\(script string\) \(\*program.Program, error\)](<#CompilerFn.Compile>) +- [type Controller](<#Controller>) +- [type ControllerWithCache](<#ControllerWithCache>) + - [func NewControllerWithCache\(ledger ledger.Ledger, underlying Controller, registry \*StateRegistry\) \*ControllerWithCache](<#NewControllerWithCache>) + - [func \(c \*ControllerWithCache\) IsDatabaseUpToDate\(ctx context.Context\) \(bool, error\)](<#ControllerWithCache.IsDatabaseUpToDate>) +- [type ControllerWithEvents](<#ControllerWithEvents>) + - [func NewControllerWithEvents\(ledger ledger.Ledger, underlying Controller, listener Listener\) \*ControllerWithEvents](<#NewControllerWithEvents>) + - [func \(ctrl \*ControllerWithEvents\) CreateTransaction\(ctx context.Context, parameters Parameters\[RunScript\]\) \(\*ledger.CreatedTransaction, error\)](<#ControllerWithEvents.CreateTransaction>) + - [func \(ctrl \*ControllerWithEvents\) DeleteAccountMetadata\(ctx context.Context, parameters Parameters\[DeleteAccountMetadata\]\) error](<#ControllerWithEvents.DeleteAccountMetadata>) + - [func \(ctrl \*ControllerWithEvents\) DeleteTransactionMetadata\(ctx context.Context, parameters Parameters\[DeleteTransactionMetadata\]\) error](<#ControllerWithEvents.DeleteTransactionMetadata>) + - [func \(ctrl \*ControllerWithEvents\) RevertTransaction\(ctx context.Context, parameters Parameters\[RevertTransaction\]\) \(\*ledger.RevertedTransaction, error\)](<#ControllerWithEvents.RevertTransaction>) + - [func \(ctrl \*ControllerWithEvents\) SaveAccountMetadata\(ctx context.Context, parameters Parameters\[SaveAccountMetadata\]\) error](<#ControllerWithEvents.SaveAccountMetadata>) + - [func \(ctrl \*ControllerWithEvents\) SaveTransactionMetadata\(ctx context.Context, parameters Parameters\[SaveTransactionMetadata\]\) error](<#ControllerWithEvents.SaveTransactionMetadata>) +- [type ControllerWithTraces](<#ControllerWithTraces>) + - [func NewControllerWithTraces\(underlying Controller\) \*ControllerWithTraces](<#NewControllerWithTraces>) + - [func \(ctrl \*ControllerWithTraces\) CountAccounts\(ctx context.Context, a ListAccountsQuery\) \(int, error\)](<#ControllerWithTraces.CountAccounts>) + - [func \(ctrl \*ControllerWithTraces\) CountTransactions\(ctx context.Context, q ListTransactionsQuery\) \(int, error\)](<#ControllerWithTraces.CountTransactions>) + - [func \(ctrl \*ControllerWithTraces\) CreateTransaction\(ctx context.Context, parameters Parameters\[RunScript\]\) \(\*ledger.CreatedTransaction, error\)](<#ControllerWithTraces.CreateTransaction>) + - [func \(ctrl \*ControllerWithTraces\) DeleteAccountMetadata\(ctx context.Context, parameters Parameters\[DeleteAccountMetadata\]\) error](<#ControllerWithTraces.DeleteAccountMetadata>) + - [func \(ctrl \*ControllerWithTraces\) DeleteTransactionMetadata\(ctx context.Context, parameters Parameters\[DeleteTransactionMetadata\]\) error](<#ControllerWithTraces.DeleteTransactionMetadata>) + - [func \(ctrl \*ControllerWithTraces\) Export\(ctx context.Context, w ExportWriter\) error](<#ControllerWithTraces.Export>) + - [func \(ctrl \*ControllerWithTraces\) GetAccount\(ctx context.Context, q GetAccountQuery\) \(\*ledger.Account, error\)](<#ControllerWithTraces.GetAccount>) + - [func \(ctrl \*ControllerWithTraces\) GetAggregatedBalances\(ctx context.Context, q GetAggregatedBalanceQuery\) \(ledger.BalancesByAssets, error\)](<#ControllerWithTraces.GetAggregatedBalances>) + - [func \(ctrl \*ControllerWithTraces\) GetMigrationsInfo\(ctx context.Context\) \(\[\]migrations.Info, error\)](<#ControllerWithTraces.GetMigrationsInfo>) + - [func \(ctrl \*ControllerWithTraces\) GetStats\(ctx context.Context\) \(Stats, error\)](<#ControllerWithTraces.GetStats>) + - [func \(ctrl \*ControllerWithTraces\) GetTransaction\(ctx context.Context, query GetTransactionQuery\) \(\*ledger.Transaction, error\)](<#ControllerWithTraces.GetTransaction>) + - [func \(ctrl \*ControllerWithTraces\) GetVolumesWithBalances\(ctx context.Context, q GetVolumesWithBalancesQuery\) \(\*bunpaginate.Cursor\[ledger.VolumesWithBalanceByAssetByAccount\], error\)](<#ControllerWithTraces.GetVolumesWithBalances>) + - [func \(ctrl \*ControllerWithTraces\) Import\(ctx context.Context, stream chan ledger.Log\) error](<#ControllerWithTraces.Import>) + - [func \(ctrl \*ControllerWithTraces\) IsDatabaseUpToDate\(ctx context.Context\) \(bool, error\)](<#ControllerWithTraces.IsDatabaseUpToDate>) + - [func \(ctrl \*ControllerWithTraces\) ListAccounts\(ctx context.Context, a ListAccountsQuery\) \(\*bunpaginate.Cursor\[ledger.Account\], error\)](<#ControllerWithTraces.ListAccounts>) + - [func \(ctrl \*ControllerWithTraces\) ListLogs\(ctx context.Context, q GetLogsQuery\) \(\*bunpaginate.Cursor\[ledger.Log\], error\)](<#ControllerWithTraces.ListLogs>) + - [func \(ctrl \*ControllerWithTraces\) ListTransactions\(ctx context.Context, q ListTransactionsQuery\) \(\*bunpaginate.Cursor\[ledger.Transaction\], error\)](<#ControllerWithTraces.ListTransactions>) + - [func \(ctrl \*ControllerWithTraces\) RevertTransaction\(ctx context.Context, parameters Parameters\[RevertTransaction\]\) \(\*ledger.RevertedTransaction, error\)](<#ControllerWithTraces.RevertTransaction>) + - [func \(ctrl \*ControllerWithTraces\) SaveAccountMetadata\(ctx context.Context, parameters Parameters\[SaveAccountMetadata\]\) error](<#ControllerWithTraces.SaveAccountMetadata>) + - [func \(ctrl \*ControllerWithTraces\) SaveTransactionMetadata\(ctx context.Context, parameters Parameters\[SaveTransactionMetadata\]\) error](<#ControllerWithTraces.SaveTransactionMetadata>) +- [type DefaultController](<#DefaultController>) + - [func NewDefaultController\(ledger ledger.Ledger, store Store, machineFactory MachineFactory\) \*DefaultController](<#NewDefaultController>) + - [func \(ctrl \*DefaultController\) CountAccounts\(ctx context.Context, a ListAccountsQuery\) \(int, error\)](<#DefaultController.CountAccounts>) + - [func \(ctrl \*DefaultController\) CountTransactions\(ctx context.Context, q ListTransactionsQuery\) \(int, error\)](<#DefaultController.CountTransactions>) + - [func \(ctrl \*DefaultController\) CreateTransaction\(ctx context.Context, parameters Parameters\[RunScript\]\) \(\*ledger.CreatedTransaction, error\)](<#DefaultController.CreateTransaction>) + - [func \(ctrl \*DefaultController\) DeleteAccountMetadata\(ctx context.Context, parameters Parameters\[DeleteAccountMetadata\]\) error](<#DefaultController.DeleteAccountMetadata>) + - [func \(ctrl \*DefaultController\) DeleteTransactionMetadata\(ctx context.Context, parameters Parameters\[DeleteTransactionMetadata\]\) error](<#DefaultController.DeleteTransactionMetadata>) + - [func \(ctrl \*DefaultController\) Export\(ctx context.Context, w ExportWriter\) error](<#DefaultController.Export>) + - [func \(ctrl \*DefaultController\) GetAccount\(ctx context.Context, q GetAccountQuery\) \(\*ledger.Account, error\)](<#DefaultController.GetAccount>) + - [func \(ctrl \*DefaultController\) GetAggregatedBalances\(ctx context.Context, q GetAggregatedBalanceQuery\) \(ledger.BalancesByAssets, error\)](<#DefaultController.GetAggregatedBalances>) + - [func \(ctrl \*DefaultController\) GetMigrationsInfo\(ctx context.Context\) \(\[\]migrations.Info, error\)](<#DefaultController.GetMigrationsInfo>) + - [func \(ctrl \*DefaultController\) GetStats\(ctx context.Context\) \(Stats, error\)](<#DefaultController.GetStats>) + - [func \(ctrl \*DefaultController\) GetTransaction\(ctx context.Context, query GetTransactionQuery\) \(\*ledger.Transaction, error\)](<#DefaultController.GetTransaction>) + - [func \(ctrl \*DefaultController\) GetVolumesWithBalances\(ctx context.Context, q GetVolumesWithBalancesQuery\) \(\*bunpaginate.Cursor\[ledger.VolumesWithBalanceByAssetByAccount\], error\)](<#DefaultController.GetVolumesWithBalances>) + - [func \(ctrl \*DefaultController\) Import\(ctx context.Context, stream chan ledger.Log\) error](<#DefaultController.Import>) + - [func \(ctrl \*DefaultController\) IsDatabaseUpToDate\(ctx context.Context\) \(bool, error\)](<#DefaultController.IsDatabaseUpToDate>) + - [func \(ctrl \*DefaultController\) ListAccounts\(ctx context.Context, a ListAccountsQuery\) \(\*bunpaginate.Cursor\[ledger.Account\], error\)](<#DefaultController.ListAccounts>) + - [func \(ctrl \*DefaultController\) ListLogs\(ctx context.Context, q GetLogsQuery\) \(\*bunpaginate.Cursor\[ledger.Log\], error\)](<#DefaultController.ListLogs>) + - [func \(ctrl \*DefaultController\) ListTransactions\(ctx context.Context, q ListTransactionsQuery\) \(\*bunpaginate.Cursor\[ledger.Transaction\], error\)](<#DefaultController.ListTransactions>) + - [func \(ctrl \*DefaultController\) RevertTransaction\(ctx context.Context, parameters Parameters\[RevertTransaction\]\) \(\*ledger.RevertedTransaction, error\)](<#DefaultController.RevertTransaction>) + - [func \(ctrl \*DefaultController\) SaveAccountMetadata\(ctx context.Context, parameters Parameters\[SaveAccountMetadata\]\) error](<#DefaultController.SaveAccountMetadata>) + - [func \(ctrl \*DefaultController\) SaveTransactionMetadata\(ctx context.Context, parameters Parameters\[SaveTransactionMetadata\]\) error](<#DefaultController.SaveTransactionMetadata>) +- [type DefaultMachineAdapter](<#DefaultMachineAdapter>) + - [func NewDefaultMachine\(p program.Program\) \*DefaultMachineAdapter](<#NewDefaultMachine>) + - [func \(d \*DefaultMachineAdapter\) Execute\(ctx context.Context, tx TX, vars map\[string\]string\) \(\*MachineResult, error\)](<#DefaultMachineAdapter.Execute>) +- [type DefaultMachineFactory](<#DefaultMachineFactory>) + - [func NewDefaultMachineFactory\(compiler Compiler\) \*DefaultMachineFactory](<#NewDefaultMachineFactory>) + - [func \(d \*DefaultMachineFactory\) Make\(script string\) \(Machine, error\)](<#DefaultMachineFactory.Make>) +- [type DeleteAccountMetadata](<#DeleteAccountMetadata>) +- [type DeleteTransactionMetadata](<#DeleteTransactionMetadata>) +- [type ErrAlreadyReverted](<#ErrAlreadyReverted>) + - [func \(e ErrAlreadyReverted\) Error\(\) string](<#ErrAlreadyReverted.Error>) + - [func \(e ErrAlreadyReverted\) Is\(err error\) bool](<#ErrAlreadyReverted.Is>) +- [type ErrCompilationFailed](<#ErrCompilationFailed>) + - [func \(e ErrCompilationFailed\) Error\(\) string](<#ErrCompilationFailed.Error>) + - [func \(e ErrCompilationFailed\) Is\(err error\) bool](<#ErrCompilationFailed.Is>) +- [type ErrIdempotencyKeyConflict](<#ErrIdempotencyKeyConflict>) + - [func NewErrIdempotencyKeyConflict\(ik string\) ErrIdempotencyKeyConflict](<#NewErrIdempotencyKeyConflict>) + - [func \(e ErrIdempotencyKeyConflict\) Error\(\) string](<#ErrIdempotencyKeyConflict.Error>) + - [func \(e ErrIdempotencyKeyConflict\) Is\(err error\) bool](<#ErrIdempotencyKeyConflict.Is>) +- [type ErrImport](<#ErrImport>) + - [func \(i ErrImport\) Error\(\) string](<#ErrImport.Error>) + - [func \(i ErrImport\) Is\(err error\) bool](<#ErrImport.Is>) +- [type ErrInsufficientFunds](<#ErrInsufficientFunds>) +- [type ErrInvalidHash](<#ErrInvalidHash>) + - [func \(i ErrInvalidHash\) Error\(\) string](<#ErrInvalidHash.Error>) +- [type ErrInvalidIdempotencyInput](<#ErrInvalidIdempotencyInput>) + - [func \(e ErrInvalidIdempotencyInput\) Error\(\) string](<#ErrInvalidIdempotencyInput.Error>) + - [func \(e ErrInvalidIdempotencyInput\) Is\(err error\) bool](<#ErrInvalidIdempotencyInput.Is>) +- [type ErrInvalidQuery](<#ErrInvalidQuery>) + - [func NewErrInvalidQuery\(msg string, args ...any\) ErrInvalidQuery](<#NewErrInvalidQuery>) + - [func \(e ErrInvalidQuery\) Error\(\) string](<#ErrInvalidQuery.Error>) + - [func \(e ErrInvalidQuery\) Is\(err error\) bool](<#ErrInvalidQuery.Is>) +- [type ErrInvalidVars](<#ErrInvalidVars>) +- [type ErrMetadataOverride](<#ErrMetadataOverride>) + - [func \(e \*ErrMetadataOverride\) Error\(\) string](<#ErrMetadataOverride.Error>) + - [func \(e \*ErrMetadataOverride\) Is\(err error\) bool](<#ErrMetadataOverride.Is>) +- [type ErrMissingFeature](<#ErrMissingFeature>) + - [func NewErrMissingFeature\(feature string\) ErrMissingFeature](<#NewErrMissingFeature>) + - [func \(e ErrMissingFeature\) Error\(\) string](<#ErrMissingFeature.Error>) + - [func \(e ErrMissingFeature\) Is\(err error\) bool](<#ErrMissingFeature.Is>) +- [type ErrTransactionReferenceConflict](<#ErrTransactionReferenceConflict>) + - [func NewErrTransactionReferenceConflict\(reference string\) ErrTransactionReferenceConflict](<#NewErrTransactionReferenceConflict>) + - [func \(e ErrTransactionReferenceConflict\) Error\(\) string](<#ErrTransactionReferenceConflict.Error>) + - [func \(e ErrTransactionReferenceConflict\) Is\(err error\) bool](<#ErrTransactionReferenceConflict.Is>) +- [type ExportWriter](<#ExportWriter>) +- [type ExportWriterFn](<#ExportWriterFn>) + - [func \(fn ExportWriterFn\) Write\(ctx context.Context, log ledger.Log\) error](<#ExportWriterFn.Write>) +- [type FiltersForVolumes](<#FiltersForVolumes>) +- [type GetAccountQuery](<#GetAccountQuery>) + - [func NewGetAccountQuery\(addr string\) GetAccountQuery](<#NewGetAccountQuery>) + - [func \(q GetAccountQuery\) WithExpandEffectiveVolumes\(\) GetAccountQuery](<#GetAccountQuery.WithExpandEffectiveVolumes>) + - [func \(q GetAccountQuery\) WithExpandVolumes\(\) GetAccountQuery](<#GetAccountQuery.WithExpandVolumes>) + - [func \(q GetAccountQuery\) WithPIT\(pit time.Time\) GetAccountQuery](<#GetAccountQuery.WithPIT>) +- [type GetAggregatedBalanceQuery](<#GetAggregatedBalanceQuery>) + - [func NewGetAggregatedBalancesQuery\(filter PITFilter, qb query.Builder, useInsertionDate bool\) GetAggregatedBalanceQuery](<#NewGetAggregatedBalancesQuery>) +- [type GetLogsQuery](<#GetLogsQuery>) + - [func NewListLogsQuery\(options PaginatedQueryOptions\[any\]\) GetLogsQuery](<#NewListLogsQuery>) + - [func \(q GetLogsQuery\) WithOrder\(order bunpaginate.Order\) GetLogsQuery](<#GetLogsQuery.WithOrder>) +- [type GetTransactionQuery](<#GetTransactionQuery>) + - [func NewGetTransactionQuery\(id int\) GetTransactionQuery](<#NewGetTransactionQuery>) + - [func \(q GetTransactionQuery\) WithExpandEffectiveVolumes\(\) GetTransactionQuery](<#GetTransactionQuery.WithExpandEffectiveVolumes>) + - [func \(q GetTransactionQuery\) WithExpandVolumes\(\) GetTransactionQuery](<#GetTransactionQuery.WithExpandVolumes>) +- [type GetVolumesWithBalancesQuery](<#GetVolumesWithBalancesQuery>) + - [func NewGetVolumesWithBalancesQuery\(opts PaginatedQueryOptions\[FiltersForVolumes\]\) GetVolumesWithBalancesQuery](<#NewGetVolumesWithBalancesQuery>) +- [type ListAccountsQuery](<#ListAccountsQuery>) + - [func NewListAccountsQuery\(opts PaginatedQueryOptions\[PITFilterWithVolumes\]\) ListAccountsQuery](<#NewListAccountsQuery>) + - [func \(q ListAccountsQuery\) WithExpandEffectiveVolumes\(\) ListAccountsQuery](<#ListAccountsQuery.WithExpandEffectiveVolumes>) + - [func \(q ListAccountsQuery\) WithExpandVolumes\(\) ListAccountsQuery](<#ListAccountsQuery.WithExpandVolumes>) +- [type ListLedgersQuery](<#ListLedgersQuery>) + - [func NewListLedgersQuery\(pageSize uint64\) ListLedgersQuery](<#NewListLedgersQuery>) +- [type ListTransactionsQuery](<#ListTransactionsQuery>) + - [func NewListTransactionsQuery\(options PaginatedQueryOptions\[PITFilterWithVolumes\]\) ListTransactionsQuery](<#NewListTransactionsQuery>) + - [func \(q ListTransactionsQuery\) WithColumn\(column string\) ListTransactionsQuery](<#ListTransactionsQuery.WithColumn>) +- [type Listener](<#Listener>) +- [type Machine](<#Machine>) +- [type MachineFactory](<#MachineFactory>) +- [type MachineResult](<#MachineResult>) +- [type PITFilter](<#PITFilter>) +- [type PITFilterWithVolumes](<#PITFilterWithVolumes>) +- [type PaginatedQueryOptions](<#PaginatedQueryOptions>) + - [func NewPaginatedQueryOptions\[T any\]\(options T\) PaginatedQueryOptions\[T\]](<#NewPaginatedQueryOptions>) + - [func \(opts \*PaginatedQueryOptions\[T\]\) UnmarshalJSON\(data \[\]byte\) error](<#PaginatedQueryOptions[T].UnmarshalJSON>) + - [func \(opts PaginatedQueryOptions\[T\]\) WithPageSize\(pageSize uint64\) PaginatedQueryOptions\[T\]](<#PaginatedQueryOptions[T].WithPageSize>) + - [func \(opts PaginatedQueryOptions\[T\]\) WithQueryBuilder\(qb query.Builder\) PaginatedQueryOptions\[T\]](<#PaginatedQueryOptions[T].WithQueryBuilder>) +- [type Parameters](<#Parameters>) +- [type RevertTransaction](<#RevertTransaction>) +- [type RunScript](<#RunScript>) +- [type SaveAccountMetadata](<#SaveAccountMetadata>) +- [type SaveTransactionMetadata](<#SaveTransactionMetadata>) +- [type Script](<#Script>) +- [type ScriptV1](<#ScriptV1>) +- [type State](<#State>) +- [type StateRegistry](<#StateRegistry>) + - [func NewStateRegistry\(\) \*StateRegistry](<#NewStateRegistry>) + - [func \(r \*StateRegistry\) IsUpToDate\(name string\) bool](<#StateRegistry.IsUpToDate>) + - [func \(r \*StateRegistry\) SetUpToDate\(name string\)](<#StateRegistry.SetUpToDate>) + - [func \(r \*StateRegistry\) Upsert\(l ledger.Ledger\) bool](<#StateRegistry.Upsert>) +- [type Stats](<#Stats>) +- [type Store](<#Store>) +- [type TX](<#TX>) + + +## Variables + + + +```go +var ErrNoPostings = errors.New("numscript execution returned no postings") +``` + + + +```go +var ErrNotFound = postgres.ErrNotFound +``` + + +## type [Balance]() + + + +```go +type Balance struct { + Asset string + Balance *big.Int +} +``` + + +## type [BalanceQuery]() + + + +```go +type BalanceQuery = vm.BalanceQuery +``` + + +## type [Balances]() + + + +```go +type Balances = vm.Balances +``` + + +## type [CacheConfiguration]() + + + +```go +type CacheConfiguration struct { + MaxCount uint +} +``` + + +## type [CachedCompiler]() + + + +```go +type CachedCompiler struct { + // contains filtered or unexported fields +} +``` + + +### func [NewCachedCompiler]() + +```go +func NewCachedCompiler(compiler Compiler, configuration CacheConfiguration) *CachedCompiler +``` + + + + +### func \(\*CachedCompiler\) [Compile]() + +```go +func (c *CachedCompiler) Compile(script string) (*program.Program, error) +``` + + + + +## type [Compiler]() + +Compiler can return following errors: + +- ErrCompilationFailed + +```go +type Compiler interface { + Compile(script string) (*program.Program, error) +} +``` + + +## type [CompilerFn]() + + + +```go +type CompilerFn func(script string) (*program.Program, error) +``` + + +### func [NewDefaultCompiler]() + +```go +func NewDefaultCompiler() CompilerFn +``` + + + + +### func \(CompilerFn\) [Compile]() + +```go +func (fn CompilerFn) Compile(script string) (*program.Program, error) +``` + + + + +## type [Controller]() + + + +```go +type Controller interface { + // IsDatabaseUpToDate check if the ledger store is up to date, including the bucket and the ledger specifics + // It returns true if up to date + IsDatabaseUpToDate(ctx context.Context) (bool, error) + GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) + GetStats(ctx context.Context) (Stats, error) + + GetAccount(ctx context.Context, query GetAccountQuery) (*ledger.Account, error) + ListAccounts(ctx context.Context, query ListAccountsQuery) (*bunpaginate.Cursor[ledger.Account], error) + CountAccounts(ctx context.Context, query ListAccountsQuery) (int, error) + ListLogs(ctx context.Context, query GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) + CountTransactions(ctx context.Context, query ListTransactionsQuery) (int, error) + ListTransactions(ctx context.Context, query ListTransactionsQuery) (*bunpaginate.Cursor[ledger.Transaction], error) + GetTransaction(ctx context.Context, query GetTransactionQuery) (*ledger.Transaction, error) + GetVolumesWithBalances(ctx context.Context, q GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) + GetAggregatedBalances(ctx context.Context, q GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) + + // CreateTransaction accept a numscript script and returns a transaction + // It can return following errors: + // * ErrCompilationFailed + // * ErrMetadataOverride + // * ErrInvalidVars + // * ErrTransactionReferenceConflict + // * ErrIdempotencyKeyConflict + // * ErrInsufficientFunds + CreateTransaction(ctx context.Context, parameters Parameters[RunScript]) (*ledger.CreatedTransaction, error) + // RevertTransaction allow to revert a transaction. + // It can return following errors: + // * ErrInsufficientFunds + // * ErrAlreadyReverted + // * ErrNotFound + // Parameter force indicate we want to force revert the transaction even if the accounts does not have funds + // Parameter atEffectiveDate indicate we want to set the timestamp of the newly created transaction on the timestamp of the reverted transaction + RevertTransaction(ctx context.Context, parameters Parameters[RevertTransaction]) (*ledger.RevertedTransaction, error) + // SaveTransactionMetadata allow to add metadata to an existing transaction + // It can return following errors: + // * ErrNotFound + SaveTransactionMetadata(ctx context.Context, parameters Parameters[SaveTransactionMetadata]) error + // SaveAccountMetadata allow to add metadata to an account + // If the account does not exist, it is created + SaveAccountMetadata(ctx context.Context, parameters Parameters[SaveAccountMetadata]) error + // DeleteTransactionMetadata allow to remove metadata of a transaction + // It can return following errors: + // * ErrNotFound : indicate the transaction was not found OR the metadata does not exist on the transaction + DeleteTransactionMetadata(ctx context.Context, parameters Parameters[DeleteTransactionMetadata]) error + // DeleteAccountMetadata allow to remove metadata of an account + // It can return following errors: + // * ErrNotFound : indicate the account was not found OR the metadata does not exist on the account + DeleteAccountMetadata(ctx context.Context, parameters Parameters[DeleteAccountMetadata]) error + // Import allow to import the logs of an existing ledger + // It can return following errors: + // * ErrImport + // Logs hash must be valid and the ledger.Ledger must be in 'initializing' state + Import(ctx context.Context, stream chan ledger.Log) error + // Export allow to export the logs of a ledger + Export(ctx context.Context, w ExportWriter) error +} +``` + + +## type [ControllerWithCache]() + + + +```go +type ControllerWithCache struct { + Controller + // contains filtered or unexported fields +} +``` + + +### func [NewControllerWithCache]() + +```go +func NewControllerWithCache(ledger ledger.Ledger, underlying Controller, registry *StateRegistry) *ControllerWithCache +``` + + + + +### func \(\*ControllerWithCache\) [IsDatabaseUpToDate]() + +```go +func (c *ControllerWithCache) IsDatabaseUpToDate(ctx context.Context) (bool, error) +``` + + + + +## type [ControllerWithEvents]() + + + +```go +type ControllerWithEvents struct { + Controller + // contains filtered or unexported fields +} +``` + + +### func [NewControllerWithEvents]() + +```go +func NewControllerWithEvents(ledger ledger.Ledger, underlying Controller, listener Listener) *ControllerWithEvents +``` + + + + +### func \(\*ControllerWithEvents\) [CreateTransaction]() + +```go +func (ctrl *ControllerWithEvents) CreateTransaction(ctx context.Context, parameters Parameters[RunScript]) (*ledger.CreatedTransaction, error) +``` + + + + +### func \(\*ControllerWithEvents\) [DeleteAccountMetadata]() + +```go +func (ctrl *ControllerWithEvents) DeleteAccountMetadata(ctx context.Context, parameters Parameters[DeleteAccountMetadata]) error +``` + + + + +### func \(\*ControllerWithEvents\) [DeleteTransactionMetadata]() + +```go +func (ctrl *ControllerWithEvents) DeleteTransactionMetadata(ctx context.Context, parameters Parameters[DeleteTransactionMetadata]) error +``` + + + + +### func \(\*ControllerWithEvents\) [RevertTransaction]() + +```go +func (ctrl *ControllerWithEvents) RevertTransaction(ctx context.Context, parameters Parameters[RevertTransaction]) (*ledger.RevertedTransaction, error) +``` + + + + +### func \(\*ControllerWithEvents\) [SaveAccountMetadata]() + +```go +func (ctrl *ControllerWithEvents) SaveAccountMetadata(ctx context.Context, parameters Parameters[SaveAccountMetadata]) error +``` + + + + +### func \(\*ControllerWithEvents\) [SaveTransactionMetadata]() + +```go +func (ctrl *ControllerWithEvents) SaveTransactionMetadata(ctx context.Context, parameters Parameters[SaveTransactionMetadata]) error +``` + + + + +## type [ControllerWithTraces]() + + + +```go +type ControllerWithTraces struct { + // contains filtered or unexported fields +} +``` + + +### func [NewControllerWithTraces]() + +```go +func NewControllerWithTraces(underlying Controller) *ControllerWithTraces +``` + + + + +### func \(\*ControllerWithTraces\) [CountAccounts]() + +```go +func (ctrl *ControllerWithTraces) CountAccounts(ctx context.Context, a ListAccountsQuery) (int, error) +``` + + + + +### func \(\*ControllerWithTraces\) [CountTransactions]() + +```go +func (ctrl *ControllerWithTraces) CountTransactions(ctx context.Context, q ListTransactionsQuery) (int, error) +``` + + + + +### func \(\*ControllerWithTraces\) [CreateTransaction]() + +```go +func (ctrl *ControllerWithTraces) CreateTransaction(ctx context.Context, parameters Parameters[RunScript]) (*ledger.CreatedTransaction, error) +``` + + + + +### func \(\*ControllerWithTraces\) [DeleteAccountMetadata]() + +```go +func (ctrl *ControllerWithTraces) DeleteAccountMetadata(ctx context.Context, parameters Parameters[DeleteAccountMetadata]) error +``` + + + + +### func \(\*ControllerWithTraces\) [DeleteTransactionMetadata]() + +```go +func (ctrl *ControllerWithTraces) DeleteTransactionMetadata(ctx context.Context, parameters Parameters[DeleteTransactionMetadata]) error +``` + + + + +### func \(\*ControllerWithTraces\) [Export]() + +```go +func (ctrl *ControllerWithTraces) Export(ctx context.Context, w ExportWriter) error +``` + + + + +### func \(\*ControllerWithTraces\) [GetAccount]() + +```go +func (ctrl *ControllerWithTraces) GetAccount(ctx context.Context, q GetAccountQuery) (*ledger.Account, error) +``` + + + + +### func \(\*ControllerWithTraces\) [GetAggregatedBalances]() + +```go +func (ctrl *ControllerWithTraces) GetAggregatedBalances(ctx context.Context, q GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) +``` + + + + +### func \(\*ControllerWithTraces\) [GetMigrationsInfo]() + +```go +func (ctrl *ControllerWithTraces) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) +``` + + + + +### func \(\*ControllerWithTraces\) [GetStats]() + +```go +func (ctrl *ControllerWithTraces) GetStats(ctx context.Context) (Stats, error) +``` + + + + +### func \(\*ControllerWithTraces\) [GetTransaction]() + +```go +func (ctrl *ControllerWithTraces) GetTransaction(ctx context.Context, query GetTransactionQuery) (*ledger.Transaction, error) +``` + + + + +### func \(\*ControllerWithTraces\) [GetVolumesWithBalances]() + +```go +func (ctrl *ControllerWithTraces) GetVolumesWithBalances(ctx context.Context, q GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) +``` + + + + +### func \(\*ControllerWithTraces\) [Import]() + +```go +func (ctrl *ControllerWithTraces) Import(ctx context.Context, stream chan ledger.Log) error +``` + + + + +### func \(\*ControllerWithTraces\) [IsDatabaseUpToDate]() + +```go +func (ctrl *ControllerWithTraces) IsDatabaseUpToDate(ctx context.Context) (bool, error) +``` + + + + +### func \(\*ControllerWithTraces\) [ListAccounts]() + +```go +func (ctrl *ControllerWithTraces) ListAccounts(ctx context.Context, a ListAccountsQuery) (*bunpaginate.Cursor[ledger.Account], error) +``` + + + + +### func \(\*ControllerWithTraces\) [ListLogs]() + +```go +func (ctrl *ControllerWithTraces) ListLogs(ctx context.Context, q GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) +``` + + + + +### func \(\*ControllerWithTraces\) [ListTransactions]() + +```go +func (ctrl *ControllerWithTraces) ListTransactions(ctx context.Context, q ListTransactionsQuery) (*bunpaginate.Cursor[ledger.Transaction], error) +``` + + + + +### func \(\*ControllerWithTraces\) [RevertTransaction]() + +```go +func (ctrl *ControllerWithTraces) RevertTransaction(ctx context.Context, parameters Parameters[RevertTransaction]) (*ledger.RevertedTransaction, error) +``` + + + + +### func \(\*ControllerWithTraces\) [SaveAccountMetadata]() + +```go +func (ctrl *ControllerWithTraces) SaveAccountMetadata(ctx context.Context, parameters Parameters[SaveAccountMetadata]) error +``` + + + + +### func \(\*ControllerWithTraces\) [SaveTransactionMetadata]() + +```go +func (ctrl *ControllerWithTraces) SaveTransactionMetadata(ctx context.Context, parameters Parameters[SaveTransactionMetadata]) error +``` + + + + +## type [DefaultController]() + + + +```go +type DefaultController struct { + // contains filtered or unexported fields +} +``` + + +### func [NewDefaultController]() + +```go +func NewDefaultController(ledger ledger.Ledger, store Store, machineFactory MachineFactory) *DefaultController +``` + + + + +### func \(\*DefaultController\) [CountAccounts]() + +```go +func (ctrl *DefaultController) CountAccounts(ctx context.Context, a ListAccountsQuery) (int, error) +``` + + + + +### func \(\*DefaultController\) [CountTransactions]() + +```go +func (ctrl *DefaultController) CountTransactions(ctx context.Context, q ListTransactionsQuery) (int, error) +``` + + + + +### func \(\*DefaultController\) [CreateTransaction]() + +```go +func (ctrl *DefaultController) CreateTransaction(ctx context.Context, parameters Parameters[RunScript]) (*ledger.CreatedTransaction, error) +``` + + + + +### func \(\*DefaultController\) [DeleteAccountMetadata]() + +```go +func (ctrl *DefaultController) DeleteAccountMetadata(ctx context.Context, parameters Parameters[DeleteAccountMetadata]) error +``` + + + + +### func \(\*DefaultController\) [DeleteTransactionMetadata]() + +```go +func (ctrl *DefaultController) DeleteTransactionMetadata(ctx context.Context, parameters Parameters[DeleteTransactionMetadata]) error +``` + + + + +### func \(\*DefaultController\) [Export]() + +```go +func (ctrl *DefaultController) Export(ctx context.Context, w ExportWriter) error +``` + + + + +### func \(\*DefaultController\) [GetAccount]() + +```go +func (ctrl *DefaultController) GetAccount(ctx context.Context, q GetAccountQuery) (*ledger.Account, error) +``` + + + + +### func \(\*DefaultController\) [GetAggregatedBalances]() + +```go +func (ctrl *DefaultController) GetAggregatedBalances(ctx context.Context, q GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) +``` + + + + +### func \(\*DefaultController\) [GetMigrationsInfo]() + +```go +func (ctrl *DefaultController) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) +``` + + + + +### func \(\*DefaultController\) [GetStats]() + +```go +func (ctrl *DefaultController) GetStats(ctx context.Context) (Stats, error) +``` + + + + +### func \(\*DefaultController\) [GetTransaction]() + +```go +func (ctrl *DefaultController) GetTransaction(ctx context.Context, query GetTransactionQuery) (*ledger.Transaction, error) +``` + + + + +### func \(\*DefaultController\) [GetVolumesWithBalances]() + +```go +func (ctrl *DefaultController) GetVolumesWithBalances(ctx context.Context, q GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) +``` + + + + +### func \(\*DefaultController\) [Import]() + +```go +func (ctrl *DefaultController) Import(ctx context.Context, stream chan ledger.Log) error +``` + + + + +### func \(\*DefaultController\) [IsDatabaseUpToDate]() + +```go +func (ctrl *DefaultController) IsDatabaseUpToDate(ctx context.Context) (bool, error) +``` + + + + +### func \(\*DefaultController\) [ListAccounts]() + +```go +func (ctrl *DefaultController) ListAccounts(ctx context.Context, a ListAccountsQuery) (*bunpaginate.Cursor[ledger.Account], error) +``` + + + + +### func \(\*DefaultController\) [ListLogs]() + +```go +func (ctrl *DefaultController) ListLogs(ctx context.Context, q GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) +``` + + + + +### func \(\*DefaultController\) [ListTransactions]() + +```go +func (ctrl *DefaultController) ListTransactions(ctx context.Context, q ListTransactionsQuery) (*bunpaginate.Cursor[ledger.Transaction], error) +``` + + + + +### func \(\*DefaultController\) [RevertTransaction]() + +```go +func (ctrl *DefaultController) RevertTransaction(ctx context.Context, parameters Parameters[RevertTransaction]) (*ledger.RevertedTransaction, error) +``` + + + + +### func \(\*DefaultController\) [SaveAccountMetadata]() + +```go +func (ctrl *DefaultController) SaveAccountMetadata(ctx context.Context, parameters Parameters[SaveAccountMetadata]) error +``` + + + + +### func \(\*DefaultController\) [SaveTransactionMetadata]() + +```go +func (ctrl *DefaultController) SaveTransactionMetadata(ctx context.Context, parameters Parameters[SaveTransactionMetadata]) error +``` + + + + +## type [DefaultMachineAdapter]() + + + +```go +type DefaultMachineAdapter struct { + // contains filtered or unexported fields +} +``` + + +### func [NewDefaultMachine]() + +```go +func NewDefaultMachine(p program.Program) *DefaultMachineAdapter +``` + + + + +### func \(\*DefaultMachineAdapter\) [Execute]() + +```go +func (d *DefaultMachineAdapter) Execute(ctx context.Context, tx TX, vars map[string]string) (*MachineResult, error) +``` + + + + +## type [DefaultMachineFactory]() + + + +```go +type DefaultMachineFactory struct { + // contains filtered or unexported fields +} +``` + + +### func [NewDefaultMachineFactory]() + +```go +func NewDefaultMachineFactory(compiler Compiler) *DefaultMachineFactory +``` + + + + +### func \(\*DefaultMachineFactory\) [Make]() + +```go +func (d *DefaultMachineFactory) Make(script string) (Machine, error) +``` + + + + +## type [DeleteAccountMetadata]() + + + +```go +type DeleteAccountMetadata struct { + Address string + Key string +} +``` + + +## type [DeleteTransactionMetadata]() + + + +```go +type DeleteTransactionMetadata struct { + TransactionID int + Key string +} +``` + + +## type [ErrAlreadyReverted]() + + + +```go +type ErrAlreadyReverted struct { + // contains filtered or unexported fields +} +``` + + +### func \(ErrAlreadyReverted\) [Error]() + +```go +func (e ErrAlreadyReverted) Error() string +``` + + + + +### func \(ErrAlreadyReverted\) [Is]() + +```go +func (e ErrAlreadyReverted) Is(err error) bool +``` + + + + +## type [ErrCompilationFailed]() + +ErrCompilationFailed is used for any errors returned by the numscript interpreter + +```go +type ErrCompilationFailed struct { + // contains filtered or unexported fields +} +``` + + +### func \(ErrCompilationFailed\) [Error]() + +```go +func (e ErrCompilationFailed) Error() string +``` + + + + +### func \(ErrCompilationFailed\) [Is]() + +```go +func (e ErrCompilationFailed) Is(err error) bool +``` + + + + +## type [ErrIdempotencyKeyConflict]() + + + +```go +type ErrIdempotencyKeyConflict struct { + // contains filtered or unexported fields +} +``` + + +### func [NewErrIdempotencyKeyConflict]() + +```go +func NewErrIdempotencyKeyConflict(ik string) ErrIdempotencyKeyConflict +``` + + + + +### func \(ErrIdempotencyKeyConflict\) [Error]() + +```go +func (e ErrIdempotencyKeyConflict) Error() string +``` + + + + +### func \(ErrIdempotencyKeyConflict\) [Is]() + +```go +func (e ErrIdempotencyKeyConflict) Is(err error) bool +``` + + + + +## type [ErrImport]() + + + +```go +type ErrImport struct { + // contains filtered or unexported fields +} +``` + + +### func \(ErrImport\) [Error]() + +```go +func (i ErrImport) Error() string +``` + + + + +### func \(ErrImport\) [Is]() + +```go +func (i ErrImport) Is(err error) bool +``` + + + + +## type [ErrInsufficientFunds]() + +todo\(waiting\): need a more precise underlying error notes\(gfyrag\): Waiting new interpreter + +```go +type ErrInsufficientFunds = machine.ErrInsufficientFund +``` + + +## type [ErrInvalidHash]() + + + +```go +type ErrInvalidHash struct { + // contains filtered or unexported fields +} +``` + + +### func \(ErrInvalidHash\) [Error]() + +```go +func (i ErrInvalidHash) Error() string +``` + + + + +## type [ErrInvalidIdempotencyInput]() + +ErrInvalidIdempotencyInput is used when a IK is used with an inputs different from the original one. For example, try to use the same IK with a different numscript script will result with that error. + +```go +type ErrInvalidIdempotencyInput struct { + // contains filtered or unexported fields +} +``` + + +### func \(ErrInvalidIdempotencyInput\) [Error]() + +```go +func (e ErrInvalidIdempotencyInput) Error() string +``` + + + + +### func \(ErrInvalidIdempotencyInput\) [Is]() + +```go +func (e ErrInvalidIdempotencyInput) Is(err error) bool +``` + + + + +## type [ErrInvalidQuery]() + + + +```go +type ErrInvalidQuery struct { + // contains filtered or unexported fields +} +``` + + +### func [NewErrInvalidQuery]() + +```go +func NewErrInvalidQuery(msg string, args ...any) ErrInvalidQuery +``` + + + + +### func \(ErrInvalidQuery\) [Error]() + +```go +func (e ErrInvalidQuery) Error() string +``` + + + + +### func \(ErrInvalidQuery\) [Is]() + +```go +func (e ErrInvalidQuery) Is(err error) bool +``` + + + + +## type [ErrInvalidVars]() + + + +```go +type ErrInvalidVars = machine.ErrInvalidVars +``` + + +## type [ErrMetadataOverride]() + +ErrMetadataOverride is used when a metadata is defined at numscript level AND at the input level + +```go +type ErrMetadataOverride struct { + // contains filtered or unexported fields +} +``` + + +### func \(\*ErrMetadataOverride\) [Error]() + +```go +func (e *ErrMetadataOverride) Error() string +``` + + + + +### func \(\*ErrMetadataOverride\) [Is]() + +```go +func (e *ErrMetadataOverride) Is(err error) bool +``` + + + + +## type [ErrMissingFeature]() + + + +```go +type ErrMissingFeature struct { + // contains filtered or unexported fields +} +``` + + +### func [NewErrMissingFeature]() + +```go +func NewErrMissingFeature(feature string) ErrMissingFeature +``` + + + + +### func \(ErrMissingFeature\) [Error]() + +```go +func (e ErrMissingFeature) Error() string +``` + + + + +### func \(ErrMissingFeature\) [Is]() + +```go +func (e ErrMissingFeature) Is(err error) bool +``` + + + + +## type [ErrTransactionReferenceConflict]() + + + +```go +type ErrTransactionReferenceConflict struct { + // contains filtered or unexported fields +} +``` + + +### func [NewErrTransactionReferenceConflict]() + +```go +func NewErrTransactionReferenceConflict(reference string) ErrTransactionReferenceConflict +``` + + + + +### func \(ErrTransactionReferenceConflict\) [Error]() + +```go +func (e ErrTransactionReferenceConflict) Error() string +``` + + + + +### func \(ErrTransactionReferenceConflict\) [Is]() + +```go +func (e ErrTransactionReferenceConflict) Is(err error) bool +``` + + + + +## type [ExportWriter]() + + + +```go +type ExportWriter interface { + Write(ctx context.Context, log ledger.Log) error +} +``` + + +## type [ExportWriterFn]() + + + +```go +type ExportWriterFn func(ctx context.Context, log ledger.Log) error +``` + + +### func \(ExportWriterFn\) [Write]() + +```go +func (fn ExportWriterFn) Write(ctx context.Context, log ledger.Log) error +``` + + + + +## type [FiltersForVolumes]() + + + +```go +type FiltersForVolumes struct { + PITFilter + UseInsertionDate bool + GroupLvl int +} +``` + + +## type [GetAccountQuery]() + + + +```go +type GetAccountQuery struct { + PITFilterWithVolumes + Addr string +} +``` + + +### func [NewGetAccountQuery]() + +```go +func NewGetAccountQuery(addr string) GetAccountQuery +``` + + + + +### func \(GetAccountQuery\) [WithExpandEffectiveVolumes]() + +```go +func (q GetAccountQuery) WithExpandEffectiveVolumes() GetAccountQuery +``` + + + + +### func \(GetAccountQuery\) [WithExpandVolumes]() + +```go +func (q GetAccountQuery) WithExpandVolumes() GetAccountQuery +``` + + + + +### func \(GetAccountQuery\) [WithPIT]() + +```go +func (q GetAccountQuery) WithPIT(pit time.Time) GetAccountQuery +``` + + + + +## type [GetAggregatedBalanceQuery]() + + + +```go +type GetAggregatedBalanceQuery struct { + PITFilter + QueryBuilder query.Builder + UseInsertionDate bool +} +``` + + +### func [NewGetAggregatedBalancesQuery]() + +```go +func NewGetAggregatedBalancesQuery(filter PITFilter, qb query.Builder, useInsertionDate bool) GetAggregatedBalanceQuery +``` + + + + +## type [GetLogsQuery]() + + + +```go +type GetLogsQuery bunpaginate.ColumnPaginatedQuery[PaginatedQueryOptions[any]] +``` + + +### func [NewListLogsQuery]() + +```go +func NewListLogsQuery(options PaginatedQueryOptions[any]) GetLogsQuery +``` + + + + +### func \(GetLogsQuery\) [WithOrder]() + +```go +func (q GetLogsQuery) WithOrder(order bunpaginate.Order) GetLogsQuery +``` + + + + +## type [GetTransactionQuery]() + + + +```go +type GetTransactionQuery struct { + PITFilterWithVolumes + ID int +} +``` + + +### func [NewGetTransactionQuery]() + +```go +func NewGetTransactionQuery(id int) GetTransactionQuery +``` + + + + +### func \(GetTransactionQuery\) [WithExpandEffectiveVolumes]() + +```go +func (q GetTransactionQuery) WithExpandEffectiveVolumes() GetTransactionQuery +``` + + + + +### func \(GetTransactionQuery\) [WithExpandVolumes]() + +```go +func (q GetTransactionQuery) WithExpandVolumes() GetTransactionQuery +``` + + + + +## type [GetVolumesWithBalancesQuery]() + + + +```go +type GetVolumesWithBalancesQuery bunpaginate.OffsetPaginatedQuery[PaginatedQueryOptions[FiltersForVolumes]] +``` + + +### func [NewGetVolumesWithBalancesQuery]() + +```go +func NewGetVolumesWithBalancesQuery(opts PaginatedQueryOptions[FiltersForVolumes]) GetVolumesWithBalancesQuery +``` + + + + +## type [ListAccountsQuery]() + + + +```go +type ListAccountsQuery bunpaginate.OffsetPaginatedQuery[PaginatedQueryOptions[PITFilterWithVolumes]] +``` + + +### func [NewListAccountsQuery]() + +```go +func NewListAccountsQuery(opts PaginatedQueryOptions[PITFilterWithVolumes]) ListAccountsQuery +``` + + + + +### func \(ListAccountsQuery\) [WithExpandEffectiveVolumes]() + +```go +func (q ListAccountsQuery) WithExpandEffectiveVolumes() ListAccountsQuery +``` + + + + +### func \(ListAccountsQuery\) [WithExpandVolumes]() + +```go +func (q ListAccountsQuery) WithExpandVolumes() ListAccountsQuery +``` + + + + +## type [ListLedgersQuery]() + + + +```go +type ListLedgersQuery bunpaginate.OffsetPaginatedQuery[PaginatedQueryOptions[struct{}]] +``` + + +### func [NewListLedgersQuery]() + +```go +func NewListLedgersQuery(pageSize uint64) ListLedgersQuery +``` + + + + +## type [ListTransactionsQuery]() + + + +```go +type ListTransactionsQuery bunpaginate.ColumnPaginatedQuery[PaginatedQueryOptions[PITFilterWithVolumes]] +``` + + +### func [NewListTransactionsQuery]() + +```go +func NewListTransactionsQuery(options PaginatedQueryOptions[PITFilterWithVolumes]) ListTransactionsQuery +``` + + + + +### func \(ListTransactionsQuery\) [WithColumn]() + +```go +func (q ListTransactionsQuery) WithColumn(column string) ListTransactionsQuery +``` + + + + +## type [Listener]() + + + +```go +type Listener interface { + CommittedTransactions(ctx context.Context, ledger string, res ledger.Transaction, accountMetadata ledger.AccountMetadata) + SavedMetadata(ctx context.Context, ledger string, targetType, id string, metadata metadata.Metadata) + RevertedTransaction(ctx context.Context, ledger string, reverted, revert ledger.Transaction) + DeletedMetadata(ctx context.Context, ledger string, targetType string, targetID any, key string) +} +``` + + +## type [Machine]() + + + +```go +type Machine interface { + Execute(context.Context, TX, map[string]string) (*MachineResult, error) +} +``` + + +## type [MachineFactory]() + + + +```go +type MachineFactory interface { + // Make can return following errors: + // * ErrCompilationFailed + Make(script string) (Machine, error) +} +``` + + +## type [MachineResult]() + + + +```go +type MachineResult struct { + Postings ledger.Postings `json:"postings"` + Metadata metadata.Metadata `json:"metadata"` + AccountMetadata map[string]metadata.Metadata +} +``` + + +## type [PITFilter]() + + + +```go +type PITFilter struct { + PIT *time.Time `json:"pit"` + OOT *time.Time `json:"oot"` +} +``` + + +## type [PITFilterWithVolumes]() + + + +```go +type PITFilterWithVolumes struct { + PITFilter + ExpandVolumes bool `json:"volumes"` + ExpandEffectiveVolumes bool `json:"effectiveVolumes"` +} +``` + + +## type [PaginatedQueryOptions]() + + + +```go +type PaginatedQueryOptions[T any] struct { + QueryBuilder query.Builder `json:"qb"` + PageSize uint64 `json:"pageSize"` + Options T `json:"options"` +} +``` + + +### func [NewPaginatedQueryOptions]() + +```go +func NewPaginatedQueryOptions[T any](options T) PaginatedQueryOptions[T] +``` + + + + +### func \(\*PaginatedQueryOptions\[T\]\) [UnmarshalJSON]() + +```go +func (opts *PaginatedQueryOptions[T]) UnmarshalJSON(data []byte) error +``` + + + + +### func \(PaginatedQueryOptions\[T\]\) [WithPageSize]() + +```go +func (opts PaginatedQueryOptions[T]) WithPageSize(pageSize uint64) PaginatedQueryOptions[T] +``` + + + + +### func \(PaginatedQueryOptions\[T\]\) [WithQueryBuilder]() + +```go +func (opts PaginatedQueryOptions[T]) WithQueryBuilder(qb query.Builder) PaginatedQueryOptions[T] +``` + + + + +## type [Parameters]() + + + +```go +type Parameters[INPUT any] struct { + DryRun bool + IdempotencyKey string + Input INPUT +} +``` + + +## type [RevertTransaction]() + + + +```go +type RevertTransaction struct { + Force bool + AtEffectiveDate bool + TransactionID int +} +``` + + +## type [RunScript]() + + + +```go +type RunScript = vm.RunScript +``` + + +## type [SaveAccountMetadata]() + + + +```go +type SaveAccountMetadata struct { + Address string + Metadata metadata.Metadata +} +``` + + +## type [SaveTransactionMetadata]() + + + +```go +type SaveTransactionMetadata struct { + TransactionID int + Metadata metadata.Metadata +} +``` + + +## type [Script]() + + + +```go +type Script = vm.Script +``` + + +## type [ScriptV1]() + + + +```go +type ScriptV1 = vm.ScriptV1 +``` + + +## type [State]() + + + +```go +type State struct { + // contains filtered or unexported fields +} +``` + + +## type [StateRegistry]() + + + +```go +type StateRegistry struct { + // contains filtered or unexported fields +} +``` + + +### func [NewStateRegistry]() + +```go +func NewStateRegistry() *StateRegistry +``` + + + + +### func \(\*StateRegistry\) [IsUpToDate]() + +```go +func (r *StateRegistry) IsUpToDate(name string) bool +``` + + + + +### func \(\*StateRegistry\) [SetUpToDate]() + +```go +func (r *StateRegistry) SetUpToDate(name string) +``` + + + + +### func \(\*StateRegistry\) [Upsert]() + +```go +func (r *StateRegistry) Upsert(l ledger.Ledger) bool +``` + + + + +## type [Stats]() + + + +```go +type Stats struct { + Transactions int `json:"transactions"` + Accounts int `json:"accounts"` +} +``` + + +## type [Store]() + + + +```go +type Store interface { + WithTX(context.Context, *sql.TxOptions, func(TX) (bool, error)) error + GetDB() bun.IDB + ListLogs(ctx context.Context, q GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) + ReadLogWithIdempotencyKey(ctx context.Context, ik string) (*ledger.Log, error) + + ListTransactions(ctx context.Context, q ListTransactionsQuery) (*bunpaginate.Cursor[ledger.Transaction], error) + CountTransactions(ctx context.Context, q ListTransactionsQuery) (int, error) + GetTransaction(ctx context.Context, query GetTransactionQuery) (*ledger.Transaction, error) + CountAccounts(ctx context.Context, a ListAccountsQuery) (int, error) + ListAccounts(ctx context.Context, a ListAccountsQuery) (*bunpaginate.Cursor[ledger.Account], error) + GetAccount(ctx context.Context, q GetAccountQuery) (*ledger.Account, error) + GetAggregatedBalances(ctx context.Context, q GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) + GetVolumesWithBalances(ctx context.Context, q GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) + IsUpToDate(ctx context.Context) (bool, error) + GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) +} +``` + + +## type [TX]() + + + +```go +type TX interface { + GetAccount(ctx context.Context, query GetAccountQuery) (*ledger.Account, error) + // GetBalances must returns balance and lock account until the end of the TX + GetBalances(ctx context.Context, query BalanceQuery) (Balances, error) + CommitTransaction(ctx context.Context, transaction *ledger.Transaction) error + // RevertTransaction revert the transaction with identifier id + // It returns : + // * the reverted transaction + // * a boolean indicating if the transaction has been reverted. false indicates an already reverted transaction (unless error != nil) + // * an error + RevertTransaction(ctx context.Context, id int) (*ledger.Transaction, bool, error) + UpdateTransactionMetadata(ctx context.Context, transactionID int, m metadata.Metadata) (*ledger.Transaction, bool, error) + DeleteTransactionMetadata(ctx context.Context, transactionID int, key string) (*ledger.Transaction, bool, error) + UpdateAccountsMetadata(ctx context.Context, m map[string]metadata.Metadata) error + // UpsertAccount returns a boolean indicating if the account was upserted + UpsertAccount(ctx context.Context, account *ledger.Account) (bool, error) + DeleteAccountMetadata(ctx context.Context, address, key string) error + InsertLog(ctx context.Context, log *ledger.Log) error + + LockLedger(ctx context.Context) error + ListLogs(ctx context.Context, q GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) +} +``` + +Generated by [gomarkdoc]() diff --git a/internal/controller/ledger/controller.go b/internal/controller/ledger/controller.go new file mode 100644 index 000000000..fc1ca32ba --- /dev/null +++ b/internal/controller/ledger/controller.go @@ -0,0 +1,101 @@ +package ledger + +import ( + "context" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/ledger/internal/machine/vm" + + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/migrations" + ledger "github.com/formancehq/ledger/internal" +) + +//go:generate mockgen -write_source_comment=false -write_package_comment=false -source controller.go -destination controller_generated_test.go -package ledger . Controller + +type Controller interface { + // IsDatabaseUpToDate check if the ledger store is up to date, including the bucket and the ledger specifics + // It returns true if up to date + IsDatabaseUpToDate(ctx context.Context) (bool, error) + GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) + GetStats(ctx context.Context) (Stats, error) + + GetAccount(ctx context.Context, query GetAccountQuery) (*ledger.Account, error) + ListAccounts(ctx context.Context, query ListAccountsQuery) (*bunpaginate.Cursor[ledger.Account], error) + CountAccounts(ctx context.Context, query ListAccountsQuery) (int, error) + ListLogs(ctx context.Context, query GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) + CountTransactions(ctx context.Context, query ListTransactionsQuery) (int, error) + ListTransactions(ctx context.Context, query ListTransactionsQuery) (*bunpaginate.Cursor[ledger.Transaction], error) + GetTransaction(ctx context.Context, query GetTransactionQuery) (*ledger.Transaction, error) + GetVolumesWithBalances(ctx context.Context, q GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) + GetAggregatedBalances(ctx context.Context, q GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) + + // CreateTransaction accept a numscript script and returns a transaction + // It can return following errors: + // * ErrCompilationFailed + // * ErrMetadataOverride + // * ErrInvalidVars + // * ErrTransactionReferenceConflict + // * ErrIdempotencyKeyConflict + // * ErrInsufficientFunds + CreateTransaction(ctx context.Context, parameters Parameters[RunScript]) (*ledger.CreatedTransaction, error) + // RevertTransaction allow to revert a transaction. + // It can return following errors: + // * ErrInsufficientFunds + // * ErrAlreadyReverted + // * ErrNotFound + // Parameter force indicate we want to force revert the transaction even if the accounts does not have funds + // Parameter atEffectiveDate indicate we want to set the timestamp of the newly created transaction on the timestamp of the reverted transaction + RevertTransaction(ctx context.Context, parameters Parameters[RevertTransaction]) (*ledger.RevertedTransaction, error) + // SaveTransactionMetadata allow to add metadata to an existing transaction + // It can return following errors: + // * ErrNotFound + SaveTransactionMetadata(ctx context.Context, parameters Parameters[SaveTransactionMetadata]) error + // SaveAccountMetadata allow to add metadata to an account + // If the account does not exist, it is created + SaveAccountMetadata(ctx context.Context, parameters Parameters[SaveAccountMetadata]) error + // DeleteTransactionMetadata allow to remove metadata of a transaction + // It can return following errors: + // * ErrNotFound : indicate the transaction was not found OR the metadata does not exist on the transaction + DeleteTransactionMetadata(ctx context.Context, parameters Parameters[DeleteTransactionMetadata]) error + // DeleteAccountMetadata allow to remove metadata of an account + // It can return following errors: + // * ErrNotFound : indicate the account was not found OR the metadata does not exist on the account + DeleteAccountMetadata(ctx context.Context, parameters Parameters[DeleteAccountMetadata]) error + // Import allow to import the logs of an existing ledger + // It can return following errors: + // * ErrImport + // Logs hash must be valid and the ledger.Ledger must be in 'initializing' state + Import(ctx context.Context, stream chan ledger.Log) error + // Export allow to export the logs of a ledger + Export(ctx context.Context, w ExportWriter) error +} + +type RunScript = vm.RunScript +type Script = vm.Script +type ScriptV1 = vm.ScriptV1 + +type RevertTransaction struct { + Force bool + AtEffectiveDate bool + TransactionID int +} + +type SaveTransactionMetadata struct { + TransactionID int + Metadata metadata.Metadata +} + +type SaveAccountMetadata struct { + Address string + Metadata metadata.Metadata +} + +type DeleteTransactionMetadata struct { + TransactionID int + Key string +} + +type DeleteAccountMetadata struct { + Address string + Key string +} diff --git a/internal/controller/ledger/controller_default.go b/internal/controller/ledger/controller_default.go new file mode 100644 index 000000000..d5d8bdf56 --- /dev/null +++ b/internal/controller/ledger/controller_default.go @@ -0,0 +1,475 @@ +package ledger + +import ( + "context" + "database/sql" + "fmt" + "math/big" + "reflect" + + . "github.com/formancehq/go-libs/v2/collectionutils" + "go.opentelemetry.io/otel/metric" + noopmetrics "go.opentelemetry.io/otel/metric/noop" + "go.opentelemetry.io/otel/trace" + nooptracer "go.opentelemetry.io/otel/trace/noop" + + "github.com/formancehq/go-libs/v2/migrations" + "github.com/formancehq/ledger/internal/tracing" + + "github.com/formancehq/ledger/internal/machine" + + "github.com/formancehq/go-libs/v2/platform/postgres" + + "errors" + + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/google/uuid" + + ledger "github.com/formancehq/ledger/internal" +) + +type DefaultController struct { + store Store + parser NumscriptParser + ledger ledger.Ledger + + tracer trace.Tracer + meter metric.Meter + + executeMachineHistogram metric.Int64Histogram + deadLockCounter metric.Int64Counter + + createTransactionLp *logProcessor[RunScript, ledger.CreatedTransaction] + revertTransactionLp *logProcessor[RevertTransaction, ledger.RevertedTransaction] + saveTransactionMetadataLp *logProcessor[SaveTransactionMetadata, ledger.SavedMetadata] + saveAccountMetadataLp *logProcessor[SaveAccountMetadata, ledger.SavedMetadata] + deleteTransactionMetadataLp *logProcessor[DeleteTransactionMetadata, ledger.DeletedMetadata] + deleteAccountMetadataLp *logProcessor[DeleteAccountMetadata, ledger.DeletedMetadata] +} + +func NewDefaultController( + l ledger.Ledger, + store Store, + numscriptParser NumscriptParser, + opts ...DefaultControllerOption, +) *DefaultController { + ret := &DefaultController{ + store: store, + ledger: l, + parser: numscriptParser, + } + + for _, opt := range append(defaultOptions, opts...) { + opt(ret) + } + + var err error + ret.executeMachineHistogram, err = ret.meter.Int64Histogram("numscript.run") + if err != nil { + panic(err) + } + ret.deadLockCounter, err = ret.meter.Int64Counter("deadlocks") + if err != nil { + panic(err) + } + + ret.createTransactionLp = newLogProcessor[RunScript, ledger.CreatedTransaction]("CreateTransaction", ret.deadLockCounter) + ret.revertTransactionLp = newLogProcessor[RevertTransaction, ledger.RevertedTransaction]("RevertTransaction", ret.deadLockCounter) + ret.saveTransactionMetadataLp = newLogProcessor[SaveTransactionMetadata, ledger.SavedMetadata]("SaveTransactionMetadata", ret.deadLockCounter) + ret.saveAccountMetadataLp = newLogProcessor[SaveAccountMetadata, ledger.SavedMetadata]("SaveAccountMetadata", ret.deadLockCounter) + ret.deleteTransactionMetadataLp = newLogProcessor[DeleteTransactionMetadata, ledger.DeletedMetadata]("DeleteTransactionMetadata", ret.deadLockCounter) + ret.deleteAccountMetadataLp = newLogProcessor[DeleteAccountMetadata, ledger.DeletedMetadata]("DeleteAccountMetadata", ret.deadLockCounter) + + return ret +} + +func (ctrl *DefaultController) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) { + return ctrl.store.GetMigrationsInfo(ctx) +} + +func (ctrl *DefaultController) ListTransactions(ctx context.Context, q ListTransactionsQuery) (*bunpaginate.Cursor[ledger.Transaction], error) { + return ctrl.store.ListTransactions(ctx, q) +} + +func (ctrl *DefaultController) CountTransactions(ctx context.Context, q ListTransactionsQuery) (int, error) { + return ctrl.store.CountTransactions(ctx, q) +} + +func (ctrl *DefaultController) GetTransaction(ctx context.Context, query GetTransactionQuery) (*ledger.Transaction, error) { + return ctrl.store.GetTransaction(ctx, query) +} + +func (ctrl *DefaultController) CountAccounts(ctx context.Context, a ListAccountsQuery) (int, error) { + return ctrl.store.CountAccounts(ctx, a) +} + +func (ctrl *DefaultController) ListAccounts(ctx context.Context, a ListAccountsQuery) (*bunpaginate.Cursor[ledger.Account], error) { + return ctrl.store.ListAccounts(ctx, a) +} + +func (ctrl *DefaultController) GetAccount(ctx context.Context, q GetAccountQuery) (*ledger.Account, error) { + return ctrl.store.GetAccount(ctx, q) +} + +func (ctrl *DefaultController) GetAggregatedBalances(ctx context.Context, q GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) { + return ctrl.store.GetAggregatedBalances(ctx, q) +} + +func (ctrl *DefaultController) ListLogs(ctx context.Context, q GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) { + return ctrl.store.ListLogs(ctx, q) +} + +func (ctrl *DefaultController) Import(ctx context.Context, stream chan ledger.Log) error { + // Use serializable isolation level to ensure no concurrent request use the store. + // If a concurrent transactions is made while we are importing some logs, the transaction importing logs will + // be canceled with serialization error. + err := ctrl.store.WithTX(ctx, &sql.TxOptions{Isolation: sql.LevelSerializable}, func(sqlTx TX) (bool, error) { + + // Due to the serializable isolation level, and since we explicitly ask for the ledger state in the sql transaction context + // if the state change, the sql transaction will be aborted with a serialization error + if err := sqlTx.LockLedger(ctx); err != nil { + return false, fmt.Errorf("failed to lock ledger: %w", err) + } + + // We can import only if the ledger is empty. + logs, err := sqlTx.ListLogs(ctx, NewListLogsQuery(PaginatedQueryOptions[any]{ + PageSize: 1, + })) + if err != nil { + return false, fmt.Errorf("error listing logs: %w", err) + } + + if len(logs.Data) > 0 { + return false, newErrImport(errors.New("ledger must be empty")) + } + + for log := range stream { + if err := ctrl.importLog(ctx, sqlTx, log); err != nil { + return false, fmt.Errorf("importing log %d: %w", log.ID, err) + } + } + + return true, nil + }) + if err != nil { + if errors.Is(err, postgres.ErrSerialization) { + return newErrImport(errors.New("concurrent transaction occur" + + "red, cannot import the ledger")) + } + } + + return err +} + +func (ctrl *DefaultController) importLog(ctx context.Context, sqlTx TX, log ledger.Log) error { + switch payload := log.Data.(type) { + case ledger.CreatedTransaction: + if err := sqlTx.CommitTransaction(ctx, &payload.Transaction); err != nil { + return fmt.Errorf("failed to commit transaction: %w", err) + } + if len(payload.AccountMetadata) > 0 { + if err := sqlTx.UpdateAccountsMetadata(ctx, payload.AccountMetadata); err != nil { + return fmt.Errorf("updating metadata of accounts '%s': %w", Keys(payload.AccountMetadata), err) + } + } + case ledger.RevertedTransaction: + _, _, err := sqlTx.RevertTransaction(ctx, payload.RevertedTransaction.ID) + if err != nil { + return fmt.Errorf("failed to revert transaction: %w", err) + } + case ledger.SavedMetadata: + switch payload.TargetType { + case ledger.MetaTargetTypeTransaction: + if _, _, err := sqlTx.UpdateTransactionMetadata(ctx, payload.TargetID.(int), payload.Metadata); err != nil { + return fmt.Errorf("failed to update transaction metadata: %w", err) + } + case ledger.MetaTargetTypeAccount: + if err := sqlTx.UpdateAccountsMetadata(ctx, ledger.AccountMetadata{ + payload.TargetID.(string): payload.Metadata, + }); err != nil { + return fmt.Errorf("failed to update account metadata: %w", err) + } + } + case ledger.DeletedMetadata: + switch payload.TargetType { + case ledger.MetaTargetTypeTransaction: + if _, _, err := sqlTx.DeleteTransactionMetadata(ctx, payload.TargetID.(int), payload.Key); err != nil { + return fmt.Errorf("failed to delete transaction metadata: %w", err) + } + case ledger.MetaTargetTypeAccount: + if err := sqlTx.DeleteAccountMetadata(ctx, payload.TargetID.(string), payload.Key); err != nil { + return fmt.Errorf("failed to delete account metadata: %w", err) + } + } + } + + logCopy := log + if err := sqlTx.InsertLog(ctx, &log); err != nil { + return fmt.Errorf("failed to insert log: %w", err) + } + + if ctrl.ledger.HasFeature(ledger.FeatureHashLogs, "SYNC") { + if !reflect.DeepEqual(log.Hash, logCopy.Hash) { + return newErrInvalidHash(log.ID, logCopy.Hash, log.Hash) + } + } + + return nil +} + +func (ctrl *DefaultController) Export(ctx context.Context, w ExportWriter) error { + return bunpaginate.Iterate( + ctx, + NewListLogsQuery(NewPaginatedQueryOptions[any](nil).WithPageSize(100)). + WithOrder(bunpaginate.OrderAsc), + func(ctx context.Context, q GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) { + return ctrl.store.ListLogs(ctx, q) + }, + func(cursor *bunpaginate.Cursor[ledger.Log]) error { + for _, data := range cursor.Data { + if err := w.Write(ctx, data); err != nil { + return err + } + } + return nil + }, + ) +} + +func (ctrl *DefaultController) IsDatabaseUpToDate(ctx context.Context) (bool, error) { + return ctrl.store.IsUpToDate(ctx) +} + +func (ctrl *DefaultController) GetVolumesWithBalances(ctx context.Context, q GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) { + return ctrl.store.GetVolumesWithBalances(ctx, q) +} + +func (ctrl *DefaultController) createTransaction(ctx context.Context, sqlTX TX, parameters Parameters[RunScript]) (*ledger.CreatedTransaction, error) { + + logger := logging.FromContext(ctx).WithField("req", uuid.NewString()[:8]) + ctx = logging.ContextWithLogger(ctx, logger) + + m, err := ctrl.parser.Parse(parameters.Input.Plain) + if err != nil { + return nil, fmt.Errorf("failed to compile script: %w", err) + } + + result, err := tracing.TraceWithMetric( + ctx, + "ExecuteMachine", + ctrl.tracer, + ctrl.executeMachineHistogram, + func(ctx context.Context) (*NumscriptExecutionResult, error) { + return m.Execute(ctx, sqlTX, parameters.Input.Vars) + }, + ) + if err != nil { + return nil, fmt.Errorf("failed to execute program: %w", err) + } + + if len(result.Postings) == 0 { + return nil, ErrNoPostings + } + + finalMetadata := result.Metadata + if finalMetadata == nil { + finalMetadata = metadata.Metadata{} + } + for k, v := range parameters.Input.Metadata { + if finalMetadata[k] != "" { + return nil, newErrMetadataOverride(k) + } + finalMetadata[k] = v + } + + transaction := ledger.NewTransaction(). + WithPostings(result.Postings...). + WithMetadata(finalMetadata). + WithTimestamp(parameters.Input.Timestamp). + WithReference(parameters.Input.Reference) + err = sqlTX.CommitTransaction(ctx, &transaction) + if err != nil { + return nil, err + } + + if len(result.AccountMetadata) > 0 { + if err := sqlTX.UpdateAccountsMetadata(ctx, result.AccountMetadata); err != nil { + return nil, fmt.Errorf("updating metadata of account '%s': %w", Keys(result.AccountMetadata), err) + } + } + + return &ledger.CreatedTransaction{ + Transaction: transaction, + AccountMetadata: result.AccountMetadata, + }, err +} + +func (ctrl *DefaultController) CreateTransaction(ctx context.Context, parameters Parameters[RunScript]) (*ledger.CreatedTransaction, error) { + return ctrl.createTransactionLp.forgeLog(ctx, ctrl.store, parameters, ctrl.createTransaction) +} + +func (ctrl *DefaultController) revertTransaction(ctx context.Context, sqlTX TX, parameters Parameters[RevertTransaction]) (*ledger.RevertedTransaction, error) { + var ( + hasBeenReverted bool + err error + ) + originalTransaction, hasBeenReverted, err := sqlTX.RevertTransaction(ctx, parameters.Input.TransactionID) + if err != nil { + return nil, err + } + if !hasBeenReverted { + return nil, newErrAlreadyReverted(parameters.Input.TransactionID) + } + + bq := originalTransaction.InvolvedAccountAndAssets() + + balances, err := sqlTX.GetBalances(ctx, bq) + if err != nil { + return nil, fmt.Errorf("failed to get balances: %w", err) + } + + reversedTx := originalTransaction.Reverse() + if parameters.Input.AtEffectiveDate { + reversedTx = reversedTx.WithTimestamp(originalTransaction.Timestamp) + } else { + reversedTx = reversedTx.WithTimestamp(*originalTransaction.RevertedAt) + } + + // Check balances after the revert, all balances must be greater than 0 + if !parameters.Input.Force { + for _, posting := range reversedTx.Postings { + balances[posting.Source][posting.Asset] = balances[posting.Source][posting.Asset].Add( + balances[posting.Source][posting.Asset], + big.NewInt(0).Neg(posting.Amount), + ) + balances[posting.Destination][posting.Destination] = balances[posting.Destination][posting.Asset].Add( + balances[posting.Destination][posting.Asset], + big.NewInt(0).Set(posting.Amount), + ) + } + + for account, forAccount := range balances { + for asset, finalBalance := range forAccount { + if finalBalance.Cmp(new(big.Int)) < 0 { + // todo(waiting): break dependency on machine package + // notes(gfyrag): wait for the new interpreter + return nil, machine.NewErrInsufficientFund("insufficient fund for %s/%s", account, asset) + } + } + } + } + + err = sqlTX.CommitTransaction(ctx, &reversedTx) + if err != nil { + return nil, fmt.Errorf("failed to insert transaction: %w", err) + } + + return &ledger.RevertedTransaction{ + RevertedTransaction: *originalTransaction, + RevertTransaction: reversedTx, + }, nil +} + +func (ctrl *DefaultController) RevertTransaction(ctx context.Context, parameters Parameters[RevertTransaction]) (*ledger.RevertedTransaction, error) { + return ctrl.revertTransactionLp.forgeLog(ctx, ctrl.store, parameters, ctrl.revertTransaction) +} + +func (ctrl *DefaultController) saveTransactionMetadata(ctx context.Context, sqlTX TX, parameters Parameters[SaveTransactionMetadata]) (*ledger.SavedMetadata, error) { + if _, _, err := sqlTX.UpdateTransactionMetadata(ctx, parameters.Input.TransactionID, parameters.Input.Metadata); err != nil { + return nil, err + } + + return &ledger.SavedMetadata{ + TargetType: ledger.MetaTargetTypeTransaction, + TargetID: parameters.Input.TransactionID, + Metadata: parameters.Input.Metadata, + }, nil +} + +func (ctrl *DefaultController) SaveTransactionMetadata(ctx context.Context, parameters Parameters[SaveTransactionMetadata]) error { + _, err := ctrl.saveTransactionMetadataLp.forgeLog(ctx, ctrl.store, parameters, ctrl.saveTransactionMetadata) + return err +} + +func (ctrl *DefaultController) saveAccountMetadata(ctx context.Context, sqlTX TX, parameters Parameters[SaveAccountMetadata]) (*ledger.SavedMetadata, error) { + if _, err := sqlTX.UpsertAccount(ctx, &ledger.Account{ + Address: parameters.Input.Address, + Metadata: parameters.Input.Metadata, + }); err != nil { + return nil, err + } + + return &ledger.SavedMetadata{ + TargetType: ledger.MetaTargetTypeAccount, + TargetID: parameters.Input.Address, + Metadata: parameters.Input.Metadata, + }, nil +} + +func (ctrl *DefaultController) SaveAccountMetadata(ctx context.Context, parameters Parameters[SaveAccountMetadata]) error { + _, err := ctrl.saveAccountMetadataLp.forgeLog(ctx, ctrl.store, parameters, ctrl.saveAccountMetadata) + + return err +} + +func (ctrl *DefaultController) deleteTransactionMetadata(ctx context.Context, sqlTX TX, parameters Parameters[DeleteTransactionMetadata]) (*ledger.DeletedMetadata, error) { + _, modified, err := sqlTX.DeleteTransactionMetadata(ctx, parameters.Input.TransactionID, parameters.Input.Key) + if err != nil { + return nil, err + } + + if !modified { + return nil, postgres.ErrNotFound + } + + return &ledger.DeletedMetadata{ + TargetType: ledger.MetaTargetTypeTransaction, + TargetID: parameters.Input.TransactionID, + Key: parameters.Input.Key, + }, nil +} + +func (ctrl *DefaultController) DeleteTransactionMetadata(ctx context.Context, parameters Parameters[DeleteTransactionMetadata]) error { + _, err := ctrl.deleteTransactionMetadataLp.forgeLog(ctx, ctrl.store, parameters, ctrl.deleteTransactionMetadata) + return err +} + +func (ctrl *DefaultController) deleteAccountMetadata(ctx context.Context, sqlTX TX, parameters Parameters[DeleteAccountMetadata]) (*ledger.DeletedMetadata, error) { + err := sqlTX.DeleteAccountMetadata(ctx, parameters.Input.Address, parameters.Input.Key) + if err != nil { + return nil, err + } + + return &ledger.DeletedMetadata{ + TargetType: ledger.MetaTargetTypeAccount, + TargetID: parameters.Input.Address, + Key: parameters.Input.Key, + }, nil +} + +func (ctrl *DefaultController) DeleteAccountMetadata(ctx context.Context, parameters Parameters[DeleteAccountMetadata]) error { + _, err := ctrl.deleteAccountMetadataLp.forgeLog(ctx, ctrl.store, parameters, ctrl.deleteAccountMetadata) + return err +} + +var _ Controller = (*DefaultController)(nil) + +type DefaultControllerOption func(controller *DefaultController) + +var defaultOptions = []DefaultControllerOption{ + WithMeter(noopmetrics.Meter{}), + WithTracer(nooptracer.Tracer{}), +} + +func WithMeter(meter metric.Meter) DefaultControllerOption { + return func(controller *DefaultController) { + controller.meter = meter + } +} +func WithTracer(tracer trace.Tracer) DefaultControllerOption { + return func(controller *DefaultController) { + controller.tracer = tracer + } +} diff --git a/internal/controller/ledger/controller_default_test.go b/internal/controller/ledger/controller_default_test.go new file mode 100644 index 000000000..e6165068f --- /dev/null +++ b/internal/controller/ledger/controller_default_test.go @@ -0,0 +1,405 @@ +package ledger + +import ( + "context" + "database/sql" + "math/big" + "testing" + + "github.com/formancehq/go-libs/v2/pointer" + "github.com/formancehq/go-libs/v2/time" + + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/migrations" + + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/metadata" + ledger "github.com/formancehq/ledger/internal" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestCreateTransaction(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + + store := NewMockStore(ctrl) + numscriptRuntime := NewMockNumscriptRuntime(ctrl) + parser := NewMockNumscriptParser(ctrl) + sqlTX := NewMockTX(ctrl) + + l := NewDefaultController(ledger.Ledger{}, store, parser) + + runScript := RunScript{} + + parser.EXPECT(). + Parse(runScript.Plain). + Return(numscriptRuntime, nil) + + store.EXPECT(). + WithTX(gomock.Any(), nil, gomock.Any()). + DoAndReturn(func(_ context.Context, _ *sql.TxOptions, fn func(tx TX) (bool, error)) error { + _, err := fn(sqlTX) + return err + }) + + posting := ledger.NewPosting("world", "bank", "USD", big.NewInt(100)) + numscriptRuntime.EXPECT(). + Execute(gomock.Any(), sqlTX, runScript.Vars). + Return(&NumscriptExecutionResult{ + Postings: ledger.Postings{posting}, + }, nil) + + sqlTX.EXPECT(). + CommitTransaction(gomock.Any(), gomock.Any()). + Return(nil) + + sqlTX.EXPECT(). + InsertLog(gomock.Any(), gomock.Cond(func(x any) bool { + return x.(*ledger.Log).Type == ledger.NewLogType + })). + DoAndReturn(func(_ context.Context, x any) any { + return x + }) + + _, err := l.CreateTransaction(context.Background(), Parameters[RunScript]{ + Input: runScript, + }) + require.NoError(t, err) +} + +func TestRevertTransaction(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + + store := NewMockStore(ctrl) + parser := NewMockNumscriptParser(ctrl) + sqlTX := NewMockTX(ctrl) + ctx := logging.TestingContext() + + l := NewDefaultController(ledger.Ledger{}, store, parser) + + store.EXPECT(). + WithTX(gomock.Any(), nil, gomock.Any()). + DoAndReturn(func(_ context.Context, _ *sql.TxOptions, fn func(tx TX) (bool, error)) error { + _, err := fn(sqlTX) + return err + }) + + txToRevert := ledger.Transaction{} + sqlTX.EXPECT(). + RevertTransaction(gomock.Any(), 1). + DoAndReturn(func(_ context.Context, _ int) (*ledger.Transaction, bool, error) { + txToRevert.RevertedAt = pointer.For(time.Now()) + return &txToRevert, true, nil + }) + + sqlTX.EXPECT(). + GetBalances(gomock.Any(), gomock.Any()). + Return(map[string]map[string]*big.Int{}, nil) + + sqlTX.EXPECT(). + CommitTransaction(gomock.Any(), gomock.Any()). + Return(nil) + + sqlTX.EXPECT(). + InsertLog(gomock.Any(), gomock.Cond(func(x any) bool { + return x.(*ledger.Log).Type == ledger.RevertedTransactionLogType + })). + Return(nil) + + _, err := l.RevertTransaction(ctx, Parameters[RevertTransaction]{ + Input: RevertTransaction{ + TransactionID: 1, + }, + }) + require.NoError(t, err) +} + +func TestSaveTransactionMetadata(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + + store := NewMockStore(ctrl) + parser := NewMockNumscriptParser(ctrl) + sqlTX := NewMockTX(ctrl) + ctx := logging.TestingContext() + + l := NewDefaultController(ledger.Ledger{}, store, parser) + + store.EXPECT(). + WithTX(gomock.Any(), nil, gomock.Any()). + DoAndReturn(func(ctx context.Context, _ *sql.TxOptions, fn func(tx TX) (bool, error)) error { + _, err := fn(sqlTX) + return err + }) + + m := metadata.Metadata{ + "foo": "bar", + } + sqlTX.EXPECT(). + UpdateTransactionMetadata(gomock.Any(), 1, m). + Return(&ledger.Transaction{}, true, nil) + + sqlTX.EXPECT(). + InsertLog(gomock.Any(), gomock.Cond(func(x any) bool { + return x.(*ledger.Log).Type == ledger.SetMetadataLogType + })). + Return(nil) + + err := l.SaveTransactionMetadata(ctx, Parameters[SaveTransactionMetadata]{ + Input: SaveTransactionMetadata{ + Metadata: m, + TransactionID: 1, + }, + }) + require.NoError(t, err) +} + +func TestDeleteTransactionMetadata(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + + store := NewMockStore(ctrl) + parser := NewMockNumscriptParser(ctrl) + sqlTX := NewMockTX(ctrl) + ctx := logging.TestingContext() + + l := NewDefaultController(ledger.Ledger{}, store, parser) + + store.EXPECT(). + WithTX(gomock.Any(), nil, gomock.Any()). + DoAndReturn(func(ctx context.Context, _ *sql.TxOptions, fn func(tx TX) (bool, error)) error { + _, err := fn(sqlTX) + return err + }) + + sqlTX.EXPECT(). + DeleteTransactionMetadata(gomock.Any(), 1, "foo"). + Return(&ledger.Transaction{}, true, nil) + + sqlTX.EXPECT(). + InsertLog(gomock.Any(), gomock.Cond(func(x any) bool { + return x.(*ledger.Log).Type == ledger.DeleteMetadataLogType + })). + Return(nil) + + err := l.DeleteTransactionMetadata(ctx, Parameters[DeleteTransactionMetadata]{ + Input: DeleteTransactionMetadata{ + TransactionID: 1, + Key: "foo", + }, + }) + require.NoError(t, err) +} + +func TestListTransactions(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + + store := NewMockStore(ctrl) + parser := NewMockNumscriptParser(ctrl) + ctx := logging.TestingContext() + + cursor := &bunpaginate.Cursor[ledger.Transaction]{} + query := NewListTransactionsQuery(NewPaginatedQueryOptions[PITFilterWithVolumes](PITFilterWithVolumes{})) + store.EXPECT(). + ListTransactions(gomock.Any(), query). + Return(cursor, nil) + + l := NewDefaultController(ledger.Ledger{}, store, parser) + ret, err := l.ListTransactions(ctx, query) + require.NoError(t, err) + require.Equal(t, cursor, ret) +} + +func TestCountAccounts(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + + store := NewMockStore(ctrl) + parser := NewMockNumscriptParser(ctrl) + ctx := logging.TestingContext() + + query := NewListAccountsQuery(NewPaginatedQueryOptions[PITFilterWithVolumes](PITFilterWithVolumes{})) + store.EXPECT().CountAccounts(gomock.Any(), query).Return(1, nil) + + l := NewDefaultController(ledger.Ledger{}, store, parser) + count, err := l.CountAccounts(ctx, query) + require.NoError(t, err) + require.Equal(t, 1, count) +} + +func TestGetTransaction(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + + store := NewMockStore(ctrl) + parser := NewMockNumscriptParser(ctrl) + ctx := logging.TestingContext() + + tx := ledger.Transaction{} + query := NewGetTransactionQuery(0) + store.EXPECT(). + GetTransaction(gomock.Any(), query). + Return(&tx, nil) + + l := NewDefaultController(ledger.Ledger{}, store, parser) + ret, err := l.GetTransaction(ctx, query) + require.NoError(t, err) + require.Equal(t, tx, *ret) +} + +func TestGetAccount(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + + store := NewMockStore(ctrl) + parser := NewMockNumscriptParser(ctrl) + ctx := logging.TestingContext() + + account := ledger.Account{} + query := NewGetAccountQuery("world") + store.EXPECT(). + GetAccount(gomock.Any(), query). + Return(&account, nil) + + l := NewDefaultController(ledger.Ledger{}, store, parser) + ret, err := l.GetAccount(ctx, query) + require.NoError(t, err) + require.Equal(t, account, *ret) +} + +func TestCountTransactions(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + + store := NewMockStore(ctrl) + parser := NewMockNumscriptParser(ctrl) + ctx := logging.TestingContext() + + query := NewListTransactionsQuery(NewPaginatedQueryOptions[PITFilterWithVolumes](PITFilterWithVolumes{})) + store.EXPECT().CountTransactions(gomock.Any(), query).Return(1, nil) + + l := NewDefaultController(ledger.Ledger{}, store, parser) + count, err := l.CountTransactions(ctx, query) + require.NoError(t, err) + require.Equal(t, 1, count) +} + +func TestListAccounts(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + + store := NewMockStore(ctrl) + parser := NewMockNumscriptParser(ctrl) + ctx := logging.TestingContext() + + cursor := &bunpaginate.Cursor[ledger.Account]{} + query := NewListAccountsQuery(NewPaginatedQueryOptions[PITFilterWithVolumes](PITFilterWithVolumes{})) + store.EXPECT(). + ListAccounts(gomock.Any(), query). + Return(cursor, nil) + + l := NewDefaultController(ledger.Ledger{}, store, parser) + ret, err := l.ListAccounts(ctx, query) + require.NoError(t, err) + require.Equal(t, cursor, ret) +} + +func TestGetAggregatedBalances(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + + store := NewMockStore(ctrl) + parser := NewMockNumscriptParser(ctrl) + ctx := logging.TestingContext() + + balancesByAssets := ledger.BalancesByAssets{} + query := NewGetAggregatedBalancesQuery(PITFilter{}, nil, false) + store.EXPECT(). + GetAggregatedBalances(gomock.Any(), query). + Return(balancesByAssets, nil) + + l := NewDefaultController(ledger.Ledger{}, store, parser) + ret, err := l.GetAggregatedBalances(ctx, query) + require.NoError(t, err) + require.Equal(t, balancesByAssets, ret) +} + +func TestListLogs(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + + store := NewMockStore(ctrl) + parser := NewMockNumscriptParser(ctrl) + ctx := logging.TestingContext() + + cursor := &bunpaginate.Cursor[ledger.Log]{} + query := NewListLogsQuery(NewPaginatedQueryOptions[any](nil)) + store.EXPECT(). + ListLogs(gomock.Any(), query). + Return(cursor, nil) + + l := NewDefaultController(ledger.Ledger{}, store, parser) + ret, err := l.ListLogs(ctx, query) + require.NoError(t, err) + require.Equal(t, cursor, ret) +} + +func TestGetVolumesWithBalances(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + + store := NewMockStore(ctrl) + parser := NewMockNumscriptParser(ctrl) + ctx := logging.TestingContext() + + balancesByAssets := &bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount]{} + query := NewGetVolumesWithBalancesQuery(NewPaginatedQueryOptions[FiltersForVolumes](FiltersForVolumes{})) + store.EXPECT(). + GetVolumesWithBalances(gomock.Any(), query). + Return(balancesByAssets, nil) + + l := NewDefaultController(ledger.Ledger{}, store, parser) + ret, err := l.GetVolumesWithBalances(ctx, query) + require.NoError(t, err) + require.Equal(t, balancesByAssets, ret) +} + +func TestGetMigrationsInfo(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + + store := NewMockStore(ctrl) + parser := NewMockNumscriptParser(ctrl) + ctx := logging.TestingContext() + + migrationsInfo := make([]migrations.Info, 0) + store.EXPECT(). + GetMigrationsInfo(gomock.Any()). + Return(migrationsInfo, nil) + + l := NewDefaultController(ledger.Ledger{}, store, parser) + ret, err := l.GetMigrationsInfo(ctx) + require.NoError(t, err) + require.Equal(t, migrationsInfo, ret) +} + +func TestIsDatabaseUpToDate(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + + store := NewMockStore(ctrl) + parser := NewMockNumscriptParser(ctrl) + ctx := logging.TestingContext() + + store.EXPECT(). + IsUpToDate(gomock.Any()). + Return(true, nil) + + l := NewDefaultController(ledger.Ledger{}, store, parser) + ret, err := l.IsDatabaseUpToDate(ctx) + require.NoError(t, err) + require.True(t, ret) +} diff --git a/internal/controller/ledger/controller_generated_test.go b/internal/controller/ledger/controller_generated_test.go new file mode 100644 index 000000000..4fae9acac --- /dev/null +++ b/internal/controller/ledger/controller_generated_test.go @@ -0,0 +1,333 @@ +// Code generated by MockGen. DO NOT EDIT. +// +// Generated by this command: +// +// mockgen -write_source_comment=false -write_package_comment=false -source controller.go -destination controller_generated_test.go -package ledger . Controller +package ledger + +import ( + context "context" + reflect "reflect" + + bunpaginate "github.com/formancehq/go-libs/v2/bun/bunpaginate" + migrations "github.com/formancehq/go-libs/v2/migrations" + ledger "github.com/formancehq/ledger/internal" + gomock "go.uber.org/mock/gomock" +) + +// MockController is a mock of Controller interface. +type MockController struct { + ctrl *gomock.Controller + recorder *MockControllerMockRecorder +} + +// MockControllerMockRecorder is the mock recorder for MockController. +type MockControllerMockRecorder struct { + mock *MockController +} + +// NewMockController creates a new mock instance. +func NewMockController(ctrl *gomock.Controller) *MockController { + mock := &MockController{ctrl: ctrl} + mock.recorder = &MockControllerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockController) EXPECT() *MockControllerMockRecorder { + return m.recorder +} + +// CountAccounts mocks base method. +func (m *MockController) CountAccounts(ctx context.Context, query ListAccountsQuery) (int, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CountAccounts", ctx, query) + ret0, _ := ret[0].(int) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CountAccounts indicates an expected call of CountAccounts. +func (mr *MockControllerMockRecorder) CountAccounts(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountAccounts", reflect.TypeOf((*MockController)(nil).CountAccounts), ctx, query) +} + +// CountTransactions mocks base method. +func (m *MockController) CountTransactions(ctx context.Context, query ListTransactionsQuery) (int, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CountTransactions", ctx, query) + ret0, _ := ret[0].(int) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CountTransactions indicates an expected call of CountTransactions. +func (mr *MockControllerMockRecorder) CountTransactions(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountTransactions", reflect.TypeOf((*MockController)(nil).CountTransactions), ctx, query) +} + +// CreateTransaction mocks base method. +func (m *MockController) CreateTransaction(ctx context.Context, parameters Parameters[RunScript]) (*ledger.CreatedTransaction, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateTransaction", ctx, parameters) + ret0, _ := ret[0].(*ledger.CreatedTransaction) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateTransaction indicates an expected call of CreateTransaction. +func (mr *MockControllerMockRecorder) CreateTransaction(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateTransaction", reflect.TypeOf((*MockController)(nil).CreateTransaction), ctx, parameters) +} + +// DeleteAccountMetadata mocks base method. +func (m *MockController) DeleteAccountMetadata(ctx context.Context, parameters Parameters[DeleteAccountMetadata]) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteAccountMetadata", ctx, parameters) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteAccountMetadata indicates an expected call of DeleteAccountMetadata. +func (mr *MockControllerMockRecorder) DeleteAccountMetadata(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteAccountMetadata", reflect.TypeOf((*MockController)(nil).DeleteAccountMetadata), ctx, parameters) +} + +// DeleteTransactionMetadata mocks base method. +func (m *MockController) DeleteTransactionMetadata(ctx context.Context, parameters Parameters[DeleteTransactionMetadata]) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteTransactionMetadata", ctx, parameters) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteTransactionMetadata indicates an expected call of DeleteTransactionMetadata. +func (mr *MockControllerMockRecorder) DeleteTransactionMetadata(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteTransactionMetadata", reflect.TypeOf((*MockController)(nil).DeleteTransactionMetadata), ctx, parameters) +} + +// Export mocks base method. +func (m *MockController) Export(ctx context.Context, w ExportWriter) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Export", ctx, w) + ret0, _ := ret[0].(error) + return ret0 +} + +// Export indicates an expected call of Export. +func (mr *MockControllerMockRecorder) Export(ctx, w any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Export", reflect.TypeOf((*MockController)(nil).Export), ctx, w) +} + +// GetAccount mocks base method. +func (m *MockController) GetAccount(ctx context.Context, query GetAccountQuery) (*ledger.Account, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAccount", ctx, query) + ret0, _ := ret[0].(*ledger.Account) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAccount indicates an expected call of GetAccount. +func (mr *MockControllerMockRecorder) GetAccount(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccount", reflect.TypeOf((*MockController)(nil).GetAccount), ctx, query) +} + +// GetAggregatedBalances mocks base method. +func (m *MockController) GetAggregatedBalances(ctx context.Context, q GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAggregatedBalances", ctx, q) + ret0, _ := ret[0].(ledger.BalancesByAssets) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAggregatedBalances indicates an expected call of GetAggregatedBalances. +func (mr *MockControllerMockRecorder) GetAggregatedBalances(ctx, q any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAggregatedBalances", reflect.TypeOf((*MockController)(nil).GetAggregatedBalances), ctx, q) +} + +// GetMigrationsInfo mocks base method. +func (m *MockController) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetMigrationsInfo", ctx) + ret0, _ := ret[0].([]migrations.Info) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetMigrationsInfo indicates an expected call of GetMigrationsInfo. +func (mr *MockControllerMockRecorder) GetMigrationsInfo(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMigrationsInfo", reflect.TypeOf((*MockController)(nil).GetMigrationsInfo), ctx) +} + +// GetStats mocks base method. +func (m *MockController) GetStats(ctx context.Context) (Stats, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetStats", ctx) + ret0, _ := ret[0].(Stats) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetStats indicates an expected call of GetStats. +func (mr *MockControllerMockRecorder) GetStats(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetStats", reflect.TypeOf((*MockController)(nil).GetStats), ctx) +} + +// GetTransaction mocks base method. +func (m *MockController) GetTransaction(ctx context.Context, query GetTransactionQuery) (*ledger.Transaction, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetTransaction", ctx, query) + ret0, _ := ret[0].(*ledger.Transaction) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetTransaction indicates an expected call of GetTransaction. +func (mr *MockControllerMockRecorder) GetTransaction(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTransaction", reflect.TypeOf((*MockController)(nil).GetTransaction), ctx, query) +} + +// GetVolumesWithBalances mocks base method. +func (m *MockController) GetVolumesWithBalances(ctx context.Context, q GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetVolumesWithBalances", ctx, q) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetVolumesWithBalances indicates an expected call of GetVolumesWithBalances. +func (mr *MockControllerMockRecorder) GetVolumesWithBalances(ctx, q any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetVolumesWithBalances", reflect.TypeOf((*MockController)(nil).GetVolumesWithBalances), ctx, q) +} + +// Import mocks base method. +func (m *MockController) Import(ctx context.Context, stream chan ledger.Log) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Import", ctx, stream) + ret0, _ := ret[0].(error) + return ret0 +} + +// Import indicates an expected call of Import. +func (mr *MockControllerMockRecorder) Import(ctx, stream any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Import", reflect.TypeOf((*MockController)(nil).Import), ctx, stream) +} + +// IsDatabaseUpToDate mocks base method. +func (m *MockController) IsDatabaseUpToDate(ctx context.Context) (bool, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "IsDatabaseUpToDate", ctx) + ret0, _ := ret[0].(bool) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// IsDatabaseUpToDate indicates an expected call of IsDatabaseUpToDate. +func (mr *MockControllerMockRecorder) IsDatabaseUpToDate(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsDatabaseUpToDate", reflect.TypeOf((*MockController)(nil).IsDatabaseUpToDate), ctx) +} + +// ListAccounts mocks base method. +func (m *MockController) ListAccounts(ctx context.Context, query ListAccountsQuery) (*bunpaginate.Cursor[ledger.Account], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListAccounts", ctx, query) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Account]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListAccounts indicates an expected call of ListAccounts. +func (mr *MockControllerMockRecorder) ListAccounts(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListAccounts", reflect.TypeOf((*MockController)(nil).ListAccounts), ctx, query) +} + +// ListLogs mocks base method. +func (m *MockController) ListLogs(ctx context.Context, query GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListLogs", ctx, query) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Log]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListLogs indicates an expected call of ListLogs. +func (mr *MockControllerMockRecorder) ListLogs(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListLogs", reflect.TypeOf((*MockController)(nil).ListLogs), ctx, query) +} + +// ListTransactions mocks base method. +func (m *MockController) ListTransactions(ctx context.Context, query ListTransactionsQuery) (*bunpaginate.Cursor[ledger.Transaction], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListTransactions", ctx, query) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Transaction]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListTransactions indicates an expected call of ListTransactions. +func (mr *MockControllerMockRecorder) ListTransactions(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListTransactions", reflect.TypeOf((*MockController)(nil).ListTransactions), ctx, query) +} + +// RevertTransaction mocks base method. +func (m *MockController) RevertTransaction(ctx context.Context, parameters Parameters[RevertTransaction]) (*ledger.RevertedTransaction, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RevertTransaction", ctx, parameters) + ret0, _ := ret[0].(*ledger.RevertedTransaction) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// RevertTransaction indicates an expected call of RevertTransaction. +func (mr *MockControllerMockRecorder) RevertTransaction(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RevertTransaction", reflect.TypeOf((*MockController)(nil).RevertTransaction), ctx, parameters) +} + +// SaveAccountMetadata mocks base method. +func (m *MockController) SaveAccountMetadata(ctx context.Context, parameters Parameters[SaveAccountMetadata]) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SaveAccountMetadata", ctx, parameters) + ret0, _ := ret[0].(error) + return ret0 +} + +// SaveAccountMetadata indicates an expected call of SaveAccountMetadata. +func (mr *MockControllerMockRecorder) SaveAccountMetadata(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SaveAccountMetadata", reflect.TypeOf((*MockController)(nil).SaveAccountMetadata), ctx, parameters) +} + +// SaveTransactionMetadata mocks base method. +func (m *MockController) SaveTransactionMetadata(ctx context.Context, parameters Parameters[SaveTransactionMetadata]) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SaveTransactionMetadata", ctx, parameters) + ret0, _ := ret[0].(error) + return ret0 +} + +// SaveTransactionMetadata indicates an expected call of SaveTransactionMetadata. +func (mr *MockControllerMockRecorder) SaveTransactionMetadata(ctx, parameters any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SaveTransactionMetadata", reflect.TypeOf((*MockController)(nil).SaveTransactionMetadata), ctx, parameters) +} diff --git a/internal/controller/ledger/controller_with_cache.go b/internal/controller/ledger/controller_with_cache.go new file mode 100644 index 000000000..ec5f7532b --- /dev/null +++ b/internal/controller/ledger/controller_with_cache.go @@ -0,0 +1,43 @@ +package ledger + +import ( + "context" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" + + ledger "github.com/formancehq/ledger/internal" +) + +type ControllerWithCache struct { + registry *StateRegistry + ledger ledger.Ledger + Controller +} + +func (c *ControllerWithCache) IsDatabaseUpToDate(ctx context.Context) (bool, error) { + + if c.registry.IsUpToDate(c.ledger.Name) { + trace.SpanFromContext(ctx).SetAttributes(attribute.Bool("cache-hit", true)) + return true, nil + } + + upToDate, err := c.Controller.IsDatabaseUpToDate(ctx) + if err != nil { + return false, err + } + + _ = c.registry.Upsert(c.ledger) + if upToDate { + c.registry.SetUpToDate(c.ledger.Name) + } + + return upToDate, nil +} + +func NewControllerWithCache(ledger ledger.Ledger, underlying Controller, registry *StateRegistry) *ControllerWithCache { + return &ControllerWithCache{ + ledger: ledger, + Controller: underlying, + registry: registry, + } +} diff --git a/internal/controller/ledger/controller_with_events.go b/internal/controller/ledger/controller_with_events.go new file mode 100644 index 000000000..16d4886bd --- /dev/null +++ b/internal/controller/ledger/controller_with_events.go @@ -0,0 +1,123 @@ +package ledger + +import ( + "context" + "fmt" + ledger "github.com/formancehq/ledger/internal" +) + +type ControllerWithEvents struct { + Controller + ledger ledger.Ledger + listener Listener +} + +func NewControllerWithEvents(ledger ledger.Ledger, underlying Controller, listener Listener) *ControllerWithEvents { + return &ControllerWithEvents{ + Controller: underlying, + ledger: ledger, + listener: listener, + } +} +func (ctrl *ControllerWithEvents) CreateTransaction(ctx context.Context, parameters Parameters[RunScript]) (*ledger.CreatedTransaction, error) { + ret, err := ctrl.Controller.CreateTransaction(ctx, parameters) + if err != nil { + return nil, err + } + if !parameters.DryRun { + ctrl.listener.CommittedTransactions(ctx, ctrl.ledger.Name, ret.Transaction, ret.AccountMetadata) + } + + return ret, nil +} + +func (ctrl *ControllerWithEvents) RevertTransaction(ctx context.Context, parameters Parameters[RevertTransaction]) (*ledger.RevertedTransaction, error) { + ret, err := ctrl.Controller.RevertTransaction(ctx, parameters) + if err != nil { + return nil, err + } + if !parameters.DryRun { + ctrl.listener.RevertedTransaction( + ctx, + ctrl.ledger.Name, + ret.RevertedTransaction, + ret.RevertedTransaction, + ) + } + + return ret, nil +} + +func (ctrl *ControllerWithEvents) SaveTransactionMetadata(ctx context.Context, parameters Parameters[SaveTransactionMetadata]) error { + err := ctrl.Controller.SaveTransactionMetadata(ctx, parameters) + if err != nil { + return err + } + if !parameters.DryRun { + ctrl.listener.SavedMetadata( + ctx, + ctrl.ledger.Name, + ledger.MetaTargetTypeTransaction, + fmt.Sprint(parameters.Input.TransactionID), + parameters.Input.Metadata, + ) + } + + return nil +} + +func (ctrl *ControllerWithEvents) SaveAccountMetadata(ctx context.Context, parameters Parameters[SaveAccountMetadata]) error { + err := ctrl.Controller.SaveAccountMetadata(ctx, parameters) + if err != nil { + return err + } + if !parameters.DryRun { + ctrl.listener.SavedMetadata( + ctx, + ctrl.ledger.Name, + ledger.MetaTargetTypeAccount, + parameters.Input.Address, + parameters.Input.Metadata, + ) + } + + return nil +} + +func (ctrl *ControllerWithEvents) DeleteTransactionMetadata(ctx context.Context, parameters Parameters[DeleteTransactionMetadata]) error { + err := ctrl.Controller.DeleteTransactionMetadata(ctx, parameters) + if err != nil { + return err + } + if !parameters.DryRun { + ctrl.listener.DeletedMetadata( + ctx, + ctrl.ledger.Name, + ledger.MetaTargetTypeTransaction, + fmt.Sprint(parameters.Input.TransactionID), + parameters.Input.Key, + ) + } + + return nil +} + +func (ctrl *ControllerWithEvents) DeleteAccountMetadata(ctx context.Context, parameters Parameters[DeleteAccountMetadata]) error { + err := ctrl.Controller.DeleteAccountMetadata(ctx, parameters) + if err != nil { + return err + } + if !parameters.DryRun { + ctrl.listener.DeletedMetadata( + ctx, + ctrl.ledger.Name, + ledger.MetaTargetTypeAccount, + parameters.Input.Address, + parameters.Input.Key, + ) + } + + return nil +} + +var _ Controller = (*ControllerWithEvents)(nil) diff --git a/internal/controller/ledger/controller_with_too_many_client_handling.go b/internal/controller/ledger/controller_with_too_many_client_handling.go new file mode 100644 index 000000000..911fcb6fb --- /dev/null +++ b/internal/controller/ledger/controller_with_too_many_client_handling.go @@ -0,0 +1,109 @@ +package ledger + +import ( + "context" + "errors" + "github.com/formancehq/go-libs/v2/platform/postgres" + ledger "github.com/formancehq/ledger/internal" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" + "time" +) + +//go:generate mockgen -write_source_comment=false -write_package_comment=false -source controller_with_too_many_client_handling.go -destination controller_with_too_many_client_handling_generated_test.go -package ledger . DelayCalculator -typed +type DelayCalculator interface { + Next(int) time.Duration +} +type DelayCalculatorFn func(int) time.Duration + +func (fn DelayCalculatorFn) Next(iteration int) time.Duration { + return fn(iteration) +} + +type ControllerWithTooManyClientHandling struct { + Controller + delayCalculator DelayCalculator + tracer trace.Tracer +} + +func NewControllerWithTooManyClientHandling( + underlying Controller, + tracer trace.Tracer, + delayCalculator DelayCalculator, +) *ControllerWithTooManyClientHandling { + return &ControllerWithTooManyClientHandling{ + Controller: underlying, + delayCalculator: delayCalculator, + tracer: tracer, + } +} + +func (ctrl *ControllerWithTooManyClientHandling) CreateTransaction(ctx context.Context, parameters Parameters[RunScript]) (*ledger.CreatedTransaction, error) { + return handleRetry(ctx, ctrl.tracer, ctrl.delayCalculator, parameters, ctrl.Controller.CreateTransaction) +} + +func (ctrl *ControllerWithTooManyClientHandling) RevertTransaction(ctx context.Context, parameters Parameters[RevertTransaction]) (*ledger.RevertedTransaction, error) { + return handleRetry(ctx, ctrl.tracer, ctrl.delayCalculator, parameters, ctrl.Controller.RevertTransaction) +} + +func (ctrl *ControllerWithTooManyClientHandling) SaveTransactionMetadata(ctx context.Context, parameters Parameters[SaveTransactionMetadata]) error { + _, err := handleRetry(ctx, ctrl.tracer, ctrl.delayCalculator, parameters, func(ctx context.Context, parameters Parameters[SaveTransactionMetadata]) (*struct{}, error) { + return nil, ctrl.Controller.SaveTransactionMetadata(ctx, parameters) + }) + return err +} + +func (ctrl *ControllerWithTooManyClientHandling) SaveAccountMetadata(ctx context.Context, parameters Parameters[SaveAccountMetadata]) error { + _, err := handleRetry(ctx, ctrl.tracer, ctrl.delayCalculator, parameters, func(ctx context.Context, parameters Parameters[SaveAccountMetadata]) (*struct{}, error) { + return nil, ctrl.Controller.SaveAccountMetadata(ctx, parameters) + }) + return err +} + +func (ctrl *ControllerWithTooManyClientHandling) DeleteTransactionMetadata(ctx context.Context, parameters Parameters[DeleteTransactionMetadata]) error { + _, err := handleRetry(ctx, ctrl.tracer, ctrl.delayCalculator, parameters, func(ctx context.Context, parameters Parameters[DeleteTransactionMetadata]) (*struct{}, error) { + return nil, ctrl.Controller.DeleteTransactionMetadata(ctx, parameters) + }) + return err +} + +func (ctrl *ControllerWithTooManyClientHandling) DeleteAccountMetadata(ctx context.Context, parameters Parameters[DeleteAccountMetadata]) error { + _, err := handleRetry(ctx, ctrl.tracer, ctrl.delayCalculator, parameters, func(ctx context.Context, parameters Parameters[DeleteAccountMetadata]) (*struct{}, error) { + return nil, ctrl.Controller.DeleteAccountMetadata(ctx, parameters) + }) + return err +} + +var _ Controller = (*ControllerWithTooManyClientHandling)(nil) + +func handleRetry[INPUT, OUTPUT any]( + ctx context.Context, + tracer trace.Tracer, + delayCalculator DelayCalculator, + parameters Parameters[INPUT], + fn func(ctx context.Context, parameters Parameters[INPUT]) (*OUTPUT, error), +) (*OUTPUT, error) { + + ctx, span := tracer.Start(ctx, "TooManyClientRetrier") + defer span.End() + + count := 0 + for { + output, err := fn(ctx, parameters) + if err != nil && errors.Is(err, postgres.ErrTooManyClient{}) { + delay := delayCalculator.Next(count) + if delay == 0 { + return nil, err + } + select { + case <-ctx.Done(): + return nil, ctx.Err() + case <-time.After(delay): + count++ + span.SetAttributes(attribute.Int("retry", count)) + continue + } + } + return output, err + } +} diff --git a/internal/controller/ledger/controller_with_too_many_client_handling_generated_test.go b/internal/controller/ledger/controller_with_too_many_client_handling_generated_test.go new file mode 100644 index 000000000..2f0c421cb --- /dev/null +++ b/internal/controller/ledger/controller_with_too_many_client_handling_generated_test.go @@ -0,0 +1,50 @@ +// Code generated by MockGen. DO NOT EDIT. +// +// Generated by this command: +// +// mockgen -write_source_comment=false -write_package_comment=false -source controller_with_too_many_client_handling.go -destination controller_with_too_many_client_handling_generated_test.go -package ledger . DelayCalculator -typed +package ledger + +import ( + reflect "reflect" + time "time" + + gomock "go.uber.org/mock/gomock" +) + +// MockDelayCalculator is a mock of DelayCalculator interface. +type MockDelayCalculator struct { + ctrl *gomock.Controller + recorder *MockDelayCalculatorMockRecorder +} + +// MockDelayCalculatorMockRecorder is the mock recorder for MockDelayCalculator. +type MockDelayCalculatorMockRecorder struct { + mock *MockDelayCalculator +} + +// NewMockDelayCalculator creates a new mock instance. +func NewMockDelayCalculator(ctrl *gomock.Controller) *MockDelayCalculator { + mock := &MockDelayCalculator{ctrl: ctrl} + mock.recorder = &MockDelayCalculatorMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockDelayCalculator) EXPECT() *MockDelayCalculatorMockRecorder { + return m.recorder +} + +// Next mocks base method. +func (m *MockDelayCalculator) Next(arg0 int) time.Duration { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Next", arg0) + ret0, _ := ret[0].(time.Duration) + return ret0 +} + +// Next indicates an expected call of Next. +func (mr *MockDelayCalculatorMockRecorder) Next(arg0 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Next", reflect.TypeOf((*MockDelayCalculator)(nil).Next), arg0) +} diff --git a/internal/controller/ledger/controller_with_too_many_client_handling_test.go b/internal/controller/ledger/controller_with_too_many_client_handling_test.go new file mode 100644 index 000000000..2dca515f8 --- /dev/null +++ b/internal/controller/ledger/controller_with_too_many_client_handling_test.go @@ -0,0 +1,80 @@ +package ledger + +import ( + "errors" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/platform/postgres" + "github.com/formancehq/go-libs/v2/time" + ledger "github.com/formancehq/ledger/internal" + "github.com/stretchr/testify/require" + "go.opentelemetry.io/otel/trace/noop" + "go.uber.org/mock/gomock" + "testing" +) + +func TestNewControllerWithTooManyClientHandling(t *testing.T) { + t.Parallel() + + t.Run("finally passing", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + underlyingLedgerController := NewMockController(ctrl) + delayCalculator := NewMockDelayCalculator(ctrl) + ctx := logging.TestingContext() + + parameters := Parameters[RunScript]{} + + underlyingLedgerController.EXPECT(). + CreateTransaction(gomock.Any(), parameters). + Return(nil, postgres.ErrTooManyClient{}). + Times(2) + + underlyingLedgerController.EXPECT(). + CreateTransaction(gomock.Any(), parameters). + Return(&ledger.CreatedTransaction{ + Transaction: ledger.NewTransaction(), + }, nil) + + delayCalculator.EXPECT(). + Next(0). + Return(time.Millisecond) + + delayCalculator.EXPECT(). + Next(1). + Return(10 * time.Millisecond) + + ledgerController := NewControllerWithTooManyClientHandling(underlyingLedgerController, noop.Tracer{}, delayCalculator) + _, err := ledgerController.CreateTransaction(ctx, parameters) + require.NoError(t, err) + }) + + t.Run("finally failing", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + underlyingLedgerController := NewMockController(ctrl) + delayCalculator := NewMockDelayCalculator(ctrl) + ctx := logging.TestingContext() + + parameters := Parameters[RunScript]{} + + underlyingLedgerController.EXPECT(). + CreateTransaction(gomock.Any(), parameters). + Return(nil, postgres.ErrTooManyClient{}). + Times(2) + + delayCalculator.EXPECT(). + Next(0). + Return(time.Millisecond) + + delayCalculator.EXPECT(). + Next(1). + Return(time.Duration(0)) + + ledgerController := NewControllerWithTooManyClientHandling(underlyingLedgerController, noop.Tracer{}, delayCalculator) + _, err := ledgerController.CreateTransaction(ctx, parameters) + require.Error(t, err) + require.True(t, errors.Is(err, postgres.ErrTooManyClient{})) + }) +} diff --git a/internal/controller/ledger/controller_with_traces.go b/internal/controller/ledger/controller_with_traces.go new file mode 100644 index 000000000..9c25bea1a --- /dev/null +++ b/internal/controller/ledger/controller_with_traces.go @@ -0,0 +1,143 @@ +package ledger + +import ( + "context" + "github.com/formancehq/go-libs/v2/migrations" + "github.com/formancehq/ledger/internal/tracing" + "go.opentelemetry.io/otel/trace" + + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + ledger "github.com/formancehq/ledger/internal" +) + +type ControllerWithTraces struct { + underlying Controller + tracer trace.Tracer +} + +func NewControllerWithTraces(underlying Controller, tracer trace.Tracer) *ControllerWithTraces { + return &ControllerWithTraces{ + underlying: underlying, + tracer: tracer, + } +} + +func (ctrl *ControllerWithTraces) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) { + return ctrl.underlying.GetMigrationsInfo(ctx) +} + +func (ctrl *ControllerWithTraces) ListTransactions(ctx context.Context, q ListTransactionsQuery) (*bunpaginate.Cursor[ledger.Transaction], error) { + return tracing.Trace(ctx, ctrl.tracer, "ListTransactions", func(ctx context.Context) (*bunpaginate.Cursor[ledger.Transaction], error) { + return ctrl.underlying.ListTransactions(ctx, q) + }) +} + +func (ctrl *ControllerWithTraces) CountTransactions(ctx context.Context, q ListTransactionsQuery) (int, error) { + return tracing.Trace(ctx, ctrl.tracer, "CountTransactions", func(ctx context.Context) (int, error) { + return ctrl.underlying.CountTransactions(ctx, q) + }) +} + +func (ctrl *ControllerWithTraces) GetTransaction(ctx context.Context, query GetTransactionQuery) (*ledger.Transaction, error) { + return tracing.Trace(ctx, ctrl.tracer, "GetTransaction", func(ctx context.Context) (*ledger.Transaction, error) { + return ctrl.underlying.GetTransaction(ctx, query) + }) +} + +func (ctrl *ControllerWithTraces) CountAccounts(ctx context.Context, a ListAccountsQuery) (int, error) { + return tracing.Trace(ctx, ctrl.tracer, "CountAccounts", func(ctx context.Context) (int, error) { + return ctrl.underlying.CountAccounts(ctx, a) + }) +} + +func (ctrl *ControllerWithTraces) ListAccounts(ctx context.Context, a ListAccountsQuery) (*bunpaginate.Cursor[ledger.Account], error) { + return tracing.Trace(ctx, ctrl.tracer, "ListAccounts", func(ctx context.Context) (*bunpaginate.Cursor[ledger.Account], error) { + return ctrl.underlying.ListAccounts(ctx, a) + }) +} + +func (ctrl *ControllerWithTraces) GetAccount(ctx context.Context, q GetAccountQuery) (*ledger.Account, error) { + return tracing.Trace(ctx, ctrl.tracer, "GetAccount", func(ctx context.Context) (*ledger.Account, error) { + return ctrl.underlying.GetAccount(ctx, q) + }) +} + +func (ctrl *ControllerWithTraces) GetAggregatedBalances(ctx context.Context, q GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) { + return tracing.Trace(ctx, ctrl.tracer, "GetAggregatedBalances", func(ctx context.Context) (ledger.BalancesByAssets, error) { + return ctrl.underlying.GetAggregatedBalances(ctx, q) + }) +} + +func (ctrl *ControllerWithTraces) ListLogs(ctx context.Context, q GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) { + return tracing.Trace(ctx, ctrl.tracer, "ListLogs", func(ctx context.Context) (*bunpaginate.Cursor[ledger.Log], error) { + return ctrl.underlying.ListLogs(ctx, q) + }) +} + +func (ctrl *ControllerWithTraces) Import(ctx context.Context, stream chan ledger.Log) error { + return tracing.SkipResult(tracing.Trace(ctx, ctrl.tracer, "Import", tracing.NoResult(func(ctx context.Context) error { + return ctrl.underlying.Import(ctx, stream) + }))) +} + +func (ctrl *ControllerWithTraces) Export(ctx context.Context, w ExportWriter) error { + return tracing.SkipResult(tracing.Trace(ctx, ctrl.tracer, "Export", tracing.NoResult(func(ctx context.Context) error { + return ctrl.underlying.Export(ctx, w) + }))) +} + +func (ctrl *ControllerWithTraces) IsDatabaseUpToDate(ctx context.Context) (bool, error) { + return tracing.Trace(ctx, ctrl.tracer, "IsDatabaseUpToDate", func(ctx context.Context) (bool, error) { + return ctrl.underlying.IsDatabaseUpToDate(ctx) + }) +} + +func (ctrl *ControllerWithTraces) GetVolumesWithBalances(ctx context.Context, q GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) { + return tracing.Trace(ctx, ctrl.tracer, "GetVolumesWithBalances", func(ctx context.Context) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) { + return ctrl.underlying.GetVolumesWithBalances(ctx, q) + }) +} + +func (ctrl *ControllerWithTraces) CreateTransaction(ctx context.Context, parameters Parameters[RunScript]) (*ledger.CreatedTransaction, error) { + return tracing.Trace(ctx, ctrl.tracer, "CreateTransaction", func(ctx context.Context) (*ledger.CreatedTransaction, error) { + return ctrl.underlying.CreateTransaction(ctx, parameters) + }) +} + +func (ctrl *ControllerWithTraces) RevertTransaction(ctx context.Context, parameters Parameters[RevertTransaction]) (*ledger.RevertedTransaction, error) { + return tracing.Trace(ctx, ctrl.tracer, "RevertTransaction", func(ctx context.Context) (*ledger.RevertedTransaction, error) { + return ctrl.underlying.RevertTransaction(ctx, parameters) + }) +} + +func (ctrl *ControllerWithTraces) SaveTransactionMetadata(ctx context.Context, parameters Parameters[SaveTransactionMetadata]) error { + return tracing.SkipResult(tracing.Trace(ctx, ctrl.tracer, "SaveTransactionMetadata", tracing.NoResult(func(ctx context.Context) error { + return ctrl.underlying.SaveTransactionMetadata(ctx, parameters) + }))) +} + +func (ctrl *ControllerWithTraces) SaveAccountMetadata(ctx context.Context, parameters Parameters[SaveAccountMetadata]) error { + return tracing.SkipResult(tracing.Trace(ctx, ctrl.tracer, "SaveAccountMetadata", tracing.NoResult(func(ctx context.Context) error { + return ctrl.underlying.SaveAccountMetadata(ctx, parameters) + }))) +} + +func (ctrl *ControllerWithTraces) DeleteTransactionMetadata(ctx context.Context, parameters Parameters[DeleteTransactionMetadata]) error { + return tracing.SkipResult(tracing.Trace(ctx, ctrl.tracer, "DeleteTransactionMetadata", tracing.NoResult(func(ctx context.Context) error { + return ctrl.underlying.DeleteTransactionMetadata(ctx, parameters) + }))) +} + +func (ctrl *ControllerWithTraces) DeleteAccountMetadata(ctx context.Context, parameters Parameters[DeleteAccountMetadata]) error { + return tracing.SkipResult(tracing.Trace(ctx, ctrl.tracer, "DeleteAccountMetadata", tracing.NoResult(func(ctx context.Context) error { + return ctrl.underlying.DeleteAccountMetadata(ctx, parameters) + }))) +} + +func (ctrl *ControllerWithTraces) GetStats(ctx context.Context) (Stats, error) { + return tracing.Trace(ctx, ctrl.tracer, "GetStats", func(ctx context.Context) (Stats, error) { + return ctrl.underlying.GetStats(ctx) + }) +} + +var _ Controller = (*ControllerWithTraces)(nil) diff --git a/internal/controller/ledger/errors.go b/internal/controller/ledger/errors.go new file mode 100644 index 000000000..e2572cbb9 --- /dev/null +++ b/internal/controller/ledger/errors.go @@ -0,0 +1,239 @@ +package ledger + +import ( + "encoding/base64" + "fmt" + + "github.com/formancehq/go-libs/v2/platform/postgres" + + "github.com/formancehq/ledger/internal/machine" + + "errors" +) + +var ErrNotFound = postgres.ErrNotFound + +type ErrTooManyClient = postgres.ErrTooManyClient + +type ErrImport struct { + err error +} + +func (i ErrImport) Error() string { + return i.err.Error() +} + +func (i ErrImport) Is(err error) bool { + _, ok := err.(ErrImport) + return ok +} + +var _ error = (*ErrImport)(nil) + +func newErrImport(err error) ErrImport { + return ErrImport{ + err: err, + } +} + +var _ error = (*ErrInvalidHash)(nil) + +type ErrInvalidHash struct { + logID int + expected []byte + got []byte +} + +func (i ErrInvalidHash) Error() string { + return fmt.Sprintf( + "invalid hash, expected %s got %s for log %d", + base64.StdEncoding.EncodeToString(i.expected), + base64.StdEncoding.EncodeToString(i.got), + i.logID, + ) +} + +var _ error = (*ErrInvalidHash)(nil) + +func newErrInvalidHash(logID int, got, expected []byte) ErrImport { + return newErrImport(ErrInvalidHash{ + expected: expected, + got: got, + logID: logID, + }) +} + +// todo(waiting): need a more precise underlying error +// notes(gfyrag): Waiting new interpreter +type ErrInsufficientFunds = machine.ErrInsufficientFund + +var ErrNoPostings = errors.New("numscript execution returned no postings") + +type ErrAlreadyReverted struct { + id int +} + +func (e ErrAlreadyReverted) Error() string { + return fmt.Sprintf("already reverted, id: %d", e.id) +} + +func (e ErrAlreadyReverted) Is(err error) bool { + _, ok := err.(ErrAlreadyReverted) + return ok +} + +var _ error = (*ErrAlreadyReverted)(nil) + +func newErrAlreadyReverted(id int) ErrAlreadyReverted { + return ErrAlreadyReverted{ + id: id, + } +} + +type ErrInvalidQuery struct { + msg string +} + +func (e ErrInvalidQuery) Error() string { + return e.msg +} + +func (e ErrInvalidQuery) Is(err error) bool { + _, ok := err.(ErrInvalidQuery) + return ok +} + +func NewErrInvalidQuery(msg string, args ...any) ErrInvalidQuery { + return ErrInvalidQuery{ + msg: fmt.Sprintf(msg, args...), + } +} + +type ErrMissingFeature struct { + feature string +} + +func (e ErrMissingFeature) Error() string { + return fmt.Sprintf("missing feature %q", e.feature) +} + +func (e ErrMissingFeature) Is(err error) bool { + _, ok := err.(ErrMissingFeature) + return ok +} + +func NewErrMissingFeature(feature string) ErrMissingFeature { + return ErrMissingFeature{ + feature: feature, + } +} + +type ErrIdempotencyKeyConflict struct { + ik string +} + +func (e ErrIdempotencyKeyConflict) Error() string { + return fmt.Sprintf("duplicate idempotency key %q", e.ik) +} + +func (e ErrIdempotencyKeyConflict) Is(err error) bool { + _, ok := err.(ErrIdempotencyKeyConflict) + return ok +} + +func NewErrIdempotencyKeyConflict(ik string) ErrIdempotencyKeyConflict { + return ErrIdempotencyKeyConflict{ + ik: ik, + } +} + +type ErrTransactionReferenceConflict struct { + reference string +} + +func (e ErrTransactionReferenceConflict) Error() string { + return fmt.Sprintf("duplicate reference %q", e.reference) +} + +func (e ErrTransactionReferenceConflict) Is(err error) bool { + _, ok := err.(ErrTransactionReferenceConflict) + return ok +} + +func NewErrTransactionReferenceConflict(reference string) ErrTransactionReferenceConflict { + return ErrTransactionReferenceConflict{ + reference: reference, + } +} + +type ErrInvalidVars = machine.ErrInvalidVars + +// ErrCompilationFailed is used for any errors returned by the numscript interpreter +type ErrCompilationFailed struct { + err error +} + +func (e ErrCompilationFailed) Error() string { + return fmt.Sprintf("compilation error: %s", e.err) +} + +func (e ErrCompilationFailed) Is(err error) bool { + _, ok := err.(ErrCompilationFailed) + return ok +} + +func newErrCompilationFailed(err error) ErrCompilationFailed { + return ErrCompilationFailed{ + err: err, + } +} + +// ErrMetadataOverride is used when a metadata is defined at numscript level AND at the input level +type ErrMetadataOverride struct { + key string +} + +func (e *ErrMetadataOverride) Error() string { + return fmt.Sprintf("cannot override metadata '%s'", e.key) +} + +func (e *ErrMetadataOverride) Is(err error) bool { + _, ok := err.(*ErrMetadataOverride) + return ok +} + +func newErrMetadataOverride(key string) *ErrMetadataOverride { + return &ErrMetadataOverride{ + key: key, + } +} + +// ErrInvalidIdempotencyInput is used when a IK is used with an inputs different from the original one. +// For example, try to use the same IK with a different numscript script will result with that error. +type ErrInvalidIdempotencyInput struct { + idempotencyKey string + expectedIdempotencyHash string + computedIdempotencyHash string +} + +func (e ErrInvalidIdempotencyInput) Error() string { + return fmt.Sprintf( + "invalid idempotency hash when using idempotency key '%s', has computed '%s' but '%s' is stored", + e.idempotencyKey, + e.computedIdempotencyHash, + e.expectedIdempotencyHash, + ) +} + +func (e ErrInvalidIdempotencyInput) Is(err error) bool { + _, ok := err.(ErrInvalidIdempotencyInput) + return ok +} + +func newErrInvalidIdempotencyInputs(idempotencyKey, expectedIdempotencyHash, gotIdempotencyHash string) ErrInvalidIdempotencyInput { + return ErrInvalidIdempotencyInput{ + idempotencyKey: idempotencyKey, + expectedIdempotencyHash: expectedIdempotencyHash, + computedIdempotencyHash: gotIdempotencyHash, + } +} diff --git a/internal/controller/ledger/export.go b/internal/controller/ledger/export.go new file mode 100644 index 000000000..3d0c6f3ab --- /dev/null +++ b/internal/controller/ledger/export.go @@ -0,0 +1,17 @@ +package ledger + +import ( + "context" + + ledger "github.com/formancehq/ledger/internal" +) + +type ExportWriter interface { + Write(ctx context.Context, log ledger.Log) error +} + +type ExportWriterFn func(ctx context.Context, log ledger.Log) error + +func (fn ExportWriterFn) Write(ctx context.Context, log ledger.Log) error { + return fn(ctx, log) +} diff --git a/internal/controller/ledger/listener.go b/internal/controller/ledger/listener.go new file mode 100644 index 000000000..4a9a878e5 --- /dev/null +++ b/internal/controller/ledger/listener.go @@ -0,0 +1,16 @@ +package ledger + +import ( + "context" + + "github.com/formancehq/go-libs/v2/metadata" + ledger "github.com/formancehq/ledger/internal" +) + +//go:generate mockgen -write_source_comment=false -write_package_comment=false -source listener.go -destination listener_generated_test.go -package ledger . Listener +type Listener interface { + CommittedTransactions(ctx context.Context, ledger string, res ledger.Transaction, accountMetadata ledger.AccountMetadata) + SavedMetadata(ctx context.Context, ledger string, targetType, id string, metadata metadata.Metadata) + RevertedTransaction(ctx context.Context, ledger string, reverted, revert ledger.Transaction) + DeletedMetadata(ctx context.Context, ledger string, targetType string, targetID any, key string) +} diff --git a/internal/controller/ledger/listener_generated_test.go b/internal/controller/ledger/listener_generated_test.go new file mode 100644 index 000000000..44df6a6a7 --- /dev/null +++ b/internal/controller/ledger/listener_generated_test.go @@ -0,0 +1,86 @@ +// Code generated by MockGen. DO NOT EDIT. +// +// Generated by this command: +// +// mockgen -write_source_comment=false -write_package_comment=false -source listener.go -destination listener_generated_test.go -package ledger . Listener +package ledger + +import ( + context "context" + reflect "reflect" + + metadata "github.com/formancehq/go-libs/v2/metadata" + ledger "github.com/formancehq/ledger/internal" + gomock "go.uber.org/mock/gomock" +) + +// MockListener is a mock of Listener interface. +type MockListener struct { + ctrl *gomock.Controller + recorder *MockListenerMockRecorder +} + +// MockListenerMockRecorder is the mock recorder for MockListener. +type MockListenerMockRecorder struct { + mock *MockListener +} + +// NewMockListener creates a new mock instance. +func NewMockListener(ctrl *gomock.Controller) *MockListener { + mock := &MockListener{ctrl: ctrl} + mock.recorder = &MockListenerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockListener) EXPECT() *MockListenerMockRecorder { + return m.recorder +} + +// CommittedTransactions mocks base method. +func (m *MockListener) CommittedTransactions(ctx context.Context, ledger string, res ledger.Transaction, accountMetadata ledger.AccountMetadata) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "CommittedTransactions", ctx, ledger, res, accountMetadata) +} + +// CommittedTransactions indicates an expected call of CommittedTransactions. +func (mr *MockListenerMockRecorder) CommittedTransactions(ctx, ledger, res, accountMetadata any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CommittedTransactions", reflect.TypeOf((*MockListener)(nil).CommittedTransactions), ctx, ledger, res, accountMetadata) +} + +// DeletedMetadata mocks base method. +func (m *MockListener) DeletedMetadata(ctx context.Context, ledger, targetType string, targetID any, key string) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "DeletedMetadata", ctx, ledger, targetType, targetID, key) +} + +// DeletedMetadata indicates an expected call of DeletedMetadata. +func (mr *MockListenerMockRecorder) DeletedMetadata(ctx, ledger, targetType, targetID, key any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeletedMetadata", reflect.TypeOf((*MockListener)(nil).DeletedMetadata), ctx, ledger, targetType, targetID, key) +} + +// RevertedTransaction mocks base method. +func (m *MockListener) RevertedTransaction(ctx context.Context, ledger string, reverted, revert ledger.Transaction) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "RevertedTransaction", ctx, ledger, reverted, revert) +} + +// RevertedTransaction indicates an expected call of RevertedTransaction. +func (mr *MockListenerMockRecorder) RevertedTransaction(ctx, ledger, reverted, revert any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RevertedTransaction", reflect.TypeOf((*MockListener)(nil).RevertedTransaction), ctx, ledger, reverted, revert) +} + +// SavedMetadata mocks base method. +func (m *MockListener) SavedMetadata(ctx context.Context, ledger, targetType, id string, metadata metadata.Metadata) { + m.ctrl.T.Helper() + m.ctrl.Call(m, "SavedMetadata", ctx, ledger, targetType, id, metadata) +} + +// SavedMetadata indicates an expected call of SavedMetadata. +func (mr *MockListenerMockRecorder) SavedMetadata(ctx, ledger, targetType, id, metadata any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SavedMetadata", reflect.TypeOf((*MockListener)(nil).SavedMetadata), ctx, ledger, targetType, id, metadata) +} diff --git a/internal/controller/ledger/log_process.go b/internal/controller/ledger/log_process.go new file mode 100644 index 000000000..cb09770d9 --- /dev/null +++ b/internal/controller/ledger/log_process.go @@ -0,0 +1,123 @@ +package ledger + +import ( + "context" + "errors" + "fmt" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/platform/postgres" + "github.com/formancehq/go-libs/v2/pointer" + ledger "github.com/formancehq/ledger/internal" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/metric" + "go.opentelemetry.io/otel/trace" +) + +type logProcessor[INPUT any, OUTPUT ledger.LogPayload] struct { + deadLockCounter metric.Int64Counter + operation string +} + +func newLogProcessor[INPUT any, OUTPUT ledger.LogPayload](operation string, deadlockCounter metric.Int64Counter) *logProcessor[INPUT, OUTPUT] { + return &logProcessor[INPUT, OUTPUT]{ + operation: operation, + deadLockCounter: deadlockCounter, + } +} + +func (lp *logProcessor[INPUT, OUTPUT]) runTx( + ctx context.Context, + store Store, + parameters Parameters[INPUT], + fn func(ctx context.Context, sqlTX TX, parameters Parameters[INPUT]) (*OUTPUT, error), +) (*OUTPUT, error) { + var payload *OUTPUT + err := store.WithTX(ctx, nil, func(tx TX) (commit bool, err error) { + payload, err = fn(ctx, tx, parameters) + if err != nil { + return false, err + } + log := ledger.NewLog(*payload) + log.IdempotencyKey = parameters.IdempotencyKey + log.IdempotencyHash = ledger.ComputeIdempotencyHash(parameters.Input) + + err = tx.InsertLog(ctx, &log) + if err != nil { + return false, fmt.Errorf("failed to insert log: %w", err) + } + logging.FromContext(ctx).Debugf("log inserted with id %d", log.ID) + + if parameters.DryRun { + return false, nil + } + + return true, nil + }) + return payload, err +} + +func (lp *logProcessor[INPUT, OUTPUT]) forgeLog( + ctx context.Context, + store Store, + parameters Parameters[INPUT], + fn func(ctx context.Context, sqlTX TX, parameters Parameters[INPUT]) (*OUTPUT, error), +) (*OUTPUT, error) { + if parameters.IdempotencyKey != "" { + output, err := lp.fetchLogWithIK(ctx, store, parameters) + if err != nil { + return nil, err + } + if output != nil { + return output, nil + } + } + + for { + output, err := lp.runTx(ctx, store, parameters, fn) + if err != nil { + switch { + case errors.Is(err, postgres.ErrDeadlockDetected): + trace.SpanFromContext(ctx).SetAttributes(attribute.Bool("deadlock", true)) + logging.FromContext(ctx).Info("deadlock detected, retrying...") + lp.deadLockCounter.Add(ctx, 1, metric.WithAttributes( + attribute.String("operation", lp.operation), + )) + continue + // A log with the IK could have been inserted in the meantime, read again the database to retrieve it + case errors.Is(err, ErrIdempotencyKeyConflict{}): + output, err := lp.fetchLogWithIK(ctx, store, parameters) + if err != nil { + return nil, err + } + if output == nil { + panic("incoherent error, received duplicate IK but log not found in database") + } + + return output, nil + default: + return nil, fmt.Errorf("unexpected error while forging log: %w", err) + } + } + + return output, nil + } +} + +func (lp *logProcessor[INPUT, OUTPUT]) fetchLogWithIK(ctx context.Context, store Store, parameters Parameters[INPUT]) (*OUTPUT, error) { + log, err := store.ReadLogWithIdempotencyKey(ctx, parameters.IdempotencyKey) + if err != nil && !errors.Is(err, postgres.ErrNotFound) { + return nil, err + } + if err == nil { + // notes(gfyrag): idempotency hash should never be empty in this case, but data from previous + // ledger version does not have this field and it cannot be recomputed + if log.IdempotencyHash != "" { + if computedHash := ledger.ComputeIdempotencyHash(parameters.Input); log.IdempotencyHash != computedHash { + return nil, newErrInvalidIdempotencyInputs(log.IdempotencyKey, log.IdempotencyHash, computedHash) + } + } + + return pointer.For(log.Data.(OUTPUT)), nil + } + return nil, nil +} diff --git a/internal/controller/ledger/log_process_test.go b/internal/controller/ledger/log_process_test.go new file mode 100644 index 000000000..8b58981e2 --- /dev/null +++ b/internal/controller/ledger/log_process_test.go @@ -0,0 +1,61 @@ +package ledger + +import ( + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/platform/postgres" + ledger "github.com/formancehq/ledger/internal" + "github.com/stretchr/testify/require" + "go.opentelemetry.io/otel/metric/noop" + "go.uber.org/mock/gomock" + "testing" +) + +func TestForgeLogWithIKConflict(t *testing.T) { + + t.Parallel() + ctx := logging.TestingContext() + ctrl := gomock.NewController(t) + store := NewMockStore(ctrl) + + store.EXPECT(). + ReadLogWithIdempotencyKey(gomock.Any(), "foo"). + Return(nil, postgres.ErrNotFound) + + store.EXPECT(). + WithTX(gomock.Any(), gomock.Any(), gomock.Any()). + Return(ErrIdempotencyKeyConflict{}) + + store.EXPECT(). + ReadLogWithIdempotencyKey(gomock.Any(), "foo"). + Return(&ledger.Log{ + Data: ledger.CreatedTransaction{}, + }, nil) + + lp := newLogProcessor[RunScript, ledger.CreatedTransaction]("foo", noop.Int64Counter{}) + _, err := lp.forgeLog(ctx, store, Parameters[RunScript]{ + IdempotencyKey: "foo", + }, nil) + require.NoError(t, err) +} + +func TestForgeLogWithDeadlock(t *testing.T) { + + t.Parallel() + ctx := logging.TestingContext() + ctrl := gomock.NewController(t) + store := NewMockStore(ctrl) + + // First call returns a deadlock + store.EXPECT(). + WithTX(gomock.Any(), gomock.Any(), gomock.Any()). + Return(postgres.ErrDeadlockDetected) + + // Second call is ok + store.EXPECT(). + WithTX(gomock.Any(), gomock.Any(), gomock.Any()). + Return(nil) + + lp := newLogProcessor[RunScript, ledger.CreatedTransaction]("foo", noop.Int64Counter{}) + _, err := lp.forgeLog(ctx, store, Parameters[RunScript]{}, nil) + require.NoError(t, err) +} diff --git a/internal/controller/ledger/numscript_parser.go b/internal/controller/ledger/numscript_parser.go new file mode 100644 index 000000000..a06d2e081 --- /dev/null +++ b/internal/controller/ledger/numscript_parser.go @@ -0,0 +1,74 @@ +package ledger + +import ( + "crypto/sha256" + "encoding/base64" + + "github.com/bluele/gcache" + "github.com/formancehq/ledger/internal/machine/script/compiler" +) + +//go:generate mockgen -write_source_comment=false -write_package_comment=false -source numscript_parser.go -destination numscript_parser_generated_test.go -package ledger . NumscriptParser + +type NumscriptParser interface { + // Parse can return following errors: + // * ErrCompilationFailed + Parse(script string) (NumscriptRuntime, error) +} + +type DefaultNumscriptParser struct{} + +func (d *DefaultNumscriptParser) Parse(script string) (NumscriptRuntime, error) { + ret, err := compiler.Compile(script) + if err != nil { + return nil, newErrCompilationFailed(err) + } + return NewMachineNumscriptRuntimeAdapter(*ret), nil +} + +func NewDefaultNumscriptParser() *DefaultNumscriptParser { + return &DefaultNumscriptParser{} +} + +var _ NumscriptParser = (*DefaultNumscriptParser)(nil) + +type CacheConfiguration struct { + MaxCount uint +} + +type CachedParser struct { + underlying NumscriptParser + cache gcache.Cache +} + +func (c *CachedParser) Parse(script string) (NumscriptRuntime, error) { + digest := sha256.New() + _, err := digest.Write([]byte(script)) + if err != nil { + return nil, err + } + + cacheKey := base64.StdEncoding.EncodeToString(digest.Sum(nil)) + v, err := c.cache.Get(cacheKey) + if err == nil { + return v.(NumscriptRuntime), nil + } + + program, err := c.underlying.Parse(script) + if err != nil { + return nil, err + } + + _ = c.cache.Set(cacheKey, program) + + return program, nil +} + +func NewCachedNumscriptParser(parser NumscriptParser, configuration CacheConfiguration) *CachedParser { + return &CachedParser{ + underlying: parser, + cache: gcache.New(int(configuration.MaxCount)).LFU().Build(), + } +} + +var _ NumscriptParser = (*CachedParser)(nil) diff --git a/internal/controller/ledger/numscript_parser_generated_test.go b/internal/controller/ledger/numscript_parser_generated_test.go new file mode 100644 index 000000000..f319d367d --- /dev/null +++ b/internal/controller/ledger/numscript_parser_generated_test.go @@ -0,0 +1,50 @@ +// Code generated by MockGen. DO NOT EDIT. +// +// Generated by this command: +// +// mockgen -write_source_comment=false -write_package_comment=false -source numscript_parser.go -destination numscript_parser_generated_test.go -package ledger . NumscriptParser +package ledger + +import ( + reflect "reflect" + + gomock "go.uber.org/mock/gomock" +) + +// MockNumscriptParser is a mock of NumscriptParser interface. +type MockNumscriptParser struct { + ctrl *gomock.Controller + recorder *MockNumscriptParserMockRecorder +} + +// MockNumscriptParserMockRecorder is the mock recorder for MockNumscriptParser. +type MockNumscriptParserMockRecorder struct { + mock *MockNumscriptParser +} + +// NewMockNumscriptParser creates a new mock instance. +func NewMockNumscriptParser(ctrl *gomock.Controller) *MockNumscriptParser { + mock := &MockNumscriptParser{ctrl: ctrl} + mock.recorder = &MockNumscriptParserMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockNumscriptParser) EXPECT() *MockNumscriptParserMockRecorder { + return m.recorder +} + +// Parse mocks base method. +func (m *MockNumscriptParser) Parse(script string) (NumscriptRuntime, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Parse", script) + ret0, _ := ret[0].(NumscriptRuntime) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Parse indicates an expected call of Parse. +func (mr *MockNumscriptParserMockRecorder) Parse(script any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Parse", reflect.TypeOf((*MockNumscriptParser)(nil).Parse), script) +} diff --git a/internal/controller/ledger/numscript_runtime.go b/internal/controller/ledger/numscript_runtime.go new file mode 100644 index 000000000..0effc17fd --- /dev/null +++ b/internal/controller/ledger/numscript_runtime.go @@ -0,0 +1,87 @@ +package ledger + +import ( + "context" + "fmt" + + "github.com/formancehq/ledger/internal/machine" + + "errors" + + "github.com/formancehq/go-libs/v2/collectionutils" + "github.com/formancehq/go-libs/v2/metadata" + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/machine/vm" + "github.com/formancehq/ledger/internal/machine/vm/program" +) + +type NumscriptExecutionResult struct { + Postings ledger.Postings `json:"postings"` + Metadata metadata.Metadata `json:"metadata"` + AccountMetadata map[string]metadata.Metadata +} + +//go:generate mockgen -write_source_comment=false -write_package_comment=false -source numscript_runtime.go -destination numscript_runtime_generated_test.go -package ledger . NumscriptRuntime +type NumscriptRuntime interface { + Execute(context.Context, TX, map[string]string) (*NumscriptExecutionResult, error) +} + +type MachineNumscriptRuntimeAdapter struct { + program program.Program +} + +func (d *MachineNumscriptRuntimeAdapter) Execute(ctx context.Context, tx TX, vars map[string]string) (*NumscriptExecutionResult, error) { + store := newVmStoreAdapter(tx) + + machineInstance := vm.NewMachine(d.program) + + // notes(gfyrag): machines modify the map, copy it to keep our original parameters unchanged + varsCopy := make(map[string]string) + for k, v := range vars { + varsCopy[k] = v + } + + if err := machineInstance.SetVarsFromJSON(varsCopy); err != nil { + return nil, fmt.Errorf("failed to set vars from JSON: %w", err) + } + err := machineInstance.ResolveResources(ctx, store) + if err != nil { + return nil, fmt.Errorf("failed to resolve resources: %w", err) + } + + if err := machineInstance.ResolveBalances(ctx, store); err != nil { + return nil, fmt.Errorf("failed to resolve balances: %w", err) + } + + if err := machineInstance.Execute(); err != nil { + switch { + case errors.Is(err, &machine.ErrMetadataOverride{}): + errMetadataOverride := &machine.ErrMetadataOverride{} + _ = errors.As(err, &errMetadataOverride) + return nil, newErrMetadataOverride(errMetadataOverride.Key()) + default: + return nil, fmt.Errorf("failed to execute machine: %w", err) + } + } + + return &NumscriptExecutionResult{ + Postings: collectionutils.Map(machineInstance.Postings, func(from vm.Posting) ledger.Posting { + return ledger.Posting{ + Source: from.Source, + Destination: from.Destination, + Amount: from.Amount.ToBigInt(), + Asset: from.Asset, + } + }), + Metadata: machineInstance.GetTxMetaJSON(), + AccountMetadata: machineInstance.GetAccountsMetaJSON(), + }, nil +} + +func NewMachineNumscriptRuntimeAdapter(p program.Program) *MachineNumscriptRuntimeAdapter { + return &MachineNumscriptRuntimeAdapter{ + program: p, + } +} + +var _ NumscriptRuntime = (*MachineNumscriptRuntimeAdapter)(nil) diff --git a/internal/controller/ledger/numscript_runtime_generated_test.go b/internal/controller/ledger/numscript_runtime_generated_test.go new file mode 100644 index 000000000..a116d7d89 --- /dev/null +++ b/internal/controller/ledger/numscript_runtime_generated_test.go @@ -0,0 +1,51 @@ +// Code generated by MockGen. DO NOT EDIT. +// +// Generated by this command: +// +// mockgen -write_source_comment=false -write_package_comment=false -source numscript_runtime.go -destination numscript_runtime_generated_test.go -package ledger . NumscriptRuntime +package ledger + +import ( + context "context" + reflect "reflect" + + gomock "go.uber.org/mock/gomock" +) + +// MockNumscriptRuntime is a mock of NumscriptRuntime interface. +type MockNumscriptRuntime struct { + ctrl *gomock.Controller + recorder *MockNumscriptRuntimeMockRecorder +} + +// MockNumscriptRuntimeMockRecorder is the mock recorder for MockNumscriptRuntime. +type MockNumscriptRuntimeMockRecorder struct { + mock *MockNumscriptRuntime +} + +// NewMockNumscriptRuntime creates a new mock instance. +func NewMockNumscriptRuntime(ctrl *gomock.Controller) *MockNumscriptRuntime { + mock := &MockNumscriptRuntime{ctrl: ctrl} + mock.recorder = &MockNumscriptRuntimeMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockNumscriptRuntime) EXPECT() *MockNumscriptRuntimeMockRecorder { + return m.recorder +} + +// Execute mocks base method. +func (m *MockNumscriptRuntime) Execute(arg0 context.Context, arg1 TX, arg2 map[string]string) (*NumscriptExecutionResult, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Execute", arg0, arg1, arg2) + ret0, _ := ret[0].(*NumscriptExecutionResult) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Execute indicates an expected call of Execute. +func (mr *MockNumscriptRuntimeMockRecorder) Execute(arg0, arg1, arg2 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Execute", reflect.TypeOf((*MockNumscriptRuntime)(nil).Execute), arg0, arg1, arg2) +} diff --git a/internal/controller/ledger/parameters.go b/internal/controller/ledger/parameters.go new file mode 100644 index 000000000..1a97236f0 --- /dev/null +++ b/internal/controller/ledger/parameters.go @@ -0,0 +1,7 @@ +package ledger + +type Parameters[INPUT any] struct { + DryRun bool + IdempotencyKey string + Input INPUT +} diff --git a/internal/controller/ledger/state_registry.go b/internal/controller/ledger/state_registry.go new file mode 100644 index 000000000..429dd56d5 --- /dev/null +++ b/internal/controller/ledger/state_registry.go @@ -0,0 +1,55 @@ +package ledger + +import ( + "sync" + + ledger "github.com/formancehq/ledger/internal" +) + +type State struct { + bucket string + upToDate bool +} + +type StateRegistry struct { + mu sync.Mutex + ledgers map[string]*State +} + +func (r *StateRegistry) Upsert(l ledger.Ledger) bool { + r.mu.Lock() + defer r.mu.Unlock() + + if _, ok := r.ledgers[l.Name]; !ok { + r.ledgers[l.Name] = &State{ + bucket: l.Bucket, + } + return true + } + return false +} + +func (r *StateRegistry) SetUpToDate(name string) { + r.mu.Lock() + defer r.mu.Unlock() + + r.ledgers[name].upToDate = true +} + +func (r *StateRegistry) IsUpToDate(name string) bool { + r.mu.Lock() + defer r.mu.Unlock() + + l, ok := r.ledgers[name] + if !ok { + return false + } + + return l.upToDate +} + +func NewStateRegistry() *StateRegistry { + return &StateRegistry{ + ledgers: make(map[string]*State), + } +} diff --git a/internal/controller/ledger/stats.go b/internal/controller/ledger/stats.go new file mode 100644 index 000000000..46d9c92c4 --- /dev/null +++ b/internal/controller/ledger/stats.go @@ -0,0 +1,30 @@ +package ledger + +import ( + "context" + "fmt" +) + +type Stats struct { + Transactions int `json:"transactions"` + Accounts int `json:"accounts"` +} + +func (ctrl *DefaultController) GetStats(ctx context.Context) (Stats, error) { + var stats Stats + + transactions, err := ctrl.store.CountTransactions(ctx, NewListTransactionsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{}))) + if err != nil { + return stats, fmt.Errorf("counting transactions: %w", err) + } + + accounts, err := ctrl.store.CountAccounts(ctx, NewListAccountsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{}))) + if err != nil { + return stats, fmt.Errorf("counting accounts: %w", err) + } + + return Stats{ + Transactions: transactions, + Accounts: accounts, + }, nil +} diff --git a/internal/controller/ledger/stats_test.go b/internal/controller/ledger/stats_test.go new file mode 100644 index 000000000..e314c6d02 --- /dev/null +++ b/internal/controller/ledger/stats_test.go @@ -0,0 +1,36 @@ +package ledger + +import ( + "testing" + + "github.com/formancehq/go-libs/v2/logging" + ledger "github.com/formancehq/ledger/internal" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestStats(t *testing.T) { + + ctx := logging.TestingContext() + ctrl := gomock.NewController(t) + store := NewMockStore(ctrl) + parser := NewMockNumscriptParser(ctrl) + + store.EXPECT(). + CountTransactions(gomock.Any(), NewListTransactionsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{}))). + Return(10, nil) + + store.EXPECT(). + CountAccounts(gomock.Any(), NewListAccountsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{}))). + Return(10, nil) + + ledgerController := NewDefaultController( + ledger.MustNewWithDefault("foo"), + store, + parser, + ) + stats, err := ledgerController.GetStats(ctx) + require.NoError(t, err) + require.Equal(t, 10, stats.Transactions) + require.Equal(t, 10, stats.Accounts) +} diff --git a/internal/controller/ledger/store.go b/internal/controller/ledger/store.go new file mode 100644 index 000000000..86a2d6be3 --- /dev/null +++ b/internal/controller/ledger/store.go @@ -0,0 +1,299 @@ +package ledger + +import ( + "context" + "database/sql" + "encoding/json" + "math/big" + + "github.com/formancehq/go-libs/v2/migrations" + + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/pointer" + "github.com/formancehq/go-libs/v2/query" + "github.com/formancehq/go-libs/v2/time" + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/machine/vm" + "github.com/uptrace/bun" +) + +type Balance struct { + Asset string + Balance *big.Int +} + +type BalanceQuery = vm.BalanceQuery +type Balances = vm.Balances + +//go:generate mockgen -write_source_comment=false -write_package_comment=false -source store.go -destination store_generated_test.go -package ledger . TX +type TX interface { + GetAccount(ctx context.Context, query GetAccountQuery) (*ledger.Account, error) + // GetBalances must returns balance and lock account until the end of the TX + GetBalances(ctx context.Context, query BalanceQuery) (Balances, error) + CommitTransaction(ctx context.Context, transaction *ledger.Transaction) error + // RevertTransaction revert the transaction with identifier id + // It returns : + // * the reverted transaction + // * a boolean indicating if the transaction has been reverted. false indicates an already reverted transaction (unless error != nil) + // * an error + RevertTransaction(ctx context.Context, id int) (*ledger.Transaction, bool, error) + UpdateTransactionMetadata(ctx context.Context, transactionID int, m metadata.Metadata) (*ledger.Transaction, bool, error) + DeleteTransactionMetadata(ctx context.Context, transactionID int, key string) (*ledger.Transaction, bool, error) + UpdateAccountsMetadata(ctx context.Context, m map[string]metadata.Metadata) error + // UpsertAccount returns a boolean indicating if the account was upserted + UpsertAccount(ctx context.Context, account *ledger.Account) (bool, error) + DeleteAccountMetadata(ctx context.Context, address, key string) error + InsertLog(ctx context.Context, log *ledger.Log) error + + LockLedger(ctx context.Context) error + ListLogs(ctx context.Context, q GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) +} + +type Store interface { + WithTX(context.Context, *sql.TxOptions, func(TX) (bool, error)) error + GetDB() bun.IDB + ListLogs(ctx context.Context, q GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) + ReadLogWithIdempotencyKey(ctx context.Context, ik string) (*ledger.Log, error) + + ListTransactions(ctx context.Context, q ListTransactionsQuery) (*bunpaginate.Cursor[ledger.Transaction], error) + CountTransactions(ctx context.Context, q ListTransactionsQuery) (int, error) + GetTransaction(ctx context.Context, query GetTransactionQuery) (*ledger.Transaction, error) + CountAccounts(ctx context.Context, a ListAccountsQuery) (int, error) + ListAccounts(ctx context.Context, a ListAccountsQuery) (*bunpaginate.Cursor[ledger.Account], error) + GetAccount(ctx context.Context, q GetAccountQuery) (*ledger.Account, error) + GetAggregatedBalances(ctx context.Context, q GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) + GetVolumesWithBalances(ctx context.Context, q GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) + IsUpToDate(ctx context.Context) (bool, error) + GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) +} + +type ListTransactionsQuery bunpaginate.ColumnPaginatedQuery[PaginatedQueryOptions[PITFilterWithVolumes]] + +func (q ListTransactionsQuery) WithColumn(column string) ListTransactionsQuery { + ret := pointer.For((bunpaginate.ColumnPaginatedQuery[PaginatedQueryOptions[PITFilterWithVolumes]])(q)) + ret = ret.WithColumn(column) + + return ListTransactionsQuery(*ret) +} + +func NewListTransactionsQuery(options PaginatedQueryOptions[PITFilterWithVolumes]) ListTransactionsQuery { + return ListTransactionsQuery{ + PageSize: options.PageSize, + Column: "id", + Order: bunpaginate.OrderDesc, + Options: options, + } +} + +type GetTransactionQuery struct { + PITFilterWithVolumes + ID int +} + +func (q GetTransactionQuery) WithExpandVolumes() GetTransactionQuery { + q.ExpandVolumes = true + + return q +} + +func (q GetTransactionQuery) WithExpandEffectiveVolumes() GetTransactionQuery { + q.ExpandEffectiveVolumes = true + + return q +} + +func NewGetTransactionQuery(id int) GetTransactionQuery { + return GetTransactionQuery{ + PITFilterWithVolumes: PITFilterWithVolumes{}, + ID: id, + } +} + +type ListAccountsQuery bunpaginate.OffsetPaginatedQuery[PaginatedQueryOptions[PITFilterWithVolumes]] + +func (q ListAccountsQuery) WithExpandVolumes() ListAccountsQuery { + q.Options.Options.ExpandVolumes = true + + return q +} + +func (q ListAccountsQuery) WithExpandEffectiveVolumes() ListAccountsQuery { + q.Options.Options.ExpandEffectiveVolumes = true + + return q +} + +func NewListAccountsQuery(opts PaginatedQueryOptions[PITFilterWithVolumes]) ListAccountsQuery { + return ListAccountsQuery{ + PageSize: opts.PageSize, + Order: bunpaginate.OrderAsc, + Options: opts, + } +} + +type GetAccountQuery struct { + PITFilterWithVolumes + Addr string +} + +func (q GetAccountQuery) WithPIT(pit time.Time) GetAccountQuery { + q.PIT = &pit + + return q +} + +func (q GetAccountQuery) WithExpandVolumes() GetAccountQuery { + q.ExpandVolumes = true + + return q +} + +func (q GetAccountQuery) WithExpandEffectiveVolumes() GetAccountQuery { + q.ExpandEffectiveVolumes = true + + return q +} + +func NewGetAccountQuery(addr string) GetAccountQuery { + return GetAccountQuery{ + Addr: addr, + } +} + +type GetAggregatedBalanceQuery struct { + PITFilter + QueryBuilder query.Builder + UseInsertionDate bool +} + +func NewGetAggregatedBalancesQuery(filter PITFilter, qb query.Builder, useInsertionDate bool) GetAggregatedBalanceQuery { + return GetAggregatedBalanceQuery{ + PITFilter: filter, + QueryBuilder: qb, + UseInsertionDate: useInsertionDate, + } +} + +type GetVolumesWithBalancesQuery bunpaginate.OffsetPaginatedQuery[PaginatedQueryOptions[FiltersForVolumes]] + +func NewGetVolumesWithBalancesQuery(opts PaginatedQueryOptions[FiltersForVolumes]) GetVolumesWithBalancesQuery { + return GetVolumesWithBalancesQuery{ + PageSize: opts.PageSize, + Order: bunpaginate.OrderAsc, + Options: opts, + } +} + +type PaginatedQueryOptions[T any] struct { + QueryBuilder query.Builder `json:"qb"` + PageSize uint64 `json:"pageSize"` + Options T `json:"options"` +} + +func (opts *PaginatedQueryOptions[T]) UnmarshalJSON(data []byte) error { + type aux struct { + QueryBuilder json.RawMessage `json:"qb"` + PageSize uint64 `json:"pageSize"` + Options T `json:"options"` + } + x := &aux{} + if err := json.Unmarshal(data, x); err != nil { + return err + } + + *opts = PaginatedQueryOptions[T]{ + PageSize: x.PageSize, + Options: x.Options, + } + + var err error + if x.QueryBuilder != nil { + opts.QueryBuilder, err = query.ParseJSON(string(x.QueryBuilder)) + if err != nil { + return err + } + } + + return nil +} + +func (opts PaginatedQueryOptions[T]) WithQueryBuilder(qb query.Builder) PaginatedQueryOptions[T] { + opts.QueryBuilder = qb + + return opts +} + +func (opts PaginatedQueryOptions[T]) WithPageSize(pageSize uint64) PaginatedQueryOptions[T] { + opts.PageSize = pageSize + + return opts +} + +func NewPaginatedQueryOptions[T any](options T) PaginatedQueryOptions[T] { + return PaginatedQueryOptions[T]{ + Options: options, + PageSize: bunpaginate.QueryDefaultPageSize, + } +} + +type PITFilter struct { + PIT *time.Time `json:"pit"` + OOT *time.Time `json:"oot"` +} + +type PITFilterWithVolumes struct { + PITFilter + ExpandVolumes bool `json:"volumes"` + ExpandEffectiveVolumes bool `json:"effectiveVolumes"` +} + +type FiltersForVolumes struct { + PITFilter + UseInsertionDate bool + GroupLvl int +} + +type GetLogsQuery bunpaginate.ColumnPaginatedQuery[PaginatedQueryOptions[any]] + +func (q GetLogsQuery) WithOrder(order bunpaginate.Order) GetLogsQuery { + q.Order = order + return q +} + +func NewListLogsQuery(options PaginatedQueryOptions[any]) GetLogsQuery { + return GetLogsQuery{ + PageSize: options.PageSize, + Column: "id", + Order: bunpaginate.OrderDesc, + Options: options, + } +} + +type vmStoreAdapter struct { + TX +} + +func (v *vmStoreAdapter) GetAccount(ctx context.Context, address string) (*ledger.Account, error) { + account, err := v.TX.GetAccount(ctx, NewGetAccountQuery(address)) + if err != nil { + return nil, err + } + return account, nil +} + +var _ vm.Store = (*vmStoreAdapter)(nil) + +func newVmStoreAdapter(tx TX) *vmStoreAdapter { + return &vmStoreAdapter{ + TX: tx, + } +} + +type ListLedgersQuery bunpaginate.OffsetPaginatedQuery[PaginatedQueryOptions[struct{}]] + +func NewListLedgersQuery(pageSize uint64) ListLedgersQuery { + return ListLedgersQuery{ + PageSize: pageSize, + } +} diff --git a/internal/controller/ledger/store_generated_test.go b/internal/controller/ledger/store_generated_test.go new file mode 100644 index 000000000..1618ae7c3 --- /dev/null +++ b/internal/controller/ledger/store_generated_test.go @@ -0,0 +1,451 @@ +// Code generated by MockGen. DO NOT EDIT. +// +// Generated by this command: +// +// mockgen -write_source_comment=false -write_package_comment=false -source store.go -destination store_generated_test.go -package ledger . TX +package ledger + +import ( + context "context" + sql "database/sql" + reflect "reflect" + + bunpaginate "github.com/formancehq/go-libs/v2/bun/bunpaginate" + metadata "github.com/formancehq/go-libs/v2/metadata" + migrations "github.com/formancehq/go-libs/v2/migrations" + ledger "github.com/formancehq/ledger/internal" + bun "github.com/uptrace/bun" + gomock "go.uber.org/mock/gomock" +) + +// MockTX is a mock of TX interface. +type MockTX struct { + ctrl *gomock.Controller + recorder *MockTXMockRecorder +} + +// MockTXMockRecorder is the mock recorder for MockTX. +type MockTXMockRecorder struct { + mock *MockTX +} + +// NewMockTX creates a new mock instance. +func NewMockTX(ctrl *gomock.Controller) *MockTX { + mock := &MockTX{ctrl: ctrl} + mock.recorder = &MockTXMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockTX) EXPECT() *MockTXMockRecorder { + return m.recorder +} + +// CommitTransaction mocks base method. +func (m *MockTX) CommitTransaction(ctx context.Context, transaction *ledger.Transaction) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CommitTransaction", ctx, transaction) + ret0, _ := ret[0].(error) + return ret0 +} + +// CommitTransaction indicates an expected call of CommitTransaction. +func (mr *MockTXMockRecorder) CommitTransaction(ctx, transaction any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CommitTransaction", reflect.TypeOf((*MockTX)(nil).CommitTransaction), ctx, transaction) +} + +// DeleteAccountMetadata mocks base method. +func (m *MockTX) DeleteAccountMetadata(ctx context.Context, address, key string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteAccountMetadata", ctx, address, key) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteAccountMetadata indicates an expected call of DeleteAccountMetadata. +func (mr *MockTXMockRecorder) DeleteAccountMetadata(ctx, address, key any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteAccountMetadata", reflect.TypeOf((*MockTX)(nil).DeleteAccountMetadata), ctx, address, key) +} + +// DeleteTransactionMetadata mocks base method. +func (m *MockTX) DeleteTransactionMetadata(ctx context.Context, transactionID int, key string) (*ledger.Transaction, bool, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteTransactionMetadata", ctx, transactionID, key) + ret0, _ := ret[0].(*ledger.Transaction) + ret1, _ := ret[1].(bool) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 +} + +// DeleteTransactionMetadata indicates an expected call of DeleteTransactionMetadata. +func (mr *MockTXMockRecorder) DeleteTransactionMetadata(ctx, transactionID, key any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteTransactionMetadata", reflect.TypeOf((*MockTX)(nil).DeleteTransactionMetadata), ctx, transactionID, key) +} + +// GetAccount mocks base method. +func (m *MockTX) GetAccount(ctx context.Context, query GetAccountQuery) (*ledger.Account, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAccount", ctx, query) + ret0, _ := ret[0].(*ledger.Account) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAccount indicates an expected call of GetAccount. +func (mr *MockTXMockRecorder) GetAccount(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccount", reflect.TypeOf((*MockTX)(nil).GetAccount), ctx, query) +} + +// GetBalances mocks base method. +func (m *MockTX) GetBalances(ctx context.Context, query BalanceQuery) (Balances, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetBalances", ctx, query) + ret0, _ := ret[0].(Balances) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetBalances indicates an expected call of GetBalances. +func (mr *MockTXMockRecorder) GetBalances(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBalances", reflect.TypeOf((*MockTX)(nil).GetBalances), ctx, query) +} + +// InsertLog mocks base method. +func (m *MockTX) InsertLog(ctx context.Context, log *ledger.Log) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "InsertLog", ctx, log) + ret0, _ := ret[0].(error) + return ret0 +} + +// InsertLog indicates an expected call of InsertLog. +func (mr *MockTXMockRecorder) InsertLog(ctx, log any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertLog", reflect.TypeOf((*MockTX)(nil).InsertLog), ctx, log) +} + +// ListLogs mocks base method. +func (m *MockTX) ListLogs(ctx context.Context, q GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListLogs", ctx, q) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Log]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListLogs indicates an expected call of ListLogs. +func (mr *MockTXMockRecorder) ListLogs(ctx, q any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListLogs", reflect.TypeOf((*MockTX)(nil).ListLogs), ctx, q) +} + +// LockLedger mocks base method. +func (m *MockTX) LockLedger(ctx context.Context) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "LockLedger", ctx) + ret0, _ := ret[0].(error) + return ret0 +} + +// LockLedger indicates an expected call of LockLedger. +func (mr *MockTXMockRecorder) LockLedger(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "LockLedger", reflect.TypeOf((*MockTX)(nil).LockLedger), ctx) +} + +// RevertTransaction mocks base method. +func (m *MockTX) RevertTransaction(ctx context.Context, id int) (*ledger.Transaction, bool, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RevertTransaction", ctx, id) + ret0, _ := ret[0].(*ledger.Transaction) + ret1, _ := ret[1].(bool) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 +} + +// RevertTransaction indicates an expected call of RevertTransaction. +func (mr *MockTXMockRecorder) RevertTransaction(ctx, id any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RevertTransaction", reflect.TypeOf((*MockTX)(nil).RevertTransaction), ctx, id) +} + +// UpdateAccountsMetadata mocks base method. +func (m_2 *MockTX) UpdateAccountsMetadata(ctx context.Context, m map[string]metadata.Metadata) error { + m_2.ctrl.T.Helper() + ret := m_2.ctrl.Call(m_2, "UpdateAccountsMetadata", ctx, m) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpdateAccountsMetadata indicates an expected call of UpdateAccountsMetadata. +func (mr *MockTXMockRecorder) UpdateAccountsMetadata(ctx, m any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateAccountsMetadata", reflect.TypeOf((*MockTX)(nil).UpdateAccountsMetadata), ctx, m) +} + +// UpdateTransactionMetadata mocks base method. +func (m_2 *MockTX) UpdateTransactionMetadata(ctx context.Context, transactionID int, m metadata.Metadata) (*ledger.Transaction, bool, error) { + m_2.ctrl.T.Helper() + ret := m_2.ctrl.Call(m_2, "UpdateTransactionMetadata", ctx, transactionID, m) + ret0, _ := ret[0].(*ledger.Transaction) + ret1, _ := ret[1].(bool) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 +} + +// UpdateTransactionMetadata indicates an expected call of UpdateTransactionMetadata. +func (mr *MockTXMockRecorder) UpdateTransactionMetadata(ctx, transactionID, m any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateTransactionMetadata", reflect.TypeOf((*MockTX)(nil).UpdateTransactionMetadata), ctx, transactionID, m) +} + +// UpsertAccount mocks base method. +func (m *MockTX) UpsertAccount(ctx context.Context, account *ledger.Account) (bool, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpsertAccount", ctx, account) + ret0, _ := ret[0].(bool) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpsertAccount indicates an expected call of UpsertAccount. +func (mr *MockTXMockRecorder) UpsertAccount(ctx, account any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertAccount", reflect.TypeOf((*MockTX)(nil).UpsertAccount), ctx, account) +} + +// MockStore is a mock of Store interface. +type MockStore struct { + ctrl *gomock.Controller + recorder *MockStoreMockRecorder +} + +// MockStoreMockRecorder is the mock recorder for MockStore. +type MockStoreMockRecorder struct { + mock *MockStore +} + +// NewMockStore creates a new mock instance. +func NewMockStore(ctrl *gomock.Controller) *MockStore { + mock := &MockStore{ctrl: ctrl} + mock.recorder = &MockStoreMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockStore) EXPECT() *MockStoreMockRecorder { + return m.recorder +} + +// CountAccounts mocks base method. +func (m *MockStore) CountAccounts(ctx context.Context, a ListAccountsQuery) (int, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CountAccounts", ctx, a) + ret0, _ := ret[0].(int) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CountAccounts indicates an expected call of CountAccounts. +func (mr *MockStoreMockRecorder) CountAccounts(ctx, a any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountAccounts", reflect.TypeOf((*MockStore)(nil).CountAccounts), ctx, a) +} + +// CountTransactions mocks base method. +func (m *MockStore) CountTransactions(ctx context.Context, q ListTransactionsQuery) (int, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CountTransactions", ctx, q) + ret0, _ := ret[0].(int) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CountTransactions indicates an expected call of CountTransactions. +func (mr *MockStoreMockRecorder) CountTransactions(ctx, q any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountTransactions", reflect.TypeOf((*MockStore)(nil).CountTransactions), ctx, q) +} + +// GetAccount mocks base method. +func (m *MockStore) GetAccount(ctx context.Context, q GetAccountQuery) (*ledger.Account, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAccount", ctx, q) + ret0, _ := ret[0].(*ledger.Account) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAccount indicates an expected call of GetAccount. +func (mr *MockStoreMockRecorder) GetAccount(ctx, q any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccount", reflect.TypeOf((*MockStore)(nil).GetAccount), ctx, q) +} + +// GetAggregatedBalances mocks base method. +func (m *MockStore) GetAggregatedBalances(ctx context.Context, q GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAggregatedBalances", ctx, q) + ret0, _ := ret[0].(ledger.BalancesByAssets) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAggregatedBalances indicates an expected call of GetAggregatedBalances. +func (mr *MockStoreMockRecorder) GetAggregatedBalances(ctx, q any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAggregatedBalances", reflect.TypeOf((*MockStore)(nil).GetAggregatedBalances), ctx, q) +} + +// GetDB mocks base method. +func (m *MockStore) GetDB() bun.IDB { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetDB") + ret0, _ := ret[0].(bun.IDB) + return ret0 +} + +// GetDB indicates an expected call of GetDB. +func (mr *MockStoreMockRecorder) GetDB() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDB", reflect.TypeOf((*MockStore)(nil).GetDB)) +} + +// GetMigrationsInfo mocks base method. +func (m *MockStore) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetMigrationsInfo", ctx) + ret0, _ := ret[0].([]migrations.Info) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetMigrationsInfo indicates an expected call of GetMigrationsInfo. +func (mr *MockStoreMockRecorder) GetMigrationsInfo(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMigrationsInfo", reflect.TypeOf((*MockStore)(nil).GetMigrationsInfo), ctx) +} + +// GetTransaction mocks base method. +func (m *MockStore) GetTransaction(ctx context.Context, query GetTransactionQuery) (*ledger.Transaction, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetTransaction", ctx, query) + ret0, _ := ret[0].(*ledger.Transaction) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetTransaction indicates an expected call of GetTransaction. +func (mr *MockStoreMockRecorder) GetTransaction(ctx, query any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTransaction", reflect.TypeOf((*MockStore)(nil).GetTransaction), ctx, query) +} + +// GetVolumesWithBalances mocks base method. +func (m *MockStore) GetVolumesWithBalances(ctx context.Context, q GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetVolumesWithBalances", ctx, q) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetVolumesWithBalances indicates an expected call of GetVolumesWithBalances. +func (mr *MockStoreMockRecorder) GetVolumesWithBalances(ctx, q any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetVolumesWithBalances", reflect.TypeOf((*MockStore)(nil).GetVolumesWithBalances), ctx, q) +} + +// IsUpToDate mocks base method. +func (m *MockStore) IsUpToDate(ctx context.Context) (bool, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "IsUpToDate", ctx) + ret0, _ := ret[0].(bool) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// IsUpToDate indicates an expected call of IsUpToDate. +func (mr *MockStoreMockRecorder) IsUpToDate(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsUpToDate", reflect.TypeOf((*MockStore)(nil).IsUpToDate), ctx) +} + +// ListAccounts mocks base method. +func (m *MockStore) ListAccounts(ctx context.Context, a ListAccountsQuery) (*bunpaginate.Cursor[ledger.Account], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListAccounts", ctx, a) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Account]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListAccounts indicates an expected call of ListAccounts. +func (mr *MockStoreMockRecorder) ListAccounts(ctx, a any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListAccounts", reflect.TypeOf((*MockStore)(nil).ListAccounts), ctx, a) +} + +// ListLogs mocks base method. +func (m *MockStore) ListLogs(ctx context.Context, q GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListLogs", ctx, q) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Log]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListLogs indicates an expected call of ListLogs. +func (mr *MockStoreMockRecorder) ListLogs(ctx, q any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListLogs", reflect.TypeOf((*MockStore)(nil).ListLogs), ctx, q) +} + +// ListTransactions mocks base method. +func (m *MockStore) ListTransactions(ctx context.Context, q ListTransactionsQuery) (*bunpaginate.Cursor[ledger.Transaction], error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListTransactions", ctx, q) + ret0, _ := ret[0].(*bunpaginate.Cursor[ledger.Transaction]) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListTransactions indicates an expected call of ListTransactions. +func (mr *MockStoreMockRecorder) ListTransactions(ctx, q any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListTransactions", reflect.TypeOf((*MockStore)(nil).ListTransactions), ctx, q) +} + +// ReadLogWithIdempotencyKey mocks base method. +func (m *MockStore) ReadLogWithIdempotencyKey(ctx context.Context, ik string) (*ledger.Log, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ReadLogWithIdempotencyKey", ctx, ik) + ret0, _ := ret[0].(*ledger.Log) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ReadLogWithIdempotencyKey indicates an expected call of ReadLogWithIdempotencyKey. +func (mr *MockStoreMockRecorder) ReadLogWithIdempotencyKey(ctx, ik any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ReadLogWithIdempotencyKey", reflect.TypeOf((*MockStore)(nil).ReadLogWithIdempotencyKey), ctx, ik) +} + +// WithTX mocks base method. +func (m *MockStore) WithTX(arg0 context.Context, arg1 *sql.TxOptions, arg2 func(TX) (bool, error)) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "WithTX", arg0, arg1, arg2) + ret0, _ := ret[0].(error) + return ret0 +} + +// WithTX indicates an expected call of WithTX. +func (mr *MockStoreMockRecorder) WithTX(arg0, arg1, arg2 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "WithTX", reflect.TypeOf((*MockStore)(nil).WithTX), arg0, arg1, arg2) +} diff --git a/internal/controller/system/controller.go b/internal/controller/system/controller.go new file mode 100644 index 000000000..e721e2ac8 --- /dev/null +++ b/internal/controller/system/controller.go @@ -0,0 +1,178 @@ +package system + +import ( + "context" + "reflect" + "time" + + "go.opentelemetry.io/otel/metric" + noopmetrics "go.opentelemetry.io/otel/metric/noop" + "go.opentelemetry.io/otel/trace" + nooptracer "go.opentelemetry.io/otel/trace/noop" + + "github.com/formancehq/ledger/internal/tracing" + + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" +) + +type Controller interface { + GetLedgerController(ctx context.Context, name string) (ledgercontroller.Controller, error) + GetLedger(ctx context.Context, name string) (*ledger.Ledger, error) + ListLedgers(ctx context.Context, query ledgercontroller.ListLedgersQuery) (*bunpaginate.Cursor[ledger.Ledger], error) + // CreateLedger can return following errors: + // * ErrLedgerAlreadyExists + // * ledger.ErrInvalidLedgerName + // It create the ledger in system store and the underlying storage + CreateLedger(ctx context.Context, name string, configuration ledger.Configuration) error + UpdateLedgerMetadata(ctx context.Context, name string, m map[string]string) error + DeleteLedgerMetadata(ctx context.Context, param string, key string) error +} + +type DefaultController struct { + store Store + listener ledgercontroller.Listener + parser ledgercontroller.NumscriptParser + registry *ledgercontroller.StateRegistry + databaseRetryConfiguration DatabaseRetryConfiguration + + tracer trace.Tracer + meter metric.Meter + enableFeatures bool +} + +func (ctrl *DefaultController) GetLedgerController(ctx context.Context, name string) (ledgercontroller.Controller, error) { + return tracing.Trace(ctx, ctrl.tracer, "GetLedgerController", func(ctx context.Context) (ledgercontroller.Controller, error) { + store, l, err := ctrl.store.OpenLedger(ctx, name) + if err != nil { + return nil, err + } + + var ledgerController ledgercontroller.Controller = ledgercontroller.NewDefaultController( + *l, + store, + ctrl.parser, + ledgercontroller.WithMeter(ctrl.meter), + ) + + // Add too many client error handling + ledgerController = ledgercontroller.NewControllerWithTooManyClientHandling( + ledgerController, + ctrl.tracer, + ledgercontroller.DelayCalculatorFn(func(i int) time.Duration { + if i < ctrl.databaseRetryConfiguration.MaxRetry { + return time.Duration(i+1) * ctrl.databaseRetryConfiguration.Delay + } + + return 0 + }), + ) + + // Add cache regarding database state + ledgerController = ledgercontroller.NewControllerWithCache(*l, ledgerController, ctrl.registry) + + // Add traces + ledgerController = ledgercontroller.NewControllerWithTraces(ledgerController, ctrl.tracer) + + // Add events listener + if ctrl.listener != nil { + ledgerController = ledgercontroller.NewControllerWithEvents(*l, ledgerController, ctrl.listener) + } + + return ledgerController, nil + }) +} + +func (ctrl *DefaultController) CreateLedger(ctx context.Context, name string, configuration ledger.Configuration) error { + return tracing.SkipResult(tracing.Trace(ctx, ctrl.tracer, "CreateLedger", tracing.NoResult(func(ctx context.Context) error { + configuration.SetDefaults() + + if !ctrl.enableFeatures { + if !reflect.DeepEqual(configuration.Features, ledger.DefaultFeatures) { + return ErrExperimentalFeaturesDisabled + } + } + + l, err := ledger.New(name, configuration) + if err != nil { + return newErrInvalidLedgerConfiguration(err) + } + + return ctrl.store.CreateLedger(ctx, l) + }))) +} + +func (ctrl *DefaultController) GetLedger(ctx context.Context, name string) (*ledger.Ledger, error) { + return tracing.Trace(ctx, ctrl.tracer, "GetLedger", func(ctx context.Context) (*ledger.Ledger, error) { + return ctrl.store.GetLedger(ctx, name) + }) +} + +func (ctrl *DefaultController) ListLedgers(ctx context.Context, query ledgercontroller.ListLedgersQuery) (*bunpaginate.Cursor[ledger.Ledger], error) { + return tracing.Trace(ctx, ctrl.tracer, "ListLedgers", func(ctx context.Context) (*bunpaginate.Cursor[ledger.Ledger], error) { + return ctrl.store.ListLedgers(ctx, query) + }) +} + +func (ctrl *DefaultController) UpdateLedgerMetadata(ctx context.Context, name string, m map[string]string) error { + return tracing.SkipResult(tracing.Trace(ctx, ctrl.tracer, "UpdateLedgerMetadata", tracing.NoResult(func(ctx context.Context) error { + return ctrl.store.UpdateLedgerMetadata(ctx, name, m) + }))) +} + +func (ctrl *DefaultController) DeleteLedgerMetadata(ctx context.Context, param string, key string) error { + return tracing.SkipResult(tracing.Trace(ctx, ctrl.tracer, "DeleteLedgerMetadata", tracing.NoResult(func(ctx context.Context) error { + return ctrl.store.DeleteLedgerMetadata(ctx, param, key) + }))) +} + +func NewDefaultController(store Store, listener ledgercontroller.Listener, opts ...Option) *DefaultController { + ret := &DefaultController{ + store: store, + listener: listener, + registry: ledgercontroller.NewStateRegistry(), + } + for _, opt := range append(defaultOptions, opts...) { + opt(ret) + } + return ret +} + +type Option func(ctrl *DefaultController) + +func WithParser(parser ledgercontroller.NumscriptParser) Option { + return func(ctrl *DefaultController) { + ctrl.parser = parser + } +} + +func WithDatabaseRetryConfiguration(configuration DatabaseRetryConfiguration) Option { + return func(ctrl *DefaultController) { + ctrl.databaseRetryConfiguration = configuration + } +} + +func WithMeter(m metric.Meter) Option { + return func(ctrl *DefaultController) { + ctrl.meter = m + } +} + +func WithTracer(t trace.Tracer) Option { + return func(ctrl *DefaultController) { + ctrl.tracer = t + } +} + +func WithEnableFeatures(v bool) Option { + return func(ctrl *DefaultController) { + ctrl.enableFeatures = v + } +} + +var defaultOptions = []Option{ + WithParser(ledgercontroller.NewDefaultNumscriptParser()), + WithMeter(noopmetrics.Meter{}), + WithTracer(nooptracer.Tracer{}), +} diff --git a/internal/controller/system/errors.go b/internal/controller/system/errors.go new file mode 100644 index 000000000..d5b5e1da7 --- /dev/null +++ b/internal/controller/system/errors.go @@ -0,0 +1,30 @@ +package system + +import ( + "errors" + "fmt" +) + +var ( + ErrLedgerAlreadyExists = errors.New("ledger already exists") + ErrExperimentalFeaturesDisabled = errors.New("experimental features are disabled") +) + +type ErrInvalidLedgerConfiguration struct { + err error +} + +func (e ErrInvalidLedgerConfiguration) Error() string { + return fmt.Sprintf("invalid ledger configuration: %s", e.err) +} + +func (e ErrInvalidLedgerConfiguration) Is(err error) bool { + _, ok := err.(ErrInvalidLedgerConfiguration) + return ok +} + +func newErrInvalidLedgerConfiguration(err error) ErrInvalidLedgerConfiguration { + return ErrInvalidLedgerConfiguration{ + err: err, + } +} \ No newline at end of file diff --git a/internal/controller/system/module.go b/internal/controller/system/module.go new file mode 100644 index 000000000..18f839583 --- /dev/null +++ b/internal/controller/system/module.go @@ -0,0 +1,54 @@ +package system + +import ( + "time" + + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "go.opentelemetry.io/otel/metric" + "go.opentelemetry.io/otel/trace" + "go.uber.org/fx" +) + +type DatabaseRetryConfiguration struct { + MaxRetry int + Delay time.Duration +} + +type ModuleConfiguration struct { + NSCacheConfiguration ledgercontroller.CacheConfiguration + DatabaseRetryConfiguration DatabaseRetryConfiguration + EnableFeatures bool +} + +func NewFXModule(configuration ModuleConfiguration) fx.Option { + return fx.Options( + fx.Provide(func(controller *DefaultController) Controller { + return controller + }), + fx.Provide(func( + store Store, + listener ledgercontroller.Listener, + meterProvider metric.MeterProvider, + tracerProvider trace.TracerProvider, + ) *DefaultController { + options := make([]Option, 0) + if configuration.NSCacheConfiguration.MaxCount != 0 { + options = append(options, WithParser(ledgercontroller.NewCachedNumscriptParser( + ledgercontroller.NewDefaultNumscriptParser(), + configuration.NSCacheConfiguration, + ))) + } + + return NewDefaultController( + store, + listener, + append(options, + WithDatabaseRetryConfiguration(configuration.DatabaseRetryConfiguration), + WithMeter(meterProvider.Meter("core")), + WithTracer(tracerProvider.Tracer("core")), + WithEnableFeatures(configuration.EnableFeatures), + )..., + ) + }), + ) +} diff --git a/internal/controller/system/store.go b/internal/controller/system/store.go new file mode 100644 index 000000000..bea6c3f27 --- /dev/null +++ b/internal/controller/system/store.go @@ -0,0 +1,20 @@ +package system + +import ( + "context" + + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/metadata" + ledger "github.com/formancehq/ledger/internal" +) + +type Store interface { + GetLedger(ctx context.Context, name string) (*ledger.Ledger, error) + ListLedgers(ctx context.Context, query ledgercontroller.ListLedgersQuery) (*bunpaginate.Cursor[ledger.Ledger], error) + UpdateLedgerMetadata(ctx context.Context, name string, m metadata.Metadata) error + DeleteLedgerMetadata(ctx context.Context, param string, key string) error + OpenLedger(context.Context, string) (ledgercontroller.Store, *ledger.Ledger, error) + CreateLedger(context.Context, *ledger.Ledger) error +} diff --git a/internal/doc.go b/internal/doc.go new file mode 100644 index 000000000..0d2884577 --- /dev/null +++ b/internal/doc.go @@ -0,0 +1,2 @@ +//go:generate gomarkdoc -o README.md +package ledger diff --git a/internal/engine/chain/chain.go b/internal/engine/chain/chain.go deleted file mode 100644 index 9f0aaabd4..000000000 --- a/internal/engine/chain/chain.go +++ /dev/null @@ -1,79 +0,0 @@ -package chain - -import ( - "context" - "math/big" - "sync" - - ledger "github.com/formancehq/ledger/internal" - storageerrors "github.com/formancehq/ledger/internal/storage/sqlutils" -) - -type Chain struct { - mu sync.Mutex - lastLog *ledger.ChainedLog - lastTXID *big.Int - store Store -} - -func (chain *Chain) ChainLog(log *ledger.Log) *ledger.ChainedLog { - chain.mu.Lock() - defer chain.mu.Unlock() - - chain.lastLog = log.ChainLog(chain.lastLog) - return chain.lastLog -} - -func (chain *Chain) Init(ctx context.Context) error { - lastTx, err := chain.store.GetLastTransaction(ctx) - if err != nil && !storageerrors.IsNotFoundError(err) { - return err - } - if lastTx != nil { - chain.lastTXID = lastTx.ID - } - - chain.lastLog, err = chain.store.GetLastLog(ctx) - if err != nil && !storageerrors.IsNotFoundError(err) { - return err - } - return nil -} - -func (chain *Chain) AllocateNewTxID() *big.Int { - chain.mu.Lock() - defer chain.mu.Unlock() - - chain.lastTXID = chain.predictNextTxID() - - return chain.lastTXID -} - -func (chain *Chain) PredictNextTxID() *big.Int { - chain.mu.Lock() - defer chain.mu.Unlock() - - return chain.predictNextTxID() -} - -func (chain *Chain) predictNextTxID() *big.Int { - return big.NewInt(0).Add(chain.lastTXID, big.NewInt(1)) -} - -func (chain *Chain) ReplaceLast(log *ledger.ChainedLog) { - if log.Type == ledger.NewTransactionLogType { - chain.lastTXID = log.Data.(ledger.NewTransactionLogPayload).Transaction.ID - } - chain.lastLog = log -} - -func (chain *Chain) GetLastLog() *ledger.ChainedLog { - return chain.lastLog -} - -func New(store Store) *Chain { - return &Chain{ - lastTXID: big.NewInt(-1), - store: store, - } -} diff --git a/internal/engine/chain/store.go b/internal/engine/chain/store.go deleted file mode 100644 index 9e5771b8f..000000000 --- a/internal/engine/chain/store.go +++ /dev/null @@ -1,12 +0,0 @@ -package chain - -import ( - "context" - - ledger "github.com/formancehq/ledger/internal" -) - -type Store interface { - GetLastLog(ctx context.Context) (*ledger.ChainedLog, error) - GetLastTransaction(ctx context.Context) (*ledger.ExpandedTransaction, error) -} diff --git a/internal/engine/command/commander.go b/internal/engine/command/commander.go deleted file mode 100644 index 4fdd0d86f..000000000 --- a/internal/engine/command/commander.go +++ /dev/null @@ -1,347 +0,0 @@ -package command - -import ( - "context" - "fmt" - "math/big" - "sync" - - "github.com/formancehq/ledger/internal/machine/vm/program" - "github.com/formancehq/ledger/internal/opentelemetry/tracer" - - "github.com/formancehq/go-libs/time" - - storageerrors "github.com/formancehq/ledger/internal/storage/sqlutils" - - "github.com/formancehq/go-libs/metadata" - ledger "github.com/formancehq/ledger/internal" - "github.com/formancehq/ledger/internal/bus" - "github.com/formancehq/ledger/internal/engine/utils/batching" - "github.com/formancehq/ledger/internal/machine/vm" - "github.com/pkg/errors" -) - -type Parameters struct { - DryRun bool - IdempotencyKey string -} - -type Chainer interface { - ChainLog(log *ledger.Log) *ledger.ChainedLog - AllocateNewTxID() *big.Int - PredictNextTxID() *big.Int -} - -type Commander struct { - *batching.Batcher[*ledger.ChainedLog] - store Store - locker Locker - compiler *Compiler - running sync.WaitGroup - referencer *Referencer - - monitor bus.Monitor - chain Chainer -} - -func New( - store Store, - locker Locker, - compiler *Compiler, - referencer *Referencer, - monitor bus.Monitor, - chain Chainer, - batchSize int, -) *Commander { - return &Commander{ - store: store, - locker: locker, - compiler: compiler, - chain: chain, - referencer: referencer, - Batcher: batching.NewBatcher(store.InsertLogs, 1, batchSize), - monitor: monitor, - } -} - -func (commander *Commander) GetLedgerStore() Store { - return commander.store -} - -func (commander *Commander) exec(ctx context.Context, parameters Parameters, script ledger.RunScript, - logComputer func(tx *ledger.Transaction, accountMetadata map[string]metadata.Metadata) *ledger.Log) (*ledger.ChainedLog, error) { - - if script.Script.Plain == "" { - return nil, NewErrNoScript() - } - - if script.Timestamp.IsZero() { - script.Timestamp = time.Now() - } - - execContext := newExecutionContext(commander, parameters) - return execContext.run(ctx, func(executionContext *executionContext) (*ledger.ChainedLog, error) { - if script.Reference != "" { - if err := commander.referencer.take(referenceTxReference, script.Reference); err != nil { - return nil, NewErrConflict() - } - defer commander.referencer.release(referenceTxReference, script.Reference) - - err := func() error { - ctx, span := tracer.Start(ctx, "CheckReference") - defer span.End() - - _, err := commander.store.GetTransactionByReference(ctx, script.Reference) - if err == nil { - return NewErrConflict() - } - if err != nil && !storageerrors.IsNotFoundError(err) { - return err - } - return nil - }() - if err != nil { - return nil, err - } - } - - program, err := func() (*program.Program, error) { - _, span := tracer.Start(ctx, "CompileNumscript") - defer span.End() - - program, err := commander.compiler.Compile(script.Plain) - if err != nil { - return nil, NewErrCompilationFailed(err) - } - - return program, nil - }() - if err != nil { - return nil, err - } - - m := vm.NewMachine(*program) - if err := m.SetVarsFromJSON(script.Vars); err != nil { - return nil, NewErrCompilationFailed(err) - } - - readLockAccounts, writeLockAccounts, err := m.ResolveResources(ctx, commander.store) - if err != nil { - return nil, NewErrCompilationFailed(err) - } - lockAccounts := Accounts{ - Read: readLockAccounts, - Write: writeLockAccounts, - } - - unlock, err := func() (Unlock, error) { - _, span := tracer.Start(ctx, "Lock") - defer span.End() - - unlock, err := commander.locker.Lock(ctx, lockAccounts) - if err != nil { - return nil, errors.Wrap(err, "locking accounts for tx processing") - } - - return unlock, nil - }() - if err != nil { - return nil, err - } - defer unlock(ctx) - - err = func() error { - ctx, span := tracer.Start(ctx, "ResolveBalances") - defer span.End() - - err = m.ResolveBalances(ctx, commander.store) - if err != nil { - return errors.Wrap(err, "could not resolve balances") - } - - return nil - }() - if err != nil { - return nil, err - } - result, err := func() (*vm.Result, error) { - _, span := tracer.Start(ctx, "RunNumscript") - defer span.End() - - result, err := vm.Run(m, script) - if err != nil { - return nil, NewErrMachine(err) - } - - return result, nil - }() - if err != nil { - return nil, err - } - - if len(result.Postings) == 0 { - return nil, NewErrNoPostings() - } - - txID := commander.chain.PredictNextTxID() - if !parameters.DryRun { - txID = commander.chain.AllocateNewTxID() - } - - tx := ledger.NewTransaction(). - WithPostings(result.Postings...). - WithMetadata(result.Metadata). - WithDate(script.Timestamp). - WithID(txID). - WithReference(script.Reference) - - log := logComputer(tx, result.AccountMetadata) - if parameters.IdempotencyKey != "" { - log = log.WithIdempotencyKey(parameters.IdempotencyKey) - } - - return executionContext.AppendLog(ctx, log) - }) -} - -func (commander *Commander) CreateTransaction(ctx context.Context, parameters Parameters, script ledger.RunScript) (*ledger.Transaction, error) { - - ctx, span := tracer.Start(ctx, "CreateTransaction") - defer span.End() - - log, err := commander.exec(ctx, parameters, script, ledger.NewTransactionLog) - if err != nil { - - return nil, err - } - - commander.monitor.CommittedTransactions(ctx, *log.Data.(ledger.NewTransactionLogPayload).Transaction, log.Data.(ledger.NewTransactionLogPayload).AccountMetadata) - - return log.Data.(ledger.NewTransactionLogPayload).Transaction, nil -} - -func (commander *Commander) SaveMeta(ctx context.Context, parameters Parameters, targetType string, targetID interface{}, m metadata.Metadata) error { - execContext := newExecutionContext(commander, parameters) - _, err := execContext.run(ctx, func(executionContext *executionContext) (*ledger.ChainedLog, error) { - var ( - log *ledger.Log - at = time.Now() - ) - switch targetType { - case ledger.MetaTargetTypeTransaction: - _, err := commander.store.GetTransaction(ctx, targetID.(*big.Int)) - if err != nil { - if storageerrors.IsNotFoundError(err) { - return nil, newErrSaveMetadataTransactionNotFound() - } - } - log = ledger.NewSetMetadataLog(at, ledger.SetMetadataLogPayload{ - TargetType: ledger.MetaTargetTypeTransaction, - TargetID: targetID.(*big.Int), - Metadata: m, - }) - case ledger.MetaTargetTypeAccount: - log = ledger.NewSetMetadataLog(at, ledger.SetMetadataLogPayload{ - TargetType: ledger.MetaTargetTypeAccount, - TargetID: targetID.(string), - Metadata: m, - }) - default: - panic(errors.Errorf("unknown target type '%s'", targetType)) - } - - return executionContext.AppendLog(ctx, log) - }) - if err != nil { - return err - } - - commander.monitor.SavedMetadata(ctx, targetType, fmt.Sprint(targetID), m) - return nil -} - -func (commander *Commander) RevertTransaction(ctx context.Context, parameters Parameters, id *big.Int, force, atEffectiveDate bool) (*ledger.Transaction, error) { - - if err := commander.referencer.take(referenceReverts, id); err != nil { - return nil, NewErrRevertTransactionOccurring() - } - defer commander.referencer.release(referenceReverts, id) - - transactionToRevert, err := commander.store.GetTransaction(ctx, id) - if err != nil { - if storageerrors.IsNotFoundError(err) { - return nil, NewErrRevertTransactionNotFound() - } - return nil, err - } - if transactionToRevert.Reverted { - return nil, NewErrRevertTransactionAlreadyReverted() - } - - rt := transactionToRevert.Reverse() - rt.Metadata = ledger.MarkReverts(metadata.Metadata{}, transactionToRevert.ID) - - script := ledger.TxToScriptData(ledger.TransactionData{ - Postings: rt.Postings, - Metadata: rt.Metadata, - }, force) - if atEffectiveDate { - script.Timestamp = transactionToRevert.Timestamp - } - - log, err := commander.exec(ctx, parameters, script, - func(tx *ledger.Transaction, accountMetadata map[string]metadata.Metadata) *ledger.Log { - return ledger.NewRevertedTransactionLog(tx.Timestamp, transactionToRevert.ID, tx) - }) - if err != nil { - return nil, err - } - - commander.monitor.RevertedTransaction(ctx, log.Data.(ledger.RevertedTransactionLogPayload).RevertTransaction, transactionToRevert) - - return log.Data.(ledger.RevertedTransactionLogPayload).RevertTransaction, nil -} - -func (commander *Commander) Close() { - commander.Batcher.Close() - commander.running.Wait() -} - -func (commander *Commander) DeleteMetadata(ctx context.Context, parameters Parameters, targetType string, targetID any, key string) error { - execContext := newExecutionContext(commander, parameters) - _, err := execContext.run(ctx, func(executionContext *executionContext) (*ledger.ChainedLog, error) { - var ( - log *ledger.Log - at = time.Now() - ) - switch targetType { - case ledger.MetaTargetTypeTransaction: - _, err := commander.store.GetTransaction(ctx, targetID.(*big.Int)) - if err != nil { - return nil, newErrDeleteMetadataTransactionNotFound() - } - log = ledger.NewDeleteMetadataLog(at, ledger.DeleteMetadataLogPayload{ - TargetType: ledger.MetaTargetTypeTransaction, - TargetID: targetID.(*big.Int), - Key: key, - }) - case ledger.MetaTargetTypeAccount: - log = ledger.NewDeleteMetadataLog(at, ledger.DeleteMetadataLogPayload{ - TargetType: ledger.MetaTargetTypeAccount, - TargetID: targetID.(string), - Key: key, - }) - default: - panic(errors.Errorf("unknown target type '%s'", targetType)) - } - - return executionContext.AppendLog(ctx, log) - }) - if err != nil { - return err - } - - commander.monitor.DeletedMetadata(ctx, targetType, targetID, key) - - return nil -} diff --git a/internal/engine/command/commander_test.go b/internal/engine/command/commander_test.go deleted file mode 100644 index a3fac0682..000000000 --- a/internal/engine/command/commander_test.go +++ /dev/null @@ -1,419 +0,0 @@ -package command - -import ( - "context" - "math/big" - "sync" - "testing" - - "github.com/formancehq/go-libs/testing/docker" - - "github.com/formancehq/go-libs/bun/bundebug" - "github.com/uptrace/bun" - - "github.com/formancehq/ledger/internal/engine/chain" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/bun/bunconnect" - "github.com/formancehq/go-libs/testing/platform/pgtesting" - "github.com/formancehq/ledger/internal/storage/ledgerstore" - "github.com/google/uuid" - - "github.com/formancehq/ledger/internal/machine" - - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/metadata" - ledger "github.com/formancehq/ledger/internal" - "github.com/formancehq/ledger/internal/bus" - storageerrors "github.com/formancehq/ledger/internal/storage" - internaltesting "github.com/formancehq/ledger/internal/testing" - "github.com/pkg/errors" - "github.com/stretchr/testify/require" -) - -var ( - now = time.Now() -) - -type testCase struct { - name string - setup func(t *testing.T, r Store) - script string - reference string - expectedErrorCode string - expectedTx *ledger.Transaction - expectedLogs []*ledger.Log - parameters Parameters -} - -var testCases = []testCase{ - { - name: "nominal", - script: ` - send [GEM 100] ( - source = @world - destination = @mint - )`, - expectedTx: ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "mint", "GEM", big.NewInt(100)), - ), - expectedLogs: []*ledger.Log{ - ledger.NewTransactionLog( - ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "mint", "GEM", big.NewInt(100))), - map[string]metadata.Metadata{}, - ), - }, - }, - { - name: "no script", - script: ``, - expectedErrorCode: ErrInvalidTransactionCodeNoScript, - }, - { - name: "invalid script", - script: `XXX`, - expectedErrorCode: ErrInvalidTransactionCodeCompilationFailed, - }, - { - name: "set reference conflict", - setup: func(t *testing.T, store Store) { - tx := ledger.NewTransaction(). - WithPostings(ledger.NewPosting("world", "mint", "GEM", big.NewInt(100))). - WithReference("tx_ref") - log := ledger.NewTransactionLog(tx, nil) - err := store.InsertLogs(context.Background(), log.ChainLog(nil)) - require.NoError(t, err) - }, - script: ` - send [GEM 100] ( - source = @world - destination = @mint - )`, - reference: "tx_ref", - expectedErrorCode: ErrInvalidTransactionCodeConflict, - }, - { - name: "set reference", - script: ` - send [GEM 100] ( - source = @world - destination = @mint - )`, - reference: "tx_ref", - expectedTx: ledger.NewTransaction(). - WithPostings( - ledger.NewPosting("world", "mint", "GEM", big.NewInt(100)), - ). - WithReference("tx_ref"), - expectedLogs: []*ledger.Log{ - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings( - ledger.NewPosting("world", "mint", "GEM", big.NewInt(100)), - ). - WithReference("tx_ref"), - map[string]metadata.Metadata{}, - ), - }, - }, - { - name: "using idempotency", - script: ` - send [GEM 100] ( - source = @world - destination = @mint - )`, - reference: "tx_ref", - expectedTx: ledger.NewTransaction(). - WithPostings( - ledger.NewPosting("world", "mint", "GEM", big.NewInt(100)), - ), - expectedLogs: []*ledger.Log{ - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings( - ledger.NewPosting("world", "mint", "GEM", big.NewInt(100)), - ), - map[string]metadata.Metadata{}, - ).WithIdempotencyKey("testing"), - }, - setup: func(t *testing.T, r Store) { - log := ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings( - ledger.NewPosting("world", "mint", "GEM", big.NewInt(100)), - ). - WithDate(now), - map[string]metadata.Metadata{}, - ).WithIdempotencyKey("testing") - err := r.InsertLogs(context.Background(), log.ChainLog(nil)) - require.NoError(t, err) - }, - parameters: Parameters{ - IdempotencyKey: "testing", - }, - }, -} - -func TestCreateTransaction(t *testing.T) { - t.Parallel() - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(t *testing.T) { - - store := storageerrors.NewInMemoryStore() - ctx := logging.TestingContext() - - commander := New(store, NoOpLocker, NewCompiler(1024), NewReferencer(), bus.NewNoOpMonitor(), chain.New(store), 50) - go commander.Run(ctx) - defer commander.Close() - - if tc.setup != nil { - tc.setup(t, store) - } - ret, err := commander.CreateTransaction(ctx, tc.parameters, ledger.RunScript{ - Script: ledger.Script{ - Plain: tc.script, - }, - Timestamp: now, - Reference: tc.reference, - }) - - if tc.expectedErrorCode != "" { - require.True(t, IsInvalidTransactionError(err, tc.expectedErrorCode)) - } else { - require.NoError(t, err) - require.NotNil(t, ret) - tc.expectedTx.Timestamp = now - internaltesting.RequireEqual(t, tc.expectedTx, ret) - - for ind := range tc.expectedLogs { - expectedLog := tc.expectedLogs[ind] - switch v := expectedLog.Data.(type) { - case ledger.NewTransactionLogPayload: - v.Transaction.Timestamp = now - expectedLog.Data = v - } - expectedLog.Date = now - } - } - }) - } -} - -func TestRevert(t *testing.T) { - txID := big.NewInt(0) - store := storageerrors.NewInMemoryStore() - ctx := logging.TestingContext() - - log := ledger.NewTransactionLog( - ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ), - map[string]metadata.Metadata{}, - ).ChainLog(nil) - err := store.InsertLogs(context.Background(), log) - require.NoError(t, err) - - commander := New(store, NoOpLocker, NewCompiler(1024), NewReferencer(), bus.NewNoOpMonitor(), chain.New(store), 50) - go commander.Run(ctx) - defer commander.Close() - - _, err = commander.RevertTransaction(ctx, Parameters{}, txID, false, false) - require.NoError(t, err) -} - -func TestRevertWithAlreadyReverted(t *testing.T) { - - store := storageerrors.NewInMemoryStore() - ctx := logging.TestingContext() - - tx := ledger.NewTransaction().WithPostings(ledger.NewPosting("world", "bank", "USD", big.NewInt(100))) - err := store.InsertLogs(context.Background(), - ledger.NewTransactionLog(tx, map[string]metadata.Metadata{}).ChainLog(nil), - ledger.NewRevertedTransactionLog(time.Now(), tx.ID, ledger.NewTransaction()).ChainLog(nil), - ) - require.NoError(t, err) - - commander := New(store, NoOpLocker, NewCompiler(1024), NewReferencer(), bus.NewNoOpMonitor(), chain.New(store), 50) - go commander.Run(ctx) - defer commander.Close() - - _, err = commander.RevertTransaction(context.Background(), Parameters{}, tx.ID, false, false) - require.True(t, IsRevertError(err, ErrRevertTransactionCodeAlreadyReverted)) -} - -func TestRevertWithRevertOccurring(t *testing.T) { - - store := storageerrors.NewInMemoryStore() - ctx := logging.TestingContext() - - tx := ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ) - log := ledger.NewTransactionLog(tx, map[string]metadata.Metadata{}) - err := store.InsertLogs(ctx, log.ChainLog(nil)) - require.NoError(t, err) - - referencer := NewReferencer() - commander := New(store, NoOpLocker, NewCompiler(1024), referencer, bus.NewNoOpMonitor(), chain.New(store), 50) - go commander.Run(ctx) - defer commander.Close() - - referencer.take(referenceReverts, big.NewInt(0)) - - _, err = commander.RevertTransaction(ctx, Parameters{}, tx.ID, false, false) - require.True(t, IsRevertError(err, ErrRevertTransactionCodeOccurring)) -} - -func TestForceRevert(t *testing.T) { - - store := storageerrors.NewInMemoryStore() - ctx := logging.TestingContext() - - tx1 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ) - tx2 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("bank", "foo", "USD", big.NewInt(100)), - ) - err := store.InsertLogs(ctx, ledger.ChainLogs( - ledger.NewTransactionLog(tx1, map[string]metadata.Metadata{}), - ledger.NewTransactionLog(tx2, map[string]metadata.Metadata{}), - )...) - require.NoError(t, err) - - commander := New(store, NoOpLocker, NewCompiler(1024), NewReferencer(), bus.NewNoOpMonitor(), chain.New(store), 50) - go commander.Run(ctx) - defer commander.Close() - - _, err = commander.RevertTransaction(ctx, Parameters{}, tx1.ID, false, false) - require.NotNil(t, err) - require.True(t, errors.Is(err, &machine.ErrInsufficientFund{})) - balance, err := store.GetBalance(ctx, "bank", "USD") - require.NoError(t, err) - require.Equal(t, uint64(0), balance.Uint64()) - - _, err = commander.RevertTransaction(ctx, Parameters{}, tx1.ID, true, false) - require.Nil(t, err) - - balance, err = store.GetBalance(ctx, "bank", "USD") - require.NoError(t, err) - require.Equal(t, big.NewInt(-100), balance) - - balance, err = store.GetBalance(ctx, "world", "USD") - require.NoError(t, err) - require.Equal(t, uint64(0), balance.Uint64()) -} - -func TestRevertAtEffectiveDate(t *testing.T) { - - store := storageerrors.NewInMemoryStore() - ctx := logging.TestingContext() - now := time.Now() - - tx1 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ).WithDate(now) - tx2 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "foo", "USD", big.NewInt(100)), - ).WithDate(now.Add(time.Second)) - err := store.InsertLogs(ctx, ledger.ChainLogs( - ledger.NewTransactionLog(tx1, map[string]metadata.Metadata{}), - ledger.NewTransactionLog(tx2, map[string]metadata.Metadata{}), - )...) - require.NoError(t, err) - - commander := New(store, NoOpLocker, NewCompiler(1024), NewReferencer(), bus.NewNoOpMonitor(), chain.New(store), 50) - go commander.Run(ctx) - defer commander.Close() - - revertTx, err := commander.RevertTransaction(ctx, Parameters{}, tx1.ID, false, true) - require.Nil(t, err) - require.Equal(t, tx1.Timestamp, revertTx.Timestamp) - - balance, err := store.GetBalance(ctx, "bank", "USD") - require.NoError(t, err) - internaltesting.RequireEqual(t, big.NewInt(0), balance) - - balance, err = store.GetBalance(ctx, "world", "USD") - require.NoError(t, err) - internaltesting.RequireEqual(t, big.NewInt(-100), balance) -} - -func TestParallelTransactions(t *testing.T) { - dockerPool := docker.NewPool(t, logging.Testing()) - srv := pgtesting.CreatePostgresServer(t, dockerPool) - ctx := logging.TestingContext() - - pgDB := srv.NewDatabase(t) - - connectionOptions := bunconnect.ConnectionOptions{ - DatabaseSourceName: pgDB.ConnString(), - } - - hooks := make([]bun.QueryHook, 0) - if testing.Verbose() { - hooks = append(hooks, bundebug.NewQueryHook()) - } - - sqlDB, err := bunconnect.OpenSQLDB(ctx, connectionOptions, hooks...) - require.NoError(t, err) - t.Cleanup(func() { - require.NoError(t, sqlDB.Close()) - }) - - bucketName := uuid.NewString() - - bucket, err := ledgerstore.ConnectToBucket(ctx, connectionOptions, bucketName) - require.NoError(t, err) - t.Cleanup(func() { - require.NoError(t, bucket.Close()) - }) - - err = ledgerstore.MigrateBucket(ctx, sqlDB, bucketName) - require.NoError(t, err) - - store, err := ledgerstore.New(bucket, "default") - require.NoError(t, err) - - commander := New(store, NewDefaultLocker(), NewCompiler(1024), NewReferencer(), bus.NewNoOpMonitor(), chain.New(store), 50) - go commander.Run(ctx) - defer commander.Close() - - _, err = commander.CreateTransaction(ctx, Parameters{}, ledger.TxToScriptData(ledger.TransactionData{ - Postings: []ledger.Posting{{ - Source: "world", - Destination: "foo", - Amount: big.NewInt(1000), - Asset: "USD", - }}, - }, false)) - require.NoError(t, err) - - count := 100 - wg := sync.WaitGroup{} - wg.Add(count) - for i := 0; i < count; i++ { - go func() { - _, _ = commander.CreateTransaction(ctx, Parameters{}, ledger.TxToScriptData(ledger.TransactionData{ - Postings: []ledger.Posting{{ - Source: "foo", - Destination: "bar", - Amount: big.NewInt(100), - Asset: "USD", - }}, - }, false)) - wg.Done() - }() - - } - wg.Wait() - - account, err := store.GetAccountWithVolumes(ctx, ledgerstore.NewGetAccountQuery("bar").WithExpandVolumes()) - require.NoError(t, err) - internaltesting.RequireEqual(t, big.NewInt(1000), account.Volumes.Balances()["USD"]) -} diff --git a/internal/engine/command/compiler.go b/internal/engine/command/compiler.go deleted file mode 100644 index df609eea1..000000000 --- a/internal/engine/command/compiler.go +++ /dev/null @@ -1,45 +0,0 @@ -package command - -import ( - "crypto/sha256" - "encoding/base64" - - "github.com/bluele/gcache" - "github.com/formancehq/ledger/internal/machine/script/compiler" - "github.com/formancehq/ledger/internal/machine/vm/program" -) - -type Compiler struct { - cache gcache.Cache -} - -func (c *Compiler) Compile(script string) (*program.Program, error) { - - digest := sha256.New() - _, err := digest.Write([]byte(script)) - if err != nil { - return nil, err - } - - cacheKey := base64.StdEncoding.EncodeToString(digest.Sum(nil)) - v, err := c.cache.Get(cacheKey) - if err == nil { - return v.(*program.Program), nil - } - - program, err := compiler.Compile(script) - if err != nil { - return nil, err - } - _ = c.cache.Set(cacheKey, program) - - return program, nil -} - -func NewCompiler(maxCacheCount int) *Compiler { - return &Compiler{ - cache: gcache.New(maxCacheCount). - LFU(). - Build(), - } -} diff --git a/internal/engine/command/compiler_test.go b/internal/engine/command/compiler_test.go deleted file mode 100644 index a8074133e..000000000 --- a/internal/engine/command/compiler_test.go +++ /dev/null @@ -1,24 +0,0 @@ -package command - -import ( - "testing" - - "github.com/stretchr/testify/require" -) - -func TestCompiler(t *testing.T) { - - script := `send [USD/2 100] ( - source = @world - destination = @bank -)` - - compiler := NewCompiler(1024) - p1, err := compiler.Compile(script) - require.NoError(t, err) - - p2, err := compiler.Compile(script) - require.NoError(t, err) - - require.Equal(t, p1, p2) -} diff --git a/internal/engine/command/context.go b/internal/engine/command/context.go deleted file mode 100644 index e2f93c5ae..000000000 --- a/internal/engine/command/context.go +++ /dev/null @@ -1,87 +0,0 @@ -package command - -import ( - "context" - - "github.com/formancehq/ledger/internal/opentelemetry/tracer" - - storageerrors "github.com/formancehq/ledger/internal/storage/sqlutils" - - ledger "github.com/formancehq/ledger/internal" -) - -type executionContext struct { - commander *Commander - parameters Parameters -} - -func (e *executionContext) AppendLog(ctx context.Context, log *ledger.Log) (*ledger.ChainedLog, error) { - ctx, span := tracer.Start(ctx, "AppendLog") - defer span.End() - - if e.parameters.DryRun { - return log.ChainLog(nil), nil - } - - chainedLog := func() *ledger.ChainedLog { - _, span := tracer.Start(ctx, "ChainLog") - defer span.End() - - return e.commander.chain.ChainLog(log) - }() - - done := make(chan struct{}) - func() { - _, span := tracer.Start(ctx, "AppendLogToQueue") - defer span.End() - - e.commander.Append(chainedLog, func() { - close(done) - }) - }() - - err := func() error { - _, span := tracer.Start(ctx, "WaitLogAck") - defer span.End() - - select { - case <-ctx.Done(): - return ctx.Err() - case <-done: - return nil - } - }() - if err != nil { - return nil, err - } - - return chainedLog, nil -} - -func (e *executionContext) run(ctx context.Context, executor func(e *executionContext) (*ledger.ChainedLog, error)) (*ledger.ChainedLog, error) { - if ik := e.parameters.IdempotencyKey; ik != "" { - if err := e.commander.referencer.take(referenceIks, ik); err != nil { - return nil, err - } - defer e.commander.referencer.release(referenceIks, ik) - - ctx, span := tracer.Start(ctx, "CheckIK") - defer span.End() - - chainedLog, err := e.commander.store.ReadLogWithIdempotencyKey(ctx, ik) - if err == nil { - return chainedLog, nil - } - if err != nil && !storageerrors.IsNotFoundError(err) { - return nil, err - } - } - return executor(e) -} - -func newExecutionContext(commander *Commander, parameters Parameters) *executionContext { - return &executionContext{ - commander: commander, - parameters: parameters, - } -} diff --git a/internal/engine/command/errors.go b/internal/engine/command/errors.go deleted file mode 100644 index 16ad75c8c..000000000 --- a/internal/engine/command/errors.go +++ /dev/null @@ -1,212 +0,0 @@ -package command - -import ( - "fmt" - - "github.com/pkg/errors" -) - -const ( - ErrSaveMetaCodeTransactionNotFound = "TRANSACTION_NOT_FOUND" -) - -type errSaveMeta struct { - code string -} - -func (e *errSaveMeta) Error() string { - return fmt.Sprintf("invalid transaction: %s", e.code) -} - -func (e *errSaveMeta) Is(err error) bool { - _, ok := err.(*errSaveMeta) - return ok -} - -func newErrSaveMeta(code string) *errSaveMeta { - return &errSaveMeta{ - code: code, - } -} - -func newErrSaveMetadataTransactionNotFound() *errSaveMeta { - return newErrSaveMeta(ErrSaveMetaCodeTransactionNotFound) -} - -func IsSaveMetaError(err error, code string) bool { - e := &errSaveMeta{} - if errors.As(err, &e) { - return e.code == code - } - - return false -} - -const ( - ErrDeleteMetaCodeTransactionNotFound = "TRANSACTION_NOT_FOUND" -) - -type errDeleteMeta struct { - code string -} - -func (e *errDeleteMeta) Error() string { - return fmt.Sprintf("invalid transaction: %s", e.code) -} - -func (e *errDeleteMeta) Is(err error) bool { - _, ok := err.(*errDeleteMeta) - return ok -} - -func newErrDeleteMeta(code string) *errDeleteMeta { - return &errDeleteMeta{ - code: code, - } -} - -func IsDeleteMetaError(err error, code string) bool { - e := &errDeleteMeta{} - if errors.As(err, &e) { - return e.code == code - } - - return false -} - -func newErrDeleteMetadataTransactionNotFound() *errDeleteMeta { - return newErrDeleteMeta(ErrDeleteMetaCodeTransactionNotFound) -} - -type errRevert struct { - code string -} - -func (e *errRevert) Error() string { - return fmt.Sprintf("invalid transaction: %s", e.code) -} - -func (e *errRevert) Is(err error) bool { - _, ok := err.(*errRevert) - return ok -} - -func NewErrRevert(code string) *errRevert { - return &errRevert{ - code: code, - } -} - -const ( - ErrRevertTransactionCodeAlreadyReverted = "ALREADY_REVERTED" - ErrRevertTransactionCodeOccurring = "REVERT_OCCURRING" - ErrRevertTransactionCodeNotFound = "NOT_FOUND" -) - -func NewErrRevertTransactionOccurring() *errRevert { - return NewErrRevert(ErrRevertTransactionCodeOccurring) -} - -func NewErrRevertTransactionAlreadyReverted() *errRevert { - return NewErrRevert(ErrRevertTransactionCodeAlreadyReverted) -} - -func NewErrRevertTransactionNotFound() *errRevert { - return NewErrRevert(ErrRevertTransactionCodeNotFound) -} - -func IsRevertError(err error, code string) bool { - e := &errRevert{} - if errors.As(err, &e) { - return e.code == code - } - - return false -} - -type errInvalidTransaction struct { - code string - err error -} - -func (e *errInvalidTransaction) Error() string { - if e.err == nil { - return fmt.Sprintf("invalid transaction: %s", e.code) - } - return fmt.Sprintf("invalid transaction: %s (%s)", e.code, e.err) -} - -func (e *errInvalidTransaction) Is(err error) bool { - _, ok := err.(*errInvalidTransaction) - return ok -} - -func (e *errInvalidTransaction) Cause() error { - return e.err -} - -func NewErrInvalidTransaction(code string, err error) *errInvalidTransaction { - return &errInvalidTransaction{ - code: code, - err: err, - } -} - -const ( - ErrInvalidTransactionCodeCompilationFailed = "COMPILATION_FAILED" - ErrInvalidTransactionCodeNoScript = "NO_SCRIPT" - ErrInvalidTransactionCodeNoPostings = "NO_POSTINGS" - ErrInvalidTransactionCodeConflict = "CONFLICT" -) - -func NewErrCompilationFailed(err error) *errInvalidTransaction { - return NewErrInvalidTransaction(ErrInvalidTransactionCodeCompilationFailed, err) -} - -func NewErrNoScript() *errInvalidTransaction { - return NewErrInvalidTransaction(ErrInvalidTransactionCodeNoScript, nil) -} - -func NewErrNoPostings() *errInvalidTransaction { - return NewErrInvalidTransaction(ErrInvalidTransactionCodeNoPostings, nil) -} - -func NewErrConflict() *errInvalidTransaction { - return NewErrInvalidTransaction(ErrInvalidTransactionCodeConflict, nil) -} - -func IsInvalidTransactionError(err error, code string) bool { - e := &errInvalidTransaction{} - if errors.As(err, &e) { - return e.code == code - } - - return false -} - -type errMachine struct { - err error -} - -func (e *errMachine) Error() string { - return errors.Wrap(e.err, "running numscript").Error() -} - -func (e *errMachine) Is(err error) bool { - _, ok := err.(*errMachine) - return ok -} - -func (e *errMachine) Unwrap() error { - return e.err -} - -func NewErrMachine(err error) *errMachine { - return &errMachine{ - err: err, - } -} - -func IsErrMachine(err error) bool { - return errors.Is(err, &errMachine{}) -} diff --git a/internal/engine/command/lock.go b/internal/engine/command/lock.go deleted file mode 100644 index 57a197d16..000000000 --- a/internal/engine/command/lock.go +++ /dev/null @@ -1,151 +0,0 @@ -package command - -import ( - "context" - "sync" - "sync/atomic" - "time" - - "github.com/formancehq/go-libs/collectionutils" - "github.com/pkg/errors" -) - -type Unlock func(ctx context.Context) - -type Locker interface { - Lock(ctx context.Context, accounts Accounts) (Unlock, error) -} -type LockerFn func(ctx context.Context, accounts Accounts) (Unlock, error) - -func (fn LockerFn) Lock(ctx context.Context, accounts Accounts) (Unlock, error) { - return fn(ctx, accounts) -} - -var NoOpLocker = LockerFn(func(ctx context.Context, accounts Accounts) (Unlock, error) { - return func(ctx context.Context) {}, nil -}) - -type Accounts struct { - Read []string - Write []string -} - -type lockIntent struct { - accounts Accounts - acquired chan struct{} - at time.Time -} - -func (intent *lockIntent) tryLock(chain *DefaultLocker) bool { - - for _, account := range intent.accounts.Read { - _, ok := chain.writeLocks[account] - if ok { - return false - } - } - - for _, account := range intent.accounts.Write { - _, ok := chain.readLocks[account] - if ok { - return false - } - _, ok = chain.writeLocks[account] - if ok { - return false - } - } - - for _, account := range intent.accounts.Read { - atomicValue, ok := chain.readLocks[account] - if !ok { - atomicValue = &atomic.Int64{} - chain.readLocks[account] = atomicValue - } - atomicValue.Add(1) - } - for _, account := range intent.accounts.Write { - chain.writeLocks[account] = struct{}{} - } - - return true -} - -func (intent *lockIntent) unlock(chain *DefaultLocker) { - for _, account := range intent.accounts.Read { - atomicValue := chain.readLocks[account] - if atomicValue.Add(-1) == 0 { - delete(chain.readLocks, account) - } - } - for _, account := range intent.accounts.Write { - delete(chain.writeLocks, account) - } -} - -type DefaultLocker struct { - intents *collectionutils.LinkedList[*lockIntent] - mu sync.Mutex - readLocks map[string]*atomic.Int64 - writeLocks map[string]struct{} -} - -func (defaultLocker *DefaultLocker) Lock(ctx context.Context, accounts Accounts) (Unlock, error) { - defaultLocker.mu.Lock() - - intent := &lockIntent{ - accounts: accounts, - acquired: make(chan struct{}), - at: time.Now(), - } - - recheck := func() { - node := defaultLocker.intents.FirstNode() - for { - if node == nil { - return - } - if node.Value().tryLock(defaultLocker) { - node.Remove() - close(node.Value().acquired) - return - } - node = node.Next() - } - } - - releaseIntent := func(ctx context.Context) { - defaultLocker.mu.Lock() - defer defaultLocker.mu.Unlock() - - intent.unlock(defaultLocker) - - recheck() - } - - acquired := intent.tryLock(defaultLocker) - if acquired { - defaultLocker.mu.Unlock() - - return releaseIntent, nil - } - - defaultLocker.intents.Append(intent) - defaultLocker.mu.Unlock() - - select { - case <-ctx.Done(): - defaultLocker.intents.RemoveValue(intent) - return nil, errors.Wrapf(ctx.Err(), "locking accounts: %s as read, and %s as write", accounts.Read, accounts.Write) - case <-intent.acquired: - return releaseIntent, nil - } -} - -func NewDefaultLocker() *DefaultLocker { - return &DefaultLocker{ - intents: collectionutils.NewLinkedList[*lockIntent](), - readLocks: map[string]*atomic.Int64{}, - writeLocks: map[string]struct{}{}, - } -} diff --git a/internal/engine/command/lock_test.go b/internal/engine/command/lock_test.go deleted file mode 100644 index bc6fe5f7d..000000000 --- a/internal/engine/command/lock_test.go +++ /dev/null @@ -1,46 +0,0 @@ -package command - -import ( - "fmt" - "math/rand" - "sync" - "testing" - "time" - - "github.com/formancehq/go-libs/logging" - "github.com/stretchr/testify/require" -) - -func TestLock(t *testing.T) { - locker := NewDefaultLocker() - var accounts []string - for i := 0; i < 10; i++ { - accounts = append(accounts, fmt.Sprintf("accounts:%d", i)) - } - - r := rand.New(rand.NewSource(time.Now().Unix())) - ctx := logging.TestingContext() - - const nbLoop = 1000 - wg := sync.WaitGroup{} - wg.Add(nbLoop) - - for i := 0; i < nbLoop; i++ { - read := accounts[r.Int31n(10)] - write := accounts[r.Int31n(10)] - go func() { - unlock, err := locker.Lock(ctx, Accounts{ - Read: []string{read}, - Write: []string{write}, - }) - require.NoError(t, err) - defer unlock(ctx) - - <-time.After(10 * time.Millisecond) - wg.Add(-1) - }() - } - - wg.Wait() - -} diff --git a/internal/engine/command/reference.go b/internal/engine/command/reference.go deleted file mode 100644 index 86d4a10c6..000000000 --- a/internal/engine/command/reference.go +++ /dev/null @@ -1,42 +0,0 @@ -package command - -import ( - "fmt" - "sync" - - "github.com/pkg/errors" -) - -type Reference int - -const ( - referenceReverts = iota - referenceIks - referenceTxReference -) - -type Referencer struct { - references map[Reference]*sync.Map -} - -func (r *Referencer) take(ref Reference, key any) error { - _, loaded := r.references[ref].LoadOrStore(fmt.Sprintf("%d/%s", ref, key), struct{}{}) - if loaded { - return errors.New("already taken") - } - return nil -} - -func (r *Referencer) release(ref Reference, key any) { - r.references[ref].Delete(fmt.Sprintf("%d/%s", ref, key)) -} - -func NewReferencer() *Referencer { - return &Referencer{ - references: map[Reference]*sync.Map{ - referenceReverts: {}, - referenceIks: {}, - referenceTxReference: {}, - }, - } -} diff --git a/internal/engine/command/store.go b/internal/engine/command/store.go deleted file mode 100644 index 25569e56f..000000000 --- a/internal/engine/command/store.go +++ /dev/null @@ -1,19 +0,0 @@ -package command - -import ( - "context" - "math/big" - - ledger "github.com/formancehq/ledger/internal" - "github.com/formancehq/ledger/internal/machine/vm" -) - -type Store interface { - vm.Store - InsertLogs(ctx context.Context, logs ...*ledger.ChainedLog) error - GetLastLog(ctx context.Context) (*ledger.ChainedLog, error) - GetLastTransaction(ctx context.Context) (*ledger.ExpandedTransaction, error) - ReadLogWithIdempotencyKey(ctx context.Context, key string) (*ledger.ChainedLog, error) - GetTransactionByReference(ctx context.Context, ref string) (*ledger.ExpandedTransaction, error) - GetTransaction(ctx context.Context, txID *big.Int) (*ledger.Transaction, error) -} diff --git a/internal/engine/errors.go b/internal/engine/errors.go deleted file mode 100644 index 930c358f9..000000000 --- a/internal/engine/errors.go +++ /dev/null @@ -1,73 +0,0 @@ -package engine - -import ( - "fmt" - - "github.com/pkg/errors" -) - -type storageError struct { - err error - msg string -} - -func (e *storageError) Error() string { - return fmt.Sprintf("%s: %s", e.msg, e.err) -} - -func (e *storageError) Is(err error) bool { - _, ok := err.(*storageError) - return ok -} - -func (e *storageError) Unwrap() error { - return e.err -} - -func newStorageError(err error, msg string) error { - if err == nil { - return nil - } - return &storageError{ - err: err, - msg: msg, - } -} - -func IsStorageError(err error) bool { - return errors.Is(err, &storageError{}) -} - -type commandError struct { - err error -} - -func (e *commandError) Error() string { - return e.err.Error() -} - -func (e *commandError) Is(err error) bool { - _, ok := err.(*commandError) - return ok -} - -func (e *commandError) Unwrap() error { - return e.err -} - -func (e *commandError) Cause() error { - return e.err -} - -func NewCommandError(err error) error { - if err == nil { - return nil - } - return &commandError{ - err: err, - } -} - -func IsCommandError(err error) bool { - return errors.Is(err, &commandError{}) -} diff --git a/internal/engine/export.go b/internal/engine/export.go deleted file mode 100644 index 07f04a1af..000000000 --- a/internal/engine/export.go +++ /dev/null @@ -1,39 +0,0 @@ -package engine - -import ( - "context" - - "github.com/formancehq/go-libs/bun/bunpaginate" - ledger "github.com/formancehq/ledger/internal" - "github.com/formancehq/ledger/internal/storage/ledgerstore" -) - -type ExportWriter interface { - Write(ctx context.Context, log *ledger.ChainedLog) error -} - -type ExportWriterFn func(ctx context.Context, log *ledger.ChainedLog) error - -func (fn ExportWriterFn) Write(ctx context.Context, log *ledger.ChainedLog) error { - return fn(ctx, log) -} - -func (l *Ledger) Export(ctx context.Context, w ExportWriter) error { - return bunpaginate.Iterate( - ctx, - ledgerstore. - NewGetLogsQuery(ledgerstore.NewPaginatedQueryOptions[any](nil).WithPageSize(100)). - WithOrder(bunpaginate.OrderAsc), - func(ctx context.Context, q ledgerstore.GetLogsQuery) (*bunpaginate.Cursor[ledger.ChainedLog], error) { - return l.store.GetLogs(ctx, q) - }, - func(cursor *bunpaginate.Cursor[ledger.ChainedLog]) error { - for _, data := range cursor.Data { - if err := w.Write(ctx, &data); err != nil { - return err - } - } - return nil - }, - ) -} diff --git a/internal/engine/import.go b/internal/engine/import.go deleted file mode 100644 index e62722264..000000000 --- a/internal/engine/import.go +++ /dev/null @@ -1,128 +0,0 @@ -package engine - -import ( - "context" - "encoding/base64" - "fmt" - "math/big" - "reflect" - - ledger "github.com/formancehq/ledger/internal" - "github.com/pkg/errors" -) - -type ImportError struct { - err error - logID *big.Int -} - -func (i ImportError) Error() string { - return i.err.Error() -} - -func (i ImportError) Is(err error) bool { - _, ok := err.(ImportError) - return ok -} - -var _ error = (*ImportError)(nil) - -func newImportError(logID *big.Int, err error) ImportError { - return ImportError{ - logID: logID, - err: err, - } -} - -type InvalidIdError struct { - Expected *big.Int - Got *big.Int -} - -func (i InvalidIdError) Error() string { - return fmt.Sprintf("invalid id, got %s, expected %s", i.Got, i.Expected) -} - -func (i InvalidIdError) Is(err error) bool { - _, ok := err.(InvalidIdError) - return ok -} - -var _ error = (*InvalidIdError)(nil) - -func newInvalidIdError(got, expected *big.Int) ImportError { - return newImportError(got, InvalidIdError{ - Expected: expected, - Got: got, - }) -} - -type InvalidHashError struct { - Expected []byte - Got []byte -} - -func (i InvalidHashError) Error() string { - return fmt.Sprintf( - "invalid hash, expected %s got %s", - base64.StdEncoding.EncodeToString(i.Expected), - base64.StdEncoding.EncodeToString(i.Got), - ) -} - -func (i InvalidHashError) Is(err error) bool { - _, ok := err.(InvalidHashError) - return ok -} - -var _ error = (*InvalidHashError)(nil) - -func newInvalidHashError(logID *big.Int, got, expected []byte) ImportError { - return newImportError(logID, InvalidHashError{ - Expected: expected, - Got: got, - }) -} - -func (l *Ledger) Import(ctx context.Context, stream chan *ledger.ChainedLog) error { - if l.config.LedgerState.State != "initializing" { - return errors.New("ledger must be in initializing state to be imported") - } - batch := make([]*ledger.ChainedLog, 0) - for log := range stream { - lastLog := l.chain.GetLastLog() - nextLogID := big.NewInt(0) - if lastLog != nil { - nextLogID = nextLogID.Add(lastLog.ID, big.NewInt(1)) - } - if log.ID.String() != nextLogID.String() { - return newInvalidIdError(log.ID, nextLogID) - } - logHash := log.Hash - log.Hash = nil - log.ID = big.NewInt(0) - log.ComputeHash(lastLog) - - if !reflect.DeepEqual(log.Hash, logHash) { - return newInvalidHashError(log.ID, log.Hash, logHash) - } - - log.ID = nextLogID - l.chain.ReplaceLast(log) - - batch = append(batch, log) - if len(batch) == 100 { // notes(gfyrag): maybe we could parameterize that, but i don't think it will be useful - if err := l.store.InsertLogs(ctx, batch...); err != nil { - return err - } - batch = make([]*ledger.ChainedLog, 0) - } - } - if len(batch) > 0 { - if err := l.store.InsertLogs(ctx, batch...); err != nil { - return err - } - } - - return nil -} diff --git a/internal/engine/ledger.go b/internal/engine/ledger.go deleted file mode 100644 index 0b5271fef..000000000 --- a/internal/engine/ledger.go +++ /dev/null @@ -1,195 +0,0 @@ -package engine - -import ( - "context" - "math/big" - "sync" - - "github.com/formancehq/go-libs/bun/bunpaginate" - "github.com/formancehq/ledger/internal/engine/chain" - "github.com/formancehq/ledger/internal/storage/driver" - "github.com/formancehq/ledger/internal/storage/systemstore" - - "github.com/ThreeDotsLabs/watermill/message" - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/metadata" - ledger "github.com/formancehq/ledger/internal" - "github.com/formancehq/ledger/internal/bus" - "github.com/formancehq/ledger/internal/engine/command" - "github.com/formancehq/ledger/internal/storage/ledgerstore" -) - -type Ledger struct { - commander *command.Commander - systemStore *systemstore.Store - store *ledgerstore.Store - mu sync.Mutex - config LedgerConfig - chain *chain.Chain -} - -type GlobalLedgerConfig struct { - batchSize int -} - -type LedgerConfig struct { - GlobalLedgerConfig - driver.LedgerState - isSchemaUpToDate bool -} - -var ( - defaultLedgerConfig = GlobalLedgerConfig{ - batchSize: 50, - } -) - -func New( - systemStore *systemstore.Store, - store *ledgerstore.Store, - publisher message.Publisher, - compiler *command.Compiler, - ledgerConfig LedgerConfig, -) *Ledger { - var monitor bus.Monitor = bus.NewNoOpMonitor() - if publisher != nil { - monitor = bus.NewLedgerMonitor(publisher, store.Name()) - } - chain := chain.New(store) - ret := &Ledger{ - commander: command.New( - store, - command.NewDefaultLocker(), - compiler, - command.NewReferencer(), - monitor, - chain, - ledgerConfig.batchSize, - ), - store: store, - config: ledgerConfig, - systemStore: systemStore, - chain: chain, - } - return ret -} - -func (l *Ledger) Start(ctx context.Context) { - if err := l.chain.Init(ctx); err != nil { - panic(err) - } - go l.commander.Run(logging.ContextWithField(ctx, "component", "commander")) -} - -func (l *Ledger) Close(ctx context.Context) { - logging.FromContext(ctx).Debugf("Close commander") - l.commander.Close() -} - -func (l *Ledger) GetTransactions(ctx context.Context, q ledgerstore.GetTransactionsQuery) (*bunpaginate.Cursor[ledger.ExpandedTransaction], error) { - txs, err := l.store.GetTransactions(ctx, q) - return txs, newStorageError(err, "getting transactions") -} - -func (l *Ledger) CountTransactions(ctx context.Context, q ledgerstore.GetTransactionsQuery) (int, error) { - count, err := l.store.CountTransactions(ctx, q) - return count, newStorageError(err, "counting transactions") -} - -func (l *Ledger) GetTransactionWithVolumes(ctx context.Context, query ledgerstore.GetTransactionQuery) (*ledger.ExpandedTransaction, error) { - tx, err := l.store.GetTransactionWithVolumes(ctx, query) - return tx, newStorageError(err, "getting transaction") -} - -func (l *Ledger) CountAccounts(ctx context.Context, a ledgerstore.GetAccountsQuery) (int, error) { - count, err := l.store.CountAccounts(ctx, a) - return count, newStorageError(err, "counting accounts") -} - -func (l *Ledger) GetAccountsWithVolumes(ctx context.Context, a ledgerstore.GetAccountsQuery) (*bunpaginate.Cursor[ledger.ExpandedAccount], error) { - accounts, err := l.store.GetAccountsWithVolumes(ctx, a) - return accounts, newStorageError(err, "getting accounts") -} - -func (l *Ledger) GetAccountWithVolumes(ctx context.Context, q ledgerstore.GetAccountQuery) (*ledger.ExpandedAccount, error) { - accounts, err := l.store.GetAccountWithVolumes(ctx, q) - return accounts, newStorageError(err, "getting account") -} - -func (l *Ledger) GetAggregatedBalances(ctx context.Context, q ledgerstore.GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) { - balances, err := l.store.GetAggregatedBalances(ctx, q) - return balances, newStorageError(err, "getting balances aggregated") -} - -func (l *Ledger) GetLogs(ctx context.Context, q ledgerstore.GetLogsQuery) (*bunpaginate.Cursor[ledger.ChainedLog], error) { - logs, err := l.store.GetLogs(ctx, q) - return logs, newStorageError(err, "getting logs") -} - -func (l *Ledger) markInUseIfNeeded(ctx context.Context) { - if l.config.LedgerState.State == systemstore.StateInitializing { - if err := l.systemStore.UpdateLedgerState(ctx, l.store.Name(), systemstore.StateInUse); err != nil { - logging.FromContext(ctx).Error("Unable to declare ledger as in use") - return - } - l.config.LedgerState.State = systemstore.StateInUse - } -} - -func (l *Ledger) CreateTransaction(ctx context.Context, parameters command.Parameters, data ledger.RunScript) (*ledger.Transaction, error) { - ret, err := l.commander.CreateTransaction(ctx, parameters, data) - if err != nil { - return nil, NewCommandError(err) - } - l.markInUseIfNeeded(ctx) - return ret, nil -} - -func (l *Ledger) RevertTransaction(ctx context.Context, parameters command.Parameters, id *big.Int, force, atEffectiveDate bool) (*ledger.Transaction, error) { - ret, err := l.commander.RevertTransaction(ctx, parameters, id, force, atEffectiveDate) - if err != nil { - return nil, NewCommandError(err) - } - l.markInUseIfNeeded(ctx) - return ret, nil -} - -func (l *Ledger) SaveMeta(ctx context.Context, parameters command.Parameters, targetType string, targetID any, m metadata.Metadata) error { - if err := l.commander.SaveMeta(ctx, parameters, targetType, targetID, m); err != nil { - return NewCommandError(err) - } - - l.markInUseIfNeeded(ctx) - return nil -} - -func (l *Ledger) DeleteMetadata(ctx context.Context, parameters command.Parameters, targetType string, targetID any, key string) error { - if err := l.commander.DeleteMetadata(ctx, parameters, targetType, targetID, key); err != nil { - return NewCommandError(err) - } - - l.markInUseIfNeeded(ctx) - return nil -} - -func (l *Ledger) IsDatabaseUpToDate(ctx context.Context) (bool, error) { - if l.config.isSchemaUpToDate { - return true, nil - } - l.mu.Lock() - defer l.mu.Unlock() - - if l.config.isSchemaUpToDate { - return true, nil - } - - var err error - l.config.isSchemaUpToDate, err = l.store.IsUpToDate(ctx) - - return l.config.isSchemaUpToDate, err -} - -func (l *Ledger) GetVolumesWithBalances(ctx context.Context, q ledgerstore.GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) { - volumes, err := l.store.GetVolumesWithBalances(ctx, q) - return volumes, newStorageError(err, "getting Volumes with balances") -} diff --git a/internal/engine/migrations.go b/internal/engine/migrations.go deleted file mode 100644 index 47eda0bcd..000000000 --- a/internal/engine/migrations.go +++ /dev/null @@ -1,11 +0,0 @@ -package engine - -import ( - "context" - - "github.com/formancehq/go-libs/migrations" -) - -func (l *Ledger) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) { - return l.store.GetMigrationsInfo(ctx) -} diff --git a/internal/engine/module.go b/internal/engine/module.go deleted file mode 100644 index 608d9422e..000000000 --- a/internal/engine/module.go +++ /dev/null @@ -1,58 +0,0 @@ -package engine - -import ( - "context" - - "github.com/ThreeDotsLabs/watermill/message" - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/ledger/internal/bus" - "github.com/formancehq/ledger/internal/engine/command" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/formancehq/ledger/internal/storage/driver" - "go.uber.org/fx" -) - -type NumscriptCacheConfiguration struct { - MaxCount int -} - -type Configuration struct { - NumscriptCache NumscriptCacheConfiguration - LedgerBatchSize int -} - -func Module(configuration Configuration) fx.Option { - return fx.Options( - fx.Provide(func( - storageDriver *driver.Driver, - publisher message.Publisher, - metricsRegistry metrics.GlobalRegistry, - logger logging.Logger, - ) *Resolver { - options := []option{ - WithMessagePublisher(publisher), - WithMetricsRegistry(metricsRegistry), - WithLogger(logger), - } - if configuration.NumscriptCache.MaxCount != 0 { - options = append(options, WithCompiler(command.NewCompiler(configuration.NumscriptCache.MaxCount))) - } - if configuration.LedgerBatchSize != 0 { - options = append(options, WithLedgerConfig(GlobalLedgerConfig{ - batchSize: configuration.LedgerBatchSize, - })) - } - return NewResolver(storageDriver, options...) - }), - fx.Provide(fx.Annotate(bus.NewNoOpMonitor, fx.As(new(bus.Monitor)))), - fx.Provide(fx.Annotate(metrics.NewNoOpRegistry, fx.As(new(metrics.GlobalRegistry)))), - //TODO(gfyrag): Move in pkg/ledger package - fx.Invoke(func(lc fx.Lifecycle, resolver *Resolver) { - lc.Append(fx.Hook{ - OnStop: func(ctx context.Context) error { - return resolver.CloseLedgers(ctx) - }, - }) - }), - ) -} diff --git a/internal/engine/resolver.go b/internal/engine/resolver.go deleted file mode 100644 index 738b01d1f..000000000 --- a/internal/engine/resolver.go +++ /dev/null @@ -1,171 +0,0 @@ -package engine - -import ( - "context" - "sync" - - "github.com/formancehq/ledger/internal/storage/ledgerstore" - "github.com/formancehq/ledger/internal/storage/systemstore" - - "github.com/pkg/errors" - - "github.com/ThreeDotsLabs/watermill/message" - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/ledger/internal/engine/command" - "github.com/formancehq/ledger/internal/opentelemetry/metrics" - "github.com/formancehq/ledger/internal/storage/driver" - "github.com/sirupsen/logrus" -) - -type option func(r *Resolver) - -func WithMessagePublisher(publisher message.Publisher) option { - return func(r *Resolver) { - r.publisher = publisher - } -} - -func WithMetricsRegistry(registry metrics.GlobalRegistry) option { - return func(r *Resolver) { - r.metricsRegistry = registry - } -} - -func WithCompiler(compiler *command.Compiler) option { - return func(r *Resolver) { - r.compiler = compiler - } -} - -func WithLogger(logger logging.Logger) option { - return func(r *Resolver) { - r.logger = logger - } -} - -func WithLedgerConfig(config GlobalLedgerConfig) option { - return func(r *Resolver) { - r.ledgerConfig = config - } -} - -var defaultOptions = []option{ - WithMetricsRegistry(metrics.NewNoOpRegistry()), - WithCompiler(command.NewCompiler(1024)), - WithLogger(logging.NewLogrus(logrus.New())), -} - -type Resolver struct { - storageDriver *driver.Driver - lock sync.RWMutex - metricsRegistry metrics.GlobalRegistry - //TODO(gfyrag): add a routine to clean old ledger - ledgers map[string]*Ledger - ledgerConfig GlobalLedgerConfig - compiler *command.Compiler - logger logging.Logger - publisher message.Publisher -} - -func NewResolver(storageDriver *driver.Driver, options ...option) *Resolver { - r := &Resolver{ - storageDriver: storageDriver, - ledgers: map[string]*Ledger{}, - ledgerConfig: defaultLedgerConfig, - } - for _, opt := range append(defaultOptions, options...) { - opt(r) - } - - return r -} - -func (r *Resolver) startLedger(ctx context.Context, name string, store *ledgerstore.Store, state driver.LedgerState) (*Ledger, error) { - - ledger := New(r.storageDriver.GetSystemStore(), store, r.publisher, r.compiler, LedgerConfig{ - GlobalLedgerConfig: r.ledgerConfig, - LedgerState: state, - }) - ledger.Start(logging.ContextWithLogger(context.Background(), r.logger)) - r.ledgers[name] = ledger - r.metricsRegistry.ActiveLedgers().Add(ctx, +1) - - return ledger, nil -} - -func (r *Resolver) GetLedger(ctx context.Context, name string) (*Ledger, error) { - if name == "" { - return nil, errors.New("empty name") - } - r.lock.RLock() - ledger, ok := r.ledgers[name] - r.lock.RUnlock() - - if !ok { - r.lock.Lock() - defer r.lock.Unlock() - - ledger, ok = r.ledgers[name] - if ok { - return ledger, nil - } - - ledgerConfiguration, err := r.storageDriver.GetSystemStore().GetLedger(ctx, name) - if err != nil { - return nil, err - } - - store, err := r.storageDriver.GetLedgerStore(ctx, name, driver.LedgerState{ - LedgerConfiguration: driver.LedgerConfiguration{ - Bucket: ledgerConfiguration.Bucket, - Metadata: ledgerConfiguration.Metadata, - }, - State: ledgerConfiguration.State, - }) - if err != nil { - return nil, err - } - - return r.startLedger(ctx, name, store, driver.LedgerState{ - LedgerConfiguration: driver.LedgerConfiguration{ - Bucket: ledgerConfiguration.Bucket, - Metadata: ledgerConfiguration.Metadata, - }, - }) - } - - return ledger, nil -} - -func (r *Resolver) CreateLedger(ctx context.Context, name string, configuration driver.LedgerConfiguration) (*Ledger, error) { - if name == "" { - return nil, errors.New("empty name") - } - - r.lock.Lock() - defer r.lock.Unlock() - - store, err := r.storageDriver.CreateLedgerStore(ctx, name, configuration) - if err != nil { - return nil, err - } - - return r.startLedger(ctx, name, store, driver.LedgerState{ - LedgerConfiguration: configuration, - State: systemstore.StateInitializing, - }) -} - -func (r *Resolver) CloseLedgers(ctx context.Context) error { - r.logger.Info("Close all ledgers") - defer func() { - r.logger.Info("All ledgers closed") - }() - for name, ledger := range r.ledgers { - r.logger.Infof("Close ledger %s", name) - ledger.Close(logging.ContextWithLogger(ctx, r.logger.WithField("ledger", name))) - delete(r.ledgers, name) - } - - return nil -} diff --git a/internal/engine/stats.go b/internal/engine/stats.go deleted file mode 100644 index 34a0cdb38..000000000 --- a/internal/engine/stats.go +++ /dev/null @@ -1,32 +0,0 @@ -package engine - -import ( - "context" - - "github.com/formancehq/ledger/internal/storage/ledgerstore" - "github.com/pkg/errors" -) - -type Stats struct { - Transactions int `json:"transactions"` - Accounts int `json:"accounts"` -} - -func (l *Ledger) Stats(ctx context.Context) (Stats, error) { - var stats Stats - - transactions, err := l.store.CountTransactions(ctx, ledgerstore.NewGetTransactionsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}))) - if err != nil { - return stats, errors.Wrap(err, "counting transactions") - } - - accounts, err := l.store.CountAccounts(ctx, ledgerstore.NewGetAccountsQuery(ledgerstore.NewPaginatedQueryOptions(ledgerstore.PITFilterWithVolumes{}))) - if err != nil { - return stats, errors.Wrap(err, "counting accounts") - } - - return Stats{ - Transactions: transactions, - Accounts: accounts, - }, nil -} diff --git a/internal/engine/utils/batching/batcher.go b/internal/engine/utils/batching/batcher.go deleted file mode 100644 index c41a85301..000000000 --- a/internal/engine/utils/batching/batcher.go +++ /dev/null @@ -1,85 +0,0 @@ -package batching - -import ( - "context" - "fmt" - "sync" - - "github.com/formancehq/go-libs/collectionutils" - "github.com/formancehq/ledger/internal/engine/utils/job" -) - -type OnBatchProcessed[T any] func(...T) - -func NoOpOnBatchProcessed[T any]() func(...T) { - return func(t ...T) {} -} - -type pending[T any] struct { - object T - callback func() -} - -type batcherJob[T any] struct { - items []*pending[T] -} - -func (b batcherJob[T]) String() string { - return fmt.Sprintf("processing %d items", len(b.items)) -} - -func (b batcherJob[T]) Terminated() { - for _, v := range b.items { - v.callback() - } -} - -type Batcher[T any] struct { - *job.Runner[batcherJob[T]] - pending []*pending[T] - mu sync.Mutex - maxBatchSize int -} - -func (s *Batcher[T]) Append(object T, callback func()) { - s.mu.Lock() - s.pending = append(s.pending, &pending[T]{ - callback: callback, - object: object, - }) - s.mu.Unlock() - s.Runner.Next() -} - -func (s *Batcher[T]) nextBatch() *batcherJob[T] { - s.mu.Lock() - defer s.mu.Unlock() - - if len(s.pending) == 0 { - return nil - } - if len(s.pending) > s.maxBatchSize { - batch := s.pending[:s.maxBatchSize] - s.pending = s.pending[s.maxBatchSize:] - return &batcherJob[T]{ - items: batch, - } - } - batch := s.pending - s.pending = make([]*pending[T], 0) - return &batcherJob[T]{ - items: batch, - } -} - -func NewBatcher[T any](runner func(context.Context, ...T) error, nbWorkers, maxBatchSize int) *Batcher[T] { - ret := &Batcher[T]{ - maxBatchSize: maxBatchSize, - } - ret.Runner = job.NewJobRunner[batcherJob[T]](func(ctx context.Context, job *batcherJob[T]) error { - return runner(ctx, collectionutils.Map(job.items, func(from *pending[T]) T { - return from.object - })...) - }, ret.nextBatch, nbWorkers) - return ret -} diff --git a/internal/engine/utils/job/jobs.go b/internal/engine/utils/job/jobs.go deleted file mode 100644 index e695ae573..000000000 --- a/internal/engine/utils/job/jobs.go +++ /dev/null @@ -1,143 +0,0 @@ -package job - -import ( - "context" - "fmt" - "runtime/debug" - "sync/atomic" - - "github.com/alitto/pond" - "github.com/formancehq/go-libs/logging" - "github.com/pkg/errors" -) - -type Job interface { - Terminated() -} - -type builtJob struct { - terminatedFn func() -} - -func (j builtJob) Terminated() { - j.terminatedFn() -} - -func newJob(terminatedFn func()) *builtJob { - return &builtJob{ - terminatedFn: terminatedFn, - } -} - -type Runner[JOB Job] struct { - stopChan chan chan struct{} - runner func(context.Context, *JOB) error - nbWorkers int - parkedWorkers atomic.Int64 - nextJob func() *JOB - jobs chan *JOB - newJobsAvailable chan struct{} -} - -func (r *Runner[JOB]) Next() { - r.newJobsAvailable <- struct{}{} -} - -func (r *Runner[JOB]) Close() { - done := make(chan struct{}) - r.stopChan <- done - <-done -} - -func (r *Runner[JOB]) Run(ctx context.Context) { - - logger := logging.FromContext(ctx) - logger.Infof("Start worker") - - defer func() { - if e := recover(); e != nil { - logger.Error(e) - debug.PrintStack() - panic(e) - } - }() - - terminatedJobs := make(chan *JOB, r.nbWorkers) - jobsErrors := make(chan error, r.nbWorkers) - - w := pond.New(r.nbWorkers, r.nbWorkers) - for i := 0; i < r.nbWorkers; i++ { - i := i - w.Submit(func() { - defer func() { - if e := recover(); e != nil { - if err, isError := e.(error); isError { - jobsErrors <- errors.WithStack(err) - return - } - jobsErrors <- errors.WithStack(fmt.Errorf("%s", e)) - } - }() - logger := logger.WithFields(map[string]any{ - "worker": i, - }) - for { - select { - case job, ok := <-r.jobs: - if !ok { - logger.Debugf("Worker %d stopped", i) - return - } - logger := logger.WithField("job", job) - logger.Debugf("Got new job") - if err := r.runner(ctx, job); err != nil { - panic(err) - } - logger.Debugf("Job terminated") - terminatedJobs <- job - } - } - }) - } - - for { - select { - case jobError := <-jobsErrors: - panic(jobError) - case done := <-r.stopChan: - close(r.jobs) - w.StopAndWait() - close(terminatedJobs) - close(done) - return - case <-r.newJobsAvailable: - if r.parkedWorkers.Load() > 0 { - if job := r.nextJob(); job != nil { - r.jobs <- job - r.parkedWorkers.Add(-1) - } - } - case job := <-terminatedJobs: - (*job).Terminated() - if job := r.nextJob(); job != nil { - r.jobs <- job - } else { - r.parkedWorkers.Add(1) - } - } - } -} - -func NewJobRunner[JOB Job](runner func(context.Context, *JOB) error, nextJob func() *JOB, nbWorkers int) *Runner[JOB] { - parkedWorkers := atomic.Int64{} - parkedWorkers.Add(int64(nbWorkers)) - return &Runner[JOB]{ - stopChan: make(chan chan struct{}), - runner: runner, - nbWorkers: nbWorkers, - parkedWorkers: parkedWorkers, - nextJob: nextJob, - jobs: make(chan *JOB, nbWorkers), - newJobsAvailable: make(chan struct{}), - } -} diff --git a/internal/engine/utils/job/jobs_test.go b/internal/engine/utils/job/jobs_test.go deleted file mode 100644 index 6b955e377..000000000 --- a/internal/engine/utils/job/jobs_test.go +++ /dev/null @@ -1,44 +0,0 @@ -package job - -import ( - "context" - "sync/atomic" - "testing" - "time" - - "github.com/formancehq/go-libs/logging" - "github.com/stretchr/testify/require" -) - -func TestWorkerPool(t *testing.T) { - t.Parallel() - - const countJobs = 10000 - createdJobs := atomic.Int64{} - terminatedJobs := atomic.Int64{} - nextJob := func() *builtJob { - if createdJobs.Load() == 10000 { - return nil - } - createdJobs.Add(1) - return newJob(func() { - terminatedJobs.Add(1) - }) - } - runner := func(ctx context.Context, job *builtJob) error { - return nil - } - ctx := logging.TestingContext() - - pool := NewJobRunner[builtJob](runner, nextJob, 5) - go pool.Run(ctx) - defer pool.Close() - - for i := 0; i < 100; i++ { - go pool.Next() // Simulate random input - } - - require.Eventually(t, func() bool { - return countJobs == createdJobs.Load() - }, 5*time.Second, time.Millisecond*100) -} diff --git a/internal/errors.go b/internal/errors.go new file mode 100644 index 000000000..f51e711c4 --- /dev/null +++ b/internal/errors.go @@ -0,0 +1,39 @@ +package ledger + +import "fmt" + +type ErrInvalidLedgerName struct { + err error + name string +} + +func (e ErrInvalidLedgerName) Error() string { + return fmt.Sprintf("invalid ledger name '%s': %s", e.name, e.err) +} + +func (e ErrInvalidLedgerName) Is(err error) bool { + _, ok := err.(ErrInvalidLedgerName) + return ok +} + +func newErrInvalidLedgerName(name string, err error) ErrInvalidLedgerName { + return ErrInvalidLedgerName{err: err, name: name} +} + +type ErrInvalidBucketName struct { + err error + bucket string +} + +func (e ErrInvalidBucketName) Error() string { + return fmt.Sprintf("invalid bucket name '%s': %s", e.bucket, e.err) +} + +func (e ErrInvalidBucketName) Is(err error) bool { + _, ok := err.(ErrInvalidBucketName) + return ok +} + +func newErrInvalidBucketName(bucket string, err error) ErrInvalidBucketName { + return ErrInvalidBucketName{err: err, bucket: bucket} +} \ No newline at end of file diff --git a/internal/ledger.go b/internal/ledger.go new file mode 100644 index 000000000..e80b4cc72 --- /dev/null +++ b/internal/ledger.go @@ -0,0 +1,217 @@ +package ledger + +import ( + "fmt" + . "github.com/formancehq/go-libs/v2/collectionutils" + "github.com/formancehq/go-libs/v2/time" + "github.com/uptrace/bun" + "regexp" + "slices" + "strings" + + "github.com/formancehq/go-libs/v2/metadata" +) + +type Ledger struct { + bun.BaseModel `bun:"_system.ledgers,alias:ledgers"` + + Configuration + ID int `json:"id" bun:"id,type:int,scanonly"` + Name string `json:"name" bun:"name,type:varchar(255),pk"` + AddedAt time.Time `json:"addedAt" bun:"added_at,type:timestamp,nullzero"` +} + +func (l Ledger) HasFeature(feature, value string) bool { + if err := validateFeatureWithValue(feature, value); err != nil { + panic(err) + } + + return l.Features[feature] == value +} + +func (l Ledger) WithMetadata(m metadata.Metadata) Ledger { + l.Metadata = m + return l +} + +func New(name string, configuration Configuration) (*Ledger, error) { + + if err := configuration.Validate(); err != nil { + return nil, err + } + + if !ledgerNameFormat.MatchString(name) { + return nil, newErrInvalidLedgerName(name, fmt.Errorf("name must match format '%s'", ledgerNameFormat.String())) + } + if slices.Contains(reservedLedgerName, name) { + return nil, newErrInvalidLedgerName(name, fmt.Errorf("name '%s' is reserved", name)) + } + if !bucketNameFormat.MatchString(configuration.Bucket) { + return nil, newErrInvalidBucketName(configuration.Bucket, fmt.Errorf("name must match format '%s'", bucketNameFormat.String())) + } + + return &Ledger{ + Configuration: configuration, + Name: name, + }, nil +} + +func NewWithDefaults(name string) (*Ledger, error) { + return New(name, NewDefaultConfiguration()) +} + +func MustNewWithDefault(name string) Ledger { + ledger, err := NewWithDefaults(name) + if err != nil { + panic(err) + } + return *ledger +} + +const ( + // FeatureMovesHistory is used to define if the ledger has to save funds movements history. + // Value is either ON or OFF + FeatureMovesHistory = "MOVES_HISTORY" + // FeatureMovesHistoryPostCommitEffectiveVolumes is used to define if the pvce property of funds movements history + // has to be updated with back dated transaction. + // Value is either SYNC or DISABLED. + // todo: depends on FeatureMovesHistory (dependency should be checked) + FeatureMovesHistoryPostCommitEffectiveVolumes = "MOVES_HISTORY_POST_COMMIT_EFFECTIVE_VOLUMES" + // FeatureHashLogs is used to defined it the logs has to be hashed. + FeatureHashLogs = "HASH_LOGS" + // FeatureAccountMetadataHistory is used to defined it the account metadata must be historized. + FeatureAccountMetadataHistory = "ACCOUNT_METADATA_HISTORY" + // FeatureTransactionMetadataHistory is used to defined it the transaction metadata must be historized. + FeatureTransactionMetadataHistory = "TRANSACTION_METADATA_HISTORY" + // FeatureIndexAddressSegments is used to defined it we want to index segments of accounts address. + // Without this feature, the ledger will not allow filtering on partial account address. + FeatureIndexAddressSegments = "INDEX_ADDRESS_SEGMENTS" + // FeatureIndexTransactionAccounts is used to defined it we want to index accounts used in a transaction. + FeatureIndexTransactionAccounts = "INDEX_TRANSACTION_ACCOUNTS" + + DefaultBucket = "_default" +) + +var ( + DefaultFeatures = FeatureSet{ + FeatureMovesHistory: "ON", + FeatureMovesHistoryPostCommitEffectiveVolumes: "SYNC", + FeatureHashLogs: "SYNC", + FeatureAccountMetadataHistory: "SYNC", + FeatureTransactionMetadataHistory: "SYNC", + FeatureIndexAddressSegments: "ON", + FeatureIndexTransactionAccounts: "ON", + } + MinimalFeatureSet = FeatureSet{ + FeatureMovesHistory: "OFF", + FeatureMovesHistoryPostCommitEffectiveVolumes: "DISABLED", + FeatureHashLogs: "DISABLED", + FeatureAccountMetadataHistory: "DISABLED", + FeatureTransactionMetadataHistory: "DISABLED", + FeatureIndexAddressSegments: "OFF", + FeatureIndexTransactionAccounts: "OFF", + } + FeatureConfigurations = map[string][]string{ + FeatureMovesHistory: {"ON", "OFF"}, + FeatureMovesHistoryPostCommitEffectiveVolumes: {"SYNC", "DISABLED"}, + FeatureHashLogs: {"SYNC", "DISABLED"}, + FeatureAccountMetadataHistory: {"SYNC", "DISABLED"}, + FeatureTransactionMetadataHistory: {"SYNC", "DISABLED"}, + FeatureIndexAddressSegments: {"ON", "OFF"}, + FeatureIndexTransactionAccounts: {"ON", "OFF"}, + } + + ledgerNameFormat = regexp.MustCompile("^[0-9a-zA-Z_-]{1,63}$") + bucketNameFormat = regexp.MustCompile("^[0-9a-zA-Z_-]{1,63}$") + + reservedLedgerName = []string{ + // Used for debug in urls... + "_", + "_info", + "_healthcheck", + } +) + +func validateFeatureWithValue(feature, value string) error { + possibleConfigurations, ok := FeatureConfigurations[feature] + if !ok { + return fmt.Errorf("feature %q not exists", feature) + } + if !slices.Contains(possibleConfigurations, value) { + return fmt.Errorf("configuration %s it not possible for feature %s", value, feature) + } + + return nil +} + +type FeatureSet map[string]string + +func (f FeatureSet) With(feature, value string) FeatureSet { + ret := FeatureSet{} + for k, v := range f { + ret[k] = v + } + ret[feature] = value + + return ret +} + +func (f FeatureSet) String() string { + if len(f) == 0 { + return "" + } + keys := Keys(f) + slices.Sort(keys) + + ret := "" + for _, key := range keys { + ret = ret + "," + shortenFeature(key) + "=" + f[key] + } + + return ret[1:] +} + +func shortenFeature(feature string) string { + return strings.Join(Map(strings.Split(feature, "_"), func(from string) string { + return from[:1] + }), "") +} + +type Configuration struct { + Bucket string `json:"bucket" bun:"bucket,type:varchar(255)"` + Metadata metadata.Metadata `json:"metadata" bun:"metadata,type:jsonb"` + Features FeatureSet `json:"features" bun:"features,type:jsonb"` +} + +func (c *Configuration) SetDefaults() { + if c.Bucket == "" { + c.Bucket = DefaultBucket + } + if c.Features == nil { + c.Features = map[string]string{} + } + + for key, value := range DefaultFeatures { + if _, ok := c.Features[key]; !ok { + c.Features[key] = value + } + } +} + +func (c *Configuration) Validate() error { + for feature, value := range c.Features { + if err := validateFeatureWithValue(feature, value); err != nil { + return err + } + } + + return nil +} + +func NewDefaultConfiguration() Configuration { + return Configuration{ + Bucket: DefaultBucket, + Metadata: metadata.Metadata{}, + Features: DefaultFeatures, + } +} diff --git a/internal/ledger_test.go b/internal/ledger_test.go new file mode 100644 index 000000000..91c6534a8 --- /dev/null +++ b/internal/ledger_test.go @@ -0,0 +1,12 @@ +package ledger + +import ( + "github.com/stretchr/testify/require" + "testing" +) + +func TestFeatures(t *testing.T) { + f := MinimalFeatureSet.With(FeatureMovesHistory, "DISABLED") + require.Equal(t, "DISABLED", f[FeatureMovesHistory]) + require.Equal(t, "AMH=DISABLED,HL=DISABLED,IAS=OFF,ITA=OFF,MH=DISABLED,MHPCEV=DISABLED,TMH=DISABLED", f.String()) +} diff --git a/internal/log.go b/internal/log.go index 87cb1772c..d611e3a9e 100644 --- a/internal/log.go +++ b/internal/log.go @@ -1,35 +1,59 @@ package ledger import ( - "context" "crypto/sha256" + "database/sql/driver" + "encoding/base64" "encoding/json" - "math/big" + "fmt" + "github.com/uptrace/bun" "reflect" "strconv" "strings" - "github.com/formancehq/go-libs/time" + "github.com/formancehq/go-libs/v2/time" - "github.com/formancehq/go-libs/metadata" - "github.com/pkg/errors" + "github.com/formancehq/go-libs/v2/metadata" ) -type LogType int16 - const ( - // TODO(gfyrag): Create dedicated log type for account and metadata SetMetadataLogType LogType = iota // "SET_METADATA" - NewTransactionLogType // "NEW_TRANSACTION" + NewLogType // "NEW_TRANSACTION" RevertedTransactionLogType // "REVERTED_TRANSACTION" DeleteMetadataLogType ) -func (l LogType) String() string { - switch l { +type LogType int16 + +func (lt LogType) Value() (driver.Value, error) { + return lt.String(), nil +} + +func (lt *LogType) Scan(src interface{}) error { + *lt = LogTypeFromString(src.(string)) + return nil +} + +func (lt LogType) MarshalJSON() ([]byte, error) { + return json.Marshal(lt.String()) +} + +func (lt *LogType) UnmarshalJSON(data []byte) error { + var s string + if err := json.Unmarshal(data, &s); err != nil { + return err + } + + *lt = LogTypeFromString(s) + + return nil +} + +func (lt LogType) String() string { + switch lt { case SetMetadataLogType: return "SET_METADATA" - case NewTransactionLogType: + case NewLogType: return "NEW_TRANSACTION" case RevertedTransactionLogType: return "REVERTED_TRANSACTION" @@ -45,51 +69,49 @@ func LogTypeFromString(logType string) LogType { case "SET_METADATA": return SetMetadataLogType case "NEW_TRANSACTION": - return NewTransactionLogType + return NewLogType case "REVERTED_TRANSACTION": return RevertedTransactionLogType case "DELETE_METADATA": return DeleteMetadataLogType } - panic(errors.New("invalid log type")) -} - -// Needed in order to keep the compatibility with the openapi response for -// ListLogs. -func (lt LogType) MarshalJSON() ([]byte, error) { - return json.Marshal(lt.String()) -} - -func (lt *LogType) UnmarshalJSON(data []byte) error { - var s string - if err := json.Unmarshal(data, &s); err != nil { - return err - } - - *lt = LogTypeFromString(s) - - return nil + panic("invalid log type") } -type ChainedLogWithContext struct { - ChainedLog - Context context.Context +// Log represents atomic actions made on the ledger. +type Log struct { + bun.BaseModel `bun:"table:logs,alias:logs"` + + Type LogType `json:"type" bun:"type,type:log_type"` + Data LogPayload `json:"data" bun:"data,type:jsonb"` + Date time.Time `json:"date" bun:"date,type:timestamptz,nullzero"` + IdempotencyKey string `json:"idempotencyKey" bun:"idempotency_key,type:varchar(256),unique,nullzero"` + // IdempotencyHash is a signature used when using IdempotencyKey. + // It allows to check if the usage of IdempotencyKey match inputs given on the first idempotency key usage. + IdempotencyHash string `json:"idempotencyHash" bun:"idempotency_hash,unique,nullzero"` + ID int `json:"id" bun:"id,unique,type:numeric"` + Hash []byte `json:"hash" bun:"hash,type:bytea,scanonly"` } -type ChainedLog struct { - Log - ID *big.Int `json:"id"` - Hash []byte `json:"hash"` +func (l Log) WithIdempotencyKey(key string) Log { + l.IdempotencyKey = key + return l } -func (l *ChainedLog) WithID(id uint64) *ChainedLog { - l.ID = big.NewInt(int64(id)) - return l +func (l Log) ChainLog(previous *Log) Log { + ret := l + ret.ComputeHash(previous) + if previous != nil { + ret.ID = previous.ID + 1 + } else { + ret.ID = 1 + } + return ret } -func (l *ChainedLog) UnmarshalJSON(data []byte) error { - type auxLog ChainedLog +func (l *Log) UnmarshalJSON(data []byte) error { + type auxLog Log type log struct { auxLog Data json.RawMessage `json:"data"` @@ -104,11 +126,11 @@ func (l *ChainedLog) UnmarshalJSON(data []byte) error { if err != nil { return err } - *l = ChainedLog(rawLog.auxLog) + *l = Log(rawLog.auxLog) return err } -func (l *ChainedLog) ComputeHash(previous *ChainedLog) { +func (l *Log) ComputeHash(previous *Log) { digest := sha256.New() enc := json.NewEncoder(digest) @@ -118,73 +140,103 @@ func (l *ChainedLog) ComputeHash(previous *ChainedLog) { } } - if err := enc.Encode(l); err != nil { + payload := l.Data.(any) + if hv, ok := payload.(Memento); ok { + payload = hv.GetMemento() + } + + if err := enc.Encode(struct { + // notes(gfyrag): Keep keys ordered! the order matter when hashing the log. + Type LogType `json:"type"` + Data any `json:"data"` + Date time.Time `json:"date"` + IdempotencyKey string `json:"idempotencyKey"` + ID int `json:"id"` + Hash []byte `json:"hash"` + }{ + Type: l.Type, + Data: payload, + Date: l.Date, + IdempotencyKey: l.IdempotencyKey, + ID: l.ID, + Hash: l.Hash, + }); err != nil { panic(err) } l.Hash = digest.Sum(nil) } -type Log struct { - Type LogType `json:"type"` - Data any `json:"data"` - Date time.Time `json:"date"` - IdempotencyKey string `json:"idempotencyKey"` -} - -func (l *Log) WithDate(date time.Time) *Log { - l.Date = date - return l +func NewLog(payload LogPayload) Log { + return Log{ + Type: payload.Type(), + Data: payload, + } } -func (l *Log) WithIdempotencyKey(key string) *Log { - l.IdempotencyKey = key - return l +type LogPayload interface { + Type() LogType } -func (l *Log) ChainLog(previous *ChainedLog) *ChainedLog { - ret := &ChainedLog{ - Log: *l, - ID: big.NewInt(0), - } - ret.ComputeHash(previous) - if previous != nil { - ret.ID = ret.ID.Add(previous.ID, big.NewInt(1)) - } - return ret +type Memento interface { + GetMemento() any } type AccountMetadata map[string]metadata.Metadata -type NewTransactionLogPayload struct { - Transaction *Transaction `json:"transaction"` +type CreatedTransaction struct { + Transaction Transaction `json:"transaction"` AccountMetadata AccountMetadata `json:"accountMetadata"` } -func NewTransactionLogWithDate(tx *Transaction, accountMetadata map[string]metadata.Metadata, time time.Time) *Log { - // Since the id is unique and the hash is a hash of the previous log, they - // will be filled at insertion time during the batch process. - return &Log{ - Type: NewTransactionLogType, - Date: time, - Data: NewTransactionLogPayload{ - Transaction: tx, - AccountMetadata: accountMetadata, +func (p CreatedTransaction) Type() LogType { + return NewLogType +} + +var _ LogPayload = (*CreatedTransaction)(nil) + +func (p CreatedTransaction) GetMemento() any { + // Exclude postCommitVolumes and postCommitEffectiveVolumes fields from transactions. + // We don't want those fields to be part of the hash as they are not part of the decision-making process. + type transactionResume struct { + Postings Postings `json:"postings"` + Metadata metadata.Metadata `json:"metadata"` + Timestamp time.Time `json:"timestamp"` + Reference string `json:"reference,omitempty"` + ID int `json:"id"` + Reverted bool `json:"reverted"` + } + + return struct { + Transaction transactionResume `json:"transaction"` + AccountMetadata AccountMetadata `json:"accountMetadata"` + }{ + Transaction: transactionResume{ + Postings: p.Transaction.Postings, + Metadata: p.Transaction.Metadata, + Timestamp: p.Transaction.Timestamp, + Reference: p.Transaction.Reference, + ID: p.Transaction.ID, }, + AccountMetadata: p.AccountMetadata, } } -func NewTransactionLog(tx *Transaction, accountMetadata map[string]metadata.Metadata) *Log { - return NewTransactionLogWithDate(tx, accountMetadata, time.Now()) -} +var _ Memento = (*CreatedTransaction)(nil) -type SetMetadataLogPayload struct { +type SavedMetadata struct { TargetType string `json:"targetType"` TargetID any `json:"targetId"` Metadata metadata.Metadata `json:"metadata"` } -func (s *SetMetadataLogPayload) UnmarshalJSON(data []byte) error { +func (s SavedMetadata) Type() LogType { + return SetMetadataLogType +} + +var _ LogPayload = (*SavedMetadata)(nil) + +func (s *SavedMetadata) UnmarshalJSON(data []byte) error { type X struct { TargetType string `json:"targetType"` TargetID json.RawMessage `json:"targetId"` @@ -201,7 +253,8 @@ func (s *SetMetadataLogPayload) UnmarshalJSON(data []byte) error { id = "" err = json.Unmarshal(x.TargetID, &id) case strings.ToUpper(MetaTargetTypeTransaction): - id, err = strconv.ParseUint(string(x.TargetID), 10, 64) + id, err = strconv.ParseInt(string(x.TargetID), 10, 64) + id = int(id.(int64)) default: panic("unknown type") } @@ -209,7 +262,7 @@ func (s *SetMetadataLogPayload) UnmarshalJSON(data []byte) error { return err } - *s = SetMetadataLogPayload{ + *s = SavedMetadata{ TargetType: x.TargetType, TargetID: id, Metadata: x.Metadata, @@ -217,101 +270,120 @@ func (s *SetMetadataLogPayload) UnmarshalJSON(data []byte) error { return nil } -func NewSetMetadataLog(at time.Time, metadata SetMetadataLogPayload) *Log { - // Since the id is unique and the hash is a hash of the previous log, they - // will be filled at insertion time during the batch process. - return &Log{ - Type: SetMetadataLogType, - Date: at, - Data: metadata, - } -} - -type DeleteMetadataLogPayload struct { +type DeletedMetadata struct { TargetType string `json:"targetType"` TargetID any `json:"targetId"` Key string `json:"key"` } -func NewDeleteMetadataLog(at time.Time, payload DeleteMetadataLogPayload) *Log { - // Since the id is unique and the hash is a hash of the previous log, they - // will be filled at insertion time during the batch process. - return &Log{ - Type: DeleteMetadataLogType, - Date: at, - Data: payload, - } +func (s DeletedMetadata) Type() LogType { + return DeleteMetadataLogType } -func NewSetMetadataOnAccountLog(at time.Time, account string, metadata metadata.Metadata) *Log { - return &Log{ - Type: SetMetadataLogType, - Date: at, - Data: SetMetadataLogPayload{ - TargetType: MetaTargetTypeAccount, - TargetID: account, - Metadata: metadata, - }, +var _ LogPayload = (*DeletedMetadata)(nil) + +func (s *DeletedMetadata) UnmarshalJSON(data []byte) error { + type X struct { + TargetType string `json:"targetType"` + TargetID json.RawMessage `json:"targetId"` + Key string `json:"key"` + } + x := X{} + err := json.Unmarshal(data, &x) + if err != nil { + return err + } + var id interface{} + switch strings.ToUpper(x.TargetType) { + case strings.ToUpper(MetaTargetTypeAccount): + id = "" + err = json.Unmarshal(x.TargetID, &id) + case strings.ToUpper(MetaTargetTypeTransaction): + id, err = strconv.ParseInt(string(x.TargetID), 10, 64) + id = int(id.(int64)) + default: + return fmt.Errorf("unknown type '%s'", x.TargetType) + } + if err != nil { + return err } -} -func NewSetMetadataOnTransactionLog(at time.Time, txID *big.Int, metadata metadata.Metadata) *Log { - return &Log{ - Type: SetMetadataLogType, - Date: at, - Data: SetMetadataLogPayload{ - TargetType: MetaTargetTypeTransaction, - TargetID: txID, - Metadata: metadata, - }, + *s = DeletedMetadata{ + TargetType: x.TargetType, + TargetID: id, + Key: x.Key, } + return nil +} + +type RevertedTransaction struct { + RevertedTransaction Transaction `json:"revertedTransaction"` + RevertTransaction Transaction `json:"transaction"` } -type RevertedTransactionLogPayload struct { - RevertedTransactionID *big.Int `json:"revertedTransactionID"` - RevertTransaction *Transaction `json:"transaction"` +func (r RevertedTransaction) Type() LogType { + return RevertedTransactionLogType } -func NewRevertedTransactionLog(at time.Time, revertedTxID *big.Int, tx *Transaction) *Log { - return &Log{ - Type: RevertedTransactionLogType, - Date: at, - Data: RevertedTransactionLogPayload{ - RevertedTransactionID: revertedTxID, - RevertTransaction: tx, +var _ LogPayload = (*RevertedTransaction)(nil) + +func (r RevertedTransaction) GetMemento() any { + + type transactionResume struct { + Postings Postings `json:"postings"` + Metadata metadata.Metadata `json:"metadata"` + Timestamp time.Time `json:"timestamp"` + Reference string `json:"reference,omitempty"` + ID int `json:"id"` + Reverted bool `json:"reverted"` + } + + return struct { + RevertedTransactionID int `json:"revertedTransactionID"` + RevertTransaction transactionResume `json:"transaction"` + }{ + RevertedTransactionID: r.RevertedTransaction.ID, + RevertTransaction: transactionResume{ + Postings: r.RevertTransaction.Postings, + Metadata: r.RevertTransaction.Metadata, + Timestamp: r.RevertTransaction.Timestamp, + Reference: r.RevertTransaction.Reference, + ID: r.RevertTransaction.ID, }, } } -func HydrateLog(_type LogType, data []byte) (any, error) { +var _ Memento = (*RevertedTransaction)(nil) + +func HydrateLog(_type LogType, data []byte) (LogPayload, error) { var payload any switch _type { - case NewTransactionLogType: - payload = &NewTransactionLogPayload{} + case NewLogType: + payload = &CreatedTransaction{} case SetMetadataLogType: - payload = &SetMetadataLogPayload{} + payload = &SavedMetadata{} + case DeleteMetadataLogType: + payload = &DeletedMetadata{} case RevertedTransactionLogType: - payload = &RevertedTransactionLogPayload{} + payload = &RevertedTransaction{} default: - panic("unknown type " + _type.String()) + return nil, fmt.Errorf("unknown type '%s'", _type) } err := json.Unmarshal(data, &payload) if err != nil { return nil, err } - return reflect.ValueOf(payload).Elem().Interface(), nil + return reflect.ValueOf(payload).Elem().Interface().(LogPayload), nil } -type Accounts map[string]Account +func ComputeIdempotencyHash(inputs any) string { + digest := sha256.New() + enc := json.NewEncoder(digest) -func ChainLogs(logs ...*Log) []*ChainedLog { - var previous *ChainedLog - ret := make([]*ChainedLog, 0) - for _, log := range logs { - next := log.ChainLog(previous) - ret = append(ret, next) - previous = next + if err := enc.Encode(inputs); err != nil { + panic(err) } - return ret + + return base64.URLEncoding.EncodeToString(digest.Sum(nil)) } diff --git a/internal/machine/account.go b/internal/machine/account.go index bf98e74b3..59a9d27e0 100644 --- a/internal/machine/account.go +++ b/internal/machine/account.go @@ -2,8 +2,7 @@ package machine import ( "fmt" - - "github.com/formancehq/ledger/pkg/core/accounts" + "github.com/formancehq/ledger/pkg/accounts" ) type AccountAddress string diff --git a/internal/machine/asset.go b/internal/machine/asset.go index ceb03ac65..2e63e64cf 100644 --- a/internal/machine/asset.go +++ b/internal/machine/asset.go @@ -2,8 +2,7 @@ package machine import ( "fmt" - - "github.com/formancehq/ledger/pkg/core/assets" + "github.com/formancehq/ledger/pkg/assets" ) type Asset string diff --git a/internal/machine/errors.go b/internal/machine/errors.go index fd6c8b41b..47191a129 100644 --- a/internal/machine/errors.go +++ b/internal/machine/errors.go @@ -3,7 +3,7 @@ package machine import ( "fmt" - "github.com/pkg/errors" + "errors" ) var ( @@ -116,6 +116,10 @@ type ErrMetadataOverride struct { key string } +func (e *ErrMetadataOverride) Key() string { + return e.key +} + func (e *ErrMetadataOverride) Error() string { return fmt.Sprintf("cannot override metadata '%s'", e.key) } diff --git a/internal/machine/examples/basic.go b/internal/machine/examples/basic.go index ba87e3b19..b1bf59f7c 100644 --- a/internal/machine/examples/basic.go +++ b/internal/machine/examples/basic.go @@ -5,10 +5,10 @@ import ( "fmt" "math/big" - "github.com/formancehq/go-libs/metadata" + "github.com/formancehq/go-libs/v2/metadata" "github.com/formancehq/ledger/internal" "github.com/formancehq/ledger/internal/machine/script/compiler" - vm2 "github.com/formancehq/ledger/internal/machine/vm" + "github.com/formancehq/ledger/internal/machine/vm" ) func main() { @@ -32,7 +32,7 @@ func main() { } fmt.Print(program) - m := vm2.NewMachine(*program) + m := vm.NewMachine(*program) m.Debug = true if err = m.SetVarsFromJSON(map[string]string{ @@ -50,9 +50,9 @@ func main() { }, } - store := vm2.StaticStore{} + store := vm.StaticStore{} for account, balances := range initialVolumes { - store[account] = &vm2.AccountWithBalances{ + store[account] = &vm.AccountWithBalances{ Account: ledger.Account{ Address: account, Metadata: metadata.Metadata{}, @@ -61,7 +61,7 @@ func main() { } } - _, _, err = m.ResolveResources(context.Background(), vm2.EmptyStore) + err = m.ResolveResources(context.Background(), vm.EmptyStore) if err != nil { panic(err) } diff --git a/internal/machine/funding.go b/internal/machine/funding.go index f3b49e176..97f633fe5 100644 --- a/internal/machine/funding.go +++ b/internal/machine/funding.go @@ -5,7 +5,7 @@ import ( "fmt" "strings" - collec "github.com/formancehq/go-libs/collectionutils" + collec "github.com/formancehq/go-libs/v2/collectionutils" ) type FundingPart struct { @@ -97,7 +97,7 @@ func (f Funding) Take(amount *MonetaryInt) (Funding, Funding, error) { return fp.Account.String() }) - return Funding{}, Funding{}, NewErrInsufficientFund(fmt.Sprintf("account(s) %s had/have insufficient funds", strings.Join(lstAccounts, "|"))) + return Funding{}, Funding{}, NewErrInsufficientFund("account(s) %s had/have insufficient funds", strings.Join(lstAccounts, "|")) } return result, remainder, nil } diff --git a/internal/machine/json.go b/internal/machine/json.go index 7f9387472..8c4d9c79e 100644 --- a/internal/machine/json.go +++ b/internal/machine/json.go @@ -5,7 +5,7 @@ import ( "fmt" "strings" - "github.com/pkg/errors" + "errors" ) type ValueJSON struct { @@ -18,12 +18,12 @@ func NewValueFromString(typ Type, data string) (Value, error) { switch typ { case TypeAccount: if err := ValidateAccountAddress(AccountAddress(data)); err != nil { - return nil, errors.Wrapf(err, "value %s", data) + return nil, fmt.Errorf("value %s: %w", data, err) } value = AccountAddress(data) case TypeAsset: if err := ValidateAsset(Asset(data)); err != nil { - return nil, errors.Wrapf(err, "value %s", data) + return nil, fmt.Errorf("value %s: %v", data, err) } value = Asset(data) case TypeNumber: @@ -46,7 +46,7 @@ func NewValueFromString(typ Type, data string) (Value, error) { Amount: mi, } if err := ParseMonetary(mon); err != nil { - return nil, errors.Wrapf(err, "value %s", mon.String()) + return nil, fmt.Errorf("value %s: %w", mon.String(), err) } value = mon case TypePortion: diff --git a/internal/machine/monetary.go b/internal/machine/monetary.go index 489f75e0a..ebd92f55d 100644 --- a/internal/machine/monetary.go +++ b/internal/machine/monetary.go @@ -4,7 +4,7 @@ import ( "fmt" "math/big" - "github.com/pkg/errors" + "errors" ) type Monetary struct { @@ -28,13 +28,13 @@ var Zero = NewMonetaryInt(0) func ParseMonetary(mon Monetary) error { if err := ValidateAsset(mon.Asset); err != nil { - return errors.Wrapf(err, "asset '%s'", mon.Asset) + return fmt.Errorf("asset '%s': %w", mon.Asset, err) } if mon.Amount == nil { - return errors.Errorf("nil amount") + return fmt.Errorf("nil amount") } if mon.Amount.Ltz() { - return errors.Errorf("negative amount") + return fmt.Errorf("negative amount") } return nil } @@ -142,6 +142,10 @@ func (a *MonetaryInt) UnmarshalText(b []byte) error { return (*big.Int)(a).UnmarshalText(b) } +func (a *MonetaryInt) ToBigInt() *big.Int { + return (*big.Int)(a) +} + func NewMonetaryInt(i int64) *MonetaryInt { return (*MonetaryInt)(big.NewInt(i)) } diff --git a/internal/machine/script/compiler/allotment.go b/internal/machine/script/compiler/allotment.go index 40bb0b6d6..341f748f1 100644 --- a/internal/machine/script/compiler/allotment.go +++ b/internal/machine/script/compiler/allotment.go @@ -9,7 +9,7 @@ import ( "github.com/antlr/antlr4/runtime/Go/antlr" "github.com/formancehq/ledger/internal/machine/script/parser" - program2 "github.com/formancehq/ledger/internal/machine/vm/program" + "github.com/formancehq/ledger/internal/machine/vm/program" ) func (p *parseVisitor) VisitAllotment(c antlr.ParserRuleContext, portions []parser.IAllotmentPortionContext) *CompileError { @@ -26,7 +26,7 @@ func (p *parseVisitor) VisitAllotment(c antlr.ParserRuleContext, portions []pars } rat := *portion.Specific total.Add(&rat, total) - addr, err := p.AllocateResource(program2.Constant{Inner: *portion}) + addr, err := p.AllocateResource(program.Constant{Inner: *portion}) if err != nil { return LogicError(c, err) } @@ -48,7 +48,7 @@ func (p *parseVisitor) VisitAllotment(c antlr.ParserRuleContext, portions []pars errors.New("two uses of `remaining` in the same allocation"), ) } - addr, err := p.AllocateResource(program2.Constant{Inner: machine.NewPortionRemaining()}) + addr, err := p.AllocateResource(program.Constant{Inner: machine.NewPortionRemaining()}) if err != nil { return LogicError(c, err) } @@ -80,6 +80,6 @@ func (p *parseVisitor) VisitAllotment(c antlr.ParserRuleContext, portions []pars if err != nil { return LogicError(c, err) } - p.AppendInstruction(program2.OP_MAKE_ALLOTMENT) + p.AppendInstruction(program.OP_MAKE_ALLOTMENT) return nil } diff --git a/internal/machine/script/compiler/compiler.go b/internal/machine/script/compiler/compiler.go index 750796323..53800a837 100644 --- a/internal/machine/script/compiler/compiler.go +++ b/internal/machine/script/compiler/compiler.go @@ -2,16 +2,15 @@ package compiler import ( "fmt" - "sort" "strconv" "strings" "github.com/formancehq/ledger/internal/machine" + "errors" "github.com/antlr/antlr4/runtime/Go/antlr" - parser "github.com/formancehq/ledger/internal/machine/script/parser" - program "github.com/formancehq/ledger/internal/machine/vm/program" - "github.com/pkg/errors" + "github.com/formancehq/ledger/internal/machine/script/parser" + "github.com/formancehq/ledger/internal/machine/vm/program" ) type parseVisitor struct { @@ -26,15 +25,6 @@ type parseVisitor struct { varIdx map[string]machine.Address // needBalances store for each account, the set of assets needed neededBalances map[machine.Address]map[machine.Address]struct{} - - // The sources accounts that aren't unbounded - // that is, @world or sources that appear within a - // '.. allowing unboundeed overdraft' clause - writeLockAccounts map[machine.Address]struct{} - - // all the accounts that appear in either the destination - // or in the balance() function - readLockAccounts map[machine.Address]struct{} } // Allocates constants if it hasn't already been, @@ -536,8 +526,7 @@ func (p *parseVisitor) VisitVars(c *parser.VarListDeclContext) *CompileError { addr, err = p.AllocateResource(program.Variable{Typ: ty, Name: name}) if err != nil { return &CompileError{ - Msg: errors.Wrap(err, - "allocating variable resource").Error(), + Msg: fmt.Errorf("allocating variable resource: %w", err).Error(), } } p.varIdx[name] = *addr @@ -589,7 +578,6 @@ func (p *parseVisitor) VisitVars(c *parser.VarListDeclContext) *CompileError { Account: *accAddr, Asset: *assAddr, }) - p.readLockAccounts[*accAddr] = struct{}{} if err != nil { return LogicError(c, err) } @@ -684,14 +672,12 @@ func CompileFull(input string) CompileArtifacts { } visitor := parseVisitor{ - errListener: errListener, - instructions: make([]byte, 0), - resources: make([]program.Resource, 0), - varIdx: make(map[string]machine.Address), - neededBalances: make(map[machine.Address]map[machine.Address]struct{}), - sources: map[machine.Address]struct{}{}, - writeLockAccounts: map[machine.Address]struct{}{}, - readLockAccounts: map[machine.Address]struct{}{}, + errListener: errListener, + instructions: make([]byte, 0), + resources: make([]program.Resource, 0), + varIdx: make(map[string]machine.Address), + neededBalances: make(map[machine.Address]map[machine.Address]struct{}), + sources: map[machine.Address]struct{}{}, } err := visitor.VisitScript(tree) @@ -700,24 +686,10 @@ func CompileFull(input string) CompileArtifacts { return artifacts } - readLockAccounts := make(machine.Addresses, 0) - for address := range visitor.readLockAccounts { - readLockAccounts = append(readLockAccounts, address) - } - sort.Stable(readLockAccounts) - - writeLockAccounts := make(machine.Addresses, 0) - for address := range visitor.writeLockAccounts { - writeLockAccounts = append(writeLockAccounts, address) - } - sort.Stable(writeLockAccounts) - artifacts.Program = &program.Program{ - Instructions: visitor.instructions, - Resources: visitor.resources, - NeededBalances: visitor.neededBalances, - ReadLockAccounts: readLockAccounts, - WriteLockAccounts: writeLockAccounts, + Instructions: visitor.instructions, + Resources: visitor.resources, + NeededBalances: visitor.neededBalances, } return artifacts diff --git a/internal/machine/script/compiler/compiler_test.go b/internal/machine/script/compiler/compiler_test.go index a3b694022..f76cd0f00 100644 --- a/internal/machine/script/compiler/compiler_test.go +++ b/internal/machine/script/compiler/compiler_test.go @@ -9,7 +9,7 @@ import ( "github.com/formancehq/ledger/internal/machine" - program2 "github.com/formancehq/ledger/internal/machine/vm/program" + "github.com/formancehq/ledger/internal/machine/vm/program" "github.com/stretchr/testify/require" ) @@ -20,7 +20,7 @@ type TestCase struct { type CaseResult struct { Instructions []byte - Resources []program2.Resource + Resources []program.Resource Variables []string Error string } @@ -62,27 +62,27 @@ func test(t *testing.T, c TestCase) { } } -func checkResourcesEqual(actual, expected program2.Resource) bool { +func checkResourcesEqual(actual, expected program.Resource) bool { if reflect.TypeOf(actual) != reflect.TypeOf(expected) { return false } switch res := actual.(type) { - case program2.Constant: - return machine.ValueEquals(res.Inner, expected.(program2.Constant).Inner) - case program2.Variable: - e := expected.(program2.Variable) + case program.Constant: + return machine.ValueEquals(res.Inner, expected.(program.Constant).Inner) + case program.Variable: + e := expected.(program.Variable) return res.Typ == e.Typ && res.Name == e.Name - case program2.VariableAccountMetadata: - e := expected.(program2.VariableAccountMetadata) + case program.VariableAccountMetadata: + e := expected.(program.VariableAccountMetadata) return res.Account == e.Account && res.Key == e.Key && res.Typ == e.Typ - case program2.VariableAccountBalance: - e := expected.(program2.VariableAccountBalance) + case program.VariableAccountBalance: + e := expected.(program.VariableAccountBalance) return res.Account == e.Account && res.Asset == e.Asset - case program2.Monetary: - e := expected.(program2.Monetary) + case program.Monetary: + e := expected.(program.Monetary) return res.Amount.Equal(e.Amount) && res.Asset == e.Asset default: panic(fmt.Errorf("invalid resource of type '%T'", res)) @@ -94,11 +94,11 @@ func TestSimplePrint(t *testing.T) { Case: "print 1", Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 00, 00, - program2.OP_PRINT, + program.OP_APUSH, 00, 00, + program.OP_PRINT, }, - Resources: []program2.Resource{ - program2.Constant{Inner: machine.NewMonetaryInt(1)}, + Resources: []program.Resource{ + program.Constant{Inner: machine.NewMonetaryInt(1)}, }, }, }) @@ -109,17 +109,17 @@ func TestCompositeExpr(t *testing.T) { Case: "print 29 + 15 - 2", Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 00, 00, - program2.OP_APUSH, 01, 00, - program2.OP_IADD, - program2.OP_APUSH, 02, 00, - program2.OP_ISUB, - program2.OP_PRINT, + program.OP_APUSH, 00, 00, + program.OP_APUSH, 01, 00, + program.OP_IADD, + program.OP_APUSH, 02, 00, + program.OP_ISUB, + program.OP_PRINT, }, - Resources: []program2.Resource{ - program2.Constant{Inner: machine.NewMonetaryInt(29)}, - program2.Constant{Inner: machine.NewMonetaryInt(15)}, - program2.Constant{Inner: machine.NewMonetaryInt(2)}, + Resources: []program.Resource{ + program.Constant{Inner: machine.NewMonetaryInt(29)}, + program.Constant{Inner: machine.NewMonetaryInt(15)}, + program.Constant{Inner: machine.NewMonetaryInt(2)}, }, }, }) @@ -129,8 +129,8 @@ func TestFail(t *testing.T) { test(t, TestCase{ Case: "fail", Expected: CaseResult{ - Instructions: []byte{program2.OP_FAIL}, - Resources: []program2.Resource{}, + Instructions: []byte{program.OP_FAIL}, + Resources: []program.Resource{}, }, }) } @@ -140,14 +140,14 @@ func TestCRLF(t *testing.T) { Case: "print @a\r\nprint @b", Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 00, 00, - program2.OP_PRINT, - program2.OP_APUSH, 01, 00, - program2.OP_PRINT, + program.OP_APUSH, 00, 00, + program.OP_PRINT, + program.OP_APUSH, 01, 00, + program.OP_PRINT, }, - Resources: []program2.Resource{ - program2.Constant{Inner: machine.AccountAddress("a")}, - program2.Constant{Inner: machine.AccountAddress("b")}, + Resources: []program.Resource{ + program.Constant{Inner: machine.AccountAddress("a")}, + program.Constant{Inner: machine.AccountAddress("b")}, }, }, }) @@ -159,10 +159,10 @@ func TestConstant(t *testing.T) { Case: "print @user:U001", Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 00, 00, - program2.OP_PRINT, + program.OP_APUSH, 00, 00, + program.OP_PRINT, }, - Resources: []program2.Resource{program2.Constant{Inner: user}}, + Resources: []program.Resource{program.Constant{Inner: user}}, }, }) } @@ -179,42 +179,42 @@ func TestSetTxMeta(t *testing.T) { `, Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 00, 00, - program2.OP_APUSH, 01, 00, - program2.OP_TX_META, - program2.OP_APUSH, 02, 00, - program2.OP_APUSH, 03, 00, - program2.OP_TX_META, - program2.OP_APUSH, 04, 00, - program2.OP_APUSH, 05, 00, - program2.OP_TX_META, - program2.OP_APUSH, 06, 00, - program2.OP_APUSH, 07, 00, - program2.OP_TX_META, - program2.OP_APUSH, 9, 00, - program2.OP_APUSH, 10, 00, - program2.OP_TX_META, - program2.OP_APUSH, 11, 00, - program2.OP_APUSH, 12, 00, - program2.OP_TX_META, + program.OP_APUSH, 00, 00, + program.OP_APUSH, 01, 00, + program.OP_TX_META, + program.OP_APUSH, 02, 00, + program.OP_APUSH, 03, 00, + program.OP_TX_META, + program.OP_APUSH, 04, 00, + program.OP_APUSH, 05, 00, + program.OP_TX_META, + program.OP_APUSH, 06, 00, + program.OP_APUSH, 07, 00, + program.OP_TX_META, + program.OP_APUSH, 9, 00, + program.OP_APUSH, 10, 00, + program.OP_TX_META, + program.OP_APUSH, 11, 00, + program.OP_APUSH, 12, 00, + program.OP_TX_META, }, - Resources: []program2.Resource{ - program2.Constant{Inner: machine.AccountAddress("platform")}, - program2.Constant{Inner: machine.String("aaa")}, - program2.Constant{Inner: machine.Asset("GEM")}, - program2.Constant{Inner: machine.String("bbb")}, - program2.Constant{Inner: machine.NewNumber(42)}, - program2.Constant{Inner: machine.String("ccc")}, - program2.Constant{Inner: machine.String("test")}, - program2.Constant{Inner: machine.String("ddd")}, - program2.Constant{Inner: machine.Asset("COIN")}, - program2.Monetary{Asset: 8, Amount: machine.NewMonetaryInt(30)}, - program2.Constant{Inner: machine.String("eee")}, - program2.Constant{Inner: machine.Portion{ + Resources: []program.Resource{ + program.Constant{Inner: machine.AccountAddress("platform")}, + program.Constant{Inner: machine.String("aaa")}, + program.Constant{Inner: machine.Asset("GEM")}, + program.Constant{Inner: machine.String("bbb")}, + program.Constant{Inner: machine.NewNumber(42)}, + program.Constant{Inner: machine.String("ccc")}, + program.Constant{Inner: machine.String("test")}, + program.Constant{Inner: machine.String("ddd")}, + program.Constant{Inner: machine.Asset("COIN")}, + program.Monetary{Asset: 8, Amount: machine.NewMonetaryInt(30)}, + program.Constant{Inner: machine.String("eee")}, + program.Constant{Inner: machine.Portion{ Remaining: false, Specific: big.NewRat(15, 100), }}, - program2.Constant{Inner: machine.String("fff")}, + program.Constant{Inner: machine.String("fff")}, }, }, }) @@ -230,13 +230,13 @@ func TestSetTxMetaVars(t *testing.T) { `, Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 00, 00, - program2.OP_APUSH, 01, 00, - program2.OP_TX_META, + program.OP_APUSH, 00, 00, + program.OP_APUSH, 01, 00, + program.OP_TX_META, }, - Resources: []program2.Resource{ - program2.Variable{Typ: machine.TypePortion, Name: "commission"}, - program2.Constant{Inner: machine.String("fee")}, + Resources: []program.Resource{ + program.Variable{Typ: machine.TypePortion, Name: "commission"}, + program.Constant{Inner: machine.String("fee")}, }, }, }) @@ -255,11 +255,11 @@ func TestComments(t *testing.T) { `, Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 00, 00, - program2.OP_PRINT, + program.OP_APUSH, 00, 00, + program.OP_PRINT, }, - Resources: []program2.Resource{ - program2.Variable{Typ: machine.TypeAccount, Name: "a"}, + Resources: []program.Resource{ + program.Variable{Typ: machine.TypeAccount, Name: "a"}, }, }, }) @@ -317,63 +317,63 @@ func TestDestinationAllotment(t *testing.T) { )`, Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 02, 00, // @foo - program2.OP_APUSH, 01, 00, // @foo, [EUR/2 43] - program2.OP_ASSET, // @foo, EUR/2 - program2.OP_APUSH, 03, 00, // @foo, EUR/2, 0 - program2.OP_MONETARY_NEW, // @foo, [EUR/2 0] - program2.OP_TAKE_ALL, // [EUR/2 @foo ] - program2.OP_APUSH, 01, 00, // [EUR/2 @foo ], [EUR/2 43] - program2.OP_TAKE, // [EUR/2 @foo ], [EUR/2 @foo 43] - program2.OP_APUSH, 04, 00, // [EUR/2 @foo ], [EUR/2 @foo 43] 1 - program2.OP_BUMP, // [EUR/2 @foo 43], [EUR/2 @foo ] - program2.OP_REPAY, // [EUR/2 @foo 43] - program2.OP_FUNDING_SUM, // [EUR/2 @foo 43], [EUR/2 43] - program2.OP_APUSH, 05, 00, // [EUR/2 @foo 43], [EUR/2 43], 7/8 - program2.OP_APUSH, 06, 00, // [EUR/2 @foo 43], [EUR/2 43], 7/8, 1/8 - program2.OP_APUSH, 07, 00, // [EUR/2 @foo 43], [EUR/2 43], 7/8, 1/8, 2 - program2.OP_MAKE_ALLOTMENT, // [EUR/2 @foo 43], [EUR/2 43], {1/8 : 7/8} - program2.OP_ALLOC, // [EUR/2 @foo 43], [EUR/2 37], [EUR/2 6] - program2.OP_APUSH, 07, 00, // [EUR/2 @foo 43], [EUR/2 37] [EUR/2 6], 2 - program2.OP_BUMP, // [EUR/2 37], [EUR/2 6], [EUR/2 @foo 43] - program2.OP_APUSH, 04, 00, // [EUR/2 37], [EUR/2 6], [EUR/2 @foo 43] 1 - program2.OP_BUMP, // [EUR/2 37], [EUR/2 @foo 43], [EUR/2 6] - program2.OP_TAKE, // [EUR/2 37], [EUR/2 @foo 37], [EUR/2 @foo 6] - program2.OP_FUNDING_SUM, // [EUR/2 37], [EUR/2 @foo 37], [EUR/2 @foo 6] [EUR/2 6] - program2.OP_TAKE, // [EUR/2 37], [EUR/2 @foo 37], [EUR/2] [EUR/2 @foo 6] - program2.OP_APUSH, 8, 00, // [EUR/2 37], [EUR/2 @foo 37], [EUR/2] [EUR/2 @foo 6], @bar - program2.OP_SEND, // [EUR/2 37], [EUR/2 @foo 37], [EUR/2] - program2.OP_APUSH, 04, 00, // [EUR/2 37], [EUR/2 @foo 37], [EUR/2] 1 - program2.OP_BUMP, // [EUR/2 37], [EUR/2], [EUR/2 @foo 37] - program2.OP_APUSH, 07, 00, // [EUR/2 37], [EUR/2], [EUR/2 @foo 37] 2 - program2.OP_FUNDING_ASSEMBLE, // [EUR/2 37], [EUR/2 @foo 37] - program2.OP_APUSH, 04, 00, // [EUR/2 37], [EUR/2 @foo 37], 1 - program2.OP_BUMP, // [EUR/2 @foo 37], [EUR/2 37] - program2.OP_TAKE, // [EUR/2], [EUR/2 @foo 37] - program2.OP_FUNDING_SUM, // [EUR/2], [EUR/2 @foo 37], [EUR/2 37] - program2.OP_TAKE, // [EUR/2], [EUR/2], [EUR/2 @foo 37] - program2.OP_APUSH, 9, 00, // [EUR/2], [EUR/2], [EUR/2 @foo 37], @baz - program2.OP_SEND, // [EUR/2], [EUR/2] - program2.OP_APUSH, 04, 00, // [EUR/2], [EUR/2], 1 - program2.OP_BUMP, // [EUR/2], [EUR/2] - program2.OP_APUSH, 07, 00, // [EUR/2], [EUR/2], 2 - program2.OP_FUNDING_ASSEMBLE, // [EUR/2] - program2.OP_REPAY, // + program.OP_APUSH, 02, 00, // @foo + program.OP_APUSH, 01, 00, // @foo, [EUR/2 43] + program.OP_ASSET, // @foo, EUR/2 + program.OP_APUSH, 03, 00, // @foo, EUR/2, 0 + program.OP_MONETARY_NEW, // @foo, [EUR/2 0] + program.OP_TAKE_ALL, // [EUR/2 @foo ] + program.OP_APUSH, 01, 00, // [EUR/2 @foo ], [EUR/2 43] + program.OP_TAKE, // [EUR/2 @foo ], [EUR/2 @foo 43] + program.OP_APUSH, 04, 00, // [EUR/2 @foo ], [EUR/2 @foo 43] 1 + program.OP_BUMP, // [EUR/2 @foo 43], [EUR/2 @foo ] + program.OP_REPAY, // [EUR/2 @foo 43] + program.OP_FUNDING_SUM, // [EUR/2 @foo 43], [EUR/2 43] + program.OP_APUSH, 05, 00, // [EUR/2 @foo 43], [EUR/2 43], 7/8 + program.OP_APUSH, 06, 00, // [EUR/2 @foo 43], [EUR/2 43], 7/8, 1/8 + program.OP_APUSH, 07, 00, // [EUR/2 @foo 43], [EUR/2 43], 7/8, 1/8, 2 + program.OP_MAKE_ALLOTMENT, // [EUR/2 @foo 43], [EUR/2 43], {1/8 : 7/8} + program.OP_ALLOC, // [EUR/2 @foo 43], [EUR/2 37], [EUR/2 6] + program.OP_APUSH, 07, 00, // [EUR/2 @foo 43], [EUR/2 37] [EUR/2 6], 2 + program.OP_BUMP, // [EUR/2 37], [EUR/2 6], [EUR/2 @foo 43] + program.OP_APUSH, 04, 00, // [EUR/2 37], [EUR/2 6], [EUR/2 @foo 43] 1 + program.OP_BUMP, // [EUR/2 37], [EUR/2 @foo 43], [EUR/2 6] + program.OP_TAKE, // [EUR/2 37], [EUR/2 @foo 37], [EUR/2 @foo 6] + program.OP_FUNDING_SUM, // [EUR/2 37], [EUR/2 @foo 37], [EUR/2 @foo 6] [EUR/2 6] + program.OP_TAKE, // [EUR/2 37], [EUR/2 @foo 37], [EUR/2] [EUR/2 @foo 6] + program.OP_APUSH, 8, 00, // [EUR/2 37], [EUR/2 @foo 37], [EUR/2] [EUR/2 @foo 6], @bar + program.OP_SEND, // [EUR/2 37], [EUR/2 @foo 37], [EUR/2] + program.OP_APUSH, 04, 00, // [EUR/2 37], [EUR/2 @foo 37], [EUR/2] 1 + program.OP_BUMP, // [EUR/2 37], [EUR/2], [EUR/2 @foo 37] + program.OP_APUSH, 07, 00, // [EUR/2 37], [EUR/2], [EUR/2 @foo 37] 2 + program.OP_FUNDING_ASSEMBLE, // [EUR/2 37], [EUR/2 @foo 37] + program.OP_APUSH, 04, 00, // [EUR/2 37], [EUR/2 @foo 37], 1 + program.OP_BUMP, // [EUR/2 @foo 37], [EUR/2 37] + program.OP_TAKE, // [EUR/2], [EUR/2 @foo 37] + program.OP_FUNDING_SUM, // [EUR/2], [EUR/2 @foo 37], [EUR/2 37] + program.OP_TAKE, // [EUR/2], [EUR/2], [EUR/2 @foo 37] + program.OP_APUSH, 9, 00, // [EUR/2], [EUR/2], [EUR/2 @foo 37], @baz + program.OP_SEND, // [EUR/2], [EUR/2] + program.OP_APUSH, 04, 00, // [EUR/2], [EUR/2], 1 + program.OP_BUMP, // [EUR/2], [EUR/2] + program.OP_APUSH, 07, 00, // [EUR/2], [EUR/2], 2 + program.OP_FUNDING_ASSEMBLE, // [EUR/2] + program.OP_REPAY, // }, - Resources: []program2.Resource{ - program2.Constant{Inner: machine.Asset("EUR/2")}, - program2.Monetary{ + Resources: []program.Resource{ + program.Constant{Inner: machine.Asset("EUR/2")}, + program.Monetary{ Asset: 0, Amount: machine.NewMonetaryInt(43), }, - program2.Constant{Inner: machine.AccountAddress("foo")}, - program2.Constant{Inner: machine.NewMonetaryInt(0)}, - program2.Constant{Inner: machine.NewMonetaryInt(1)}, - program2.Constant{Inner: machine.Portion{Specific: big.NewRat(7, 8)}}, - program2.Constant{Inner: machine.Portion{Specific: big.NewRat(1, 8)}}, - program2.Constant{Inner: machine.NewMonetaryInt(2)}, - program2.Constant{Inner: machine.AccountAddress("bar")}, - program2.Constant{Inner: machine.AccountAddress("baz")}, + program.Constant{Inner: machine.AccountAddress("foo")}, + program.Constant{Inner: machine.NewMonetaryInt(0)}, + program.Constant{Inner: machine.NewMonetaryInt(1)}, + program.Constant{Inner: machine.Portion{Specific: big.NewRat(7, 8)}}, + program.Constant{Inner: machine.Portion{Specific: big.NewRat(1, 8)}}, + program.Constant{Inner: machine.NewMonetaryInt(2)}, + program.Constant{Inner: machine.AccountAddress("bar")}, + program.Constant{Inner: machine.AccountAddress("baz")}, }, }, }) @@ -390,77 +390,77 @@ func TestDestinationInOrder(t *testing.T) { )`, Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 02, 00, // @a - program2.OP_APUSH, 01, 00, // @a, [COIN 50] - program2.OP_ASSET, // @a, COIN - program2.OP_APUSH, 03, 00, // @a, COIN, 0 - program2.OP_MONETARY_NEW, // @a, [COIN 0] - program2.OP_TAKE_ALL, // [COIN @a ] - program2.OP_APUSH, 01, 00, // [COIN @a ], [COIN 50] - program2.OP_TAKE, // [COIN @a ], [COIN @a 50] - program2.OP_APUSH, 04, 00, // [COIN @a ], [COIN @a 50], 1 - program2.OP_BUMP, // [COIN @a 50], [COIN @a ] - program2.OP_REPAY, // [COIN @a 50] - program2.OP_FUNDING_SUM, // [COIN @a 50], [COIN 50] <- start of DestinationInOrder - program2.OP_ASSET, // [COIN @a 50], COIN - program2.OP_APUSH, 03, 00, // [COIN @a 50], COIN, 0 - program2.OP_MONETARY_NEW, // [COIN @a 50], [COIN 0] - program2.OP_APUSH, 04, 00, // [COIN @a 50], [COIN 0], 1 - program2.OP_BUMP, // [COIN 0], [COIN @a 50] - program2.OP_APUSH, 05, 00, // [COIN 0], [COIN @a 50], [COIN 10] <- start processing max subdestinations - program2.OP_TAKE_MAX, // [COIN 0], [COIN 0], [COIN @a 40], [COIN @a 10] - program2.OP_APUSH, 06, 00, // [COIN 0], [COIN 0], [COIN @a 40], [COIN @a 10], 2 - program2.OP_BUMP, // [COIN 0], [COIN @a 40], [COIN @a 10], [COIN 0] - program2.OP_DELETE, // [COIN 0], [COIN @a 40], [COIN @a 10] - program2.OP_FUNDING_SUM, // [COIN 0], [COIN @a 40], [COIN @a 10], [COIN 10] - program2.OP_TAKE, // [COIN 0], [COIN @a 40], [COIN], [COIN @a 10] - program2.OP_APUSH, 07, 00, // [COIN 0], [COIN @a 40], [COIN], [COIN @a 10], @b - program2.OP_SEND, // [COIN 0], [COIN @a 40], [COIN] - program2.OP_FUNDING_SUM, // [COIN 0], [COIN @a 40], [COIN], [COIN 0] - program2.OP_APUSH, 8, 00, // [COIN 0], [COIN @a 40], [COIN], [COIN 0], 3 - program2.OP_BUMP, // [COIN @a 40], [COIN], [COIN 0], [COIN 0] - program2.OP_MONETARY_ADD, // [COIN @a 40], [COIN], [COIN 0] - program2.OP_APUSH, 04, 00, // [COIN @a 40], [COIN], [COIN 0], 1 - program2.OP_BUMP, // [COIN @a 40], [COIN 0], [COIN] - program2.OP_APUSH, 06, 00, // [COIN @a 40], [COIN 0], [COIN] 2 - program2.OP_BUMP, // [COIN 0], [COIN], [COIN @a 40] - program2.OP_APUSH, 06, 00, // [COIN 0], [COIN], [COIN @a 40], 2 - program2.OP_FUNDING_ASSEMBLE, // [COIN 0], [COIN @a 40] - program2.OP_FUNDING_REVERSE, // [COIN 0], [COIN @a 40] <- start processing remaining subdestination - program2.OP_APUSH, 04, 00, // [COIN 0], [COIN @a 40], 1 - program2.OP_BUMP, // [COIN @a 40], [COIN 0] - program2.OP_TAKE, // [COIN @a 40], [COIN] - program2.OP_FUNDING_REVERSE, // [COIN @a 40], [COIN] - program2.OP_APUSH, 04, 00, // [COIN @a 40], [COIN], 1 - program2.OP_BUMP, // [COIN], [COIN @a 40] - program2.OP_FUNDING_REVERSE, // [COIN], [COIN @a 40] - program2.OP_FUNDING_SUM, // [COIN], [COIN @a 40], [COIN 40] - program2.OP_TAKE, // [COIN], [COIN], [COIN @a 40] - program2.OP_APUSH, 9, 00, // [COIN], [COIN], [COIN @a 40], @c - program2.OP_SEND, // [COIN], [COIN] - program2.OP_APUSH, 04, 00, // [COIN], [COIN], 1 - program2.OP_BUMP, // [COIN], [COIN] - program2.OP_APUSH, 06, 00, // [COIN], [COIN], 2 - program2.OP_FUNDING_ASSEMBLE, // [COIN] - program2.OP_REPAY, // + program.OP_APUSH, 02, 00, // @a + program.OP_APUSH, 01, 00, // @a, [COIN 50] + program.OP_ASSET, // @a, COIN + program.OP_APUSH, 03, 00, // @a, COIN, 0 + program.OP_MONETARY_NEW, // @a, [COIN 0] + program.OP_TAKE_ALL, // [COIN @a ] + program.OP_APUSH, 01, 00, // [COIN @a ], [COIN 50] + program.OP_TAKE, // [COIN @a ], [COIN @a 50] + program.OP_APUSH, 04, 00, // [COIN @a ], [COIN @a 50], 1 + program.OP_BUMP, // [COIN @a 50], [COIN @a ] + program.OP_REPAY, // [COIN @a 50] + program.OP_FUNDING_SUM, // [COIN @a 50], [COIN 50] <- start of DestinationInOrder + program.OP_ASSET, // [COIN @a 50], COIN + program.OP_APUSH, 03, 00, // [COIN @a 50], COIN, 0 + program.OP_MONETARY_NEW, // [COIN @a 50], [COIN 0] + program.OP_APUSH, 04, 00, // [COIN @a 50], [COIN 0], 1 + program.OP_BUMP, // [COIN 0], [COIN @a 50] + program.OP_APUSH, 05, 00, // [COIN 0], [COIN @a 50], [COIN 10] <- start processing max subdestinations + program.OP_TAKE_MAX, // [COIN 0], [COIN 0], [COIN @a 40], [COIN @a 10] + program.OP_APUSH, 06, 00, // [COIN 0], [COIN 0], [COIN @a 40], [COIN @a 10], 2 + program.OP_BUMP, // [COIN 0], [COIN @a 40], [COIN @a 10], [COIN 0] + program.OP_DELETE, // [COIN 0], [COIN @a 40], [COIN @a 10] + program.OP_FUNDING_SUM, // [COIN 0], [COIN @a 40], [COIN @a 10], [COIN 10] + program.OP_TAKE, // [COIN 0], [COIN @a 40], [COIN], [COIN @a 10] + program.OP_APUSH, 07, 00, // [COIN 0], [COIN @a 40], [COIN], [COIN @a 10], @b + program.OP_SEND, // [COIN 0], [COIN @a 40], [COIN] + program.OP_FUNDING_SUM, // [COIN 0], [COIN @a 40], [COIN], [COIN 0] + program.OP_APUSH, 8, 00, // [COIN 0], [COIN @a 40], [COIN], [COIN 0], 3 + program.OP_BUMP, // [COIN @a 40], [COIN], [COIN 0], [COIN 0] + program.OP_MONETARY_ADD, // [COIN @a 40], [COIN], [COIN 0] + program.OP_APUSH, 04, 00, // [COIN @a 40], [COIN], [COIN 0], 1 + program.OP_BUMP, // [COIN @a 40], [COIN 0], [COIN] + program.OP_APUSH, 06, 00, // [COIN @a 40], [COIN 0], [COIN] 2 + program.OP_BUMP, // [COIN 0], [COIN], [COIN @a 40] + program.OP_APUSH, 06, 00, // [COIN 0], [COIN], [COIN @a 40], 2 + program.OP_FUNDING_ASSEMBLE, // [COIN 0], [COIN @a 40] + program.OP_FUNDING_REVERSE, // [COIN 0], [COIN @a 40] <- start processing remaining subdestination + program.OP_APUSH, 04, 00, // [COIN 0], [COIN @a 40], 1 + program.OP_BUMP, // [COIN @a 40], [COIN 0] + program.OP_TAKE, // [COIN @a 40], [COIN] + program.OP_FUNDING_REVERSE, // [COIN @a 40], [COIN] + program.OP_APUSH, 04, 00, // [COIN @a 40], [COIN], 1 + program.OP_BUMP, // [COIN], [COIN @a 40] + program.OP_FUNDING_REVERSE, // [COIN], [COIN @a 40] + program.OP_FUNDING_SUM, // [COIN], [COIN @a 40], [COIN 40] + program.OP_TAKE, // [COIN], [COIN], [COIN @a 40] + program.OP_APUSH, 9, 00, // [COIN], [COIN], [COIN @a 40], @c + program.OP_SEND, // [COIN], [COIN] + program.OP_APUSH, 04, 00, // [COIN], [COIN], 1 + program.OP_BUMP, // [COIN], [COIN] + program.OP_APUSH, 06, 00, // [COIN], [COIN], 2 + program.OP_FUNDING_ASSEMBLE, // [COIN] + program.OP_REPAY, // }, - Resources: []program2.Resource{ - program2.Constant{Inner: machine.Asset("COIN")}, - program2.Monetary{ + Resources: []program.Resource{ + program.Constant{Inner: machine.Asset("COIN")}, + program.Monetary{ Asset: 0, Amount: machine.NewMonetaryInt(50), }, - program2.Constant{Inner: machine.AccountAddress("a")}, - program2.Constant{Inner: machine.NewMonetaryInt(0)}, - program2.Constant{Inner: machine.NewMonetaryInt(1)}, - program2.Monetary{ + program.Constant{Inner: machine.AccountAddress("a")}, + program.Constant{Inner: machine.NewMonetaryInt(0)}, + program.Constant{Inner: machine.NewMonetaryInt(1)}, + program.Monetary{ Asset: 0, Amount: machine.NewMonetaryInt(10), }, - program2.Constant{Inner: machine.NewMonetaryInt(2)}, - program2.Constant{Inner: machine.AccountAddress("b")}, - program2.Constant{Inner: machine.NewMonetaryInt(3)}, - program2.Constant{Inner: machine.AccountAddress("c")}, + program.Constant{Inner: machine.NewMonetaryInt(2)}, + program.Constant{Inner: machine.AccountAddress("b")}, + program.Constant{Inner: machine.NewMonetaryInt(3)}, + program.Constant{Inner: machine.AccountAddress("c")}, }, }, }) @@ -477,23 +477,23 @@ func TestAllocationPercentages(t *testing.T) { } )`, Expected: CaseResult{ - Resources: []program2.Resource{ - program2.Constant{Inner: machine.Asset("EUR/2")}, - program2.Monetary{ + Resources: []program.Resource{ + program.Constant{Inner: machine.Asset("EUR/2")}, + program.Monetary{ Asset: 0, Amount: machine.NewMonetaryInt(43), }, - program2.Constant{Inner: machine.AccountAddress("foo")}, - program2.Constant{Inner: machine.NewMonetaryInt(0)}, - program2.Constant{Inner: machine.NewMonetaryInt(1)}, - program2.Constant{Inner: machine.Portion{Specific: big.NewRat(1, 2)}}, - program2.Constant{Inner: machine.Portion{Specific: big.NewRat(3, 8)}}, - program2.Constant{Inner: machine.Portion{Specific: big.NewRat(1, 8)}}, - program2.Constant{Inner: machine.NewMonetaryInt(3)}, - program2.Constant{Inner: machine.AccountAddress("bar")}, - program2.Constant{Inner: machine.NewMonetaryInt(2)}, - program2.Constant{Inner: machine.AccountAddress("baz")}, - program2.Constant{Inner: machine.AccountAddress("qux")}, + program.Constant{Inner: machine.AccountAddress("foo")}, + program.Constant{Inner: machine.NewMonetaryInt(0)}, + program.Constant{Inner: machine.NewMonetaryInt(1)}, + program.Constant{Inner: machine.Portion{Specific: big.NewRat(1, 2)}}, + program.Constant{Inner: machine.Portion{Specific: big.NewRat(3, 8)}}, + program.Constant{Inner: machine.Portion{Specific: big.NewRat(1, 8)}}, + program.Constant{Inner: machine.NewMonetaryInt(3)}, + program.Constant{Inner: machine.AccountAddress("bar")}, + program.Constant{Inner: machine.NewMonetaryInt(2)}, + program.Constant{Inner: machine.AccountAddress("baz")}, + program.Constant{Inner: machine.AccountAddress("qux")}, }, }, }) @@ -511,32 +511,32 @@ func TestSend(t *testing.T) { Case: script, Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 02, 00, // @alice - program2.OP_APUSH, 01, 00, // @alice, [EUR/2 99] - program2.OP_ASSET, // @alice, EUR/2 - program2.OP_APUSH, 03, 00, // @alice, EUR/2, 0 - program2.OP_MONETARY_NEW, // @alice, [EUR/2 0] - program2.OP_TAKE_ALL, // [EUR/2 @alice ] - program2.OP_APUSH, 01, 00, // [EUR/2 @alice ], [EUR/2 99] - program2.OP_TAKE, // [EUR/2 @alice ], [EUR/2 @alice 99] - program2.OP_APUSH, 04, 00, // [EUR/2 @alice ], [EUR/2 @alice 99], 1 - program2.OP_BUMP, // [EUR/2 @alice 99], [EUR/2 @alice ] - program2.OP_REPAY, // [EUR/2 @alice 99] - program2.OP_FUNDING_SUM, // [EUR/2 @alice 99], [EUR/2 99] - program2.OP_TAKE, // [EUR/2], [EUR/2 @alice 99] - program2.OP_APUSH, 05, 00, // [EUR/2], [EUR/2 @alice 99], @bob - program2.OP_SEND, // [EUR/2] - program2.OP_REPAY, // - }, Resources: []program2.Resource{ - program2.Constant{Inner: machine.Asset("EUR/2")}, - program2.Monetary{ + program.OP_APUSH, 02, 00, // @alice + program.OP_APUSH, 01, 00, // @alice, [EUR/2 99] + program.OP_ASSET, // @alice, EUR/2 + program.OP_APUSH, 03, 00, // @alice, EUR/2, 0 + program.OP_MONETARY_NEW, // @alice, [EUR/2 0] + program.OP_TAKE_ALL, // [EUR/2 @alice ] + program.OP_APUSH, 01, 00, // [EUR/2 @alice ], [EUR/2 99] + program.OP_TAKE, // [EUR/2 @alice ], [EUR/2 @alice 99] + program.OP_APUSH, 04, 00, // [EUR/2 @alice ], [EUR/2 @alice 99], 1 + program.OP_BUMP, // [EUR/2 @alice 99], [EUR/2 @alice ] + program.OP_REPAY, // [EUR/2 @alice 99] + program.OP_FUNDING_SUM, // [EUR/2 @alice 99], [EUR/2 99] + program.OP_TAKE, // [EUR/2], [EUR/2 @alice 99] + program.OP_APUSH, 05, 00, // [EUR/2], [EUR/2 @alice 99], @bob + program.OP_SEND, // [EUR/2] + program.OP_REPAY, // + }, Resources: []program.Resource{ + program.Constant{Inner: machine.Asset("EUR/2")}, + program.Monetary{ Asset: 0, Amount: machine.NewMonetaryInt(99), }, - program2.Constant{Inner: alice}, - program2.Constant{Inner: machine.NewMonetaryInt(0)}, - program2.Constant{Inner: machine.NewMonetaryInt(1)}, - program2.Constant{Inner: bob}}, + program.Constant{Inner: alice}, + program.Constant{Inner: machine.NewMonetaryInt(0)}, + program.Constant{Inner: machine.NewMonetaryInt(1)}, + program.Constant{Inner: bob}}, }, }) } @@ -549,21 +549,21 @@ func TestSendAll(t *testing.T) { )`, Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 01, 00, // @alice - program2.OP_APUSH, 00, 00, // @alice, EUR/2 - program2.OP_APUSH, 02, 00, // @alice, EUR/2, 0 - program2.OP_MONETARY_NEW, // @alice, [EUR/2 0] - program2.OP_TAKE_ALL, // [EUR/2 @alice ] - program2.OP_FUNDING_SUM, // [EUR/2 @alice ], [EUR/2 ] - program2.OP_TAKE, // [EUR/2], [EUR/2 @alice ] - program2.OP_APUSH, 03, 00, // [EUR/2], [EUR/2 @alice ], @b - program2.OP_SEND, // [EUR/2] - program2.OP_REPAY, // - }, Resources: []program2.Resource{ - program2.Constant{Inner: machine.Asset("EUR/2")}, - program2.Constant{Inner: machine.AccountAddress("alice")}, - program2.Constant{Inner: machine.NewMonetaryInt(0)}, - program2.Constant{Inner: machine.AccountAddress("bob")}}, + program.OP_APUSH, 01, 00, // @alice + program.OP_APUSH, 00, 00, // @alice, EUR/2 + program.OP_APUSH, 02, 00, // @alice, EUR/2, 0 + program.OP_MONETARY_NEW, // @alice, [EUR/2 0] + program.OP_TAKE_ALL, // [EUR/2 @alice ] + program.OP_FUNDING_SUM, // [EUR/2 @alice ], [EUR/2 ] + program.OP_TAKE, // [EUR/2], [EUR/2 @alice ] + program.OP_APUSH, 03, 00, // [EUR/2], [EUR/2 @alice ], @b + program.OP_SEND, // [EUR/2] + program.OP_REPAY, // + }, Resources: []program.Resource{ + program.Constant{Inner: machine.Asset("EUR/2")}, + program.Constant{Inner: machine.AccountAddress("alice")}, + program.Constant{Inner: machine.NewMonetaryInt(0)}, + program.Constant{Inner: machine.AccountAddress("bob")}}, }, }) } @@ -584,28 +584,28 @@ func TestMetadata(t *testing.T) { } )`, Expected: CaseResult{ - Resources: []program2.Resource{ - program2.Variable{Typ: machine.TypeAccount, Name: "sale"}, - program2.VariableAccountMetadata{ + Resources: []program.Resource{ + program.Variable{Typ: machine.TypeAccount, Name: "sale"}, + program.VariableAccountMetadata{ Typ: machine.TypeAccount, Account: machine.NewAddress(0), Key: "seller", }, - program2.VariableAccountMetadata{ + program.VariableAccountMetadata{ Typ: machine.TypePortion, Account: machine.NewAddress(1), Key: "commission", }, - program2.Constant{Inner: machine.Asset("EUR/2")}, - program2.Monetary{ + program.Constant{Inner: machine.Asset("EUR/2")}, + program.Monetary{ Asset: 3, Amount: machine.NewMonetaryInt(53), }, - program2.Constant{Inner: machine.NewMonetaryInt(0)}, - program2.Constant{Inner: machine.NewMonetaryInt(1)}, - program2.Constant{Inner: machine.NewPortionRemaining()}, - program2.Constant{Inner: machine.NewMonetaryInt(2)}, - program2.Constant{Inner: machine.AccountAddress("platform")}, + program.Constant{Inner: machine.NewMonetaryInt(0)}, + program.Constant{Inner: machine.NewMonetaryInt(1)}, + program.Constant{Inner: machine.NewPortionRemaining()}, + program.Constant{Inner: machine.NewMonetaryInt(2)}, + program.Constant{Inner: machine.AccountAddress("platform")}, }, }, }) @@ -977,52 +977,52 @@ func TestSetAccountMeta(t *testing.T) { `, Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 00, 00, - program2.OP_APUSH, 01, 00, - program2.OP_APUSH, 02, 00, - program2.OP_ACCOUNT_META, - program2.OP_APUSH, 03, 00, - program2.OP_APUSH, 04, 00, - program2.OP_APUSH, 02, 00, - program2.OP_ACCOUNT_META, - program2.OP_APUSH, 05, 00, - program2.OP_APUSH, 06, 00, - program2.OP_APUSH, 02, 00, - program2.OP_ACCOUNT_META, - program2.OP_APUSH, 7, 00, - program2.OP_APUSH, 8, 00, - program2.OP_APUSH, 02, 00, - program2.OP_ACCOUNT_META, - program2.OP_APUSH, 10, 00, - program2.OP_APUSH, 11, 00, - program2.OP_APUSH, 02, 00, - program2.OP_ACCOUNT_META, - program2.OP_APUSH, 12, 00, - program2.OP_APUSH, 13, 00, - program2.OP_APUSH, 02, 00, - program2.OP_ACCOUNT_META, + program.OP_APUSH, 00, 00, + program.OP_APUSH, 01, 00, + program.OP_APUSH, 02, 00, + program.OP_ACCOUNT_META, + program.OP_APUSH, 03, 00, + program.OP_APUSH, 04, 00, + program.OP_APUSH, 02, 00, + program.OP_ACCOUNT_META, + program.OP_APUSH, 05, 00, + program.OP_APUSH, 06, 00, + program.OP_APUSH, 02, 00, + program.OP_ACCOUNT_META, + program.OP_APUSH, 7, 00, + program.OP_APUSH, 8, 00, + program.OP_APUSH, 02, 00, + program.OP_ACCOUNT_META, + program.OP_APUSH, 10, 00, + program.OP_APUSH, 11, 00, + program.OP_APUSH, 02, 00, + program.OP_ACCOUNT_META, + program.OP_APUSH, 12, 00, + program.OP_APUSH, 13, 00, + program.OP_APUSH, 02, 00, + program.OP_ACCOUNT_META, }, - Resources: []program2.Resource{ - program2.Constant{Inner: machine.AccountAddress("platform")}, - program2.Constant{Inner: machine.String("aaa")}, - program2.Constant{Inner: machine.AccountAddress("alice")}, - program2.Constant{Inner: machine.Asset("GEM")}, - program2.Constant{Inner: machine.String("bbb")}, - program2.Constant{Inner: machine.NewNumber(42)}, - program2.Constant{Inner: machine.String("ccc")}, - program2.Constant{Inner: machine.String("test")}, - program2.Constant{Inner: machine.String("ddd")}, - program2.Constant{Inner: machine.Asset("COIN")}, - program2.Monetary{ + Resources: []program.Resource{ + program.Constant{Inner: machine.AccountAddress("platform")}, + program.Constant{Inner: machine.String("aaa")}, + program.Constant{Inner: machine.AccountAddress("alice")}, + program.Constant{Inner: machine.Asset("GEM")}, + program.Constant{Inner: machine.String("bbb")}, + program.Constant{Inner: machine.NewNumber(42)}, + program.Constant{Inner: machine.String("ccc")}, + program.Constant{Inner: machine.String("test")}, + program.Constant{Inner: machine.String("ddd")}, + program.Constant{Inner: machine.Asset("COIN")}, + program.Monetary{ Asset: 9, Amount: machine.NewMonetaryInt(30), }, - program2.Constant{Inner: machine.String("eee")}, - program2.Constant{Inner: machine.Portion{ + program.Constant{Inner: machine.String("eee")}, + program.Constant{Inner: machine.Portion{ Remaining: false, Specific: big.NewRat(15, 100), }}, - program2.Constant{Inner: machine.String("fff")}, + program.Constant{Inner: machine.String("fff")}, }, }, }) @@ -1040,49 +1040,49 @@ func TestSetAccountMeta(t *testing.T) { set_account_meta($acc, "fees", 1%)`, Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 03, 00, - program2.OP_APUSH, 02, 00, - program2.OP_ASSET, - program2.OP_APUSH, 04, 00, - program2.OP_MONETARY_NEW, - program2.OP_TAKE_ALWAYS, - program2.OP_APUSH, 02, 00, - program2.OP_TAKE_MAX, - program2.OP_APUSH, 05, 00, - program2.OP_BUMP, - program2.OP_REPAY, - program2.OP_APUSH, 03, 00, - program2.OP_APUSH, 06, 00, - program2.OP_BUMP, - program2.OP_TAKE_ALWAYS, - program2.OP_APUSH, 06, 00, - program2.OP_FUNDING_ASSEMBLE, - program2.OP_FUNDING_SUM, - program2.OP_TAKE, - program2.OP_APUSH, 00, 00, - program2.OP_SEND, - program2.OP_REPAY, - program2.OP_APUSH, 07, 00, - program2.OP_APUSH, 8, 00, - program2.OP_APUSH, 00, 00, - program2.OP_ACCOUNT_META, + program.OP_APUSH, 03, 00, + program.OP_APUSH, 02, 00, + program.OP_ASSET, + program.OP_APUSH, 04, 00, + program.OP_MONETARY_NEW, + program.OP_TAKE_ALWAYS, + program.OP_APUSH, 02, 00, + program.OP_TAKE_MAX, + program.OP_APUSH, 05, 00, + program.OP_BUMP, + program.OP_REPAY, + program.OP_APUSH, 03, 00, + program.OP_APUSH, 06, 00, + program.OP_BUMP, + program.OP_TAKE_ALWAYS, + program.OP_APUSH, 06, 00, + program.OP_FUNDING_ASSEMBLE, + program.OP_FUNDING_SUM, + program.OP_TAKE, + program.OP_APUSH, 00, 00, + program.OP_SEND, + program.OP_REPAY, + program.OP_APUSH, 07, 00, + program.OP_APUSH, 8, 00, + program.OP_APUSH, 00, 00, + program.OP_ACCOUNT_META, }, - Resources: []program2.Resource{ - program2.Variable{Typ: machine.TypeAccount, Name: "acc"}, - program2.Constant{Inner: machine.Asset("EUR/2")}, - program2.Monetary{ + Resources: []program.Resource{ + program.Variable{Typ: machine.TypeAccount, Name: "acc"}, + program.Constant{Inner: machine.Asset("EUR/2")}, + program.Monetary{ Asset: 1, Amount: machine.NewMonetaryInt(100), }, - program2.Constant{Inner: machine.AccountAddress("world")}, - program2.Constant{Inner: machine.NewMonetaryInt(0)}, - program2.Constant{Inner: machine.NewMonetaryInt(1)}, - program2.Constant{Inner: machine.NewMonetaryInt(2)}, - program2.Constant{Inner: machine.Portion{ + program.Constant{Inner: machine.AccountAddress("world")}, + program.Constant{Inner: machine.NewMonetaryInt(0)}, + program.Constant{Inner: machine.NewMonetaryInt(1)}, + program.Constant{Inner: machine.NewMonetaryInt(2)}, + program.Constant{Inner: machine.Portion{ Remaining: false, Specific: big.NewRat(1, 100), }}, - program2.Constant{Inner: machine.String("fees")}, + program.Constant{Inner: machine.String("fees")}, }, }, }) @@ -1131,30 +1131,30 @@ func TestVariableBalance(t *testing.T) { )`, Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 00, 00, - program2.OP_APUSH, 02, 00, - program2.OP_ASSET, - program2.OP_APUSH, 03, 00, - program2.OP_MONETARY_NEW, - program2.OP_TAKE_ALL, - program2.OP_APUSH, 02, 00, - program2.OP_TAKE, - program2.OP_APUSH, 04, 00, - program2.OP_BUMP, - program2.OP_REPAY, - program2.OP_FUNDING_SUM, - program2.OP_TAKE, - program2.OP_APUSH, 05, 00, - program2.OP_SEND, - program2.OP_REPAY, + program.OP_APUSH, 00, 00, + program.OP_APUSH, 02, 00, + program.OP_ASSET, + program.OP_APUSH, 03, 00, + program.OP_MONETARY_NEW, + program.OP_TAKE_ALL, + program.OP_APUSH, 02, 00, + program.OP_TAKE, + program.OP_APUSH, 04, 00, + program.OP_BUMP, + program.OP_REPAY, + program.OP_FUNDING_SUM, + program.OP_TAKE, + program.OP_APUSH, 05, 00, + program.OP_SEND, + program.OP_REPAY, }, - Resources: []program2.Resource{ - program2.Constant{Inner: machine.AccountAddress("alice")}, - program2.Constant{Inner: machine.Asset("COIN")}, - program2.VariableAccountBalance{Account: 0, Asset: 1}, - program2.Constant{Inner: machine.NewMonetaryInt(0)}, - program2.Constant{Inner: machine.NewMonetaryInt(1)}, - program2.Constant{Inner: machine.AccountAddress("bob")}, + Resources: []program.Resource{ + program.Constant{Inner: machine.AccountAddress("alice")}, + program.Constant{Inner: machine.Asset("COIN")}, + program.VariableAccountBalance{Account: 0, Asset: 1}, + program.Constant{Inner: machine.NewMonetaryInt(0)}, + program.Constant{Inner: machine.NewMonetaryInt(1)}, + program.Constant{Inner: machine.AccountAddress("bob")}, }, }, }) @@ -1172,38 +1172,38 @@ func TestVariableBalance(t *testing.T) { )`, Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 03, 00, - program2.OP_APUSH, 02, 00, - program2.OP_ASSET, - program2.OP_APUSH, 04, 00, - program2.OP_MONETARY_NEW, - program2.OP_TAKE_ALWAYS, - program2.OP_APUSH, 02, 00, - program2.OP_TAKE_MAX, - program2.OP_APUSH, 05, 00, - program2.OP_BUMP, - program2.OP_REPAY, - program2.OP_APUSH, 03, 00, - program2.OP_APUSH, 06, 00, - program2.OP_BUMP, - program2.OP_TAKE_ALWAYS, - program2.OP_APUSH, 06, 00, - program2.OP_FUNDING_ASSEMBLE, - program2.OP_FUNDING_SUM, - program2.OP_TAKE, - program2.OP_APUSH, 07, 00, - program2.OP_SEND, - program2.OP_REPAY, + program.OP_APUSH, 03, 00, + program.OP_APUSH, 02, 00, + program.OP_ASSET, + program.OP_APUSH, 04, 00, + program.OP_MONETARY_NEW, + program.OP_TAKE_ALWAYS, + program.OP_APUSH, 02, 00, + program.OP_TAKE_MAX, + program.OP_APUSH, 05, 00, + program.OP_BUMP, + program.OP_REPAY, + program.OP_APUSH, 03, 00, + program.OP_APUSH, 06, 00, + program.OP_BUMP, + program.OP_TAKE_ALWAYS, + program.OP_APUSH, 06, 00, + program.OP_FUNDING_ASSEMBLE, + program.OP_FUNDING_SUM, + program.OP_TAKE, + program.OP_APUSH, 07, 00, + program.OP_SEND, + program.OP_REPAY, }, - Resources: []program2.Resource{ - program2.Variable{Typ: machine.TypeAccount, Name: "acc"}, - program2.Constant{Inner: machine.Asset("COIN")}, - program2.VariableAccountBalance{Account: 0, Asset: 1}, - program2.Constant{Inner: machine.AccountAddress("world")}, - program2.Constant{Inner: machine.NewMonetaryInt(0)}, - program2.Constant{Inner: machine.NewMonetaryInt(1)}, - program2.Constant{Inner: machine.NewMonetaryInt(2)}, - program2.Constant{Inner: machine.AccountAddress("alice")}, + Resources: []program.Resource{ + program.Variable{Typ: machine.TypeAccount, Name: "acc"}, + program.Constant{Inner: machine.Asset("COIN")}, + program.VariableAccountBalance{Account: 0, Asset: 1}, + program.Constant{Inner: machine.AccountAddress("world")}, + program.Constant{Inner: machine.NewMonetaryInt(0)}, + program.Constant{Inner: machine.NewMonetaryInt(1)}, + program.Constant{Inner: machine.NewMonetaryInt(2)}, + program.Constant{Inner: machine.AccountAddress("alice")}, }, }, }) @@ -1322,64 +1322,64 @@ func TestVariableAsset(t *testing.T) { Case: script, Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 01, 00, - program2.OP_APUSH, 00, 00, - program2.OP_APUSH, 03, 00, - program2.OP_MONETARY_NEW, - program2.OP_TAKE_ALL, - program2.OP_FUNDING_SUM, - program2.OP_TAKE, - program2.OP_APUSH, 04, 00, - program2.OP_SEND, - program2.OP_REPAY, - program2.OP_APUSH, 04, 00, - program2.OP_APUSH, 05, 00, - program2.OP_ASSET, - program2.OP_APUSH, 03, 00, - program2.OP_MONETARY_NEW, - program2.OP_TAKE_ALL, - program2.OP_APUSH, 05, 00, - program2.OP_TAKE, - program2.OP_APUSH, 06, 00, - program2.OP_BUMP, - program2.OP_REPAY, - program2.OP_FUNDING_SUM, - program2.OP_TAKE, - program2.OP_APUSH, 01, 00, - program2.OP_SEND, - program2.OP_REPAY, - program2.OP_APUSH, 01, 00, - program2.OP_APUSH, 02, 00, - program2.OP_ASSET, - program2.OP_APUSH, 03, 00, - program2.OP_MONETARY_NEW, - program2.OP_TAKE_ALL, - program2.OP_APUSH, 02, 00, - program2.OP_TAKE, - program2.OP_APUSH, 06, 00, - program2.OP_BUMP, - program2.OP_REPAY, - program2.OP_FUNDING_SUM, - program2.OP_TAKE, - program2.OP_APUSH, 04, 00, - program2.OP_SEND, - program2.OP_REPAY, + program.OP_APUSH, 01, 00, + program.OP_APUSH, 00, 00, + program.OP_APUSH, 03, 00, + program.OP_MONETARY_NEW, + program.OP_TAKE_ALL, + program.OP_FUNDING_SUM, + program.OP_TAKE, + program.OP_APUSH, 04, 00, + program.OP_SEND, + program.OP_REPAY, + program.OP_APUSH, 04, 00, + program.OP_APUSH, 05, 00, + program.OP_ASSET, + program.OP_APUSH, 03, 00, + program.OP_MONETARY_NEW, + program.OP_TAKE_ALL, + program.OP_APUSH, 05, 00, + program.OP_TAKE, + program.OP_APUSH, 06, 00, + program.OP_BUMP, + program.OP_REPAY, + program.OP_FUNDING_SUM, + program.OP_TAKE, + program.OP_APUSH, 01, 00, + program.OP_SEND, + program.OP_REPAY, + program.OP_APUSH, 01, 00, + program.OP_APUSH, 02, 00, + program.OP_ASSET, + program.OP_APUSH, 03, 00, + program.OP_MONETARY_NEW, + program.OP_TAKE_ALL, + program.OP_APUSH, 02, 00, + program.OP_TAKE, + program.OP_APUSH, 06, 00, + program.OP_BUMP, + program.OP_REPAY, + program.OP_FUNDING_SUM, + program.OP_TAKE, + program.OP_APUSH, 04, 00, + program.OP_SEND, + program.OP_REPAY, }, - Resources: []program2.Resource{ - program2.Variable{Typ: machine.TypeAsset, Name: "ass"}, - program2.Constant{Inner: machine.AccountAddress("alice")}, - program2.VariableAccountBalance{ + Resources: []program.Resource{ + program.Variable{Typ: machine.TypeAsset, Name: "ass"}, + program.Constant{Inner: machine.AccountAddress("alice")}, + program.VariableAccountBalance{ Name: "bal", Account: 1, Asset: 0, }, - program2.Constant{Inner: machine.NewMonetaryInt(0)}, - program2.Constant{Inner: machine.AccountAddress("bob")}, - program2.Monetary{ + program.Constant{Inner: machine.NewMonetaryInt(0)}, + program.Constant{Inner: machine.AccountAddress("bob")}, + program.Monetary{ Asset: 0, Amount: machine.NewMonetaryInt(1), }, - program2.Constant{Inner: machine.NewMonetaryInt(1)}, + program.Constant{Inner: machine.NewMonetaryInt(1)}, }, }, }) @@ -1391,17 +1391,17 @@ func TestPrint(t *testing.T) { Case: script, Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 00, 00, - program2.OP_APUSH, 01, 00, - program2.OP_IADD, - program2.OP_APUSH, 02, 00, - program2.OP_IADD, - program2.OP_PRINT, + program.OP_APUSH, 00, 00, + program.OP_APUSH, 01, 00, + program.OP_IADD, + program.OP_APUSH, 02, 00, + program.OP_IADD, + program.OP_PRINT, }, - Resources: []program2.Resource{ - program2.Constant{Inner: machine.NewMonetaryInt(1)}, - program2.Constant{Inner: machine.NewMonetaryInt(2)}, - program2.Constant{Inner: machine.NewMonetaryInt(3)}, + Resources: []program.Resource{ + program.Constant{Inner: machine.NewMonetaryInt(1)}, + program.Constant{Inner: machine.NewMonetaryInt(2)}, + program.Constant{Inner: machine.NewMonetaryInt(3)}, }, }, }) @@ -1423,55 +1423,55 @@ func TestSendWithArithmetic(t *testing.T) { Case: script, Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 06, 00, - program2.OP_APUSH, 03, 00, - program2.OP_ASSET, - program2.OP_APUSH, 07, 00, - program2.OP_MONETARY_NEW, - program2.OP_TAKE_ALL, - program2.OP_APUSH, 03, 00, - program2.OP_APUSH, 01, 00, - program2.OP_MONETARY_ADD, - program2.OP_APUSH, 04, 00, - program2.OP_MONETARY_ADD, - program2.OP_APUSH, 05, 00, - program2.OP_MONETARY_SUB, - program2.OP_TAKE, - program2.OP_APUSH, 8, 00, - program2.OP_BUMP, - program2.OP_REPAY, - program2.OP_FUNDING_SUM, - program2.OP_TAKE, - program2.OP_APUSH, 9, 00, - program2.OP_SEND, - program2.OP_REPAY, + program.OP_APUSH, 06, 00, + program.OP_APUSH, 03, 00, + program.OP_ASSET, + program.OP_APUSH, 07, 00, + program.OP_MONETARY_NEW, + program.OP_TAKE_ALL, + program.OP_APUSH, 03, 00, + program.OP_APUSH, 01, 00, + program.OP_MONETARY_ADD, + program.OP_APUSH, 04, 00, + program.OP_MONETARY_ADD, + program.OP_APUSH, 05, 00, + program.OP_MONETARY_SUB, + program.OP_TAKE, + program.OP_APUSH, 8, 00, + program.OP_BUMP, + program.OP_REPAY, + program.OP_FUNDING_SUM, + program.OP_TAKE, + program.OP_APUSH, 9, 00, + program.OP_SEND, + program.OP_REPAY, }, - Resources: []program2.Resource{ - program2.Variable{ + Resources: []program.Resource{ + program.Variable{ Typ: machine.TypeAsset, Name: "ass", }, - program2.Variable{ + program.Variable{ Typ: machine.TypeMonetary, Name: "mon", }, - program2.Constant{Inner: machine.Asset("EUR")}, - program2.Monetary{ + program.Constant{Inner: machine.Asset("EUR")}, + program.Monetary{ Asset: 2, Amount: machine.NewMonetaryInt(1), }, - program2.Monetary{ + program.Monetary{ Asset: 0, Amount: machine.NewMonetaryInt(3), }, - program2.Monetary{ + program.Monetary{ Asset: 2, Amount: machine.NewMonetaryInt(4), }, - program2.Constant{Inner: machine.AccountAddress("a")}, - program2.Constant{Inner: machine.NewMonetaryInt(0)}, - program2.Constant{Inner: machine.NewMonetaryInt(1)}, - program2.Constant{Inner: machine.AccountAddress("b")}, + program.Constant{Inner: machine.AccountAddress("a")}, + program.Constant{Inner: machine.NewMonetaryInt(0)}, + program.Constant{Inner: machine.NewMonetaryInt(1)}, + program.Constant{Inner: machine.AccountAddress("b")}, }, }, }) @@ -1487,7 +1487,7 @@ func TestSendWithArithmetic(t *testing.T) { Case: script, Expected: CaseResult{ Instructions: []byte{}, - Resources: []program2.Resource{}, + Resources: []program.Resource{}, Error: "tried to do an arithmetic operation with incompatible left and right-hand side operand types: monetary and number", }, }) @@ -1507,7 +1507,7 @@ func TestSendWithArithmetic(t *testing.T) { Case: script, Expected: CaseResult{ Instructions: []byte{}, - Resources: []program2.Resource{}, + Resources: []program.Resource{}, Error: "tried to do an arithmetic operation with incompatible left and right-hand side operand types: monetary and number", }, }) @@ -1526,40 +1526,40 @@ func TestSaveFromAccount(t *testing.T) { )`, Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 01, 00, - program2.OP_APUSH, 02, 00, - program2.OP_SAVE, - program2.OP_APUSH, 02, 00, - program2.OP_APUSH, 03, 00, - program2.OP_ASSET, - program2.OP_APUSH, 04, 00, - program2.OP_MONETARY_NEW, - program2.OP_TAKE_ALL, - program2.OP_APUSH, 03, 00, - program2.OP_TAKE, - program2.OP_APUSH, 05, 00, - program2.OP_BUMP, - program2.OP_REPAY, - program2.OP_FUNDING_SUM, - program2.OP_TAKE, - program2.OP_APUSH, 06, 00, - program2.OP_SEND, - program2.OP_REPAY, + program.OP_APUSH, 01, 00, + program.OP_APUSH, 02, 00, + program.OP_SAVE, + program.OP_APUSH, 02, 00, + program.OP_APUSH, 03, 00, + program.OP_ASSET, + program.OP_APUSH, 04, 00, + program.OP_MONETARY_NEW, + program.OP_TAKE_ALL, + program.OP_APUSH, 03, 00, + program.OP_TAKE, + program.OP_APUSH, 05, 00, + program.OP_BUMP, + program.OP_REPAY, + program.OP_FUNDING_SUM, + program.OP_TAKE, + program.OP_APUSH, 06, 00, + program.OP_SEND, + program.OP_REPAY, }, - Resources: []program2.Resource{ - program2.Constant{Inner: machine.Asset("EUR")}, - program2.Monetary{ + Resources: []program.Resource{ + program.Constant{Inner: machine.Asset("EUR")}, + program.Monetary{ Asset: 0, Amount: machine.NewMonetaryInt(10), }, - program2.Constant{Inner: machine.AccountAddress("alice")}, - program2.Monetary{ + program.Constant{Inner: machine.AccountAddress("alice")}, + program.Monetary{ Asset: 0, Amount: machine.NewMonetaryInt(20), }, - program2.Constant{Inner: machine.NewMonetaryInt(0)}, - program2.Constant{Inner: machine.NewMonetaryInt(1)}, - program2.Constant{Inner: machine.AccountAddress("bob")}, + program.Constant{Inner: machine.NewMonetaryInt(0)}, + program.Constant{Inner: machine.NewMonetaryInt(1)}, + program.Constant{Inner: machine.AccountAddress("bob")}, }, }, }) @@ -1576,36 +1576,36 @@ func TestSaveFromAccount(t *testing.T) { )`, Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 00, 00, - program2.OP_APUSH, 01, 00, - program2.OP_SAVE, - program2.OP_APUSH, 01, 00, - program2.OP_APUSH, 02, 00, - program2.OP_ASSET, - program2.OP_APUSH, 03, 00, - program2.OP_MONETARY_NEW, - program2.OP_TAKE_ALL, - program2.OP_APUSH, 02, 00, - program2.OP_TAKE, - program2.OP_APUSH, 04, 00, - program2.OP_BUMP, - program2.OP_REPAY, - program2.OP_FUNDING_SUM, - program2.OP_TAKE, - program2.OP_APUSH, 05, 00, - program2.OP_SEND, - program2.OP_REPAY, + program.OP_APUSH, 00, 00, + program.OP_APUSH, 01, 00, + program.OP_SAVE, + program.OP_APUSH, 01, 00, + program.OP_APUSH, 02, 00, + program.OP_ASSET, + program.OP_APUSH, 03, 00, + program.OP_MONETARY_NEW, + program.OP_TAKE_ALL, + program.OP_APUSH, 02, 00, + program.OP_TAKE, + program.OP_APUSH, 04, 00, + program.OP_BUMP, + program.OP_REPAY, + program.OP_FUNDING_SUM, + program.OP_TAKE, + program.OP_APUSH, 05, 00, + program.OP_SEND, + program.OP_REPAY, }, - Resources: []program2.Resource{ - program2.Constant{Inner: machine.Asset("EUR")}, - program2.Constant{Inner: machine.AccountAddress("alice")}, - program2.Monetary{ + Resources: []program.Resource{ + program.Constant{Inner: machine.Asset("EUR")}, + program.Constant{Inner: machine.AccountAddress("alice")}, + program.Monetary{ Asset: 0, Amount: machine.NewMonetaryInt(20), }, - program2.Constant{Inner: machine.NewMonetaryInt(0)}, - program2.Constant{Inner: machine.NewMonetaryInt(1)}, - program2.Constant{Inner: machine.AccountAddress("bob")}, + program.Constant{Inner: machine.NewMonetaryInt(0)}, + program.Constant{Inner: machine.NewMonetaryInt(1)}, + program.Constant{Inner: machine.AccountAddress("bob")}, }, }, }) @@ -1626,40 +1626,40 @@ func TestSaveFromAccount(t *testing.T) { )`, Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 01, 00, - program2.OP_APUSH, 02, 00, - program2.OP_SAVE, - program2.OP_APUSH, 02, 00, - program2.OP_APUSH, 03, 00, - program2.OP_ASSET, - program2.OP_APUSH, 04, 00, - program2.OP_MONETARY_NEW, - program2.OP_TAKE_ALL, - program2.OP_APUSH, 03, 00, - program2.OP_TAKE, - program2.OP_APUSH, 05, 00, - program2.OP_BUMP, - program2.OP_REPAY, - program2.OP_FUNDING_SUM, - program2.OP_TAKE, - program2.OP_APUSH, 06, 00, - program2.OP_SEND, - program2.OP_REPAY, + program.OP_APUSH, 01, 00, + program.OP_APUSH, 02, 00, + program.OP_SAVE, + program.OP_APUSH, 02, 00, + program.OP_APUSH, 03, 00, + program.OP_ASSET, + program.OP_APUSH, 04, 00, + program.OP_MONETARY_NEW, + program.OP_TAKE_ALL, + program.OP_APUSH, 03, 00, + program.OP_TAKE, + program.OP_APUSH, 05, 00, + program.OP_BUMP, + program.OP_REPAY, + program.OP_FUNDING_SUM, + program.OP_TAKE, + program.OP_APUSH, 06, 00, + program.OP_SEND, + program.OP_REPAY, }, - Resources: []program2.Resource{ - program2.Variable{Typ: machine.TypeAsset, Name: "ass"}, - program2.Monetary{ + Resources: []program.Resource{ + program.Variable{Typ: machine.TypeAsset, Name: "ass"}, + program.Monetary{ Asset: 0, Amount: machine.NewMonetaryInt(10), }, - program2.Constant{Inner: machine.AccountAddress("alice")}, - program2.Monetary{ + program.Constant{Inner: machine.AccountAddress("alice")}, + program.Monetary{ Asset: 0, Amount: machine.NewMonetaryInt(20), }, - program2.Constant{Inner: machine.NewMonetaryInt(0)}, - program2.Constant{Inner: machine.NewMonetaryInt(1)}, - program2.Constant{Inner: machine.AccountAddress("bob")}, + program.Constant{Inner: machine.NewMonetaryInt(0)}, + program.Constant{Inner: machine.NewMonetaryInt(1)}, + program.Constant{Inner: machine.AccountAddress("bob")}, }, }, }) @@ -1680,37 +1680,37 @@ func TestSaveFromAccount(t *testing.T) { )`, Expected: CaseResult{ Instructions: []byte{ - program2.OP_APUSH, 00, 00, - program2.OP_APUSH, 01, 00, - program2.OP_SAVE, - program2.OP_APUSH, 01, 00, - program2.OP_APUSH, 03, 00, - program2.OP_ASSET, - program2.OP_APUSH, 04, 00, - program2.OP_MONETARY_NEW, - program2.OP_TAKE_ALL, - program2.OP_APUSH, 03, 00, - program2.OP_TAKE, - program2.OP_APUSH, 05, 00, - program2.OP_BUMP, - program2.OP_REPAY, - program2.OP_FUNDING_SUM, - program2.OP_TAKE, - program2.OP_APUSH, 06, 00, - program2.OP_SEND, - program2.OP_REPAY, + program.OP_APUSH, 00, 00, + program.OP_APUSH, 01, 00, + program.OP_SAVE, + program.OP_APUSH, 01, 00, + program.OP_APUSH, 03, 00, + program.OP_ASSET, + program.OP_APUSH, 04, 00, + program.OP_MONETARY_NEW, + program.OP_TAKE_ALL, + program.OP_APUSH, 03, 00, + program.OP_TAKE, + program.OP_APUSH, 05, 00, + program.OP_BUMP, + program.OP_REPAY, + program.OP_FUNDING_SUM, + program.OP_TAKE, + program.OP_APUSH, 06, 00, + program.OP_SEND, + program.OP_REPAY, }, - Resources: []program2.Resource{ - program2.Variable{Typ: machine.TypeMonetary, Name: "mon"}, - program2.Constant{Inner: machine.AccountAddress("alice")}, - program2.Constant{Inner: machine.Asset("EUR")}, - program2.Monetary{ + Resources: []program.Resource{ + program.Variable{Typ: machine.TypeMonetary, Name: "mon"}, + program.Constant{Inner: machine.AccountAddress("alice")}, + program.Constant{Inner: machine.Asset("EUR")}, + program.Monetary{ Asset: 2, Amount: machine.NewMonetaryInt(20), }, - program2.Constant{Inner: machine.NewMonetaryInt(0)}, - program2.Constant{Inner: machine.NewMonetaryInt(1)}, - program2.Constant{Inner: machine.AccountAddress("bob")}, + program.Constant{Inner: machine.NewMonetaryInt(0)}, + program.Constant{Inner: machine.NewMonetaryInt(1)}, + program.Constant{Inner: machine.AccountAddress("bob")}, }, }, }) @@ -1723,7 +1723,7 @@ func TestSaveFromAccount(t *testing.T) { `, Expected: CaseResult{ Instructions: []byte{}, - Resources: []program2.Resource{}, + Resources: []program.Resource{}, Error: "save monetary from account: the first expression should be of type 'monetary' instead of 'number'", }, }) @@ -1736,7 +1736,7 @@ func TestSaveFromAccount(t *testing.T) { `, Expected: CaseResult{ Instructions: []byte{}, - Resources: []program2.Resource{}, + Resources: []program.Resource{}, Error: "save monetary from account: the second expression should be of type 'account' instead of 'asset'", }, }) diff --git a/internal/machine/script/compiler/destination.go b/internal/machine/script/compiler/destination.go index 1b454c13f..6b941143e 100644 --- a/internal/machine/script/compiler/destination.go +++ b/internal/machine/script/compiler/destination.go @@ -23,7 +23,7 @@ func (p *parseVisitor) VisitDestinationRecursive(c parser.IDestinationContext) * case *parser.DestAccountContext: p.AppendInstruction(program.OP_FUNDING_SUM) p.AppendInstruction(program.OP_TAKE) - ty, destAddr, err := p.VisitExpr(c.Expression(), true) + ty, _, err := p.VisitExpr(c.Expression(), true) if err != nil { return err } @@ -32,9 +32,6 @@ func (p *parseVisitor) VisitDestinationRecursive(c parser.IDestinationContext) * errors.New("wrong type: expected account as destination"), ) } - if !p.isWorld(*destAddr) { - p.readLockAccounts[*destAddr] = struct{}{} - } p.AppendInstruction(program.OP_SEND) return nil case *parser.DestInOrderContext: diff --git a/internal/machine/script/compiler/error.go b/internal/machine/script/compiler/error.go index 43ec901dd..37c6407ec 100644 --- a/internal/machine/script/compiler/error.go +++ b/internal/machine/script/compiler/error.go @@ -20,6 +20,11 @@ type CompileErrorList struct { Source string } +func (c *CompileErrorList) Is(err error) bool { + _, ok := err.(*CompileErrorList) + return ok +} + func (c *CompileErrorList) Error() string { source := strings.ReplaceAll(c.Source, "\t", " ") lines := strings.SplitAfter(strings.ReplaceAll(source, "\r\n", "\n"), "\n") diff --git a/internal/machine/script/compiler/program.go b/internal/machine/script/compiler/program.go index 0d1692be8..3f7f24639 100644 --- a/internal/machine/script/compiler/program.go +++ b/internal/machine/script/compiler/program.go @@ -2,7 +2,7 @@ package compiler import ( "github.com/formancehq/ledger/internal/machine" - program2 "github.com/formancehq/ledger/internal/machine/vm/program" + "github.com/formancehq/ledger/internal/machine/vm/program" ) func (p *parseVisitor) AppendInstruction(instruction byte) { @@ -10,17 +10,17 @@ func (p *parseVisitor) AppendInstruction(instruction byte) { } func (p *parseVisitor) PushAddress(addr machine.Address) { - p.instructions = append(p.instructions, program2.OP_APUSH) + p.instructions = append(p.instructions, program.OP_APUSH) bytes := addr.ToBytes() p.instructions = append(p.instructions, bytes...) } func (p *parseVisitor) PushInteger(val machine.Number) error { - addr, err := p.AllocateResource(program2.Constant{Inner: val}) + addr, err := p.AllocateResource(program.Constant{Inner: val}) if err != nil { return err } - p.instructions = append(p.instructions, program2.OP_APUSH) + p.instructions = append(p.instructions, program.OP_APUSH) bytes := addr.ToBytes() p.instructions = append(p.instructions, bytes...) return nil @@ -31,6 +31,6 @@ func (p *parseVisitor) Bump(n int64) error { if err != nil { return err } - p.instructions = append(p.instructions, program2.OP_BUMP) + p.instructions = append(p.instructions, program.OP_BUMP) return nil } diff --git a/internal/machine/script/compiler/source.go b/internal/machine/script/compiler/source.go index a3d3dc0fb..170f769fa 100644 --- a/internal/machine/script/compiler/source.go +++ b/internal/machine/script/compiler/source.go @@ -182,7 +182,6 @@ func (p *parseVisitor) VisitSource(c parser.ISourceContext, pushAsset func(), is isUnboundedOverdraft := p.isWorld(*accAddr) || p.isOverdraftUnbounded(overdraft) if !isUnboundedOverdraft { - p.writeLockAccounts[*accAddr] = struct{}{} neededAccounts[*accAddr] = struct{}{} } diff --git a/internal/machine/vm/machine.go b/internal/machine/vm/machine.go index 34a88049a..65f333f4b 100644 --- a/internal/machine/vm/machine.go +++ b/internal/machine/vm/machine.go @@ -13,15 +13,14 @@ import ( "encoding/binary" "fmt" "math/big" - "slices" + "github.com/formancehq/go-libs/v2/metadata" + ledger "github.com/formancehq/ledger/internal" "github.com/formancehq/ledger/internal/machine" - "github.com/formancehq/go-libs/metadata" - ledger "github.com/formancehq/ledger/internal" + "errors" "github.com/formancehq/ledger/internal/machine/vm/program" "github.com/logrusorgru/aurora" - "github.com/pkg/errors" ) type Machine struct { @@ -286,7 +285,7 @@ func (m *Machine) tick() (bool, error) { } allotment, err := machine.NewAllotment(portions) if err != nil { - return true, machine.NewErrInvalidScript(err.Error()) + return true, machine.NewErrInvalidScript("%s", err) } m.pushValue(*allotment) @@ -295,7 +294,7 @@ func (m *Machine) tick() (bool, error) { account := pop[machine.AccountAddress](m) funding, err := m.withdrawAll(account, overdraft.Asset, overdraft.Amount) if err != nil { - return true, machine.NewErrInvalidScript(err.Error()) + return true, machine.NewErrInvalidScript("%s", err) } m.pushValue(*funding) @@ -304,7 +303,7 @@ func (m *Machine) tick() (bool, error) { account := pop[machine.AccountAddress](m) funding, err := m.withdrawAlways(account, mon) if err != nil { - return true, machine.NewErrInvalidScript(err.Error()) + return true, machine.NewErrInvalidScript("%s", err) } m.pushValue(*funding) @@ -316,7 +315,7 @@ func (m *Machine) tick() (bool, error) { } result, remainder, err := funding.Take(mon.Amount) if err != nil { - return true, machine.NewErrInsufficientFund(err.Error()) + return true, machine.NewErrInsufficientFund("%s", err) } m.pushValue(remainder) m.pushValue(result) @@ -367,7 +366,7 @@ func (m *Machine) tick() (bool, error) { for i := 0; i < n; i++ { res, err := result.Concat(fundings_rev[n-1-i]) if err != nil { - return true, machine.NewErrInvalidScript(err.Error()) + return true, machine.NewErrInvalidScript("%s", err) } result = res } @@ -478,29 +477,20 @@ func (m *Machine) Execute() error { } } -type BalanceRequest struct { - Account string - Asset string - Response chan *machine.MonetaryInt - Error error -} - func (m *Machine) ResolveBalances(ctx context.Context, store Store) error { - m.Balances = make(map[machine.AccountAddress]map[machine.Asset]*machine.MonetaryInt) + // map account/asset/resourceIndex + assignBalanceAsResource := map[string]map[string]int{} + balancesQuery := BalanceQuery{} for address, resourceIndex := range m.UnresolvedResourceBalances { monetary := m.Resources[resourceIndex].(machine.Monetary) - balance, err := store.GetBalance(ctx, address, string(monetary.Asset)) - if err != nil { - return err - } - if balance.Cmp(ledger.Zero) < 0 { - return machine.NewErrNegativeAmount("tried to request the balance of account %s for asset %s: received %s: monetary amounts must be non-negative", - address, monetary.Asset, balance) + balancesQuery[address] = append(balancesQuery[address], string(monetary.Asset)) + + if _, ok := assignBalanceAsResource[address]; !ok { + assignBalanceAsResource[address] = map[string]int{} } - monetary.Amount = machine.NewMonetaryIntFromBigInt(balance) - m.Resources[resourceIndex] = monetary + assignBalanceAsResource[address][string(monetary.Asset)] = resourceIndex } // for every account that we need balances of, check if it's there @@ -511,10 +501,7 @@ func (m *Machine) ResolveBalances(ctx context.Context, store Store) error { } accountAddress := (*account).(machine.AccountAddress) - if _, ok := m.Balances[accountAddress]; !ok { - m.Balances[accountAddress] = make(map[machine.Asset]*machine.MonetaryInt) - } - // for every asset, send request + // for every asset, register the query for addr := range neededAssets { mon, ok := m.getResource(addr) if !ok { @@ -527,21 +514,46 @@ func (m *Machine) ResolveBalances(ctx context.Context, store Store) error { continue } - balance, err := store.GetBalance(ctx, string(accountAddress), string(asset)) - if err != nil { - return errors.Wrap(err, fmt.Sprintf("could not get balance for account %q", addr)) - } + balancesQuery[string(accountAddress)] = append(balancesQuery[string(accountAddress)], string(asset)) + } + } - m.Balances[accountAddress][asset] = machine.NewMonetaryIntFromBigInt(balance) + m.Balances = make(map[machine.AccountAddress]map[machine.Asset]*machine.MonetaryInt) + if len(balancesQuery) > 0 { + balances, err := store.GetBalances(ctx, balancesQuery) + if err != nil { + return fmt.Errorf("could not get balances: %w", err) + } + + for account, forAssets := range balances { + for asset, balance := range forAssets { + if assignBalanceAsResource[account] != nil { + resourceIndex, ok := assignBalanceAsResource[account][asset] + if ok { + if balance.Cmp(ledger.Zero) < 0 { + return machine.NewErrNegativeAmount("tried to request the balance of account %s for asset %s: received %s: monetary amounts must be non-negative", + account, asset, balance) + } + monetary := m.Resources[resourceIndex].(machine.Monetary) + monetary.Amount = machine.NewMonetaryIntFromBigInt(balance) + m.Resources[resourceIndex] = monetary + } + } + + if _, ok := m.Balances[machine.AccountAddress(account)]; !ok { + m.Balances[machine.AccountAddress(account)] = make(map[machine.Asset]*machine.MonetaryInt) + } + m.Balances[machine.AccountAddress(account)][machine.Asset(asset)] = machine.NewMonetaryIntFromBigInt(balance) + } } } + return nil } -func (m *Machine) ResolveResources(ctx context.Context, store Store) ([]string, []string, error) { - //TODO(gfyrag): Is that really required? Feel like defensive programming. +func (m *Machine) ResolveResources(ctx context.Context, store Store) error { if m.resolveCalled { - return nil, nil, errors.New("tried to call ResolveResources twice") + return errors.New("tried to call ResolveResources twice") } m.resolveCalled = true @@ -560,7 +572,7 @@ func (m *Machine) ResolveResources(ctx context.Context, store Store) ([]string, var ok bool val, ok = m.Vars[res.Name] if !ok { - return nil, nil, fmt.Errorf("missing variable '%s'", res.Name) + return fmt.Errorf("missing variable '%s'", res.Name) } if val.GetType() == machine.TypeAccount { involvedAccountsMap[machine.Address(idx)] = string(val.(machine.AccountAddress)) @@ -571,17 +583,17 @@ func (m *Machine) ResolveResources(ctx context.Context, store Store) ([]string, account, err := store.GetAccount(ctx, addr) if err != nil { - return nil, nil, err + return err } metadata, ok := account.Metadata[res.Key] if !ok { - return nil, nil, machine.NewErrMissingMetadata("missing key %v in metadata for account %s", res.Key, addr) + return machine.NewErrMissingMetadata("missing key %v in metadata for account %s", res.Key, addr) } val, err = machine.NewValueFromString(res.Typ, metadata) if err != nil { - return nil, nil, err + return err } if val.GetType() == machine.TypeAccount { involvedAccountsMap[machine.Address(idx)] = string(val.(machine.AccountAddress)) @@ -594,12 +606,12 @@ func (m *Machine) ResolveResources(ctx context.Context, store Store) ([]string, ass, ok := m.getResource(res.Asset) if !ok { - return nil, nil, fmt.Errorf( + return fmt.Errorf( "variable '%s': tried to request account balance of an asset which has not yet been solved", res.Name) } if (*ass).GetType() != machine.TypeAsset { - return nil, nil, fmt.Errorf( + return fmt.Errorf( "variable '%s': tried to request account balance for an asset on wrong entity: %v instead of asset", res.Name, (*ass).GetType()) } @@ -619,25 +631,13 @@ func (m *Machine) ResolveResources(ctx context.Context, store Store) ([]string, m.Resources = append(m.Resources, val) } - readLockAccounts := make([]string, 0) - for _, accountAddress := range m.Program.ReadLockAccounts { - readLockAccounts = append(readLockAccounts, involvedAccountsMap[accountAddress]) - } - - writeLockAccounts := make([]string, 0) - for _, machineAddress := range m.Program.WriteLockAccounts { - writeLockAccounts = append(writeLockAccounts, involvedAccountsMap[machineAddress]) - } - - slices.Sort(readLockAccounts) - slices.Sort(writeLockAccounts) - return readLockAccounts, writeLockAccounts, nil + return nil } func (m *Machine) SetVarsFromJSON(vars map[string]string) error { v, err := m.Program.ParseVariablesJSON(vars) if err != nil { - return machine.NewErrInvalidVars(err.Error()) + return machine.NewErrInvalidVars("%s", err) } m.Vars = v return nil diff --git a/internal/machine/vm/machine_test.go b/internal/machine/vm/machine_test.go index 5df051f43..5240cc13a 100644 --- a/internal/machine/vm/machine_test.go +++ b/internal/machine/vm/machine_test.go @@ -11,11 +11,11 @@ import ( "github.com/formancehq/ledger/internal/machine" - "github.com/formancehq/go-libs/metadata" + "errors" + "github.com/formancehq/go-libs/v2/metadata" ledger "github.com/formancehq/ledger/internal" "github.com/formancehq/ledger/internal/machine/script/compiler" "github.com/formancehq/ledger/internal/machine/vm/program" - "github.com/pkg/errors" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -100,7 +100,7 @@ func test(t *testing.T, testCase TestCase) { } } - _, _, err := m.ResolveResources(context.Background(), store) + err := m.ResolveResources(context.Background(), store) if err != nil { return err } @@ -1009,10 +1009,8 @@ func TestNeededBalances(t *testing.T) { if err != nil { t.Fatalf("did not expect error on SetVars, got: %v", err) } - readLockAccounts, writeLockAccounts, err := m.ResolveResources(context.Background(), EmptyStore) + err = m.ResolveResources(context.Background(), EmptyStore) require.NoError(t, err) - require.Equalf(t, []string{"c"}, readLockAccounts, "readlock") - require.Equalf(t, []string{"a", "b", "bounded"}, writeLockAccounts, "writelock") store := mockStore{} err = m.ResolveBalances(context.Background(), &store) @@ -1038,10 +1036,8 @@ func TestNeededBalances2(t *testing.T) { } m := NewMachine(*p) - _, involvedSources, err := m.ResolveResources(context.Background(), EmptyStore) + err = m.ResolveResources(context.Background(), EmptyStore) require.NoError(t, err) - require.Equal(t, []string{"a"}, involvedSources) - } func TestNeededBalancesBalanceFn(t *testing.T) { @@ -1059,10 +1055,8 @@ send $balance ( } m := NewMachine(*p) - rlAccounts, wlAccounts, err := m.ResolveResources(context.Background(), EmptyStore) + err = m.ResolveResources(context.Background(), EmptyStore) require.NoError(t, err) - require.Equal(t, []string{"a"}, wlAccounts) - require.Equal(t, []string{"acc", "b"}, rlAccounts) store := mockStore{} err = m.ResolveBalances(context.Background(), &store) @@ -1096,10 +1090,8 @@ send [COIN 1] ( Balances: map[string]*big.Int{}, }, } - rlAccounts, wlAccounts, err := m.ResolveResources(context.Background(), staticStore) + err = m.ResolveResources(context.Background(), staticStore) require.NoError(t, err) - require.Equal(t, []string{"src"}, wlAccounts) - require.Equal(t, []string{"dest"}, rlAccounts) store := mockStore{} err = m.ResolveBalances(context.Background(), &store) @@ -1120,7 +1112,7 @@ func TestSetTxMeta(t *testing.T) { m := NewMachine(*p) - _, _, err = m.ResolveResources(context.Background(), EmptyStore) + err = m.ResolveResources(context.Background(), EmptyStore) require.NoError(t, err) err = m.ResolveBalances(context.Background(), EmptyStore) require.NoError(t, err) @@ -1160,7 +1152,7 @@ func TestSetAccountMeta(t *testing.T) { m := NewMachine(*p) - _, _, err = m.ResolveResources(context.Background(), EmptyStore) + err = m.ResolveResources(context.Background(), EmptyStore) require.NoError(t, err) err = m.ResolveBalances(context.Background(), EmptyStore) @@ -1210,7 +1202,7 @@ func TestSetAccountMeta(t *testing.T) { "acc": "test", })) - _, _, err = m.ResolveResources(context.Background(), EmptyStore) + err = m.ResolveResources(context.Background(), EmptyStore) require.NoError(t, err) err = m.ResolveBalances(context.Background(), EmptyStore) @@ -1737,7 +1729,6 @@ func TestSetVarsFromJSON(t *testing.T) { err = m.SetVarsFromJSON(tc.vars) if tc.expectedError != nil { require.Error(t, err) - //TODO(gfyrag): refine error handling of SetVars/ResolveResources/ResolveBalances require.Equal(t, tc.expectedError.Error(), err.Error()) } else { require.Nil(t, err) @@ -1779,7 +1770,7 @@ func TestResolveResources(t *testing.T) { m := NewMachine(*p) require.NoError(t, m.SetVarsFromJSON(tc.vars)) - _, _, err = m.ResolveResources(context.Background(), EmptyStore) + err = m.ResolveResources(context.Background(), EmptyStore) if tc.expectedError != nil { require.Error(t, err) require.True(t, errors.Is(err, tc.expectedError)) @@ -1832,7 +1823,7 @@ func TestResolveBalances(t *testing.T) { m := NewMachine(*p) require.NoError(t, m.SetVarsFromJSON(tc.vars)) - _, _, err = m.ResolveResources(context.Background(), EmptyStore) + err = m.ResolveResources(context.Background(), EmptyStore) require.NoError(t, err) store := tc.store @@ -1871,7 +1862,7 @@ func TestMachine(t *testing.T) { }) require.NoError(t, err) - _, _, err := m.ResolveResources(context.Background(), EmptyStore) + err = m.ResolveResources(context.Background(), EmptyStore) require.NoError(t, err) err = m.ResolveBalances(context.Background(), EmptyStore) @@ -1895,7 +1886,7 @@ func TestMachine(t *testing.T) { }) require.NoError(t, err) - _, _, err := m.ResolveResources(context.Background(), EmptyStore) + err = m.ResolveResources(context.Background(), EmptyStore) require.NoError(t, err) err = m.Execute() @@ -1910,17 +1901,17 @@ func TestMachine(t *testing.T) { }) require.NoError(t, err) - _, _, err := m.ResolveResources(context.Background(), EmptyStore) + err = m.ResolveResources(context.Background(), EmptyStore) require.NoError(t, err) - _, _, err = m.ResolveResources(context.Background(), EmptyStore) + err := m.ResolveResources(context.Background(), EmptyStore) require.ErrorContains(t, err, "tried to call ResolveResources twice") }) t.Run("err missing var", func(t *testing.T) { m := NewMachine(*p) - _, _, err := m.ResolveResources(context.Background(), EmptyStore) + err = m.ResolveResources(context.Background(), EmptyStore) require.Error(t, err) }) } @@ -2407,9 +2398,19 @@ func (s *mockStore) GetRequestedAccounts() []string { return s.requestedAccounts } -func (s *mockStore) GetBalance(ctx context.Context, address, asset string) (*big.Int, error) { - s.requestedAccounts = append(s.requestedAccounts, address) - return big.NewInt(0), nil +func (s *mockStore) GetBalances(_ context.Context, query BalanceQuery) (Balances, error) { + ret := Balances{} + for account, assets := range query { + for _, asset := range assets { + s.requestedAccounts = append(s.requestedAccounts, account) + _, ok := ret[account] + if !ok { + ret[account] = map[string]*big.Int{} + } + ret[account][asset] = new(big.Int) + } + } + return ret, nil } func (s *mockStore) GetAccount(ctx context.Context, address string) (*ledger.Account, error) { diff --git a/internal/machine/vm/program/program.go b/internal/machine/vm/program/program.go index fb93e8f9a..253839397 100644 --- a/internal/machine/vm/program/program.go +++ b/internal/machine/vm/program/program.go @@ -5,17 +5,12 @@ import ( "fmt" "github.com/formancehq/ledger/internal/machine" - - "github.com/pkg/errors" ) type Program struct { Instructions []byte Resources []Resource NeededBalances map[machine.Address]map[machine.Address]struct{} - - ReadLockAccounts []machine.Address - WriteLockAccounts []machine.Address } func (p Program) String() string { @@ -51,23 +46,23 @@ func (p *Program) ParseVariables(vars map[string]machine.Value) (map[string]mach switch val.GetType() { case machine.TypeAccount: if err := machine.ValidateAccountAddress(val.(machine.AccountAddress)); err != nil { - return nil, errors.Wrapf(err, "invalid variable $%s value '%s'", - variable.Name, string(val.(machine.AccountAddress))) + return nil, fmt.Errorf("invalid variable $%s value '%s': %w", + variable.Name, string(val.(machine.AccountAddress)), err) } case machine.TypeAsset: if err := machine.ValidateAsset(val.(machine.Asset)); err != nil { - return nil, errors.Wrapf(err, "invalid variable $%s value '%s'", - variable.Name, string(val.(machine.Asset))) + return nil, fmt.Errorf("invalid variable $%s value '%s': %w", + variable.Name, string(val.(machine.Asset)), err) } case machine.TypeMonetary: if err := machine.ParseMonetary(val.(machine.Monetary)); err != nil { - return nil, errors.Wrapf(err, "invalid variable $%s value '%s'", - variable.Name, val.(machine.Monetary).String()) + return nil, fmt.Errorf("invalid variable $%s value '%s': %w", + variable.Name, val.(machine.Monetary).String(), err) } case machine.TypePortion: if err := machine.ValidatePortionSpecific(val.(machine.Portion)); err != nil { - return nil, errors.Wrapf(err, "invalid variable $%s value '%s'", - variable.Name, val.(machine.Portion).String()) + return nil, fmt.Errorf("invalid variable $%s value '%s': %w", + variable.Name, val.(machine.Portion).String(), err) } case machine.TypeString: case machine.TypeNumber: diff --git a/internal/machine/vm/run.go b/internal/machine/vm/run.go index ae2cf0da4..a437806b8 100644 --- a/internal/machine/vm/run.go +++ b/internal/machine/vm/run.go @@ -1,25 +1,58 @@ package vm import ( + "fmt" + "github.com/formancehq/go-libs/v2/time" "math/big" "github.com/formancehq/ledger/internal/machine" - "github.com/formancehq/go-libs/metadata" + "github.com/formancehq/go-libs/v2/metadata" ledger "github.com/formancehq/ledger/internal" - "github.com/pkg/errors" ) +type RunScript struct { + Script + Timestamp time.Time `json:"timestamp"` + Metadata metadata.Metadata `json:"metadata"` + Reference string `json:"reference"` +} + +type Script struct { + Plain string `json:"plain"` + Vars map[string]string `json:"vars" swaggertype:"object"` +} + +type ScriptV1 struct { + Script + Vars map[string]any `json:"vars"` +} + +func (s ScriptV1) ToCore() Script { + s.Script.Vars = map[string]string{} + for k, v := range s.Vars { + switch v := v.(type) { + case string: + s.Script.Vars[k] = v + case map[string]any: + s.Script.Vars[k] = fmt.Sprintf("%s %v", v["asset"], v["amount"]) + default: + s.Script.Vars[k] = fmt.Sprint(v) + } + } + return s.Script +} + type Result struct { Postings ledger.Postings Metadata metadata.Metadata AccountMetadata map[string]metadata.Metadata } -func Run(m *Machine, script ledger.RunScript) (*Result, error) { +func Run(m *Machine, script RunScript) (*Result, error) { err := m.Execute() if err != nil { - return nil, errors.Wrap(err, "script execution failed") + return nil, fmt.Errorf("script execution failed: %w", err) } result := Result{ diff --git a/internal/machine/vm/run_test.go b/internal/machine/vm/run_test.go index c5b4af34f..f74b6b756 100644 --- a/internal/machine/vm/run_test.go +++ b/internal/machine/vm/run_test.go @@ -8,7 +8,7 @@ import ( "github.com/formancehq/ledger/internal/machine" - "github.com/formancehq/go-libs/metadata" + "github.com/formancehq/go-libs/v2/metadata" ledger "github.com/formancehq/ledger/internal" "github.com/formancehq/ledger/internal/machine/script/compiler" "github.com/stretchr/testify/require" @@ -430,12 +430,12 @@ func TestRun(t *testing.T) { m := NewMachine(*program) require.NoError(t, m.SetVarsFromJSON(tc.vars)) - _, _, err = m.ResolveResources(context.Background(), tc.store) + err = m.ResolveResources(context.Background(), tc.store) require.NoError(t, err) require.NoError(t, m.ResolveBalances(context.Background(), tc.store)) - result, err := Run(m, ledger.RunScript{ - Script: ledger.Script{ + result, err := Run(m, RunScript{ + Script: Script{ Plain: tc.script, Vars: tc.vars, }, diff --git a/internal/machine/vm/store.go b/internal/machine/vm/store.go index 188f65e94..2e498dfd0 100644 --- a/internal/machine/vm/store.go +++ b/internal/machine/vm/store.go @@ -4,22 +4,28 @@ import ( "context" "math/big" - "github.com/formancehq/go-libs/metadata" + "github.com/formancehq/go-libs/v2/metadata" ledger "github.com/formancehq/ledger/internal" ) +// BalanceQuery is a map of account/asset +type BalanceQuery map[string][]string + +// Balances is a map of account/asset/balance +type Balances map[string]map[string]*big.Int + type Store interface { - GetBalance(ctx context.Context, address, asset string) (*big.Int, error) + GetBalances(ctx context.Context, query BalanceQuery) (Balances, error) GetAccount(ctx context.Context, address string) (*ledger.Account, error) } type emptyStore struct{} -func (e *emptyStore) GetBalance(ctx context.Context, address, asset string) (*big.Int, error) { - return new(big.Int), nil +func (e *emptyStore) GetBalances(context.Context, BalanceQuery) (Balances, error) { + return Balances{}, nil } -func (e *emptyStore) GetAccount(ctx context.Context, address string) (*ledger.Account, error) { +func (e *emptyStore) GetAccount(_ context.Context, address string) (*ledger.Account, error) { return &ledger.Account{ Address: address, Metadata: metadata.Metadata{}, @@ -37,20 +43,31 @@ type AccountWithBalances struct { type StaticStore map[string]*AccountWithBalances -func (s StaticStore) GetBalance(ctx context.Context, address, asset string) (*big.Int, error) { - account, ok := s[address] - if !ok { - return new(big.Int), nil - } - balance, ok := account.Balances[asset] - if !ok { - return new(big.Int), nil +func (s StaticStore) GetBalances(_ context.Context, query BalanceQuery) (Balances, error) { + ret := Balances{} + for accountAddress, assets := range query { + for _, asset := range assets { + ret[accountAddress] = make(map[string]*big.Int) + account, ok := s[accountAddress] + if !ok { + ret[accountAddress] = map[string]*big.Int{ + asset: new(big.Int), + } + continue + } + balance, ok := account.Balances[asset] + if !ok { + ret[accountAddress][asset] = new(big.Int) + continue + } + ret[accountAddress][asset] = balance + } } - return balance, nil + return ret, nil } -func (s StaticStore) GetAccount(ctx context.Context, address string) (*ledger.Account, error) { +func (s StaticStore) GetAccount(_ context.Context, address string) (*ledger.Account, error) { account, ok := s[address] if !ok { return &ledger.Account{ diff --git a/internal/metadata.go b/internal/metadata.go index 564ff1b55..baa1687cd 100644 --- a/internal/metadata.go +++ b/internal/metadata.go @@ -1,37 +1,6 @@ package ledger -import ( - "math/big" - - "github.com/formancehq/go-libs/metadata" -) - const ( - formanceNamespace = "com.formance.spec/" - revertKey = "state/reverts" - MetaTargetTypeAccount = "ACCOUNT" MetaTargetTypeTransaction = "TRANSACTION" ) - -func SpecMetadata(name string) string { - return formanceNamespace + name -} - -func MarkReverts(m metadata.Metadata, txID *big.Int) metadata.Metadata { - return m.Merge(RevertMetadata(txID)) -} - -func RevertMetadataSpecKey() string { - return SpecMetadata(revertKey) -} - -func ComputeMetadata(key, value string) metadata.Metadata { - return metadata.Metadata{ - key: value, - } -} - -func RevertMetadata(tx *big.Int) metadata.Metadata { - return ComputeMetadata(RevertMetadataSpecKey(), tx.String()) -} diff --git a/internal/moves.go b/internal/moves.go new file mode 100644 index 000000000..1c84635ca --- /dev/null +++ b/internal/moves.go @@ -0,0 +1,60 @@ +package ledger + +import ( + "slices" + + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/collectionutils" + "github.com/formancehq/go-libs/v2/time" + "github.com/uptrace/bun" +) + +type Move struct { + bun.BaseModel `bun:"table:moves"` + + TransactionID int `bun:"transactions_id,type:bigint"` + IsSource bool `bun:"is_source,type:bool"` + Account string `bun:"accounts_address,type:varchar"` + Amount *bunpaginate.BigInt `bun:"amount,type:numeric"` + Asset string `bun:"asset,type:varchar"` + InsertionDate time.Time `bun:"insertion_date,type:timestamp,nullzero"` + EffectiveDate time.Time `bun:"effective_date,type:timestamp,nullzero"` + PostCommitVolumes *Volumes `bun:"post_commit_volumes,type:jsonb"` + PostCommitEffectiveVolumes *Volumes `bun:"post_commit_effective_volumes,type:jsonb,scanonly"` +} + +type Moves []*Move + +func (m Moves) ComputePostCommitEffectiveVolumes() PostCommitVolumes { + type key struct { + Account string + Asset string + } + + visited := collectionutils.Set[key]{} + + // We need to find the more recent move for each account/asset. + // We will iterate on moves by starting by the more recent. + slices.Reverse(m) + + ret := PostCommitVolumes{} + for _, move := range m { + if visited.Contains(key{ + Account: move.Account, + Asset: move.Asset, + }) { + continue + } + ret = ret.Merge(PostCommitVolumes{ + move.Account: VolumesByAssets{ + move.Asset: *move.PostCommitEffectiveVolumes, + }, + }) + visited.Put(key{ + Account: move.Account, + Asset: move.Asset, + }) + } + + return ret +} diff --git a/internal/opentelemetry/metrics/metrics.go b/internal/opentelemetry/metrics/metrics.go deleted file mode 100644 index 1cc65dea0..000000000 --- a/internal/opentelemetry/metrics/metrics.go +++ /dev/null @@ -1,89 +0,0 @@ -package metrics - -import ( - "go.opentelemetry.io/otel/metric" - "go.opentelemetry.io/otel/metric/noop" -) - -type GlobalRegistry interface { - APILatencies() metric.Int64Histogram - StatusCodes() metric.Int64Counter - ActiveLedgers() metric.Int64UpDownCounter -} - -type globalRegistry struct { - // API Latencies - apiLatencies metric.Int64Histogram - statusCodes metric.Int64Counter - activeLedgers metric.Int64UpDownCounter -} - -func RegisterGlobalRegistry(meterProvider metric.MeterProvider) (GlobalRegistry, error) { - meter := meterProvider.Meter("global") - - apiLatencies, err := meter.Int64Histogram( - "ledger.api.time", - metric.WithUnit("ms"), - metric.WithDescription("Latency of API calls"), - ) - if err != nil { - return nil, err - } - - statusCodes, err := meter.Int64Counter( - "ledger.api.status", - metric.WithUnit("1"), - metric.WithDescription("Status codes of API calls"), - ) - if err != nil { - return nil, err - } - - activeLedgers, err := meter.Int64UpDownCounter( - "ledger.api.ledgers", - metric.WithUnit("1"), - metric.WithDescription("Number of active ledgers"), - ) - if err != nil { - return nil, err - } - - return &globalRegistry{ - apiLatencies: apiLatencies, - statusCodes: statusCodes, - activeLedgers: activeLedgers, - }, nil -} - -func (gm *globalRegistry) APILatencies() metric.Int64Histogram { - return gm.apiLatencies -} - -func (gm *globalRegistry) StatusCodes() metric.Int64Counter { - return gm.statusCodes -} - -func (gm *globalRegistry) ActiveLedgers() metric.Int64UpDownCounter { - return gm.activeLedgers -} - -type noOpRegistry struct{} - -func NewNoOpRegistry() *noOpRegistry { - return &noOpRegistry{} -} - -func (nm *noOpRegistry) APILatencies() metric.Int64Histogram { - histogram, _ := noop.NewMeterProvider().Meter("ledger").Int64Histogram("api_latencies") - return histogram -} - -func (nm *noOpRegistry) StatusCodes() metric.Int64Counter { - counter, _ := noop.NewMeterProvider().Meter("ledger").Int64Counter("status_codes") - return counter -} - -func (nm *noOpRegistry) ActiveLedgers() metric.Int64UpDownCounter { - counter, _ := noop.NewMeterProvider().Meter("ledger").Int64UpDownCounter("active_ledgers") - return counter -} diff --git a/internal/opentelemetry/tracer/tracer.go b/internal/opentelemetry/tracer/tracer.go deleted file mode 100644 index 97ab0fe1b..000000000 --- a/internal/opentelemetry/tracer/tracer.go +++ /dev/null @@ -1,14 +0,0 @@ -package tracer - -import ( - "context" - - "go.opentelemetry.io/otel" - "go.opentelemetry.io/otel/trace" -) - -var Tracer = otel.Tracer("com.formance.ledger") - -func Start(ctx context.Context, name string, opts ...trace.SpanStartOption) (context.Context, trace.Span) { - return Tracer.Start(ctx, name, opts...) -} diff --git a/internal/posting.go b/internal/posting.go index 6a48194a8..ec467f689 100644 --- a/internal/posting.go +++ b/internal/posting.go @@ -1,14 +1,11 @@ package ledger import ( - "database/sql/driver" - "encoding/json" + "github.com/formancehq/ledger/pkg/accounts" + "github.com/formancehq/ledger/pkg/assets" "math/big" - "github.com/formancehq/ledger/pkg/core/accounts" - "github.com/formancehq/ledger/pkg/core/assets" - - "github.com/pkg/errors" + "errors" ) type Posting struct { @@ -29,35 +26,19 @@ func NewPosting(source string, destination string, asset string, amount *big.Int type Postings []Posting -func (p Postings) Reverse() { +func (p Postings) Reverse() Postings { + postings := make(Postings, len(p)) + copy(postings, p) + for i := range p { - p[i].Source, p[i].Destination = p[i].Destination, p[i].Source + postings[i].Source, postings[i].Destination = postings[i].Destination, postings[i].Source } for i := 0; i < len(p)/2; i++ { - p[i], p[len(p)-i-1] = p[len(p)-i-1], p[i] + postings[i], postings[len(postings)-i-1] = postings[len(postings)-i-1], postings[i] } -} -// Scan - Implement the database/sql scanner interface -func (p *Postings) Scan(value interface{}) error { - if value == nil { - return nil - } - v, err := driver.String.ConvertValue(value) - if err != nil { - return err - } - - *p = Postings{} - switch vv := v.(type) { - case []uint8: - return json.Unmarshal(vv, p) - case string: - return json.Unmarshal([]byte(vv), p) - default: - panic("not supported type") - } + return postings } func (p Postings) Validate() (int, error) { diff --git a/internal/posting_test.go b/internal/posting_test.go deleted file mode 100644 index 13114815c..000000000 --- a/internal/posting_test.go +++ /dev/null @@ -1,66 +0,0 @@ -package ledger - -import ( - "math/big" - "testing" - - "github.com/stretchr/testify/require" -) - -func TestReverseMultiple(t *testing.T) { - p := Postings{ - { - Source: "world", - Destination: "users:001", - Amount: big.NewInt(100), - Asset: "COIN", - }, - { - Source: "users:001", - Destination: "payments:001", - Amount: big.NewInt(100), - Asset: "COIN", - }, - } - - expected := Postings{ - { - Source: "payments:001", - Destination: "users:001", - Amount: big.NewInt(100), - Asset: "COIN", - }, - { - Source: "users:001", - Destination: "world", - Amount: big.NewInt(100), - Asset: "COIN", - }, - } - - p.Reverse() - require.Equal(t, expected, p) -} - -func TestReverseSingle(t *testing.T) { - p := Postings{ - { - Source: "world", - Destination: "users:001", - Amount: big.NewInt(100), - Asset: "COIN", - }, - } - - expected := Postings{ - { - Source: "users:001", - Destination: "world", - Amount: big.NewInt(100), - Asset: "COIN", - }, - } - - p.Reverse() - require.Equal(t, expected, p) -} diff --git a/internal/script.go b/internal/script.go deleted file mode 100644 index 40322a394..000000000 --- a/internal/script.go +++ /dev/null @@ -1,41 +0,0 @@ -package ledger - -import ( - "fmt" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/metadata" -) - -type RunScript struct { - Script - Timestamp time.Time `json:"timestamp"` - Metadata metadata.Metadata `json:"metadata"` - Reference string `json:"reference"` -} - -type Script struct { - Plain string `json:"plain"` - Vars map[string]string `json:"vars" swaggertype:"object"` -} - -type ScriptV1 struct { - Script - Vars map[string]any `json:"vars"` -} - -func (s ScriptV1) ToCore() Script { - s.Script.Vars = map[string]string{} - for k, v := range s.Vars { - switch v := v.(type) { - case string: - s.Script.Vars[k] = v - case map[string]any: - s.Script.Vars[k] = fmt.Sprintf("%s %v", v["asset"], v["amount"]) - default: - s.Script.Vars[k] = fmt.Sprint(v) - } - } - return s.Script -} diff --git a/internal/storage/bucket/bucket.go b/internal/storage/bucket/bucket.go new file mode 100644 index 000000000..73777c500 --- /dev/null +++ b/internal/storage/bucket/bucket.go @@ -0,0 +1,196 @@ +package bucket + +import ( + "bytes" + "context" + _ "embed" + "errors" + "fmt" + "github.com/formancehq/go-libs/v2/migrations" + ledger "github.com/formancehq/ledger/internal" + "github.com/uptrace/bun" + "go.opentelemetry.io/otel/trace" + "text/template" +) + +type Bucket struct { + name string + db bun.IDB +} + +func (b *Bucket) Migrate(ctx context.Context, tracer trace.Tracer) error { + return Migrate(ctx, tracer, b.db, b.name) +} + +func (b *Bucket) IsUpToDate(ctx context.Context) (bool, error) { + ret, err := GetMigrator(b.name).IsUpToDate(ctx, b.db) + if err != nil && errors.Is(err, migrations.ErrMissingVersionTable) { + return false, nil + } + return ret, err +} + +func (b *Bucket) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) { + return GetMigrator(b.name).GetMigrations(ctx, b.db) +} + +func (b *Bucket) AddLedger(ctx context.Context, l ledger.Ledger, db bun.IDB) error { + + tpl := template.Must(template.New("sql").Parse(addLedgerTpl)) + buf := bytes.NewBuffer(nil) + if err := tpl.Execute(buf, l); err != nil { + return fmt.Errorf("executing template: %w", err) + } + + _, err := db.ExecContext(ctx, buf.String()) + if err != nil { + return fmt.Errorf("executing sql: %w", err) + } + + return nil +} + +func New(db bun.IDB, name string) *Bucket { + return &Bucket{ + db: db, + name: name, + } +} + +const addLedgerTpl = ` +-- create a sequence for transactions by ledger instead of a sequence of the table as we want to have contiguous ids +-- notes: we can still have "holes" on ids since a sql transaction can be reverted after a usage of the sequence +create sequence "{{.Bucket}}"."transaction_id_{{.ID}}" owned by "{{.Bucket}}".transactions.id; +select setval('"{{.Bucket}}"."transaction_id_{{.ID}}"', coalesce(( + select max(id) + 1 + from "{{.Bucket}}".transactions + where ledger = '{{ .Name }}' +), 1)::bigint, false); + +-- create a sequence for logs by ledger instead of a sequence of the table as we want to have contiguous ids +-- notes: we can still have "holes" on id since a sql transaction can be reverted after a usage of the sequence +create sequence "{{.Bucket}}"."log_id_{{.ID}}" owned by "{{.Bucket}}".logs.id; +select setval('"{{.Bucket}}"."log_id_{{.ID}}"', coalesce(( + select max(id) + 1 + from "{{.Bucket}}".logs + where ledger = '{{ .Name }}' +), 1)::bigint, false); + +-- enable post commit effective volumes synchronously + +{{ if .HasFeature "MOVES_HISTORY_POST_COMMIT_EFFECTIVE_VOLUMES" "SYNC" }} +create index "pcev_{{.ID}}" on "{{.Bucket}}".moves (accounts_address, asset, effective_date desc) where ledger = '{{.Name}}'; + +create trigger "set_effective_volumes_{{.ID}}" +before insert +on "{{.Bucket}}"."moves" +for each row +when ( + new.ledger = '{{.Name}}' +) +execute procedure "{{.Bucket}}".set_effective_volumes(); + +create trigger "update_effective_volumes_{{.ID}}" +after insert +on "{{.Bucket}}"."moves" +for each row +when ( + new.ledger = '{{.Name}}' +) +execute procedure "{{.Bucket}}".update_effective_volumes(); +{{ end }} + +-- logs hash + +{{ if .HasFeature "HASH_LOGS" "SYNC" }} +create trigger "set_log_hash_{{.ID}}" +before insert +on "{{.Bucket}}"."logs" +for each row +when ( + new.ledger = '{{.Name}}' +) +execute procedure "{{.Bucket}}".set_log_hash(); +{{ end }} + +{{ if .HasFeature "ACCOUNT_METADATA_HISTORY" "SYNC" }} +create trigger "update_account_metadata_history_{{.ID}}" +after update +on "{{.Bucket}}"."accounts" +for each row +when ( + new.ledger = '{{.Name}}' +) +execute procedure "{{.Bucket}}".update_account_metadata_history(); + +create trigger "insert_account_metadata_history_{{.ID}}" +after insert +on "{{.Bucket}}"."accounts" +for each row +when ( + new.ledger = '{{.Name}}' +) +execute procedure "{{.Bucket}}".insert_account_metadata_history(); +{{ end }} + +{{ if .HasFeature "TRANSACTION_METADATA_HISTORY" "SYNC" }} +create trigger "update_transaction_metadata_history_{{.ID}}" +after update +on "{{.Bucket}}"."transactions" +for each row +when ( + new.ledger = '{{.Name}}' +) +execute procedure "{{.Bucket}}".update_transaction_metadata_history(); + +create trigger "insert_transaction_metadata_history_{{.ID}}" +after insert +on "{{.Bucket}}"."transactions" +for each row +when ( + new.ledger = '{{.Name}}' +) +execute procedure "{{.Bucket}}".insert_transaction_metadata_history(); +{{ end }} + +{{ if .HasFeature "INDEX_TRANSACTION_ACCOUNTS" "ON" }} +create index "transactions_sources_{{.ID}}" on "{{.Bucket}}".transactions using gin (sources jsonb_path_ops) where ledger = '{{.Name}}'; +create index "transactions_destinations_{{.ID}}" on "{{.Bucket}}".transactions using gin (destinations jsonb_path_ops) where ledger = '{{.Name}}'; +create trigger "transaction_set_addresses_{{.ID}}" + before insert + on "{{.Bucket}}"."transactions" + for each row + when ( + new.ledger = '{{.Name}}' + ) +execute procedure "{{.Bucket}}".set_transaction_addresses(); +{{ end }} + +{{ if .HasFeature "INDEX_ADDRESS_SEGMENTS" "ON" }} +create index "accounts_address_array_{{.ID}}" on "{{.Bucket}}".accounts using gin (address_array jsonb_ops) where ledger = '{{.Name}}'; +create index "accounts_address_array_length_{{.ID}}" on "{{.Bucket}}".accounts (jsonb_array_length(address_array)) where ledger = '{{.Name}}'; + +create trigger "accounts_set_address_array_{{.ID}}" + before insert + on "{{.Bucket}}"."accounts" + for each row + when ( + new.ledger = '{{.Name}}' + ) +execute procedure "{{.Bucket}}".set_address_array_for_account(); + +{{ if .HasFeature "INDEX_TRANSACTION_ACCOUNTS" "ON" }} +create index "transactions_sources_arrays_{{.ID}}" on "{{.Bucket}}".transactions using gin (sources_arrays jsonb_path_ops) where ledger = '{{.Name}}'; +create index "transactions_destinations_arrays_{{.ID}}" on "{{.Bucket}}".transactions using gin (destinations_arrays jsonb_path_ops) where ledger = '{{.Name}}'; + +create trigger "transaction_set_addresses_segments_{{.ID}}" + before insert + on "{{.Bucket}}"."transactions" + for each row + when ( + new.ledger = '{{.Name}}' + ) +execute procedure "{{.Bucket}}".set_transaction_addresses_segments(); +{{ end }} +{{ end }} +` diff --git a/internal/storage/bucket/bucket_test.go b/internal/storage/bucket/bucket_test.go new file mode 100644 index 000000000..9720cc6e1 --- /dev/null +++ b/internal/storage/bucket/bucket_test.go @@ -0,0 +1,30 @@ +//go:build it + +package bucket_test + +import ( + "github.com/formancehq/ledger/internal/storage/bucket" + "github.com/formancehq/ledger/internal/storage/driver" + "go.opentelemetry.io/otel/trace/noop" + "testing" + + "github.com/formancehq/go-libs/v2/bun/bunconnect" + + "github.com/formancehq/go-libs/v2/logging" + "github.com/google/uuid" + "github.com/stretchr/testify/require" +) + +func TestBuckets(t *testing.T) { + ctx := logging.TestingContext() + name := uuid.NewString()[:8] + + pgDatabase := srv.NewDatabase(t) + db, err := bunconnect.OpenSQLDB(ctx, pgDatabase.ConnectionOptions()) + require.NoError(t, err) + + require.NoError(t, driver.Migrate(ctx, db)) + + b := bucket.New(db, name) + require.NoError(t, b.Migrate(ctx, noop.Tracer{})) +} diff --git a/internal/storage/bucket/main_test.go b/internal/storage/bucket/main_test.go new file mode 100644 index 000000000..6984c32c5 --- /dev/null +++ b/internal/storage/bucket/main_test.go @@ -0,0 +1,24 @@ +//go:build it + +package bucket_test + +import ( + . "github.com/formancehq/go-libs/v2/testing/utils" + "testing" + + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/testing/docker" + "github.com/formancehq/go-libs/v2/testing/platform/pgtesting" +) + +var ( + srv *pgtesting.PostgresServer +) + +func TestMain(m *testing.M) { + WithTestMain(func(t *TestingTForMain) int { + srv = pgtesting.CreatePostgresServer(t, docker.NewPool(t, logging.Testing())) + + return m.Run() + }) +} diff --git a/internal/storage/bucket/migrations.go b/internal/storage/bucket/migrations.go new file mode 100644 index 000000000..dadd6e8e7 --- /dev/null +++ b/internal/storage/bucket/migrations.go @@ -0,0 +1,26 @@ +package bucket + +import ( + "context" + "embed" + "github.com/formancehq/go-libs/v2/migrations" + "github.com/uptrace/bun" + "go.opentelemetry.io/otel/trace" +) + +//go:embed migrations +var migrationsDir embed.FS + +func GetMigrator(name string) *migrations.Migrator { + migrator := migrations.NewMigrator(migrations.WithSchema(name, true)) + migrator.RegisterMigrationsFromFileSystem(migrationsDir, "migrations") + + return migrator +} + +func Migrate(ctx context.Context, tracer trace.Tracer, db bun.IDB, name string) error { + ctx, span := tracer.Start(ctx, "Migrate bucket") + defer span.End() + + return GetMigrator(name).Up(ctx, db) +} diff --git a/internal/storage/ledgerstore/migrations/0-init-schema.sql b/internal/storage/bucket/migrations/0-init-schema.sql similarity index 95% rename from internal/storage/ledgerstore/migrations/0-init-schema.sql rename to internal/storage/bucket/migrations/0-init-schema.sql index 68666b052..51d30bfa1 100644 --- a/internal/storage/ledgerstore/migrations/0-init-schema.sql +++ b/internal/storage/bucket/migrations/0-init-schema.sql @@ -210,7 +210,7 @@ where (_before is null or t.timestamp <= _before) and ledger = _ledger order by id desc limit 1; -$$; +$$ set search_path from current; -- a simple 'select distinct asset from moves' would be more simple -- but Postgres is extremely inefficient with distinct @@ -237,7 +237,7 @@ where asset is not null union all select null where exists(select 1 from moves where asset is null and ledger = _ledger) -$$; +$$ set search_path from current; create function get_latest_move_for_account_and_asset(_ledger varchar, _account_address varchar, _asset varchar, _before timestamp default null) @@ -254,7 +254,7 @@ where (_before is null or s.effective_date <= _before) and ledger = _ledger order by effective_date desc, seq desc limit 1; -$$; +$$ set search_path from current; create function upsert_account(_ledger varchar, _address varchar, _metadata jsonb, _date timestamp) returns void @@ -269,7 +269,7 @@ begin updated_at = _date where not accounts.metadata @> coalesce(_metadata, '{}'::jsonb); end; -$$; +$$ set search_path from current; create function delete_account_metadata(_ledger varchar, _address varchar, _key varchar, _date timestamp) returns void @@ -283,7 +283,7 @@ begin where address = _address and ledger = _ledger; end -$$; +$$ set search_path from current; create function update_transaction_metadata(_ledger varchar, _id numeric, _metadata jsonb, _date timestamp) returns void @@ -295,9 +295,9 @@ begin set metadata = metadata || _metadata, updated_at = _date where id = _id - and ledger = _ledger; -- todo: add fill factor on transactions table ? + and ledger = _ledger; end; -$$; +$$ set search_path from current; create function delete_transaction_metadata(_ledger varchar, _id numeric, _key varchar, _date timestamp) returns void @@ -311,7 +311,7 @@ begin where id = _id and ledger = _ledger; end; -$$; +$$ set search_path from current; create function revert_transaction(_ledger varchar, _id numeric, _date timestamp) returns void @@ -322,7 +322,7 @@ update transactions set reverted_at = _date where id = _id and ledger = _ledger; -$$; +$$ set search_path from current; create or replace function insert_move( @@ -345,13 +345,6 @@ declare _seq bigint; _account_seq bigint; begin - - -- todo: lock if we enable parallelism - -- perform * - -- from accounts - -- where address = _account_address - -- for update; - select seq from accounts where ledger = _ledger and address = _account_address into _account_seq; if _account_exists then @@ -432,7 +425,7 @@ begin and seq > _seq; end if; end; -$$; +$$ set search_path from current; create function insert_posting(_transaction_seq bigint, _ledger varchar, _insertion_date timestamp without time zone, _effective_date timestamp without time zone, posting jsonb, _account_metadata jsonb) @@ -452,7 +445,6 @@ begin perform upsert_account(_ledger, posting ->> 'destination', _account_metadata -> (posting ->> 'destination'), _insertion_date); - -- todo: sometimes the balance is known at commit time (for sources != world), we need to forward the value to populate the pre_commit_aggregated_input and output perform insert_move(_transaction_seq, _ledger, _insertion_date, _effective_date, posting ->> 'source', posting ->> 'asset', (posting ->> 'amount')::numeric, true, _source_exists); @@ -460,9 +452,8 @@ begin posting ->> 'destination', posting ->> 'asset', (posting ->> 'amount')::numeric, false, _destination_exists); end; -$$; +$$ set search_path from current; --- todo: maybe we could avoid plpgsql functions create function insert_transaction(_ledger varchar, data jsonb, _date timestamp without time zone, _account_metadata jsonb) returns void @@ -494,7 +485,6 @@ begin for posting in (select jsonb_array_elements(data -> 'postings')) loop - -- todo: sometimes the balance is known at commit time (for sources != world), we need to forward the value to populate the pre_commit_aggregated_input and output perform insert_posting(_seq, _ledger, _date, (data ->> 'timestamp')::timestamp without time zone, posting, _account_metadata); end loop; @@ -508,7 +498,7 @@ begin coalesce(data -> 'metadata', '{}'::jsonb)); end if; end -$$; +$$ set search_path from current; create function handle_log() returns trigger security definer @@ -552,7 +542,7 @@ begin return new; end; -$$; +$$ set search_path from current; create function update_account_metadata_history() returns trigger security definer @@ -561,15 +551,17 @@ as $$ begin insert into accounts_metadata (ledger, accounts_seq, revision, date, metadata) - values (new.ledger, new.seq, (select revision + 1 - from accounts_metadata - where accounts_metadata.accounts_seq = new.seq - order by revision desc - limit 1), new.updated_at, new.metadata); + values (new.ledger, new.seq, ( + select revision + 1 + from accounts_metadata + where accounts_metadata.accounts_seq = new.seq + order by revision desc + limit 1 + ), new.updated_at, new.metadata); return new; end; -$$; +$$ set search_path from current; create function insert_account_metadata_history() returns trigger security definer @@ -582,7 +574,7 @@ begin return new; end; -$$; +$$ set search_path from current; create function update_transaction_metadata_history() returns trigger security definer @@ -599,7 +591,7 @@ begin return new; end; -$$; +$$ set search_path from current; create function insert_transaction_metadata_history() returns trigger security definer @@ -612,7 +604,7 @@ begin return new; end; -$$; +$$ set search_path from current; create or replace function get_all_account_effective_volumes(_ledger varchar, _account varchar, _before timestamp default null) returns setof volumes_with_asset @@ -636,7 +628,7 @@ with all_assets as (select v.v as asset ) m on true) select moves.asset, moves.post_commit_effective_volumes from moves -$$; +$$ set search_path from current; create or replace function get_all_account_volumes(_ledger varchar, _account varchar, _before timestamp default null) returns setof volumes_with_asset @@ -660,7 +652,7 @@ with all_assets as (select v.v as asset ) m on true) select moves.asset, moves.post_commit_volumes from moves -$$; +$$ set search_path from current; create function volumes_to_jsonb(v volumes_with_asset) returns jsonb @@ -669,7 +661,7 @@ create function volumes_to_jsonb(v volumes_with_asset) as $$ select ('{"' || v.asset || '": {"input": ' || (v.volumes).inputs || ', "output": ' || (v.volumes).outputs || '}}')::jsonb -$$; +$$ set search_path from current; create function get_account_aggregated_effective_volumes(_ledger varchar, _account_address varchar, _before timestamp default null) @@ -680,7 +672,7 @@ as $$ select aggregate_objects(volumes_to_jsonb(volumes_with_asset)) from get_all_account_effective_volumes(_ledger, _account_address, _before := _before) volumes_with_asset -$$; +$$ set search_path from current; create function get_account_aggregated_volumes(_ledger varchar, _account_address varchar, _before timestamp default null) @@ -692,7 +684,7 @@ as $$ select aggregate_objects(volumes_to_jsonb(volumes_with_asset)) from get_all_account_volumes(_ledger, _account_address, _before := _before) volumes_with_asset -$$; +$$ set search_path from current; create function get_account_balance(_ledger varchar, _account varchar, _asset varchar, _before timestamp default null) returns numeric @@ -708,7 +700,7 @@ where (_before is null or s.effective_date <= _before) and s.ledger = _ledger order by seq desc limit 1 -$$; +$$ set search_path from current; create function aggregate_ledger_volumes( _ledger varchar, @@ -732,7 +724,7 @@ select v.asset, (sum((v.post_commit_effective_volumes).inputs), sum((v.post_commit_effective_volumes).outputs)) from moves v group by v.asset -$$; +$$ set search_path from current; create function get_aggregated_effective_volumes_for_transaction(_ledger varchar, tx numeric) returns jsonb stable @@ -746,7 +738,7 @@ from (select distinct on (move.account_address, move.asset) move.account_address where move.transactions_seq = tx and ledger = _ledger group by move.account_address, move.asset) data -$$; +$$ set search_path from current; create function get_aggregated_volumes_for_transaction(_ledger varchar, tx numeric) returns jsonb stable @@ -760,34 +752,34 @@ from (select distinct on (move.account_address, move.asset) move.account_address where move.transactions_seq = tx and ledger = _ledger group by move.account_address, move.asset) data -$$; +$$ set search_path from current; create trigger "insert_log" - after insert - on "logs" - for each row +after insert +on "logs" +for each row execute procedure handle_log(); create trigger "update_account" - after update - on "accounts" - for each row +after update +on "accounts" +for each row execute procedure update_account_metadata_history(); create trigger "insert_account" - after insert - on "accounts" - for each row +after insert +on "accounts" +for each row execute procedure insert_account_metadata_history(); create trigger "update_transaction" - after update - on "transactions" - for each row +after update +on "transactions" +for each row execute procedure update_transaction_metadata_history(); create trigger "insert_transaction" - after insert - on "transactions" - for each row +after insert +on "transactions" +for each row execute procedure insert_transaction_metadata_history(); \ No newline at end of file diff --git a/internal/storage/ledgerstore/migrations/1-fix-trigger.sql b/internal/storage/bucket/migrations/1-fix-trigger.sql similarity index 89% rename from internal/storage/ledgerstore/migrations/1-fix-trigger.sql rename to internal/storage/bucket/migrations/1-fix-trigger.sql index 2ee0f3767..af127449c 100644 --- a/internal/storage/ledgerstore/migrations/1-fix-trigger.sql +++ b/internal/storage/bucket/migrations/1-fix-trigger.sql @@ -20,7 +20,6 @@ begin perform upsert_account(_ledger, posting ->> 'destination', _account_metadata -> (posting ->> 'destination'), _insertion_date); - -- todo: sometimes the balance is known at commit time (for sources != world), we need to forward the value to populate the pre_commit_aggregated_input and output perform insert_move(_transaction_seq, _ledger, _insertion_date, _effective_date, posting ->> 'source', posting ->> 'asset', (posting ->> 'amount')::numeric, true, _source_exists); @@ -28,4 +27,4 @@ begin posting ->> 'destination', posting ->> 'asset', (posting ->> 'amount')::numeric, false, _destination_exists); end; -$$; \ No newline at end of file +$$ set search_path from current; \ No newline at end of file diff --git a/internal/storage/ledgerstore/migrations/10-fillfactor-on-moves.sql b/internal/storage/bucket/migrations/10-fillfactor-on-moves.sql similarity index 100% rename from internal/storage/ledgerstore/migrations/10-fillfactor-on-moves.sql rename to internal/storage/bucket/migrations/10-fillfactor-on-moves.sql diff --git a/internal/storage/bucket/migrations/11-make-stateless.sql b/internal/storage/bucket/migrations/11-make-stateless.sql new file mode 100644 index 000000000..d928c0c71 --- /dev/null +++ b/internal/storage/bucket/migrations/11-make-stateless.sql @@ -0,0 +1,514 @@ +drop trigger insert_account on accounts; +drop trigger update_account on accounts; +drop trigger insert_transaction on transactions; +drop trigger update_transaction on transactions; +drop trigger insert_log on logs; + +alter table moves +add column transactions_id bigint, +alter column post_commit_volumes drop not null, +alter column post_commit_effective_volumes drop not null, +alter column insertion_date set default (now() at time zone 'utc'), +alter column effective_date set default (now() at time zone 'utc'), +alter column account_address_array drop not null; + +alter table moves +rename column account_address to accounts_address; + +alter table moves +rename column account_address_array to accounts_address_array; + +-- since the column `account_address` has been renamed to `accounts_address`, we need to update the function +create or replace function get_aggregated_volumes_for_transaction(_ledger varchar, tx numeric) returns jsonb + stable + language sql +as +$$ +select aggregate_objects(jsonb_build_object(data.accounts_address, data.aggregated)) +from ( + select distinct on (move.accounts_address, move.asset) + move.accounts_address, + volumes_to_jsonb((move.asset, first(move.post_commit_volumes))) as aggregated + from (select * from moves order by seq desc) move + where move.transactions_seq = tx and + ledger = _ledger + group by move.accounts_address, move.asset +) data +$$ set search_path from current; + +create or replace function get_aggregated_effective_volumes_for_transaction(_ledger varchar, tx numeric) returns jsonb + stable + language sql +as +$$ +select aggregate_objects(jsonb_build_object(data.accounts_address, data.aggregated)) +from ( + select distinct on (move.accounts_address, move.asset) + move.accounts_address, + volumes_to_jsonb((move.asset, first(move.post_commit_effective_volumes))) as aggregated + from (select * from moves order by seq desc) move + where move.transactions_seq = tx + and ledger = _ledger + group by move.accounts_address, move.asset +) data +$$ set search_path from current; + +create or replace function get_all_account_effective_volumes(_ledger varchar, _account varchar, _before timestamp default null) + returns setof volumes_with_asset + language sql + stable +as +$$ +with all_assets as (select v.v as asset + from get_all_assets(_ledger) v), + moves as (select m.* + from all_assets assets + join lateral ( + select * + from moves s + where (_before is null or s.effective_date <= _before) + and s.accounts_address = _account + and s.asset = assets.asset + and s.ledger = _ledger + order by effective_date desc, seq desc + limit 1 + ) m on true) +select moves.asset, moves.post_commit_effective_volumes +from moves +$$ set search_path from current; + +create or replace function get_all_account_volumes(_ledger varchar, _account varchar, _before timestamp default null) + returns setof volumes_with_asset + language sql + stable +as +$$ +with all_assets as (select v.v as asset + from get_all_assets(_ledger) v), + moves as (select m.* + from all_assets assets + join lateral ( + select * + from moves s + where (_before is null or s.insertion_date <= _before) + and s.accounts_address = _account + and s.asset = assets.asset + and s.ledger = _ledger + order by seq desc + limit 1 + ) m on true) +select moves.asset, moves.post_commit_volumes +from moves +$$ set search_path from current; + +-- notes(gfyrag): temporary trigger to be able to handle writes on the old schema (the code does not specify this anymore) +create or replace function set_compat_on_move() + returns trigger + security definer + language plpgsql +as +$$ +begin + new.transactions_seq = ( + select seq + from transactions + where id = new.transactions_id and ledger = new.ledger + ); + new.accounts_seq = ( + select seq + from accounts + where address = new.accounts_address and ledger = new.ledger + ); + new.accounts_address_array = to_json(string_to_array(new.accounts_address, ':')); + + return new; +end; +$$ set search_path from current; + +create trigger set_compat_on_move +before insert on moves +for each row +execute procedure set_compat_on_move(); + +create or replace function set_compat_on_accounts_metadata() + returns trigger + security definer + language plpgsql +as +$$ +begin + new.accounts_seq = ( + select seq + from accounts + where address = new.accounts_address and ledger = new.ledger + ); + + return new; +end; +$$ set search_path from current; + +create trigger set_compat_on_accounts_metadata +before insert on accounts_metadata +for each row +execute procedure set_compat_on_accounts_metadata(); + +create or replace function set_compat_on_transactions_metadata() + returns trigger + security definer + language plpgsql +as +$$ +begin + new.transactions_seq = ( + select seq + from transactions + where id = new.transactions_id and ledger = new.ledger + ); + + return new; +end; +$$ set search_path from current; + +create trigger set_compat_on_transactions_metadata +before insert on transactions_metadata +for each row +execute procedure set_compat_on_transactions_metadata(); + +alter table transactions +add column post_commit_volumes jsonb, +add column inserted_at timestamp without time zone default (now() at time zone 'utc'), +alter column timestamp set default (now() at time zone 'utc'), +alter column id type bigint; + +drop index transactions_reference; +create unique index transactions_reference on transactions (ledger, reference); +create index transactions_sequences on transactions (id, seq); + +alter table logs +add column memento bytea, +add column idempotency_hash bytea, +alter column hash drop not null, +alter column date set default (now() at time zone 'utc'); + +alter table accounts +alter column address_array drop not null, +alter column first_usage set default (now() at time zone 'utc'), +alter column insertion_date set default (now() at time zone 'utc'), +alter column updated_at set default (now() at time zone 'utc') +; + +create table accounts_volumes ( + ledger varchar not null, + accounts_address varchar not null, + asset varchar not null, + input numeric not null, + output numeric not null, + + primary key (ledger, accounts_address, asset) +); + +create index accounts_sequences on accounts (address, seq); + +alter table transactions_metadata +add column transactions_id bigint; + +alter table accounts_metadata +add column accounts_address varchar; + +create function set_effective_volumes() + returns trigger + security definer + language plpgsql +as +$$ +begin + new.post_commit_effective_volumes = coalesce(( + select ( + (post_commit_effective_volumes).inputs + case when new.is_source then 0 else new.amount end, + (post_commit_effective_volumes).outputs + case when new.is_source then new.amount else 0 end + ) + from moves + where accounts_address = new.accounts_address + and asset = new.asset + and ledger = new.ledger + and (effective_date < new.effective_date or (effective_date = new.effective_date and seq < new.seq)) + order by effective_date desc, seq desc + limit 1 + ), ( + case when new.is_source then 0 else new.amount end, + case when new.is_source then new.amount else 0 end + )); + + return new; +end; +$$ set search_path from current; + +create function update_effective_volumes() + returns trigger + security definer + language plpgsql +as +$$ +begin + update moves + set post_commit_effective_volumes = ( + (post_commit_effective_volumes).inputs + case when new.is_source then 0 else new.amount end, + (post_commit_effective_volumes).outputs + case when new.is_source then new.amount else 0 end + ) + where accounts_address = new.accounts_address + and asset = new.asset + and effective_date > new.effective_date + and ledger = new.ledger; + + return new; +end; +$$ set search_path from current; + +create or replace function update_transaction_metadata_history() returns trigger + security definer + language plpgsql +as +$$ +begin + insert into transactions_metadata (ledger, transactions_id, revision, date, metadata) + values (new.ledger, new.id, ( + select revision + 1 + from transactions_metadata + where transactions_metadata.transactions_id = new.id and transactions_metadata.ledger = new.ledger + order by revision desc + limit 1 + ), new.updated_at, new.metadata); + + return new; +end; +$$ set search_path from current; + +create or replace function insert_transaction_metadata_history() returns trigger + security definer + language plpgsql +as +$$ +begin + insert into transactions_metadata (ledger, transactions_id, revision, date, metadata) + values (new.ledger, new.id, 1, new.timestamp, new.metadata); + + return new; +end; +$$ set search_path from current; + +create or replace function update_account_metadata_history() returns trigger + security definer + language plpgsql +as +$$ +begin + insert into accounts_metadata (ledger, accounts_address, revision, date, metadata) + values (new.ledger, new.address, ( + select revision + 1 + from accounts_metadata + where accounts_metadata.accounts_address = new.address + order by revision desc + limit 1 + ), new.updated_at, new.metadata); + + return new; +end; +$$ set search_path from current; + +create or replace function insert_account_metadata_history() returns trigger + security definer + language plpgsql +as +$$ +begin + insert into accounts_metadata (ledger, accounts_address, revision, date, metadata) + values (new.ledger, new.address, 1, new.insertion_date, new.metadata); + + return new; +end; +$$ set search_path from current; + +create or replace function explode_address(_address varchar) + returns jsonb + language sql + immutable +as +$$ +select public.aggregate_objects(jsonb_build_object(data.number - 1, data.value)) +from (select row_number() over () as number, v.value + from (select unnest(string_to_array(_address, ':')) as value + union all + select null) v) data +$$ set search_path from current; + +create or replace function set_transaction_addresses() returns trigger + security definer + language plpgsql +as +$$ +begin + + new.sources = ( + select to_jsonb(array_agg(v->>'source')) as value + from jsonb_array_elements(new.postings::jsonb) v + ); + new.destinations = ( + select to_jsonb(array_agg(v->>'destination')) as value + from jsonb_array_elements(new.postings::jsonb) v + ); + + return new; +end +$$ set search_path from current; + +create or replace function set_transaction_addresses_segments() returns trigger + security definer + language plpgsql +as +$$ +begin + new.sources_arrays = ( + select to_jsonb(array_agg(explode_address(v ->> 'source'))) as value + from jsonb_array_elements(new.postings::jsonb) v + ); + new.destinations_arrays = ( + select to_jsonb(array_agg(explode_address(v ->> 'destination'))) as value + from jsonb_array_elements(new.postings::jsonb) v + ); + + return new; +end +$$ set search_path from current; + +create or replace function set_address_array_for_account() returns trigger + security definer + language plpgsql +as +$$ +begin + new.address_array = to_json(string_to_array(new.address, ':')); + + return new; +end +$$ set search_path from current; + +create function set_log_hash() + returns trigger + security definer + language plpgsql +as +$$ +declare + previousHash bytea; + marshalledAsJSON varchar; +begin + select hash into previousHash + from logs + where ledger = new.ledger + order by seq desc + limit 1; + + -- select only fields participating in the hash on the backend and format json representation the same way + select '{' || + '"type":"' || new.type || '",' || + '"data":' || encode(new.memento, 'escape') || ',' || + '"date":"' || (to_json(new.date::timestamp)#>>'{}') || 'Z",' || + '"idempotencyKey":"' || coalesce(new.idempotency_key, '') || '",' || + '"id":0,' || + '"hash":null' || + '}' into marshalledAsJSON; + + new.hash = ( + select public.digest( + case + when previousHash is null + then marshalledAsJSON::bytea + else '"' || encode(previousHash::bytea, 'base64')::bytea || E'"\n' || convert_to(marshalledAsJSON, 'LATIN1')::bytea + end || E'\n', 'sha256'::text + ) + ); + + return new; +end; +$$ set search_path from current; + +DO +$do$ + declare + ledger record; + vsql text; + BEGIN + for ledger in select * from _system.ledgers where bucket = current_schema loop + -- create a sequence for transactions by ledger instead of a sequence of the table as we want to have contiguous ids + -- notes: we can still have "holes" on ids since a sql transaction can be reverted after a usage of the sequence + + vsql = 'create sequence "transaction_id_' || ledger.id || '" owned by transactions.id'; + execute vsql; + + vsql = 'select setval(''"transaction_id_' || ledger.id || '"'', coalesce((select max(id) + 1 from transactions where ledger = ''' || ledger.name || '''), 1)::bigint, false)'; + raise info '%', vsql; + execute vsql; + + -- create a sequence for logs by ledger instead of a sequence of the table as we want to have contiguous ids + -- notes: we can still have "holes" on id since a sql transaction can be reverted after a usage of the sequence + vsql = 'create sequence "log_id_' || ledger.id || '" owned by logs.id'; + execute vsql; + + vsql = 'select setval(''"log_id_' || ledger.id || '"'', coalesce((select max(id) + 1 from logs where ledger = ''' || ledger.name || '''), 1)::bigint, false)'; + execute vsql; + + -- enable post commit effective volumes synchronously + vsql = 'create index "pcev_' || ledger.id || '" on moves (accounts_address, asset, effective_date desc) where ledger = ''' || ledger.name || ''''; + execute vsql; + + vsql = 'create trigger "set_effective_volumes_' || ledger.id || '" before insert on moves for each row when (new.ledger = ''' || ledger.name || ''') execute procedure set_effective_volumes()'; + execute vsql; + + vsql = 'create trigger "update_effective_volumes_' || ledger.id || '" after insert on moves for each row when (new.ledger = ''' || ledger.name || ''') execute procedure update_effective_volumes()'; + execute vsql; + + -- logs hash + vsql = 'create trigger "set_log_hash_' || ledger.id || '" before insert on logs for each row when (new.ledger = ''' || ledger.name || ''') execute procedure set_log_hash()'; + execute vsql; + + vsql = 'create trigger "update_account_metadata_history_' || ledger.id || '" after update on "accounts" for each row when (new.ledger = ''' || ledger.name || ''') execute procedure update_account_metadata_history()'; + execute vsql; + + vsql = 'create trigger "insert_account_metadata_history_' || ledger.id || '" after insert on "accounts" for each row when (new.ledger = ''' || ledger.name || ''') execute procedure insert_account_metadata_history()'; + execute vsql; + + vsql = 'create trigger "update_transaction_metadata_history_' || ledger.id || '" after update on "transactions" for each row when (new.ledger = ''' || ledger.name || ''') execute procedure update_transaction_metadata_history()'; + execute vsql; + + vsql = 'create trigger "insert_transaction_metadata_history_' || ledger.id || '" after insert on "transactions" for each row when (new.ledger = ''' || ledger.name || ''') execute procedure insert_transaction_metadata_history()'; + execute vsql; + + vsql = 'create index "transactions_sources_' || ledger.id || '" on transactions using gin (sources jsonb_path_ops) where ledger = ''' || ledger.name || ''''; + execute vsql; + + vsql = 'create index "transactions_destinations_' || ledger.id || '" on transactions using gin (destinations jsonb_path_ops) where ledger = ''' || ledger.name || ''''; + execute vsql; + + vsql = 'create trigger "transaction_set_addresses_' || ledger.id || '" before insert on transactions for each row when (new.ledger = ''' || ledger.name || ''') execute procedure set_transaction_addresses()'; + execute vsql; + + vsql = 'create index "accounts_address_array_' || ledger.id || '" on accounts using gin (address_array jsonb_ops) where ledger = ''' || ledger.name || ''''; + execute vsql; + + vsql = 'create index "accounts_address_array_length_' || ledger.id || '" on accounts (jsonb_array_length(address_array)) where ledger = ''' || ledger.name || ''''; + execute vsql; + + vsql = 'create trigger "accounts_set_address_array_' || ledger.id || '" before insert on accounts for each row when (new.ledger = ''' || ledger.name || ''') execute procedure set_address_array_for_account()'; + execute vsql; + + vsql = 'create index "transactions_sources_arrays_' || ledger.id || '" on transactions using gin (sources_arrays jsonb_path_ops) where ledger = ''' || ledger.name || ''''; + execute vsql; + + vsql = 'create index "transactions_destinations_arrays_' || ledger.id || '" on transactions using gin (destinations_arrays jsonb_path_ops) where ledger = ''' || ledger.name || ''''; + execute vsql; + + vsql = 'create trigger "transaction_set_addresses_segments_' || ledger.id || '" before insert on "transactions" for each row when (new.ledger = ''' || ledger.name || ''') execute procedure set_transaction_addresses_segments()'; + execute vsql; + end loop; + END +$do$; \ No newline at end of file diff --git a/internal/storage/ledgerstore/migrations/2-fix-volumes-aggregation.sql b/internal/storage/bucket/migrations/2-fix-volumes-aggregation.sql similarity index 96% rename from internal/storage/ledgerstore/migrations/2-fix-volumes-aggregation.sql rename to internal/storage/bucket/migrations/2-fix-volumes-aggregation.sql index f13754519..334ee11b7 100644 --- a/internal/storage/ledgerstore/migrations/2-fix-volumes-aggregation.sql +++ b/internal/storage/bucket/migrations/2-fix-volumes-aggregation.sql @@ -20,4 +20,4 @@ with all_assets as (select v.v as asset ) m on true) select moves.asset, moves.post_commit_volumes from moves -$$; \ No newline at end of file +$$ set search_path from current; \ No newline at end of file diff --git a/internal/storage/ledgerstore/migrations/3-fix-trigger-inserting-backdated-transactions.sql b/internal/storage/bucket/migrations/3-fix-trigger-inserting-backdated-transactions.sql similarity index 99% rename from internal/storage/ledgerstore/migrations/3-fix-trigger-inserting-backdated-transactions.sql rename to internal/storage/bucket/migrations/3-fix-trigger-inserting-backdated-transactions.sql index cbc196fab..668771559 100644 --- a/internal/storage/ledgerstore/migrations/3-fix-trigger-inserting-backdated-transactions.sql +++ b/internal/storage/bucket/migrations/3-fix-trigger-inserting-backdated-transactions.sql @@ -102,4 +102,4 @@ begin and seq < _seq; end if; end; -$$; \ No newline at end of file +$$ set search_path from current; \ No newline at end of file diff --git a/internal/storage/ledgerstore/migrations/4-add-account-first-usage-column.sql b/internal/storage/bucket/migrations/4-add-account-first-usage-column.sql similarity index 97% rename from internal/storage/ledgerstore/migrations/4-add-account-first-usage-column.sql rename to internal/storage/bucket/migrations/4-add-account-first-usage-column.sql index 873cdcc45..34cc49c21 100644 --- a/internal/storage/ledgerstore/migrations/4-add-account-first-usage-column.sql +++ b/internal/storage/bucket/migrations/4-add-account-first-usage-column.sql @@ -96,7 +96,7 @@ begin and effective_date > _effective_date; end if; end; -$$; +$$ set search_path from current; create or replace function upsert_account(_ledger varchar, _address varchar, _metadata jsonb, _date timestamp, _first_usage timestamp) returns void @@ -112,7 +112,7 @@ begin first_usage = case when accounts.first_usage < _first_usage then accounts.first_usage else _first_usage end where not accounts.metadata @> coalesce(_metadata, '{}'::jsonb) or accounts.first_usage > _first_usage; end; -$$; +$$ set search_path from current; create or replace function insert_posting(_transaction_seq bigint, _ledger varchar, _insertion_date timestamp without time zone, _effective_date timestamp without time zone, posting jsonb, _account_metadata jsonb) @@ -131,7 +131,6 @@ begin select true from accounts where ledger = _ledger and address = posting ->> 'destination' into _destination_exists; perform upsert_account(_ledger, posting ->> 'destination', _account_metadata -> (posting ->> 'destination'), _insertion_date, _effective_date); - -- todo: sometimes the balance is known at commit time (for sources != world), we need to forward the value to populate the pre_commit_aggregated_input and output perform insert_move(_transaction_seq, _ledger, _insertion_date, _effective_date, posting ->> 'source', posting ->> 'asset', (posting ->> 'amount')::numeric, true, _source_exists); @@ -139,7 +138,7 @@ begin posting ->> 'destination', posting ->> 'asset', (posting ->> 'amount')::numeric, false, _destination_exists); end; -$$; +$$ set search_path from current; create or replace function handle_log() returns trigger security definer @@ -184,7 +183,7 @@ begin return new; end; -$$; +$$ set search_path from current; create or replace function get_all_account_volumes(_ledger varchar, _account varchar, _before timestamp default null) returns setof volumes_with_asset @@ -208,7 +207,7 @@ with all_assets as (select v.v as asset ) m on true) select moves.asset, moves.post_commit_volumes from moves -$$; +$$ set search_path from current; drop function upsert_account(_ledger varchar, _address varchar, _metadata jsonb, _date timestamp); diff --git a/internal/storage/ledgerstore/migrations/5-add-idempotency-key-index.sql b/internal/storage/bucket/migrations/5-add-idempotency-key-index.sql similarity index 100% rename from internal/storage/ledgerstore/migrations/5-add-idempotency-key-index.sql rename to internal/storage/bucket/migrations/5-add-idempotency-key-index.sql diff --git a/internal/storage/ledgerstore/migrations/6-add-reference-index.sql b/internal/storage/bucket/migrations/6-add-reference-index.sql similarity index 100% rename from internal/storage/ledgerstore/migrations/6-add-reference-index.sql rename to internal/storage/bucket/migrations/6-add-reference-index.sql diff --git a/internal/storage/ledgerstore/migrations/7-add-ik-unique-index.sql b/internal/storage/bucket/migrations/7-add-ik-unique-index.sql similarity index 100% rename from internal/storage/ledgerstore/migrations/7-add-ik-unique-index.sql rename to internal/storage/bucket/migrations/7-add-ik-unique-index.sql diff --git a/internal/storage/ledgerstore/migrations/8-ik-ledger-unique-index.sql b/internal/storage/bucket/migrations/8-ik-ledger-unique-index.sql similarity index 100% rename from internal/storage/ledgerstore/migrations/8-ik-ledger-unique-index.sql rename to internal/storage/bucket/migrations/8-ik-ledger-unique-index.sql diff --git a/internal/storage/ledgerstore/migrations/9-fix-incorrect-volumes-aggregation.sql b/internal/storage/bucket/migrations/9-fix-incorrect-volumes-aggregation.sql similarity index 94% rename from internal/storage/ledgerstore/migrations/9-fix-incorrect-volumes-aggregation.sql rename to internal/storage/bucket/migrations/9-fix-incorrect-volumes-aggregation.sql index 6b7af53cb..0731ecaef 100644 --- a/internal/storage/ledgerstore/migrations/9-fix-incorrect-volumes-aggregation.sql +++ b/internal/storage/bucket/migrations/9-fix-incorrect-volumes-aggregation.sql @@ -13,7 +13,7 @@ from ( ledger = _ledger group by move.account_address, move.asset ) data -$$; +$$ set search_path from current; create or replace function get_aggregated_effective_volumes_for_transaction(_ledger varchar, tx numeric) returns jsonb stable @@ -30,4 +30,4 @@ from ( and ledger = _ledger group by move.account_address, move.asset ) data -$$; \ No newline at end of file +$$ set search_path from current; \ No newline at end of file diff --git a/internal/storage/driver/adapters.go b/internal/storage/driver/adapters.go new file mode 100644 index 000000000..7116a4b11 --- /dev/null +++ b/internal/storage/driver/adapters.go @@ -0,0 +1,34 @@ +package driver + +import ( + "context" + ledgerstore "github.com/formancehq/ledger/internal/storage/ledger/legacy" + + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + systemcontroller "github.com/formancehq/ledger/internal/controller/system" +) + +type DefaultStorageDriverAdapter struct { + *Driver +} + +func (d *DefaultStorageDriverAdapter) OpenLedger(ctx context.Context, name string) (ledgercontroller.Store, *ledger.Ledger, error) { + store, l, err := d.Driver.OpenLedger(ctx, name) + if err != nil { + return nil, nil, err + } + + return ledgerstore.NewDefaultStoreAdapter(store), l, nil +} + +func (d *DefaultStorageDriverAdapter) CreateLedger(ctx context.Context, l *ledger.Ledger) error { + _, err := d.Driver.CreateLedger(ctx, l) + return err +} + +func NewControllerStorageDriverAdapter(d *Driver) *DefaultStorageDriverAdapter { + return &DefaultStorageDriverAdapter{Driver: d} +} + +var _ systemcontroller.Store = (*DefaultStorageDriverAdapter)(nil) diff --git a/internal/storage/driver/driver.go b/internal/storage/driver/driver.go index 577871c8f..41242f328 100644 --- a/internal/storage/driver/driver.go +++ b/internal/storage/driver/driver.go @@ -3,192 +3,196 @@ package driver import ( "context" "database/sql" - "sync" - - "github.com/formancehq/go-libs/bun/bundebug" - "github.com/formancehq/go-libs/bun/bunpaginate" - "github.com/formancehq/go-libs/metadata" - - "github.com/formancehq/go-libs/bun/bunconnect" - - "github.com/formancehq/go-libs/collectionutils" - "github.com/pkg/errors" + "fmt" + "github.com/formancehq/go-libs/v2/collectionutils" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/platform/postgres" + systemcontroller "github.com/formancehq/ledger/internal/controller/system" + "go.opentelemetry.io/otel/metric" + noopmetrics "go.opentelemetry.io/otel/metric/noop" + "go.opentelemetry.io/otel/trace" + nooptracer "go.opentelemetry.io/otel/trace/noop" + + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/storage/bucket" + ledgerstore "github.com/formancehq/ledger/internal/storage/ledger" "github.com/uptrace/bun" - "github.com/formancehq/go-libs/time" - "github.com/formancehq/ledger/internal/storage/ledgerstore" - - "github.com/formancehq/ledger/internal/storage/sqlutils" - - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/ledger/internal/storage/systemstore" + "github.com/formancehq/go-libs/v2/logging" ) -const defaultBucket = "_default" - -var ( - ErrNeedUpgradeBucket = errors.New("need to upgrade bucket before add a new ledger on it") - ErrLedgerAlreadyExists = errors.New("ledger already exists") +const ( + SchemaSystem = "_system" ) -type LedgerConfiguration struct { - Bucket string `json:"bucket"` - Metadata metadata.Metadata `json:"metadata"` -} - -type LedgerState struct { - LedgerConfiguration - State string `json:"state"` -} - type Driver struct { - systemStore *systemstore.Store - lock sync.Mutex - connectionOptions bunconnect.ConnectionOptions - buckets map[string]*ledgerstore.Bucket - db *bun.DB - debug bool -} - -func (d *Driver) GetSystemStore() *systemstore.Store { - return d.systemStore + db *bun.DB + tracer trace.Tracer + meter metric.Meter } -func (d *Driver) OpenBucket(ctx context.Context, name string) (*ledgerstore.Bucket, error) { +func (d *Driver) createLedgerStore(ctx context.Context, db bun.IDB, l *ledger.Ledger) (*ledgerstore.Store, error) { - bucket, ok := d.buckets[name] - if ok { - return bucket, nil + tx, err := db.BeginTx(ctx, &sql.TxOptions{}) + if err != nil { + return nil, fmt.Errorf("begin transaction: %w", err) } - hooks := make([]bun.QueryHook, 0) - if d.debug { - hooks = append(hooks, bundebug.NewQueryHook()) + b := bucket.New(tx, l.Bucket) + if err := b.Migrate(ctx, d.tracer); err != nil { + return nil, fmt.Errorf("migrating bucket: %w", err) } - b, err := ledgerstore.ConnectToBucket(ctx, d.connectionOptions, name, hooks...) + ret, err := db.NewInsert(). + Model(l). + Ignore(). + Returning("id, added_at"). + Exec(ctx) if err != nil { - return nil, err + return nil, postgres.ResolveError(err) } - d.buckets[name] = b - return b, nil -} + affected, err := ret.RowsAffected() + if err != nil { + return nil, fmt.Errorf("creating ledger: %w", err) + } + if affected == 0 { + return nil, systemcontroller.ErrLedgerAlreadyExists + } -func (d *Driver) GetLedgerStore(ctx context.Context, name string, configuration LedgerState) (*ledgerstore.Store, error) { - d.lock.Lock() - defer d.lock.Unlock() + if err := b.AddLedger(ctx, *l, tx); err != nil { + return nil, fmt.Errorf("adding ledger to bucket: %w", err) + } - bucket, err := d.OpenBucket(ctx, configuration.Bucket) - if err != nil { - return nil, err + if err := tx.Commit(); err != nil { + return nil, fmt.Errorf("committing sql transaction to create ledger and schemas: %w", err) } - return bucket.GetLedgerStore(name) + return ledgerstore.New( + d.db, + b, + *l, + ledgerstore.WithMeter(d.meter), + ledgerstore.WithTracer(d.tracer), + ), nil } -func (f *Driver) CreateLedgerStore(ctx context.Context, name string, configuration LedgerConfiguration) (*ledgerstore.Store, error) { +func (d *Driver) CreateLedger(ctx context.Context, l *ledger.Ledger) (*ledgerstore.Store, error) { - tx, err := f.db.BeginTx(ctx, &sql.TxOptions{}) + // start a transaction because we will need to create the schema and apply ledger migrations + tx, err := d.db.BeginTx(ctx, &sql.TxOptions{}) if err != nil { - return nil, err + return nil, fmt.Errorf("begin transaction: %w", err) } defer func() { _ = tx.Rollback() }() - if _, err := f.systemStore.GetLedger(ctx, name); err == nil { - return nil, ErrLedgerAlreadyExists - } else if !sqlutils.IsNotFoundError(err) { - return nil, err - } - - bucketName := defaultBucket - if configuration.Bucket != "" { - bucketName = configuration.Bucket + if l.Metadata == nil { + l.Metadata = metadata.Metadata{} } - bucket, err := f.OpenBucket(ctx, bucketName) + store, err := d.createLedgerStore(ctx, tx, l) if err != nil { - return nil, errors.Wrap(err, "opening bucket") + return nil, err } - isInitialized, err := bucket.IsInitialized(ctx) - if err != nil { - return nil, errors.Wrap(err, "checking if bucket is initialized") + if err := tx.Commit(); err != nil { + return nil, fmt.Errorf("committing sql transaction to create ledger schema: %w", err) } - if isInitialized { - isUpToDate, err := bucket.IsUpToDate(ctx) - if err != nil { - return nil, errors.Wrap(err, "checking if bucket is up to date") - } - if !isUpToDate { - return nil, ErrNeedUpgradeBucket - } - } else { - if err := ledgerstore.MigrateBucket(ctx, tx, bucketName); err != nil { - return nil, errors.Wrap(err, "migrating bucket") - } - } + return store, nil +} - store, err := bucket.GetLedgerStore(name) - if err != nil { - return nil, errors.Wrap(err, "getting ledger store") - } +func (d *Driver) OpenLedger(ctx context.Context, name string) (*ledgerstore.Store, *ledger.Ledger, error) { + ret := &ledger.Ledger{} + if err := d.db.NewSelect(). + Model(ret). + Column("*"). + Where("name = ?", name). + Scan(ctx); err != nil { + return nil, nil, postgres.ResolveError(err) + } + + return ledgerstore.New( + d.db, + bucket.New(d.db, ret.Bucket), + *ret, + ledgerstore.WithMeter(d.meter), + ledgerstore.WithTracer(d.tracer), + ), ret, nil +} - _, err = systemstore.RegisterLedger(ctx, tx, &systemstore.Ledger{ - Name: name, - AddedAt: time.Now(), - Bucket: bucketName, - Metadata: configuration.Metadata, - State: systemstore.StateInitializing, - }) +func (d *Driver) Initialize(ctx context.Context) error { + logging.FromContext(ctx).Debugf("Initialize driver") + err := Migrate(ctx, d.db) if err != nil { - return nil, errors.Wrap(err, "registring ledger on system store") + return fmt.Errorf("migrating system store: %w", err) } - - return store, errors.Wrap(tx.Commit(), "committing sql transaction") + return nil } -func (d *Driver) Initialize(ctx context.Context) error { - logging.FromContext(ctx).Debugf("Initialize driver") +func (d *Driver) UpdateLedgerMetadata(ctx context.Context, name string, m metadata.Metadata) error { + _, err := d.db.NewUpdate(). + Model(&ledger.Ledger{}). + Set("metadata = metadata || ?", m). + Where("name = ?", name). + Exec(ctx) + return err +} - hooks := make([]bun.QueryHook, 0) - if d.debug { - hooks = append(hooks, bundebug.NewQueryHook()) - } +func (d *Driver) DeleteLedgerMetadata(ctx context.Context, name string, key string) error { + _, err := d.db.NewUpdate(). + Model(&ledger.Ledger{}). + Set("metadata = metadata - ?", key). + Where("name = ?", name). + Exec(ctx) + return err +} - var err error - d.db, err = bunconnect.OpenSQLDB(ctx, d.connectionOptions, hooks...) - if err != nil { - return errors.Wrap(err, "connecting to database") - } +func (d *Driver) ListLedgers(ctx context.Context, q ledgercontroller.ListLedgersQuery) (*bunpaginate.Cursor[ledger.Ledger], error) { + query := d.db.NewSelect(). + Model(&ledger.Ledger{}). + Column("*"). + Order("added_at asc") + + return bunpaginate.UsingOffset[ledgercontroller.PaginatedQueryOptions[struct{}], ledger.Ledger]( + ctx, + query, + bunpaginate.OffsetPaginatedQuery[ledgercontroller.PaginatedQueryOptions[struct{}]](q), + ) +} - if err := systemstore.Migrate(ctx, d.db); err != nil { - return errors.Wrap(err, "migrating data") +func (d *Driver) GetLedger(ctx context.Context, name string) (*ledger.Ledger, error) { + ret := &ledger.Ledger{} + if err := d.db.NewSelect(). + Model(ret). + Column("*"). + Where("name = ?", name). + Scan(ctx); err != nil { + return nil, postgres.ResolveError(err) } - d.systemStore, err = systemstore.Connect(ctx, d.connectionOptions, hooks...) - if err != nil { - return errors.Wrap(err, "connecting to system store") - } + return ret, nil +} - return nil +func (d *Driver) UpgradeBucket(ctx context.Context, name string) error { + return bucket.New(d.db, name).Migrate(ctx, d.tracer) } func (d *Driver) UpgradeAllBuckets(ctx context.Context) error { - systemStore := d.GetSystemStore() - buckets := collectionutils.Set[string]{} - err := bunpaginate.Iterate(ctx, systemstore.NewListLedgersQuery(10), - func(ctx context.Context, q systemstore.ListLedgersQuery) (*bunpaginate.Cursor[systemstore.Ledger], error) { - return systemStore.ListLedgers(ctx, q) + err := bunpaginate.Iterate(ctx, ledgercontroller.NewListLedgersQuery(10), + func(ctx context.Context, q ledgercontroller.ListLedgersQuery) (*bunpaginate.Cursor[ledger.Ledger], error) { + return d.ListLedgers(ctx, q) }, - func(cursor *bunpaginate.Cursor[systemstore.Ledger]) error { - for _, name := range cursor.Data { - buckets.Put(name.Bucket) + func(cursor *bunpaginate.Cursor[ledger.Ledger]) error { + for _, l := range cursor.Data { + buckets.Put(l.Bucket) } return nil }) @@ -196,14 +200,11 @@ func (d *Driver) UpgradeAllBuckets(ctx context.Context) error { return err } - for _, bucket := range collectionutils.Keys(buckets) { - bucket, err := d.OpenBucket(ctx, bucket) - if err != nil { - return err - } + for _, bucketName := range collectionutils.Keys(buckets) { + b := bucket.New(d.db, bucketName) - logging.FromContext(ctx).Infof("Upgrading bucket '%s'", bucket.Name()) - if err := bucket.Migrate(ctx); err != nil { + logging.FromContext(ctx).Infof("Upgrading bucket '%s'", bucketName) + if err := b.Migrate(ctx, d.tracer); err != nil { return err } } @@ -211,24 +212,31 @@ func (d *Driver) UpgradeAllBuckets(ctx context.Context) error { return nil } -func (d *Driver) Close() error { - if err := d.systemStore.Close(); err != nil { - return err +func New(db *bun.DB, opts ...Option) *Driver { + ret := &Driver{ + db: db, } - for _, b := range d.buckets { - if err := b.Close(); err != nil { - return err - } + for _, opt := range append(defaultOptions, opts...) { + opt(ret) } - if err := d.db.Close(); err != nil { - return err + return ret +} + +type Option func(d *Driver) + +func WithMeter(m metric.Meter) Option { + return func(d *Driver) { + d.meter = m } - return nil } -func New(connectionOptions bunconnect.ConnectionOptions) *Driver { - return &Driver{ - connectionOptions: connectionOptions, - buckets: make(map[string]*ledgerstore.Bucket), +func WithTracer(tracer trace.Tracer) Option { + return func(d *Driver) { + d.tracer = tracer } } + +var defaultOptions = []Option{ + WithMeter(noopmetrics.Meter{}), + WithTracer(nooptracer.Tracer{}), +} diff --git a/internal/storage/driver/driver_test.go b/internal/storage/driver/driver_test.go index e845dc985..62ec6b70c 100644 --- a/internal/storage/driver/driver_test.go +++ b/internal/storage/driver/driver_test.go @@ -4,102 +4,142 @@ package driver_test import ( "fmt" - "testing" - + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/pointer" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" "github.com/formancehq/ledger/internal/storage/driver" - - "github.com/formancehq/ledger/internal/storage/sqlutils" - - "github.com/formancehq/go-libs/logging" "github.com/google/uuid" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/bun/bunconnect" + "github.com/formancehq/go-libs/v2/bun/bundebug" + "github.com/formancehq/go-libs/v2/testing/docker" + "github.com/uptrace/bun" - "github.com/formancehq/ledger/internal/storage/storagetesting" + "github.com/formancehq/go-libs/v2/logging" "github.com/stretchr/testify/require" ) -func TestConfiguration(t *testing.T) { +func TestUpgradeAllLedgers(t *testing.T) { t.Parallel() - d := storagetesting.StorageDriver(t) + d := newStorageDriver(t) ctx := logging.TestingContext() - require.NoError(t, d.GetSystemStore().InsertConfiguration(ctx, "foo", "bar")) - bar, err := d.GetSystemStore().GetConfiguration(ctx, "foo") - require.NoError(t, err) - require.Equal(t, "bar", bar) -} + count := 30 -func TestConfigurationError(t *testing.T) { - t.Parallel() + for i := 0; i < count; i++ { + name := fmt.Sprintf("ledger%d", i) + _, err := d.CreateLedger(ctx, pointer.For(ledger.MustNewWithDefault(name))) + require.NoError(t, err) + } + + require.NoError(t, d.UpgradeAllBuckets(ctx)) +} - d := storagetesting.StorageDriver(t) +func TestLedgersCreate(t *testing.T) { ctx := logging.TestingContext() + driver := newStorageDriver(t) - _, err := d.GetSystemStore().GetConfiguration(ctx, "not_existing") - require.Error(t, err) - require.True(t, sqlutils.IsNotFoundError(err)) + l := ledger.MustNewWithDefault("foo") + _, err := driver.CreateLedger(ctx, &l) + require.NoError(t, err) + require.Equal(t, 1, l.ID) + require.NotEmpty(t, l.AddedAt) } -func TestErrorOnOutdatedBucket(t *testing.T) { - t.Parallel() - +func TestLedgersList(t *testing.T) { ctx := logging.TestingContext() - d := storagetesting.StorageDriver(t) + driver := newStorageDriver(t) + + ledgers := make([]ledger.Ledger, 0) + pageSize := uint64(2) + count := uint64(10) + for i := uint64(0); i < count; i++ { + m := metadata.Metadata{} + if i%2 == 0 { + m["foo"] = "bar" + } + l := ledger.MustNewWithDefault(fmt.Sprintf("ledger%d", i)).WithMetadata(m) + _, err := driver.CreateLedger(ctx, &l) + require.NoError(t, err) - name := uuid.NewString() + ledgers = append(ledgers, l) + } - b, err := d.OpenBucket(ctx, name) + cursor, err := driver.ListLedgers(ctx, ledgercontroller.NewListLedgersQuery(pageSize)) require.NoError(t, err) - t.Cleanup(func() { - _ = b.Close() - }) + require.Len(t, cursor.Data, int(pageSize)) + require.Equal(t, ledgers[:pageSize], cursor.Data) - upToDate, err := b.IsUpToDate(ctx) - require.NoError(t, err) - require.False(t, upToDate) -} + for i := pageSize; i < count; i += pageSize { + query := ledgercontroller.ListLedgersQuery{} + require.NoError(t, bunpaginate.UnmarshalCursor(cursor.Next, &query)) -func TestGetLedgerFromDefaultBucket(t *testing.T) { - t.Parallel() + cursor, err = driver.ListLedgers(ctx, query) + require.NoError(t, err) + require.Len(t, cursor.Data, 2) + require.Equal(t, ledgers[i:i+pageSize], cursor.Data) + } +} - d := storagetesting.StorageDriver(t) +func TestLedgerUpdateMetadata(t *testing.T) { ctx := logging.TestingContext() + storageDriver := newStorageDriver(t) - name := uuid.NewString() - _, err := d.CreateLedgerStore(ctx, name, driver.LedgerConfiguration{}) + l := ledger.MustNewWithDefault(uuid.NewString()) + _, err := storageDriver.CreateLedger(ctx, &l) require.NoError(t, err) -} -func TestGetLedgerFromAlternateBucket(t *testing.T) { - t.Parallel() + addedMetadata := metadata.Metadata{ + "foo": "bar", + } + err = storageDriver.UpdateLedgerMetadata(ctx, l.Name, addedMetadata) + require.NoError(t, err) - d := storagetesting.StorageDriver(t) - ctx := logging.TestingContext() + ledgerFromDB, err := storageDriver.GetLedger(ctx, l.Name) + require.NoError(t, err) + require.Equal(t, addedMetadata, ledgerFromDB.Metadata) +} - ledgerName := "ledger0" - bucketName := "bucket0" +func TestLedgerDeleteMetadata(t *testing.T) { + ctx := logging.TestingContext() + driver := newStorageDriver(t) - _, err := d.CreateLedgerStore(ctx, ledgerName, driver.LedgerConfiguration{ - Bucket: bucketName, + l := ledger.MustNewWithDefault(uuid.NewString()).WithMetadata(metadata.Metadata{ + "foo": "bar", }) + + _, err := driver.CreateLedger(ctx, &l) + require.NoError(t, err) + + err = driver.DeleteLedgerMetadata(ctx, l.Name, "foo") + require.NoError(t, err) + + ledgerFromDB, err := driver.GetLedger(ctx, l.Name) require.NoError(t, err) + require.Equal(t, metadata.Metadata{}, ledgerFromDB.Metadata) } -func TestUpgradeAllBuckets(t *testing.T) { - t.Parallel() +func newStorageDriver(t docker.T) *driver.Driver { + t.Helper() - d := storagetesting.StorageDriver(t) ctx := logging.TestingContext() + pgDatabase := srv.NewDatabase(t) - count := 30 - - for i := 0; i < count; i++ { - name := fmt.Sprintf("ledger%d", i) - _, err := d.CreateLedgerStore(ctx, name, driver.LedgerConfiguration{ - Bucket: name, - }) - require.NoError(t, err) + hooks := make([]bun.QueryHook, 0) + if os.Getenv("DEBUG") == "true" { + hooks = append(hooks, bundebug.NewQueryHook()) } + db, err := bunconnect.OpenSQLDB(ctx, pgDatabase.ConnectionOptions(), hooks...) + require.NoError(t, err) - require.NoError(t, d.UpgradeAllBuckets(ctx)) + d := driver.New(db) + + require.NoError(t, d.Initialize(logging.TestingContext())) + + return d } diff --git a/internal/storage/driver/main_test.go b/internal/storage/driver/main_test.go index 3bac1ed9f..017d038d0 100644 --- a/internal/storage/driver/main_test.go +++ b/internal/storage/driver/main_test.go @@ -1,20 +1,22 @@ //go:build it -package driver +package driver_test import ( "testing" - "github.com/formancehq/go-libs/testing/docker" - "github.com/formancehq/go-libs/testing/utils" + "github.com/formancehq/go-libs/v2/testing/docker" + "github.com/formancehq/go-libs/v2/testing/utils" - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/testing/platform/pgtesting" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/testing/platform/pgtesting" ) +var srv *pgtesting.PostgresServer + func TestMain(m *testing.M) { utils.WithTestMain(func(t *utils.TestingTForMain) int { - pgtesting.CreatePostgresServer(t, docker.NewPool(t, logging.Testing())) + srv = pgtesting.CreatePostgresServer(t, docker.NewPool(t, logging.Testing())) return m.Run() }) diff --git a/internal/storage/driver/migrations.go b/internal/storage/driver/migrations.go new file mode 100644 index 000000000..340ab4a85 --- /dev/null +++ b/internal/storage/driver/migrations.go @@ -0,0 +1,199 @@ +package driver + +import ( + "context" + "github.com/formancehq/go-libs/v2/time" + + "github.com/formancehq/go-libs/v2/platform/postgres" + + "github.com/formancehq/go-libs/v2/migrations" + "github.com/uptrace/bun" +) + +func GetMigrator() *migrations.Migrator { + + // configuration table has been removed, we keep the model to keep migrations consistent but the table is now removed + type configuration struct { + bun.BaseModel `bun:"_system.configuration,alias:configuration"` + + Key string `bun:"key,type:varchar(255),pk"` + Value string `bun:"value,type:text"` + AddedAt time.Time `bun:"addedAt,type:timestamp"` + } + + migrator := migrations.NewMigrator(migrations.WithSchema(SchemaSystem, true)) + migrator.RegisterMigrations( + migrations.Migration{ + Name: "Init schema", + UpWithContext: func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + create table ledgers ( + ledger varchar primary key, + addedat timestamp, + bucket varchar(255) + ) + `) + if err != nil { + return err + } + + _, err = tx.NewCreateTable(). + Model((*configuration)(nil)). + Exec(ctx) + return postgres.ResolveError(err) + }, + }, + migrations.Migration{ + Name: "Add ledger, bucket naming constraints 63 chars", + UpWithContext: func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + alter table ledgers + alter column ledger type varchar(63), + alter column bucket type varchar(63); + `) + return err + }, + }, + migrations.Migration{ + Name: "Add ledger metadata", + UpWithContext: func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + alter table ledgers + add column if not exists metadata jsonb; + `) + return err + }, + }, + migrations.Migration{ + Name: "Fix empty ledger metadata", + UpWithContext: func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + update ledgers + set metadata = '{}'::jsonb + where metadata is null; + `) + return err + }, + }, + migrations.Migration{ + Name: "Add ledger state", + UpWithContext: func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + alter table ledgers + add column if not exists state varchar(255) default 'initializing'; + + update ledgers + set state = 'in-use' + where state = ''; + `) + return err + }, + }, + migrations.Migration{ + Name: "Add features column", + UpWithContext: func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + alter table ledgers + add column if not exists features jsonb; + `) + return err + }, + }, + migrations.Migration{ + Name: "Rename ledger column to name", + UpWithContext: func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + alter table ledgers + rename column ledger to name; + `) + return err + }, + }, + migrations.Migration{ + Name: "Add sequential id on ledgers", + UpWithContext: func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + create sequence ledger_sequence; + + alter table ledgers + add column id bigint default nextval('ledger_sequence'); + `) + return err + }, + }, + migrations.Migration{ + Name: "Add aggregate_objects pg aggregator", + UpWithContext: func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, aggregateObjects) + return err + }, + }, + migrations.Migration{ + Name: "Remove ledger state column", + UpWithContext: func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + alter table _system.ledgers + drop column state; + `) + return err + }, + }, + migrations.Migration{ + Name: "Remove configuration table", + UpWithContext: func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + drop table _system.configuration; + `) + return err + }, + }, + migrations.Migration{ + Name: "Generate addedat of table ledgers", + UpWithContext: func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + alter table _system.ledgers + alter column addedat type timestamp without time zone; + + alter table _system.ledgers + alter column addedat set default (now() at time zone 'utc'); + + alter table _system.ledgers + rename column addedat to added_at; + `) + return err + }, + }, + migrations.Migration{ + Name: "add pgcrypto", + UpWithContext: func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + create extension if not exists pgcrypto + with schema public; + `) + return err + }, + }, + ) + + return migrator +} + +func Migrate(ctx context.Context, db bun.IDB) error { + return GetMigrator().Up(ctx, db) +} + +const aggregateObjects = ` +create or replace function public.jsonb_concat(a jsonb, b jsonb) returns jsonb + as 'select $1 || $2' + language sql + immutable + parallel safe +; + +create or replace aggregate public.aggregate_objects(jsonb) +( + sfunc = public.jsonb_concat, + stype = jsonb, + initcond = '{}' +); +` diff --git a/internal/storage/driver/migrations_test.go b/internal/storage/driver/migrations_test.go new file mode 100644 index 000000000..72b733f8b --- /dev/null +++ b/internal/storage/driver/migrations_test.go @@ -0,0 +1,83 @@ +//go:build it + +package driver_test + +import ( + "context" + "fmt" + "github.com/formancehq/go-libs/v2/testing/migrations" + "github.com/formancehq/ledger/internal/storage/driver" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/bun/bunconnect" + "github.com/formancehq/go-libs/v2/bun/bundebug" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/time" + ledger "github.com/formancehq/ledger/internal" + "github.com/stretchr/testify/require" + "github.com/uptrace/bun" +) + +func TestMigrations(t *testing.T) { + t.Parallel() + ctx := logging.TestingContext() + + pgServer := srv.NewDatabase(t) + + hooks := make([]bun.QueryHook, 0) + if os.Getenv("DEBUG") == "true" { + hooks = append(hooks, bundebug.NewQueryHook()) + } + + db, err := bunconnect.OpenSQLDB(ctx, pgServer.ConnectionOptions(), hooks...) + require.NoError(t, err) + t.Cleanup(func() { + require.NoError(t, db.Close()) + }) + + test := migrations.NewMigrationTest(t, driver.GetMigrator(), db) + test.Append(8, addIdOnLedgerTable) + test.Run() +} + +var addIdOnLedgerTable = migrations.Hook{ + Before: func(ctx context.Context, t *testing.T, db bun.IDB) { + for i := 0; i < 3; i++ { + _, err := db.NewInsert(). + Model(&map[string]any{ + "name": fmt.Sprintf("ledger%d", i), + "addedat": time.Now().Format(time.RFC3339Nano), + "bucket": ledger.DefaultBucket, + }). + TableExpr("_system.ledgers"). + Exec(ctx) + require.NoError(t, err) + } + }, + After: func(ctx context.Context, t *testing.T, db bun.IDB) { + + for i := 0; i < 3; i++ { + model := make(map[string]any) + err := db.NewSelect(). + Model(&model). + ModelTableExpr("_system.ledgers"). + Where("id = ?", fmt.Sprint(i+1)). + Scan(ctx) + require.NoError(t, err) + } + + newLedger := map[string]any{ + "name": "ledger3", + "addedat": time.Now().Format(time.RFC3339Nano), + "bucket": ledger.DefaultBucket, + } + _, err := db.NewInsert(). + Model(&newLedger). + TableExpr("_system.ledgers"). + Returning("*"). + Exec(ctx) + require.NoError(t, err) + require.Equal(t, int64(4), newLedger["id"]) + }, +} diff --git a/internal/storage/driver/module.go b/internal/storage/driver/module.go index 2a07515da..b8352c34e 100644 --- a/internal/storage/driver/module.go +++ b/internal/storage/driver/module.go @@ -2,11 +2,14 @@ package driver import ( "context" + "go.opentelemetry.io/otel/metric" + "go.opentelemetry.io/otel/trace" - "github.com/formancehq/go-libs/bun/bunconnect" - "github.com/spf13/cobra" + systemcontroller "github.com/formancehq/ledger/internal/controller/system" - "github.com/formancehq/go-libs/logging" + "github.com/uptrace/bun" + + "github.com/formancehq/go-libs/v2/logging" "go.uber.org/fx" ) @@ -14,28 +17,37 @@ type PostgresConfig struct { ConnString string } -func FXModuleFromFlags(cmd *cobra.Command) fx.Option { - - options := make([]fx.Option, 0) - options = append(options, fx.Provide(func() (*bunconnect.ConnectionOptions, error) { - return bunconnect.ConnectionOptionsFromFlags(cmd) - })) - options = append(options, fx.Provide(func(connectionOptions *bunconnect.ConnectionOptions) (*Driver, error) { - return New(*connectionOptions), nil - })) +type ModuleConfiguration struct { +} - options = append(options, fx.Invoke(func(driver *Driver, lifecycle fx.Lifecycle, logger logging.Logger) error { - lifecycle.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - logger.Infof("Initializing database...") - return driver.Initialize(ctx) - }, - OnStop: func(ctx context.Context) error { - logger.Infof("Closing driver...") - return driver.Close() - }, - }) - return nil - })) - return fx.Options(options...) +func NewFXModule(autoUpgrade bool) fx.Option { + return fx.Options( + fx.Provide(func( + db *bun.DB, + tracerProvider trace.TracerProvider, + meterProvider metric.MeterProvider, + ) (*Driver, error) { + return New(db, + WithMeter(meterProvider.Meter("store")), + WithTracer(tracerProvider.Tracer("store")), + ), nil + }), + fx.Provide(fx.Annotate(NewControllerStorageDriverAdapter, fx.As(new(systemcontroller.Store)))), + fx.Invoke(func(driver *Driver, lifecycle fx.Lifecycle, logger logging.Logger) error { + lifecycle.Append(fx.Hook{ + OnStart: func(ctx context.Context) error { + logger.Infof("Initializing database...") + return driver.Initialize(ctx) + }, + }) + return nil + }), + fx.Invoke(func(lc fx.Lifecycle, driver *Driver) { + if autoUpgrade { + lc.Append(fx.Hook{ + OnStart: driver.UpgradeAllBuckets, + }) + } + }), + ) } diff --git a/internal/storage/inmemory.go b/internal/storage/inmemory.go deleted file mode 100644 index 8705fdcf6..000000000 --- a/internal/storage/inmemory.go +++ /dev/null @@ -1,139 +0,0 @@ -package storage - -import ( - "context" - "math/big" - - "github.com/formancehq/ledger/internal/storage/sqlutils" - - "github.com/formancehq/go-libs/collectionutils" - "github.com/formancehq/go-libs/metadata" - ledger "github.com/formancehq/ledger/internal" -) - -type InMemoryStore struct { - logs []*ledger.ChainedLog - transactions []*ledger.ExpandedTransaction - accounts []*ledger.Account -} - -func (m *InMemoryStore) GetTransactionByReference(ctx context.Context, ref string) (*ledger.ExpandedTransaction, error) { - filtered := collectionutils.Filter(m.transactions, func(transaction *ledger.ExpandedTransaction) bool { - return transaction.Reference == ref - }) - if len(filtered) == 0 { - return nil, sqlutils.ErrNotFound - } - return filtered[0], nil -} - -func (m *InMemoryStore) GetTransaction(ctx context.Context, txID *big.Int) (*ledger.Transaction, error) { - filtered := collectionutils.Filter(m.transactions, func(transaction *ledger.ExpandedTransaction) bool { - return transaction.ID.Cmp(txID) == 0 - }) - if len(filtered) == 0 { - return nil, sqlutils.ErrNotFound - } - return &filtered[0].Transaction, nil -} - -func (m *InMemoryStore) GetLastLog(ctx context.Context) (*ledger.ChainedLog, error) { - if len(m.logs) == 0 { - return nil, nil - } - return m.logs[len(m.logs)-1], nil -} - -func (m *InMemoryStore) GetBalance(ctx context.Context, address, asset string) (*big.Int, error) { - balance := new(big.Int) - - var processPostings = func(postings ledger.Postings) { - for _, posting := range postings { - if posting.Asset != asset { - continue - } - if posting.Source == address { - balance = balance.Sub(balance, posting.Amount) - } - if posting.Destination == address { - balance = balance.Add(balance, posting.Amount) - } - } - } - - for _, log := range m.logs { - switch payload := log.Data.(type) { - case ledger.NewTransactionLogPayload: - processPostings(payload.Transaction.Postings) - case ledger.RevertedTransactionLogPayload: - processPostings(payload.RevertTransaction.Postings) - } - } - return balance, nil -} - -func (m *InMemoryStore) GetAccount(ctx context.Context, address string) (*ledger.Account, error) { - account := collectionutils.Filter(m.accounts, func(account *ledger.Account) bool { - return account.Address == address - }) - if len(account) == 0 { - return &ledger.Account{ - Address: address, - Metadata: metadata.Metadata{}, - }, nil - } - return account[0], nil -} - -func (m *InMemoryStore) ReadLogWithIdempotencyKey(ctx context.Context, key string) (*ledger.ChainedLog, error) { - first := collectionutils.First(m.logs, func(log *ledger.ChainedLog) bool { - return log.IdempotencyKey == key - }) - if first == nil { - return nil, sqlutils.ErrNotFound - } - return first, nil -} - -func (m *InMemoryStore) InsertLogs(ctx context.Context, logs ...*ledger.ChainedLog) error { - - m.logs = append(m.logs, logs...) - for _, log := range logs { - switch payload := log.Data.(type) { - case ledger.NewTransactionLogPayload: - m.transactions = append(m.transactions, &ledger.ExpandedTransaction{ - Transaction: *payload.Transaction, - // TODO - PreCommitVolumes: nil, - PostCommitVolumes: nil, - }) - case ledger.RevertedTransactionLogPayload: - tx := collectionutils.Filter(m.transactions, func(transaction *ledger.ExpandedTransaction) bool { - return transaction.ID.Cmp(payload.RevertedTransactionID) == 0 - })[0] - tx.Reverted = true - m.transactions = append(m.transactions, &ledger.ExpandedTransaction{ - Transaction: *payload.RevertTransaction, - // TODO - PreCommitVolumes: nil, - PostCommitVolumes: nil, - }) - case ledger.SetMetadataLogPayload: - } - } - - return nil -} - -func (m *InMemoryStore) GetLastTransaction(ctx context.Context) (*ledger.ExpandedTransaction, error) { - if len(m.transactions) == 0 { - return nil, sqlutils.ErrNotFound - } - return m.transactions[len(m.transactions)-1], nil -} - -func NewInMemoryStore() *InMemoryStore { - return &InMemoryStore{ - logs: []*ledger.ChainedLog{}, - } -} diff --git a/internal/storage/ledger/accounts.go b/internal/storage/ledger/accounts.go new file mode 100644 index 000000000..16f30ab66 --- /dev/null +++ b/internal/storage/ledger/accounts.go @@ -0,0 +1,374 @@ +package ledger + +import ( + "context" + "database/sql" + "fmt" + . "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "regexp" + + "github.com/formancehq/ledger/internal/tracing" + + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/platform/postgres" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" + + "github.com/formancehq/go-libs/v2/time" + + "github.com/formancehq/go-libs/v2/query" + ledger "github.com/formancehq/ledger/internal" + "github.com/uptrace/bun" +) + +var ( + balanceRegex = regexp.MustCompile(`balance\[(.*)]`) +) + +func convertOperatorToSQL(operator string) string { + switch operator { + case "$match": + return "=" + case "$lt": + return "<" + case "$gt": + return ">" + case "$lte": + return "<=" + case "$gte": + return ">=" + } + panic("unreachable") +} + +func (s *Store) selectBalance(date *time.Time) *bun.SelectQuery { + + if date != nil && !date.IsZero() { + sortedMoves := s.SelectDistinctMovesBySeq(date). + ColumnExpr("(post_commit_volumes).inputs - (post_commit_volumes).outputs as balance") + + return s.db.NewSelect(). + ModelTableExpr("(?) moves", sortedMoves). + ColumnExpr("accounts_address, asset, balance") + } + + return s.db.NewSelect(). + ModelTableExpr(s.GetPrefixedRelationName("accounts_volumes")). + ColumnExpr("input - output as balance") +} + +func (s *Store) selectDistinctAccountMetadataHistories(date *time.Time) *bun.SelectQuery { + ret := s.db.NewSelect(). + DistinctOn("accounts_address"). + ModelTableExpr(s.GetPrefixedRelationName("accounts_metadata")). + Where("ledger = ?", s.ledger.Name). + Column("accounts_address", "metadata"). + Order("accounts_address", "revision desc") + + if date != nil && !date.IsZero() { + ret = ret.Where("date <= ?", date) + } + + return ret +} + +func (s *Store) selectAccounts(date *time.Time, expandVolumes, expandEffectiveVolumes bool, qb query.Builder) *bun.SelectQuery { + + ret := s.db.NewSelect() + + needVolumes := expandVolumes + if qb != nil { + // Analyze filters to check for errors and find potentially additional table to load + if err := qb.Walk(func(operator, key string, value any) error { + switch { + // Balances requires pvc, force load in this case + case balanceRegex.MatchString(key): + needVolumes = true + case key == "address": + return s.validateAddressFilter(operator, value) + case key == "metadata": + if operator != "$exists" { + return ledgercontroller.NewErrInvalidQuery("'metadata' key filter can only be used with $exists") + } + case metadataRegex.MatchString(key): + if operator != "$match" { + return ledgercontroller.NewErrInvalidQuery("'metadata' key filter can only be used with $match") + } + case key == "first_usage" || key == "balance": + default: + return ledgercontroller.NewErrInvalidQuery("unknown key '%s' when building query", key) + } + + return nil + }); err != nil { + return ret.Err(fmt.Errorf("failed to check filters: %w", err)) + } + } + + // Build the query + ret = ret. + ModelTableExpr(s.GetPrefixedRelationName("accounts")). + Column("accounts.address", "accounts.first_usage"). + Where("ledger = ?", s.ledger.Name). + Order("accounts.address") + + if date != nil && !date.IsZero() { + ret = ret.Where("accounts.first_usage <= ?", date) + } + + if s.ledger.HasFeature(ledger.FeatureAccountMetadataHistory, "SYNC") && date != nil && !date.IsZero() { + ret = ret. + Join( + `left join (?) accounts_metadata on accounts_metadata.accounts_address = accounts.address`, + s.selectDistinctAccountMetadataHistories(date), + ). + ColumnExpr("coalesce(accounts_metadata.metadata, '{}'::jsonb) as metadata") + } else { + ret = ret.ColumnExpr("accounts.metadata") + } + + if s.ledger.HasFeature(ledger.FeatureMovesHistory, "ON") && needVolumes { + ret = ret.Join( + `left join (?) volumes on volumes.accounts_address = accounts.address`, + s.selectAccountWithAggregatedVolumes(date, true, "volumes"), + ).Column("volumes.*") + } + + if s.ledger.HasFeature(ledger.FeatureMovesHistoryPostCommitEffectiveVolumes, "SYNC") && expandEffectiveVolumes { + ret = ret.Join( + `left join (?) effective_volumes on effective_volumes.accounts_address = accounts.address`, + s.selectAccountWithAggregatedVolumes(date, false, "effective_volumes"), + ).Column("effective_volumes.*") + } + + if qb != nil { + // Convert filters to where clause + where, args, err := qb.Build(query.ContextFn(func(key, operator string, value any) (string, []any, error) { + switch { + case key == "address": + return filterAccountAddress(value.(string), "accounts.address"), nil, nil + case key == "first_usage": + return fmt.Sprintf("first_usage %s ?", convertOperatorToSQL(operator)), []any{value}, nil + case balanceRegex.Match([]byte(key)): + match := balanceRegex.FindAllStringSubmatch(key, 2) + asset := match[0][1] + + return s.db.NewSelect(). + TableExpr( + "(?) balance", + s.selectBalance(date). + Where("asset = ? and accounts_address = accounts.address", asset), + ). + ColumnExpr(fmt.Sprintf("balance %s ?", convertOperatorToSQL(operator)), value). + String(), nil, nil + + case key == "balance": + return s.db.NewSelect(). + TableExpr( + "(?) balance", + s.selectBalance(date). + Where("accounts_address = accounts.address"), + ). + ColumnExpr(fmt.Sprintf("balance %s ?", convertOperatorToSQL(operator)), value). + String(), nil, nil + + case key == "metadata": + if s.ledger.HasFeature(ledger.FeatureAccountMetadataHistory, "SYNC") && date != nil && !date.IsZero() { + key = "accounts_metadata.metadata" + } + + return key + " -> ? is not null", []any{value}, nil + + case metadataRegex.Match([]byte(key)): + match := metadataRegex.FindAllStringSubmatch(key, 3) + if s.ledger.HasFeature(ledger.FeatureAccountMetadataHistory, "SYNC") && date != nil && !date.IsZero() { + key = "accounts_metadata.metadata" + } else { + key = "metadata" + } + + return key + " @> ?", []any{map[string]any{ + match[0][1]: value, + }}, nil + } + + panic("unreachable") + })) + if err != nil { + return ret.Err(fmt.Errorf("evaluating filters: %w", err)) + } + if len(args) > 0 { + ret = ret.Where(where, args...) + } else { + ret = ret.Where(where) + } + } + + return ret +} + +func (s *Store) ListAccounts(ctx context.Context, q ledgercontroller.ListAccountsQuery) (*Cursor[ledger.Account], error) { + return tracing.TraceWithMetric( + ctx, + "ListAccounts", + s.tracer, + s.listAccountsHistogram, + func(ctx context.Context) (*Cursor[ledger.Account], error) { + ret, err := UsingOffset[ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes], ledger.Account]( + ctx, + s.selectAccounts( + q.Options.Options.PIT, + q.Options.Options.ExpandVolumes, + q.Options.Options.ExpandEffectiveVolumes, + q.Options.QueryBuilder, + ), + OffsetPaginatedQuery[ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes]](q), + ) + + if err != nil { + return nil, err + } + + return ret, nil + }, + ) +} + +func (s *Store) GetAccount(ctx context.Context, q ledgercontroller.GetAccountQuery) (*ledger.Account, error) { + return tracing.TraceWithMetric( + ctx, + "GetAccount", + s.tracer, + s.getAccountHistogram, + func(ctx context.Context) (*ledger.Account, error) { + ret := &ledger.Account{} + if err := s.selectAccounts(q.PIT, q.ExpandVolumes, q.ExpandEffectiveVolumes, nil). + Model(ret). + Where("accounts.address = ?", q.Addr). + Limit(1). + Scan(ctx); err != nil { + return nil, postgres.ResolveError(err) + } + + return ret, nil + }, + ) +} + +func (s *Store) CountAccounts(ctx context.Context, q ledgercontroller.ListAccountsQuery) (int, error) { + return tracing.TraceWithMetric( + ctx, + "CountAccounts", + s.tracer, + s.countAccountsHistogram, + func(ctx context.Context) (int, error) { + return s.db.NewSelect(). + TableExpr("(?) data", s.selectAccounts( + q.Options.Options.PIT, + q.Options.Options.ExpandVolumes, + q.Options.Options.ExpandEffectiveVolumes, + q.Options.QueryBuilder, + )). + Count(ctx) + }, + ) +} + +func (s *Store) UpdateAccountsMetadata(ctx context.Context, m map[string]metadata.Metadata) error { + _, err := tracing.TraceWithMetric( + ctx, + "UpdateAccountsMetadata", + s.tracer, + s.updateAccountsMetadataHistogram, + tracing.NoResult(func(ctx context.Context) error { + type AccountWithLedger struct { + ledger.Account `bun:",extend"` + Ledger string `bun:"ledger,type:varchar"` + } + + accounts := make([]AccountWithLedger, 0) + for account, accountMetadata := range m { + accounts = append(accounts, AccountWithLedger{ + Ledger: s.ledger.Name, + Account: ledger.Account{ + Address: account, + Metadata: accountMetadata, + }, + }) + } + + _, err := s.db.NewInsert(). + Model(&accounts). + ModelTableExpr(s.GetPrefixedRelationName("accounts")). + On("CONFLICT (ledger, address) DO UPDATE"). + Set("metadata = excluded.metadata || accounts.metadata"). + Where("not accounts.metadata @> excluded.metadata"). + Exec(ctx) + return postgres.ResolveError(err) + }), + ) + return err +} + +func (s *Store) DeleteAccountMetadata(ctx context.Context, account, key string) error { + _, err := tracing.TraceWithMetric( + ctx, + "DeleteAccountMetadata", + s.tracer, + s.deleteAccountMetadataHistogram, + tracing.NoResult(func(ctx context.Context) error { + _, err := s.db.NewUpdate(). + ModelTableExpr(s.GetPrefixedRelationName("accounts")). + Set("metadata = metadata - ?", key). + Where("address = ?", account). + Where("ledger = ?", s.ledger.Name). + Exec(ctx) + return postgres.ResolveError(err) + }), + ) + return err +} + +// todo: since we update first balances of an accounts in the transaction process, we can avoid nested sql txs +// while upserting account and upsert them all in one shot +func (s *Store) UpsertAccount(ctx context.Context, account *ledger.Account) (bool, error) { + return tracing.TraceWithMetric( + ctx, + "UpsertAccount", + s.tracer, + s.upsertAccountHistogram, + func(ctx context.Context) (bool, error) { + upserted := false + err := s.db.RunInTx(ctx, &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error { + ret, err := tx.NewInsert(). + Model(account). + ModelTableExpr(s.GetPrefixedRelationName("accounts")). + On("conflict (ledger, address) do update"). + Set("first_usage = case when ? < excluded.first_usage then ? else excluded.first_usage end", account.FirstUsage, account.FirstUsage). + Set("metadata = accounts.metadata || excluded.metadata"). + Set("updated_at = excluded.updated_at"). + Value("ledger", "?", s.ledger.Name). + Returning("*"). + Where("(? < accounts.first_usage) or not accounts.metadata @> excluded.metadata", account.FirstUsage). + Exec(ctx) + if err != nil { + return err + } + rowsModified, err := ret.RowsAffected() + if err != nil { + return err + } + upserted = rowsModified > 0 + return nil + }) + return upserted, postgres.ResolveError(err) + }, + func(ctx context.Context, upserted bool) { + trace.SpanFromContext(ctx).SetAttributes( + attribute.String("address", account.Address), + attribute.Bool("upserted", upserted), + ) + }, + ) +} diff --git a/internal/storage/ledger/accounts_test.go b/internal/storage/ledger/accounts_test.go new file mode 100644 index 000000000..5277f5fba --- /dev/null +++ b/internal/storage/ledger/accounts_test.go @@ -0,0 +1,433 @@ +//go:build it + +package ledger_test + +import ( + "context" + "math/big" + "testing" + + "errors" + "github.com/formancehq/go-libs/v2/pointer" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "github.com/formancehq/go-libs/v2/time" + + "github.com/formancehq/go-libs/v2/logging" + + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/query" + ledger "github.com/formancehq/ledger/internal" + "github.com/stretchr/testify/require" +) + +func TestAccountsList(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + err := store.CommitTransaction(ctx, pointer.For(ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(100))). + WithTimestamp(now). + WithInsertedAt(now))) + require.NoError(t, err) + + require.NoError(t, store.UpdateAccountsMetadata(ctx, map[string]metadata.Metadata{ + "account:1": { + "category": "1", + }, + "account:2": { + "category": "2", + }, + "account:3": { + "category": "3", + }, + "orders:1": { + "foo": "bar", + }, + "orders:2": { + "foo": "bar", + }, + })) + + err = store.CommitTransaction(ctx, pointer.For(ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(100))). + WithTimestamp(now.Add(4*time.Minute)). + WithInsertedAt(now.Add(100*time.Millisecond)))) + require.NoError(t, err) + + err = store.CommitTransaction(ctx, pointer.For(ledger.NewTransaction(). + WithPostings(ledger.NewPosting("account:1", "bank", "USD", big.NewInt(50))). + WithTimestamp(now.Add(3*time.Minute)). + WithInsertedAt(now.Add(200*time.Millisecond)))) + require.NoError(t, err) + + err = store.CommitTransaction(ctx, pointer.For(ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(0))). + WithTimestamp(now.Add(-time.Minute)). + WithInsertedAt(now.Add(200*time.Millisecond)))) + require.NoError(t, err) + + t.Run("list all", func(t *testing.T) { + t.Parallel() + accounts, err := store.ListAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}))) + require.NoError(t, err) + require.Len(t, accounts.Data, 7) + }) + + t.Run("list using metadata", func(t *testing.T) { + t.Parallel() + accounts, err := store.ListAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("metadata[category]", "1")), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) + }) + + t.Run("list before date", func(t *testing.T) { + t.Parallel() + accounts, err := store.ListAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + }))) + require.NoError(t, err) + require.Len(t, accounts.Data, 2) + }) + + t.Run("list with volumes", func(t *testing.T) { + t.Parallel() + + accounts, err := store.ListAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + ExpandVolumes: true, + }).WithQueryBuilder(query.Match("address", "account:1")))) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) + require.Equal(t, ledger.VolumesByAssets{ + "USD": ledger.NewVolumesInt64(200, 50), + }, accounts.Data[0].Volumes) + }) + + t.Run("list with volumes using PIT", func(t *testing.T) { + t.Parallel() + + accounts, err := store.ListAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + ExpandVolumes: true, + }).WithQueryBuilder(query.Match("address", "account:1")))) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) + require.Equal(t, ledger.VolumesByAssets{ + "USD": ledger.NewVolumesInt64(100, 0), + }, accounts.Data[0].Volumes) + }) + + t.Run("list with effective volumes", func(t *testing.T) { + t.Parallel() + + accounts, err := store.ListAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + ExpandEffectiveVolumes: true, + }).WithQueryBuilder(query.Match("address", "account:1")))) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) + require.Equal(t, ledger.VolumesByAssets{ + "USD": ledger.NewVolumesInt64(200, 50), + }, accounts.Data[0].EffectiveVolumes) + }) + + t.Run("list with effective volumes using PIT", func(t *testing.T) { + t.Parallel() + accounts, err := store.ListAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + ExpandEffectiveVolumes: true, + }).WithQueryBuilder(query.Match("address", "account:1")))) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) + require.Equal(t, ledger.VolumesByAssets{ + "USD": ledger.NewVolumesInt64(100, 0), + }, accounts.Data[0].EffectiveVolumes) + }) + + t.Run("list using filter on address", func(t *testing.T) { + t.Parallel() + accounts, err := store.ListAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("address", "account:")), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 3) + }) + t.Run("list using filter on multiple address", func(t *testing.T) { + t.Parallel() + accounts, err := store.ListAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder( + query.Or( + query.Match("address", "account:1"), + query.Match("address", "orders:"), + ), + ), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 3) + }) + t.Run("list using filter on balances", func(t *testing.T) { + t.Parallel() + accounts, err := store.ListAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Lt("balance[USD]", 0)), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) // world + + accounts, err = store.ListAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Gt("balance[USD]", 0)), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 2) + require.Equal(t, "account:1", accounts.Data[0].Address) + require.Equal(t, "bank", accounts.Data[1].Address) + }) + t.Run("list using filter on balances[USD] and PIT", func(t *testing.T) { + t.Parallel() + accounts, err := store.ListAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + }). + WithQueryBuilder(query.Lt("balance[USD]", 0)), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) // world + }) + t.Run("list using filter on balances and PIT", func(t *testing.T) { + t.Parallel() + accounts, err := store.ListAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + }). + WithQueryBuilder(query.Lt("balance", 0)), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) // world + }) + + t.Run("list using filter on exists metadata", func(t *testing.T) { + t.Parallel() + accounts, err := store.ListAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Exists("metadata", "foo")), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 2) + + accounts, err = store.ListAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Exists("metadata", "category")), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 3) + }) + + t.Run("list using filter invalid field", func(t *testing.T) { + t.Parallel() + _, err := store.ListAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Lt("invalid", 0)), + )) + require.Error(t, err) + require.True(t, errors.Is(err, ledgercontroller.ErrInvalidQuery{})) + }) + + t.Run("filter on first_usage", func(t *testing.T) { + t.Parallel() + + ret, err := store.ListAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Lt("first_usage", now)), + )) + require.NoError(t, err) + require.Len(t, ret.Data, 2) + }) +} + +func TestAccountsUpdateMetadata(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + + m := metadata.Metadata{ + "foo": "bar", + } + ctx := logging.TestingContext() + + require.NoError(t, store.UpdateAccountsMetadata(ctx, map[string]metadata.Metadata{ + "bank": m, + })) + + account, err := store.GetAccount(context.Background(), ledgercontroller.NewGetAccountQuery("bank")) + require.NoError(t, err, "account retrieval should not fail") + + require.Equal(t, "bank", account.Address, "account address should match") + require.Equal(t, m, account.Metadata, "account metadata should match") +} + +func TestAccountsGet(t *testing.T) { + t.Parallel() + + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + err := store.CommitTransaction(ctx, pointer.For(ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "multi", "USD/2", big.NewInt(100)), + ).WithTimestamp(now))) + require.NoError(t, err) + + require.NoError(t, store.UpdateAccountsMetadata(ctx, map[string]metadata.Metadata{ + "multi": { + "category": "gold", + }, + })) + + err = store.CommitTransaction(ctx, pointer.For(ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "multi", "USD/2", big.NewInt(0)), + ).WithTimestamp(now.Add(-time.Minute)))) + require.NoError(t, err) + + t.Run("find account", func(t *testing.T) { + t.Parallel() + account, err := store.GetAccount(ctx, ledgercontroller.NewGetAccountQuery("multi")) + require.NoError(t, err) + require.Equal(t, ledger.Account{ + Address: "multi", + Metadata: metadata.Metadata{ + "category": "gold", + }, + FirstUsage: now.Add(-time.Minute), + }, *account) + + account, err = store.GetAccount(ctx, ledgercontroller.NewGetAccountQuery("world")) + require.NoError(t, err) + require.Equal(t, ledger.Account{ + Address: "world", + Metadata: metadata.Metadata{}, + FirstUsage: now.Add(-time.Minute), + }, *account) + }) + + t.Run("find account in past", func(t *testing.T) { + t.Parallel() + + account, err := store.GetAccount(ctx, ledgercontroller.NewGetAccountQuery("multi").WithPIT(now.Add(-30*time.Second))) + require.NoError(t, err) + require.Equal(t, ledger.Account{ + Address: "multi", + Metadata: metadata.Metadata{}, + FirstUsage: now.Add(-time.Minute), + }, *account) + }) + + t.Run("find account with volumes", func(t *testing.T) { + t.Parallel() + account, err := store.GetAccount(ctx, ledgercontroller.NewGetAccountQuery("multi"). + WithExpandVolumes()) + require.NoError(t, err) + require.Equal(t, ledger.Account{ + Address: "multi", + Metadata: metadata.Metadata{ + "category": "gold", + }, + FirstUsage: now.Add(-time.Minute), + Volumes: ledger.VolumesByAssets{ + "USD/2": ledger.NewVolumesInt64(100, 0), + }, + }, *account) + }) + + t.Run("find account with effective volumes", func(t *testing.T) { + t.Parallel() + account, err := store.GetAccount(ctx, ledgercontroller.NewGetAccountQuery("multi"). + WithExpandEffectiveVolumes()) + require.NoError(t, err) + require.Equal(t, ledger.Account{ + Address: "multi", + Metadata: metadata.Metadata{ + "category": "gold", + }, + FirstUsage: now.Add(-time.Minute), + EffectiveVolumes: ledger.VolumesByAssets{ + "USD/2": ledger.NewVolumesInt64(100, 0), + }, + }, *account) + }) + + t.Run("find account using pit", func(t *testing.T) { + t.Parallel() + + account, err := store.GetAccount(ctx, ledgercontroller.NewGetAccountQuery("multi").WithPIT(now)) + require.NoError(t, err) + require.Equal(t, ledger.Account{ + Address: "multi", + Metadata: metadata.Metadata{}, + FirstUsage: now.Add(-time.Minute), + }, *account) + }) + + t.Run("not existent account", func(t *testing.T) { + t.Parallel() + + _, err := store.GetAccount(ctx, ledgercontroller.NewGetAccountQuery("account_not_existing")) + require.Error(t, err) + }) +} + +func TestAccountsCount(t *testing.T) { + t.Parallel() + + store := newLedgerStore(t) + ctx := logging.TestingContext() + + err := store.CommitTransaction(ctx, pointer.For(ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "central_bank", "USD/2", big.NewInt(100)), + ))) + require.NoError(t, err) + + countAccounts, err := store.CountAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}))) + require.NoError(t, err) + require.EqualValues(t, 2, countAccounts) // world + central_bank +} + +func TestAccountsUpsert(t *testing.T) { + t.Parallel() + + store := newLedgerStore(t) + ctx := logging.TestingContext() + + account := ledger.Account{ + Address: "foo", + } + + // Initial insert + upserted, err := store.UpsertAccount(ctx, &account) + require.NoError(t, err) + require.True(t, upserted) + require.NotEmpty(t, account.FirstUsage) + require.NotEmpty(t, account.InsertionDate) + require.NotEmpty(t, account.UpdatedAt) + + now := time.Now() + + // Reset the account model + account = ledger.Account{ + Address: "foo", + // The account will be upserted on the timeline after its initial usage. + // The upsert should not modify anything, but, it should retrieve and load the account entity + FirstUsage: now.Add(time.Second), + InsertionDate: now.Add(time.Second), + UpdatedAt: now.Add(time.Second), + } + + // Upsert with no modification + upserted, err = store.UpsertAccount(ctx, &account) + require.NoError(t, err) + require.False(t, upserted) +} diff --git a/internal/storage/ledger/balances.go b/internal/storage/ledger/balances.go new file mode 100644 index 000000000..566e45d76 --- /dev/null +++ b/internal/storage/ledger/balances.go @@ -0,0 +1,309 @@ +package ledger + +import ( + "context" + "fmt" + "math/big" + "strings" + + "github.com/formancehq/go-libs/v2/platform/postgres" + + "github.com/formancehq/ledger/internal/tracing" + + "github.com/formancehq/go-libs/v2/query" + "github.com/formancehq/go-libs/v2/time" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/uptrace/bun" +) + +func (s *Store) selectAccountWithAssetAndVolumes(date *time.Time, useInsertionDate bool, builder query.Builder) *bun.SelectQuery { + + ret := s.db.NewSelect() + var ( + needMetadata bool + needAddressSegment bool + ) + + if builder != nil { + if err := builder.Walk(func(operator string, key string, value any) error { + switch { + case key == "address": + if err := s.validateAddressFilter(operator, value); err != nil { + return err + } + if !needAddressSegment { + // Cast is safe, the type has been validated by validatedAddressFilter + needAddressSegment = isSegmentedAddress(value.(string)) + } + + case key == "metadata": + needMetadata = true + if operator != "$exists" { + return ledgercontroller.NewErrInvalidQuery("'metadata' key filter can only be used with $exists") + } + case metadataRegex.Match([]byte(key)): + needMetadata = true + if operator != "$match" { + return ledgercontroller.NewErrInvalidQuery("'account' column can only be used with $match") + } + default: + return ledgercontroller.NewErrInvalidQuery("unknown key '%s' when building query", key) + } + return nil + }); err != nil { + return ret.Err(err) + } + } + + if needAddressSegment && !s.ledger.HasFeature(ledger.FeatureIndexAddressSegments, "ON") { + return ret.Err(ledgercontroller.NewErrMissingFeature(ledger.FeatureIndexAddressSegments)) + } + + var selectAccountsWithVolumes *bun.SelectQuery + if date != nil && !date.IsZero() { + if useInsertionDate { + if !s.ledger.HasFeature(ledger.FeatureMovesHistory, "ON") { + return ret.Err(ledgercontroller.NewErrMissingFeature(ledger.FeatureMovesHistory)) + } + selectAccountsWithVolumes = s.db.NewSelect(). + TableExpr("(?) moves", s.SelectDistinctMovesBySeq(date)). + Column("asset", "accounts_address"). + ColumnExpr("post_commit_volumes as volumes") + } else { + if !s.ledger.HasFeature(ledger.FeatureMovesHistoryPostCommitEffectiveVolumes, "SYNC") { + return ret.Err(ledgercontroller.NewErrMissingFeature(ledger.FeatureMovesHistoryPostCommitEffectiveVolumes)) + } + selectAccountsWithVolumes = s.db.NewSelect(). + TableExpr("(?) moves", s.SelectDistinctMovesByEffectiveDate(date)). + Column("asset", "accounts_address"). + ColumnExpr("moves.post_commit_effective_volumes as volumes") + } + } else { + selectAccountsWithVolumes = s.db.NewSelect(). + ModelTableExpr(s.GetPrefixedRelationName("accounts_volumes")). + Column("asset", "accounts_address"). + ColumnExpr("(input, output)::"+s.GetPrefixedRelationName("volumes")+" as volumes"). + Where("ledger = ?", s.ledger.Name) + } + + selectAccountsWithVolumes = s.db.NewSelect(). + ColumnExpr("*"). + TableExpr("(?) accounts_volumes", selectAccountsWithVolumes) + + if needMetadata { + if s.ledger.HasFeature(ledger.FeatureAccountMetadataHistory, "SYNC") && date != nil && !date.IsZero() { + selectAccountsWithVolumes = selectAccountsWithVolumes. + Join( + `left join (?) accounts_metadata on accounts_metadata.accounts_address = accounts_volumes.accounts_address`, + s.selectDistinctAccountMetadataHistories(date), + ) + } else { + selectAccountsWithVolumes = selectAccountsWithVolumes. + Join( + `join (?) accounts on accounts.address = accounts_volumes.accounts_address`, + s.db.NewSelect().ModelTableExpr(s.GetPrefixedRelationName("accounts")), + ) + } + } + + if needAddressSegment { + selectAccountsWithVolumes = s.db.NewSelect(). + TableExpr( + "(?) accounts", + selectAccountsWithVolumes. + Join("join "+s.GetPrefixedRelationName("accounts")+" accounts on accounts.address = accounts_volumes.accounts_address"), + ). + ColumnExpr("address, asset, volumes, metadata"). + ColumnExpr("accounts.address_array as accounts_address_array") + } + + finalQuery := s.db.NewSelect(). + TableExpr("(?) accounts", selectAccountsWithVolumes) + + if builder != nil { + where, args, err := builder.Build(query.ContextFn(func(key, _ string, value any) (string, []any, error) { + switch { + case key == "address": + return filterAccountAddress(value.(string), "accounts_address"), nil, nil + case metadataRegex.Match([]byte(key)): + match := metadataRegex.FindAllStringSubmatch(key, 3) + + return "metadata @> ?", []any{map[string]any{ + match[0][1]: value, + }}, nil + + case key == "metadata": + return "metadata -> ? is not null", []any{value}, nil + default: + return "", nil, ledgercontroller.NewErrInvalidQuery("unknown key '%s' when building query", key) + } + })) + if err != nil { + return ret.Err(fmt.Errorf("building where clause: %w", err)) + } + finalQuery = finalQuery.Where(where, args...) + } + + return finalQuery +} + +func (s *Store) selectAccountWithAggregatedVolumes(date *time.Time, useInsertionDate bool, alias string) *bun.SelectQuery { + selectAccountWithAssetAndVolumes := s.selectAccountWithAssetAndVolumes(date, useInsertionDate, nil) + return s.db.NewSelect(). + TableExpr("(?) values", selectAccountWithAssetAndVolumes). + Group("accounts_address"). + Column("accounts_address"). + ColumnExpr("aggregate_objects(json_build_object(asset, json_build_object('input', (volumes).inputs, 'output', (volumes).outputs))::jsonb) as " + alias) +} + +func (s *Store) SelectAggregatedBalances(date *time.Time, useInsertionDate bool, builder query.Builder) *bun.SelectQuery { + + selectAccountsWithVolumes := s.selectAccountWithAssetAndVolumes(date, useInsertionDate, builder) + sumVolumesForAsset := s.db.NewSelect(). + TableExpr("(?) values", selectAccountsWithVolumes). + Group("asset"). + Column("asset"). + ColumnExpr("json_build_object('input', sum(((volumes).inputs)::numeric), 'output', sum(((volumes).outputs)::numeric)) as volumes") + + return s.db.NewSelect(). + TableExpr("(?) values", sumVolumesForAsset). + ColumnExpr("aggregate_objects(json_build_object(asset, volumes)::jsonb) as aggregated") +} + +func (s *Store) GetAggregatedBalances(ctx context.Context, q ledgercontroller.GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) { + type AggregatedVolumes struct { + Aggregated ledger.VolumesByAssets `bun:"aggregated,type:jsonb"` + } + + aggregatedVolumes := AggregatedVolumes{} + if err := s.db.NewSelect(). + ModelTableExpr("(?) aggregated_volumes", s.SelectAggregatedBalances(q.PIT, q.UseInsertionDate, q.QueryBuilder)). + Model(&aggregatedVolumes). + Scan(ctx); err != nil { + return nil, err + } + + return aggregatedVolumes.Aggregated.Balances(), nil +} + +func (s *Store) GetBalances(ctx context.Context, query ledgercontroller.BalanceQuery) (ledgercontroller.Balances, error) { + return tracing.TraceWithMetric( + ctx, + "GetBalances", + s.tracer, + s.getBalancesHistogram, + func(ctx context.Context) (ledgercontroller.Balances, error) { + conditions := make([]string, 0) + args := make([]any, 0) + for account, assets := range query { + for _, asset := range assets { + conditions = append(conditions, "accounts_address = ? and asset = ?") + args = append(args, account, asset) + } + } + + type AccountsVolumesWithLedger struct { + ledger.AccountsVolumes `bun:",extend"` + Ledger string `bun:"ledger,type:varchar"` + } + + accountsVolumes := make([]AccountsVolumesWithLedger, 0) + for account, assets := range query { + for _, asset := range assets { + accountsVolumes = append(accountsVolumes, AccountsVolumesWithLedger{ + Ledger: s.ledger.Name, + AccountsVolumes: ledger.AccountsVolumes{ + Account: account, + Asset: asset, + Input: new(big.Int), + Output: new(big.Int), + }, + }) + } + } + + // Try to insert volumes using last move (to keep compat with previous version) or 0 values. + // This way, if the account has a 0 balance at this point, it will be locked as any other accounts. + // If the complete sql transaction fails, the account volumes will not be inserted. + selectMoves := s.db.NewSelect(). + ModelTableExpr(s.GetPrefixedRelationName("moves")). + DistinctOn("accounts_address, asset"). + Column("accounts_address", "asset"). + ColumnExpr("first_value(post_commit_volumes) over (partition by accounts_address, asset order by seq desc) as post_commit_volumes"). + ColumnExpr("first_value(ledger) over (partition by accounts_address, asset order by seq desc) as ledger"). + Where("("+strings.Join(conditions, ") OR (")+")", args...) + + zeroValuesAndMoves := s.db.NewSelect(). + TableExpr("(?) data", selectMoves). + Column("ledger", "accounts_address", "asset"). + ColumnExpr("(post_commit_volumes).inputs as input"). + ColumnExpr("(post_commit_volumes).outputs as output"). + UnionAll( + s.db.NewSelect(). + TableExpr( + "(?) data", + s.db.NewSelect().NewValues(&accountsVolumes), + ). + Column("*"), + ) + + zeroValueOrMoves := s.db.NewSelect(). + TableExpr("(?) data", zeroValuesAndMoves). + Column("ledger", "accounts_address", "asset", "input", "output"). + DistinctOn("ledger, accounts_address, asset") + + insertDefaultValue := s.db.NewInsert(). + TableExpr(s.GetPrefixedRelationName("accounts_volumes")). + TableExpr("(" + zeroValueOrMoves.String() + ") data"). + On("conflict (ledger, accounts_address, asset) do nothing"). + Returning("ledger, accounts_address, asset, input, output") + + selectExistingValues := s.db.NewSelect(). + ModelTableExpr(s.GetPrefixedRelationName("accounts_volumes")). + Column("ledger", "accounts_address", "asset", "input", "output"). + Where("("+strings.Join(conditions, ") OR (")+")", args...). + For("update"). + // notes(gfyrag): Keep order, it ensures consistent locking order and limit deadlocks + Order("accounts_address", "asset") + + finalQuery := s.db.NewSelect(). + With("inserted", insertDefaultValue). + With("existing", selectExistingValues). + ModelTableExpr( + "(?) accounts_volumes", + s.db.NewSelect(). + ModelTableExpr("inserted"). + UnionAll(s.db.NewSelect().ModelTableExpr("existing")), + ). + Model(&accountsVolumes) + + err := finalQuery.Scan(ctx) + if err != nil { + return nil, postgres.ResolveError(err) + } + + ret := ledgercontroller.Balances{} + for _, volumes := range accountsVolumes { + if _, ok := ret[volumes.Account]; !ok { + ret[volumes.Account] = map[string]*big.Int{} + } + ret[volumes.Account][volumes.Asset] = new(big.Int).Sub(volumes.Input, volumes.Output) + } + + // Fill empty balances with 0 value + for account, assets := range query { + if _, ok := ret[account]; !ok { + ret[account] = map[string]*big.Int{} + } + for _, asset := range assets { + if _, ok := ret[account][asset]; !ok { + ret[account][asset] = big.NewInt(0) + } + } + } + + return ret, nil + }, + ) +} diff --git a/internal/storage/ledger/balances_test.go b/internal/storage/ledger/balances_test.go new file mode 100644 index 000000000..3096e8952 --- /dev/null +++ b/internal/storage/ledger/balances_test.go @@ -0,0 +1,326 @@ +//go:build it + +package ledger_test + +import ( + "database/sql" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "math/big" + "testing" + + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/time" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/pointer" + + libtime "time" + + "github.com/formancehq/go-libs/v2/query" + ledger "github.com/formancehq/ledger/internal" + "github.com/stretchr/testify/require" +) + +func TestBalancesGet(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + ctx := logging.TestingContext() + + world := &ledger.Account{ + Address: "world", + InsertionDate: time.Now(), + UpdatedAt: time.Now(), + FirstUsage: time.Now(), + } + _, err := store.UpsertAccount(ctx, world) + require.NoError(t, err) + + _, err = store.UpdateVolumes(ctx, ledger.AccountsVolumes{ + Account: "world", + Asset: "USD", + Input: new(big.Int), + Output: big.NewInt(100), + }) + require.NoError(t, err) + + t.Run("check concurrent access on same balance", func(t *testing.T) { + t.Parallel() + + tx1, err := store.GetDB().BeginTx(ctx, &sql.TxOptions{}) + require.NoError(t, err) + t.Cleanup(func() { + _ = tx1.Rollback() + }) + store1 := store.WithDB(tx1) + + tx2, err := store.GetDB().BeginTx(ctx, &sql.TxOptions{}) + require.NoError(t, err) + t.Cleanup(func() { + _ = tx2.Rollback() + }) + store2 := store.WithDB(tx2) + + bq := ledgercontroller.BalanceQuery{ + "world": []string{"USD"}, + } + + balances, err := store1.GetBalances(ctx, bq) + require.NoError(t, err) + require.Len(t, balances, 1) + + getBalancesAccepted := make(chan struct{}) + go func() { + _, err := store2.GetBalances(ctx, bq) + require.NoError(t, err) + close(getBalancesAccepted) + }() + + select { + case <-libtime.After(500 * time.Millisecond): + // notes(gfyrag): Wait for 500ms to ensure the parallel tx does not have the ability to update balances + // of the already taken accounts. + // 500ms seems ok. I need to find another way to not relying on time, it's brittle. + case <-getBalancesAccepted: + t.Fatalf("parallel tx should not have been blocked") + } + + require.NoError(t, tx1.Commit()) + + select { + case <-libtime.After(100 * time.Millisecond): + t.Fatalf("parallel tx should have been unlocked") + case <-getBalancesAccepted: + } + }) + + t.Run("balance query with empty balance", func(t *testing.T) { + + tx, err := store.GetDB().BeginTx(ctx, &sql.TxOptions{}) + require.NoError(t, err) + t.Cleanup(func() { + require.NoError(t, tx.Rollback()) + }) + + store := store.WithDB(tx) + + count, err := store.GetDB().NewSelect(). + ModelTableExpr(store.GetPrefixedRelationName("accounts_volumes")). + Count(ctx) + require.NoError(t, err) + require.Equal(t, 1, count) + + balances, err := store.GetBalances(ctx, ledgercontroller.BalanceQuery{ + "world": {"USD"}, + "not-existing": {"USD"}, + }) + require.NoError(t, err) + require.Len(t, balances, 2) + require.NotNil(t, balances["world"]) + require.NotNil(t, balances["not-existing"]) + + require.Equal(t, big.NewInt(-100), balances["world"]["USD"]) + require.Equal(t, big.NewInt(0), balances["not-existing"]["USD"]) + + count, err = store.GetDB().NewSelect(). + ModelTableExpr(store.GetPrefixedRelationName("accounts_volumes")). + Count(ctx) + require.NoError(t, err) + require.Equal(t, 2, count) + }) + + t.Run("with balance from move", func(t *testing.T) { + t.Parallel() + + tx := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ) + err := store.InsertTransaction(ctx, &tx) + require.NoError(t, err) + + bankAccount := ledger.Account{ + Address: "bank", + FirstUsage: tx.InsertedAt, + InsertionDate: tx.InsertedAt, + UpdatedAt: tx.InsertedAt, + } + _, err = store.UpsertAccount(ctx, &bankAccount) + require.NoError(t, err) + + err = store.InsertMoves(ctx, &ledger.Move{ + TransactionID: tx.ID, + IsSource: false, + Account: "bank", + Amount: (*bunpaginate.BigInt)(big.NewInt(100)), + Asset: "USD", + InsertionDate: tx.InsertedAt, + EffectiveDate: tx.InsertedAt, + PostCommitVolumes: pointer.For(ledger.NewVolumesInt64(100, 0)), + }) + require.NoError(t, err) + + balances, err := store.GetBalances(ctx, ledgercontroller.BalanceQuery{ + "bank": {"USD"}, + }) + require.NoError(t, err) + + require.NotNil(t, balances["bank"]) + RequireEqual(t, big.NewInt(100), balances["bank"]["USD"]) + + // Check a new line has been inserted into accounts_volumes table + volumes := &ledger.AccountsVolumes{} + err = store.GetDB().NewSelect(). + ModelTableExpr(store.GetPrefixedRelationName("accounts_volumes")). + Where("accounts_address = ?", "bank"). + Scan(ctx, volumes) + require.NoError(t, err) + + RequireEqual(t, big.NewInt(100), volumes.Input) + RequireEqual(t, big.NewInt(0), volumes.Output) + }) +} + +func TestBalancesAggregates(t *testing.T) { + t.Parallel() + + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + bigInt, _ := big.NewInt(0).SetString("1000", 10) + smallInt := big.NewInt(100) + + tx1 := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "users:1", "USD", bigInt), + ledger.NewPosting("world", "users:2", "USD", smallInt), + ). + WithTimestamp(now). + WithInsertedAt(now) + err := store.CommitTransaction(ctx, &tx1) + require.NoError(t, err) + + tx2 := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "users:1", "USD", bigInt), + ledger.NewPosting("world", "users:2", "USD", smallInt), + ledger.NewPosting("world", "xxx", "EUR", smallInt), + ). + WithTimestamp(now.Add(-time.Minute)). + WithInsertedAt(now.Add(time.Minute)) + err = store.CommitTransaction(ctx, &tx2) + require.NoError(t, err) + + require.NoError(t, store.UpdateAccountsMetadata(ctx, map[string]metadata.Metadata{ + "users:1": { + "category": "premium", + }, + "users:2": { + "category": "premium", + }, + })) + + require.NoError(t, store.DeleteAccountMetadata(ctx, "users:2", "category")) + + require.NoError(t, store.UpdateAccountsMetadata(ctx, map[string]metadata.Metadata{ + "users:1": { + "category": "premium", + }, + "users:2": { + "category": "2", + }, + "world": { + "world": "bar", + }, + })) + + t.Run("aggregate on all", func(t *testing.T) { + t.Parallel() + + cursor, err := store.GetAggregatedBalances(ctx, ledgercontroller.NewGetAggregatedBalancesQuery(ledgercontroller.PITFilter{}, nil, false)) + require.NoError(t, err) + RequireEqual(t, ledger.BalancesByAssets{ + "USD": big.NewInt(0), + "EUR": big.NewInt(0), + }, cursor) + }) + t.Run("filter on address", func(t *testing.T) { + t.Parallel() + ret, err := store.GetAggregatedBalances(ctx, ledgercontroller.NewGetAggregatedBalancesQuery(ledgercontroller.PITFilter{}, + query.Match("address", "users:"), false)) + require.NoError(t, err) + require.Equal(t, ledger.BalancesByAssets{ + "USD": big.NewInt(0).Add( + big.NewInt(0).Mul(bigInt, big.NewInt(2)), + big.NewInt(0).Mul(smallInt, big.NewInt(2)), + ), + }, ret) + }) + t.Run("using pit on effective date", func(t *testing.T) { + t.Parallel() + ret, err := store.GetAggregatedBalances(ctx, ledgercontroller.NewGetAggregatedBalancesQuery(ledgercontroller.PITFilter{ + PIT: pointer.For(now.Add(-time.Second)), + }, query.Match("address", "users:"), false)) + require.NoError(t, err) + require.Equal(t, ledger.BalancesByAssets{ + "USD": big.NewInt(0).Add( + bigInt, + smallInt, + ), + }, ret) + }) + t.Run("using pit on insertion date", func(t *testing.T) { + t.Parallel() + ret, err := store.GetAggregatedBalances(ctx, ledgercontroller.NewGetAggregatedBalancesQuery(ledgercontroller.PITFilter{ + PIT: pointer.For(now), + }, query.Match("address", "users:"), true)) + require.NoError(t, err) + require.Equal(t, ledger.BalancesByAssets{ + "USD": big.NewInt(0).Add( + bigInt, + smallInt, + ), + }, ret) + }) + t.Run("using a metadata and pit", func(t *testing.T) { + t.Parallel() + ret, err := store.GetAggregatedBalances(ctx, ledgercontroller.NewGetAggregatedBalancesQuery(ledgercontroller.PITFilter{ + PIT: pointer.For(now.Add(time.Minute)), + }, query.Match("metadata[category]", "premium"), false)) + require.NoError(t, err) + require.Equal(t, ledger.BalancesByAssets{ + "USD": big.NewInt(0).Add( + big.NewInt(0).Mul(bigInt, big.NewInt(2)), + big.NewInt(0), + ), + }, ret) + }) + t.Run("using a metadata without pit", func(t *testing.T) { + t.Parallel() + ret, err := store.GetAggregatedBalances(ctx, ledgercontroller.NewGetAggregatedBalancesQuery(ledgercontroller.PITFilter{}, + query.Match("metadata[category]", "premium"), false)) + require.NoError(t, err) + require.Equal(t, ledger.BalancesByAssets{ + "USD": big.NewInt(0).Mul(bigInt, big.NewInt(2)), + }, ret) + }) + t.Run("when no matching", func(t *testing.T) { + t.Parallel() + ret, err := store.GetAggregatedBalances(ctx, ledgercontroller.NewGetAggregatedBalancesQuery(ledgercontroller.PITFilter{}, + query.Match("metadata[category]", "guest"), false)) + require.NoError(t, err) + require.Equal(t, ledger.BalancesByAssets{}, ret) + }) + + t.Run("using a filter exist on metadata", func(t *testing.T) { + t.Parallel() + ret, err := store.GetAggregatedBalances(ctx, ledgercontroller.NewGetAggregatedBalancesQuery(ledgercontroller.PITFilter{}, query.Exists("metadata", "category"), false)) + require.NoError(t, err) + require.Equal(t, ledger.BalancesByAssets{ + "USD": big.NewInt(0).Add( + big.NewInt(0).Mul(bigInt, big.NewInt(2)), + big.NewInt(0).Mul(smallInt, big.NewInt(2)), + ), + }, ret) + }) +} diff --git a/internal/storage/ledger/debug.go b/internal/storage/ledger/debug.go new file mode 100644 index 000000000..4cb9c4689 --- /dev/null +++ b/internal/storage/ledger/debug.go @@ -0,0 +1,42 @@ +package ledger + +import ( + "context" + "database/sql" + "fmt" + "github.com/shomali11/xsql" + "github.com/uptrace/bun" +) + +//nolint:unused +func (s *Store) DumpTables(ctx context.Context, tables ...string) { + for _, table := range tables { + s.DumpQuery( + ctx, + s.db.NewSelect(). + ModelTableExpr(s.GetPrefixedRelationName(table)), + ) + } +} + +//nolint:unused +func (s *Store) DumpQuery(ctx context.Context, query *bun.SelectQuery) { + fmt.Println(query) + rows, err := query.Rows(ctx) + if err != nil { + panic(err) + } + s.DumpRows(rows) +} + +//nolint:unused +func (s *Store) DumpRows(rows *sql.Rows) { + data, err := xsql.Pretty(rows) + if err != nil { + panic(err) + } + fmt.Println(data) + if err := rows.Close(); err != nil { + panic(err) + } +} diff --git a/internal/storage/ledger/errors.go b/internal/storage/ledger/errors.go new file mode 100644 index 000000000..e8a53ec7f --- /dev/null +++ b/internal/storage/ledger/errors.go @@ -0,0 +1,11 @@ +package ledger + +import ( + "errors" +) + +var ( + ErrBucketAlreadyExists = errors.New("bucket already exists") + ErrStoreAlreadyExists = errors.New("store already exists") + ErrStoreNotFound = errors.New("store not found") +) diff --git a/internal/storage/ledgerstore/accounts.go b/internal/storage/ledger/legacy/accounts.go similarity index 53% rename from internal/storage/ledgerstore/accounts.go rename to internal/storage/ledger/legacy/accounts.go index c45ced519..aa9004697 100644 --- a/internal/storage/ledgerstore/accounts.go +++ b/internal/storage/ledger/legacy/accounts.go @@ -1,30 +1,27 @@ -package ledgerstore +package legacy import ( "context" "errors" "fmt" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" "regexp" - "github.com/formancehq/go-libs/time" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" - "github.com/formancehq/go-libs/bun/bunpaginate" - - storageerrors "github.com/formancehq/ledger/internal/storage/sqlutils" - - "github.com/formancehq/go-libs/pointer" - "github.com/formancehq/go-libs/query" + "github.com/formancehq/go-libs/v2/query" ledger "github.com/formancehq/ledger/internal" "github.com/uptrace/bun" ) -func (store *Store) buildAccountQuery(q PITFilterWithVolumes, query *bun.SelectQuery) *bun.SelectQuery { +func (store *Store) buildAccountQuery(q ledgercontroller.PITFilterWithVolumes, query *bun.SelectQuery) *bun.SelectQuery { query = query. Column("accounts.address", "accounts.first_usage"). Where("accounts.ledger = ?", store.name). Apply(filterPIT(q.PIT, "first_usage")). - Order("accounts.address") + Order("accounts.address"). + ModelTableExpr(store.GetPrefixedRelationName("accounts")) if q.PIT != nil && !q.PIT.IsZero() { query = query. @@ -33,34 +30,34 @@ func (store *Store) buildAccountQuery(q PITFilterWithVolumes, query *bun.SelectQ Join(` left join lateral ( select metadata, accounts_seq - from accounts_metadata + from `+store.GetPrefixedRelationName("accounts_metadata")+` where accounts_metadata.accounts_seq = accounts.seq and accounts_metadata.date < ? order by revision desc limit 1 ) accounts_metadata on true `, q.PIT) } else { - query = query.Column("metadata") + query = query.ColumnExpr("accounts.metadata") } if q.ExpandVolumes { query = query. ColumnExpr("volumes.*"). - Join("join get_account_aggregated_volumes(?, accounts.address, ?) volumes on true", store.name, q.PIT) + Join(`join `+store.GetPrefixedRelationName("get_account_aggregated_volumes")+`(?, accounts.address, ?) volumes on true`, store.name, q.PIT) } if q.ExpandEffectiveVolumes { query = query. ColumnExpr("effective_volumes.*"). - Join("join get_account_aggregated_effective_volumes(?, accounts.address, ?) effective_volumes on true", store.name, q.PIT) + Join(`join `+store.GetPrefixedRelationName("get_account_aggregated_effective_volumes")+`(?, accounts.address, ?) effective_volumes on true`, store.name, q.PIT) } return query } -func (store *Store) accountQueryContext(qb query.Builder, q GetAccountsQuery) (string, []any, error) { - metadataRegex := regexp.MustCompile("metadata\\[(.+)\\]") - balanceRegex := regexp.MustCompile("balance\\[(.*)\\]") +func (store *Store) accountQueryContext(qb query.Builder, q ledgercontroller.ListAccountsQuery) (string, []any, error) { + metadataRegex := regexp.MustCompile(`metadata\[(.+)]`) + balanceRegex := regexp.MustCompile(`balance\[(.*)]`) return qb.Build(query.ContextFn(func(key, operator string, value any) (string, []any, error) { convertOperatorToSQL := func() string { @@ -80,7 +77,6 @@ func (store *Store) accountQueryContext(qb query.Builder, q GetAccountsQuery) (s } switch { case key == "address": - // TODO: Should allow comparison operator only if segments not used if operator != "$match" { return "", nil, errors.New("'address' column can only be used with $match") } @@ -108,17 +104,17 @@ func (store *Store) accountQueryContext(qb query.Builder, q GetAccountsQuery) (s match := balanceRegex.FindAllStringSubmatch(key, 2) return fmt.Sprintf(`( - select balance_from_volumes(post_commit_volumes) - from moves - where asset = ? and account_address = accounts.address and ledger = ? + select `+store.GetPrefixedRelationName("balance_from_volumes")+`(post_commit_volumes) + from `+store.GetPrefixedRelationName("moves")+` + where asset = ? and accounts_address = accounts.address and ledger = ? order by seq desc limit 1 ) %s ?`, convertOperatorToSQL()), []any{match[0][1], store.name, value}, nil case key == "balance": return fmt.Sprintf(`( - select balance_from_volumes(post_commit_volumes) - from moves - where account_address = accounts.address and ledger = ? + select `+store.GetPrefixedRelationName("balance_from_volumes")+`(post_commit_volumes) + from `+store.GetPrefixedRelationName("moves")+` + where accounts_address = accounts.address and ledger = ? order by seq desc limit 1 ) %s ?`, convertOperatorToSQL()), []any{store.name, value}, nil @@ -138,7 +134,7 @@ func (store *Store) accountQueryContext(qb query.Builder, q GetAccountsQuery) (s })) } -func (store *Store) buildAccountListQuery(selectQuery *bun.SelectQuery, q GetAccountsQuery, where string, args []any) *bun.SelectQuery { +func (store *Store) buildAccountListQuery(selectQuery *bun.SelectQuery, q ledgercontroller.ListAccountsQuery, where string, args []any) *bun.SelectQuery { selectQuery = store.buildAccountQuery(q.Options.Options, selectQuery) if where != "" { @@ -148,7 +144,7 @@ func (store *Store) buildAccountListQuery(selectQuery *bun.SelectQuery, q GetAcc return selectQuery } -func (store *Store) GetAccountsWithVolumes(ctx context.Context, q GetAccountsQuery) (*bunpaginate.Cursor[ledger.ExpandedAccount], error) { +func (store *Store) GetAccountsWithVolumes(ctx context.Context, q ledgercontroller.ListAccountsQuery) (*bunpaginate.Cursor[ledger.Account], error) { var ( where string args []any @@ -161,38 +157,16 @@ func (store *Store) GetAccountsWithVolumes(ctx context.Context, q GetAccountsQue } } - return paginateWithOffset[PaginatedQueryOptions[PITFilterWithVolumes], ledger.ExpandedAccount](store, ctx, - (*bunpaginate.OffsetPaginatedQuery[PaginatedQueryOptions[PITFilterWithVolumes]])(&q), + return paginateWithOffset[ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes], ledger.Account](store, ctx, + (*bunpaginate.OffsetPaginatedQuery[ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes]])(&q), func(query *bun.SelectQuery) *bun.SelectQuery { return store.buildAccountListQuery(query, q, where, args) }, ) } -func (store *Store) GetAccount(ctx context.Context, address string) (*ledger.Account, error) { - account, err := fetch[*ledger.Account](store, false, ctx, func(query *bun.SelectQuery) *bun.SelectQuery { - return query. - ColumnExpr("accounts.address"). - ColumnExpr("coalesce(accounts_metadata.metadata, '{}'::jsonb) as metadata"). - ColumnExpr("accounts.first_usage"). - Table("accounts"). - Join("left join accounts_metadata on accounts_metadata.accounts_seq = accounts.seq"). - Where("accounts.address = ?", address). - Where("accounts.ledger = ?", store.name). - Order("revision desc"). - Limit(1) - }) - if err != nil { - if storageerrors.IsNotFoundError(err) { - return pointer.For(ledger.NewAccount(address)), nil - } - return nil, err - } - return account, nil -} - -func (store *Store) GetAccountWithVolumes(ctx context.Context, q GetAccountQuery) (*ledger.ExpandedAccount, error) { - account, err := fetch[*ledger.ExpandedAccount](store, true, ctx, func(query *bun.SelectQuery) *bun.SelectQuery { +func (store *Store) GetAccountWithVolumes(ctx context.Context, q ledgercontroller.GetAccountQuery) (*ledger.Account, error) { + account, err := fetch[*ledger.Account](store, true, ctx, func(query *bun.SelectQuery) *bun.SelectQuery { query = store.buildAccountQuery(q.PITFilterWithVolumes, query). Where("accounts.address = ?", q.Addr). Limit(1) @@ -205,7 +179,7 @@ func (store *Store) GetAccountWithVolumes(ctx context.Context, q GetAccountQuery return account, nil } -func (store *Store) CountAccounts(ctx context.Context, q GetAccountsQuery) (int, error) { +func (store *Store) CountAccounts(ctx context.Context, q ledgercontroller.ListAccountsQuery) (int, error) { var ( where string args []any @@ -222,54 +196,3 @@ func (store *Store) CountAccounts(ctx context.Context, q GetAccountsQuery) (int, return store.buildAccountListQuery(query, q, where, args) }) } - -type GetAccountQuery struct { - PITFilterWithVolumes - Addr string -} - -func (q GetAccountQuery) WithPIT(pit time.Time) GetAccountQuery { - q.PIT = &pit - - return q -} - -func (q GetAccountQuery) WithExpandVolumes() GetAccountQuery { - q.ExpandVolumes = true - - return q -} - -func (q GetAccountQuery) WithExpandEffectiveVolumes() GetAccountQuery { - q.ExpandEffectiveVolumes = true - - return q -} - -func NewGetAccountQuery(addr string) GetAccountQuery { - return GetAccountQuery{ - Addr: addr, - } -} - -type GetAccountsQuery bunpaginate.OffsetPaginatedQuery[PaginatedQueryOptions[PITFilterWithVolumes]] - -func (q GetAccountsQuery) WithExpandVolumes() GetAccountsQuery { - q.Options.Options.ExpandVolumes = true - - return q -} - -func (q GetAccountsQuery) WithExpandEffectiveVolumes() GetAccountsQuery { - q.Options.Options.ExpandEffectiveVolumes = true - - return q -} - -func NewGetAccountsQuery(opts PaginatedQueryOptions[PITFilterWithVolumes]) GetAccountsQuery { - return GetAccountsQuery{ - PageSize: opts.PageSize, - Order: bunpaginate.OrderAsc, - Options: opts, - } -} diff --git a/internal/storage/ledger/legacy/accounts_test.go b/internal/storage/ledger/legacy/accounts_test.go new file mode 100644 index 000000000..653bcdf7d --- /dev/null +++ b/internal/storage/ledger/legacy/accounts_test.go @@ -0,0 +1,378 @@ +//go:build it + +package legacy_test + +import ( + "github.com/formancehq/go-libs/v2/pointer" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/formancehq/ledger/internal/storage/ledger/legacy" + "math/big" + "testing" + + "github.com/formancehq/go-libs/v2/time" + + "github.com/formancehq/go-libs/v2/logging" + + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/query" + ledger "github.com/formancehq/ledger/internal" + "github.com/stretchr/testify/require" +) + +func TestGetAccounts(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + err := store.newStore.CommitTransaction(ctx, pointer.For(ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(100))). + WithTimestamp(now). + WithInsertedAt(now))) + require.NoError(t, err) + + require.NoError(t, store.newStore.UpdateAccountsMetadata(ctx, map[string]metadata.Metadata{ + "account:1": { + "category": "1", + }, + "account:2": { + "category": "2", + }, + "account:3": { + "category": "3", + }, + "orders:1": { + "foo": "bar", + }, + "orders:2": { + "foo": "bar", + }, + })) + + err = store.newStore.CommitTransaction(ctx, pointer.For(ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(100))). + WithTimestamp(now.Add(4*time.Minute)). + WithInsertedAt(now.Add(100*time.Millisecond)))) + require.NoError(t, err) + + err = store.newStore.CommitTransaction(ctx, pointer.For(ledger.NewTransaction(). + WithPostings(ledger.NewPosting("account:1", "bank", "USD", big.NewInt(50))). + WithTimestamp(now.Add(3*time.Minute)). + WithInsertedAt(now.Add(200*time.Millisecond)))) + require.NoError(t, err) + + err = store.newStore.CommitTransaction(ctx, pointer.For(ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(0))). + WithTimestamp(now.Add(-time.Minute)). + WithInsertedAt(now.Add(200*time.Millisecond)))) + require.NoError(t, err) + + //require.NoError(t, store.InsertLogs(ctx, + // ledger.ChainLogs( + // ledger.NewTransactionLog( + // ledger.NewTransaction(). + // WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(100))). + // WithDate(now), + // map[string]metadata.Metadata{ + // "account:1": { + // "category": "4", + // }, + // }, + // ).WithDate(now), + // ledger.NewSetMetadataOnAccountLog(time.Now(), "account:1", metadata.Metadata{"category": "1"}).WithDate(now.Add(time.Minute)), + // ledger.NewSetMetadataOnAccountLog(time.Now(), "account:2", metadata.Metadata{"category": "2"}).WithDate(now.Add(2*time.Minute)), + // ledger.NewSetMetadataOnAccountLog(time.Now(), "account:3", metadata.Metadata{"category": "3"}).WithDate(now.Add(3*time.Minute)), + // ledger.NewSetMetadataOnAccountLog(time.Now(), "orders:1", metadata.Metadata{"foo": "bar"}).WithDate(now.Add(3*time.Minute)), + // ledger.NewSetMetadataOnAccountLog(time.Now(), "orders:2", metadata.Metadata{"foo": "bar"}).WithDate(now.Add(3*time.Minute)), + // ledger.NewTransactionLog( + // ledger.NewTransaction(). + // WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(100))). + // WithIDUint64(1). + // WithDate(now.Add(4*time.Minute)), + // map[string]metadata.Metadata{}, + // ).WithDate(now.Add(100*time.Millisecond)), + // ledger.NewTransactionLog( + // ledger.NewTransaction(). + // WithPostings(ledger.NewPosting("account:1", "bank", "USD", big.NewInt(50))). + // WithDate(now.Add(3*time.Minute)). + // WithIDUint64(2), + // map[string]metadata.Metadata{}, + // ).WithDate(now.Add(200*time.Millisecond)), + // ledger.NewTransactionLog( + // ledger.NewTransaction(). + // WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(0))). + // WithDate(now.Add(-time.Minute)). + // WithIDUint64(3), + // map[string]metadata.Metadata{}, + // ).WithDate(now.Add(200*time.Millisecond)), + // )..., + //)) + + t.Run("list all", func(t *testing.T) { + t.Parallel() + accounts, err := store.GetAccountsWithVolumes(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}))) + require.NoError(t, err) + require.Len(t, accounts.Data, 7) + }) + + t.Run("list using metadata", func(t *testing.T) { + t.Parallel() + accounts, err := store.GetAccountsWithVolumes(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("metadata[category]", "1")), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) + }) + + t.Run("list before date", func(t *testing.T) { + t.Parallel() + accounts, err := store.GetAccountsWithVolumes(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + }))) + require.NoError(t, err) + require.Len(t, accounts.Data, 2) + }) + + t.Run("list with volumes", func(t *testing.T) { + t.Parallel() + + accounts, err := store.GetAccountsWithVolumes(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + ExpandVolumes: true, + }).WithQueryBuilder(query.Match("address", "account:1")))) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) + require.Equal(t, ledger.VolumesByAssets{ + "USD": ledger.NewVolumesInt64(200, 50), + }, accounts.Data[0].Volumes) + }) + + t.Run("list with volumes using PIT", func(t *testing.T) { + t.Parallel() + + accounts, err := store.GetAccountsWithVolumes(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + ExpandVolumes: true, + }).WithQueryBuilder(query.Match("address", "account:1")))) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) + require.Equal(t, ledger.VolumesByAssets{ + "USD": ledger.NewVolumesInt64(100, 0), + }, accounts.Data[0].Volumes) + }) + + t.Run("list with effective volumes", func(t *testing.T) { + t.Parallel() + accounts, err := store.GetAccountsWithVolumes(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + ExpandEffectiveVolumes: true, + }).WithQueryBuilder(query.Match("address", "account:1")))) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) + require.Equal(t, ledger.VolumesByAssets{ + "USD": ledger.NewVolumesInt64(200, 50), + }, accounts.Data[0].EffectiveVolumes) + }) + + t.Run("list with effective volumes using PIT", func(t *testing.T) { + t.Parallel() + accounts, err := store.GetAccountsWithVolumes(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &now, + }, + ExpandEffectiveVolumes: true, + }).WithQueryBuilder(query.Match("address", "account:1")))) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) + require.Equal(t, ledger.VolumesByAssets{ + "USD": ledger.NewVolumesInt64(100, 0), + }, accounts.Data[0].EffectiveVolumes) + }) + + t.Run("list using filter on address", func(t *testing.T) { + t.Parallel() + accounts, err := store.GetAccountsWithVolumes(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("address", "account:")), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 3) + }) + t.Run("list using filter on multiple address", func(t *testing.T) { + t.Parallel() + accounts, err := store.GetAccountsWithVolumes(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder( + query.Or( + query.Match("address", "account:1"), + query.Match("address", "orders:"), + ), + ), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 3) + }) + t.Run("list using filter on balances", func(t *testing.T) { + t.Parallel() + accounts, err := store.GetAccountsWithVolumes(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Lt("balance[USD]", 0)), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 1) // world + + accounts, err = store.GetAccountsWithVolumes(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Gt("balance[USD]", 0)), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 2) + require.Equal(t, "account:1", accounts.Data[0].Address) + require.Equal(t, "bank", accounts.Data[1].Address) + }) + + t.Run("list using filter on exists metadata", func(t *testing.T) { + t.Parallel() + accounts, err := store.GetAccountsWithVolumes(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Exists("metadata", "foo")), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 2) + + accounts, err = store.GetAccountsWithVolumes(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Exists("metadata", "category")), + )) + require.NoError(t, err) + require.Len(t, accounts.Data, 3) + }) + + t.Run("list using filter invalid field", func(t *testing.T) { + t.Parallel() + _, err := store.GetAccountsWithVolumes(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Lt("invalid", 0)), + )) + require.Error(t, err) + require.True(t, legacy.IsErrInvalidQuery(err)) + }) +} + +func TestGetAccount(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + err := store.newStore.CommitTransaction(ctx, pointer.For(ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "multi", "USD/2", big.NewInt(100)), + ).WithTimestamp(now))) + require.NoError(t, err) + + require.NoError(t, store.newStore.UpdateAccountsMetadata(ctx, map[string]metadata.Metadata{ + "multi": { + "category": "gold", + }, + })) + + err = store.newStore.CommitTransaction(ctx, pointer.For(ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "multi", "USD/2", big.NewInt(0)), + ).WithTimestamp(now.Add(-time.Minute)))) + require.NoError(t, err) + + t.Run("find account", func(t *testing.T) { + t.Parallel() + account, err := store.GetAccountWithVolumes(ctx, ledgercontroller.NewGetAccountQuery("multi")) + require.NoError(t, err) + require.Equal(t, ledger.Account{ + Address: "multi", + Metadata: metadata.Metadata{ + "category": "gold", + }, + FirstUsage: now.Add(-time.Minute), + }, *account) + + account, err = store.GetAccountWithVolumes(ctx, ledgercontroller.NewGetAccountQuery("world")) + require.NoError(t, err) + require.Equal(t, ledger.Account{ + Address: "world", + Metadata: metadata.Metadata{}, + FirstUsage: now.Add(-time.Minute), + }, *account) + }) + + t.Run("find account in past", func(t *testing.T) { + t.Parallel() + account, err := store.GetAccountWithVolumes(ctx, ledgercontroller.NewGetAccountQuery("multi").WithPIT(now.Add(-30*time.Second))) + require.NoError(t, err) + require.Equal(t, ledger.Account{ + Address: "multi", + Metadata: metadata.Metadata{}, + FirstUsage: now.Add(-time.Minute), + }, *account) + }) + + t.Run("find account with volumes", func(t *testing.T) { + t.Parallel() + account, err := store.GetAccountWithVolumes(ctx, ledgercontroller.NewGetAccountQuery("multi"). + WithExpandVolumes()) + require.NoError(t, err) + require.Equal(t, ledger.Account{ + Address: "multi", + Metadata: metadata.Metadata{ + "category": "gold", + }, + FirstUsage: now.Add(-time.Minute), + Volumes: ledger.VolumesByAssets{ + "USD/2": ledger.NewVolumesInt64(100, 0), + }, + }, *account) + }) + + t.Run("find account with effective volumes", func(t *testing.T) { + t.Parallel() + account, err := store.GetAccountWithVolumes(ctx, ledgercontroller.NewGetAccountQuery("multi"). + WithExpandEffectiveVolumes()) + require.NoError(t, err) + require.Equal(t, ledger.Account{ + Address: "multi", + Metadata: metadata.Metadata{ + "category": "gold", + }, + FirstUsage: now.Add(-time.Minute), + + EffectiveVolumes: ledger.VolumesByAssets{ + "USD/2": ledger.NewVolumesInt64(100, 0), + }, + }, *account) + }) + + t.Run("find account using pit", func(t *testing.T) { + t.Parallel() + account, err := store.GetAccountWithVolumes(ctx, ledgercontroller.NewGetAccountQuery("multi").WithPIT(now)) + require.NoError(t, err) + require.Equal(t, ledger.Account{ + Address: "multi", + Metadata: metadata.Metadata{}, + FirstUsage: now.Add(-time.Minute), + }, *account) + }) + + t.Run("not existent account", func(t *testing.T) { + t.Parallel() + _, err := store.GetAccountWithVolumes(ctx, ledgercontroller.NewGetAccountQuery("account_not_existing")) + require.Error(t, err) + }) + +} + +func TestCountAccounts(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + ctx := logging.TestingContext() + + err := store.newStore.CommitTransaction(ctx, pointer.For(ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "central_bank", "USD/2", big.NewInt(100)), + ))) + require.NoError(t, err) + + countAccounts, err := store.CountAccounts(ctx, ledgercontroller.NewListAccountsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}))) + require.NoError(t, err) + require.EqualValues(t, 2, countAccounts) // world + central_bank +} diff --git a/internal/storage/ledger/legacy/adapters.go b/internal/storage/ledger/legacy/adapters.go new file mode 100644 index 000000000..3d94a53f9 --- /dev/null +++ b/internal/storage/ledger/legacy/adapters.go @@ -0,0 +1,159 @@ +package legacy + +import ( + "context" + "database/sql" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/migrations" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + ledgerstore "github.com/formancehq/ledger/internal/storage/ledger" + "github.com/uptrace/bun" +) + +type TX struct { + newStore *ledgerstore.Store + legacyStore *Store + sqlTX bun.Tx +} + +func (tx TX) GetAccount(ctx context.Context, query ledgercontroller.GetAccountQuery) (*ledger.Account, error) { + return tx.legacyStore.GetAccountWithVolumes(ctx, query) +} + +func (tx TX) GetBalances(ctx context.Context, query ledgercontroller.BalanceQuery) (ledgercontroller.Balances, error) { + return tx.newStore.GetBalances(ctx, query) +} + +func (tx TX) CommitTransaction(ctx context.Context, transaction *ledger.Transaction) error { + return tx.newStore.CommitTransaction(ctx, transaction) +} + +func (tx TX) RevertTransaction(ctx context.Context, id int) (*ledger.Transaction, bool, error) { + return tx.newStore.RevertTransaction(ctx, id) +} + +func (tx TX) UpdateTransactionMetadata(ctx context.Context, transactionID int, m metadata.Metadata) (*ledger.Transaction, bool, error) { + return tx.newStore.UpdateTransactionMetadata(ctx, transactionID, m) +} + +func (tx TX) DeleteTransactionMetadata(ctx context.Context, transactionID int, key string) (*ledger.Transaction, bool, error) { + return tx.newStore.DeleteTransactionMetadata(ctx, transactionID, key) +} + +func (tx TX) UpdateAccountsMetadata(ctx context.Context, m map[string]metadata.Metadata) error { + return tx.newStore.UpdateAccountsMetadata(ctx, m) +} + +func (tx TX) UpsertAccount(ctx context.Context, account *ledger.Account) (bool, error) { + return tx.newStore.UpsertAccount(ctx, account) +} + +func (tx TX) DeleteAccountMetadata(ctx context.Context, address, key string) error { + return tx.newStore.DeleteAccountMetadata(ctx, address, key) +} + +func (tx TX) InsertLog(ctx context.Context, log *ledger.Log) error { + return tx.newStore.InsertLog(ctx, log) +} + +func (tx TX) LockLedger(ctx context.Context) error { + return tx.newStore.LockLedger(ctx) +} + +func (tx TX) ListLogs(ctx context.Context, q ledgercontroller.GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) { + return tx.legacyStore.GetLogs(ctx, q) +} + +type DefaultStoreAdapter struct { + newStore *ledgerstore.Store + legacyStore *Store +} + +func (d *DefaultStoreAdapter) GetDB() bun.IDB { + return d.newStore.GetDB() +} + +func (d *DefaultStoreAdapter) ListLogs(ctx context.Context, q ledgercontroller.GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) { + return d.legacyStore.GetLogs(ctx, q) +} + +func (d *DefaultStoreAdapter) ReadLogWithIdempotencyKey(ctx context.Context, ik string) (*ledger.Log, error) { + return d.newStore.ReadLogWithIdempotencyKey(ctx, ik) +} + +func (d *DefaultStoreAdapter) ListTransactions(ctx context.Context, q ledgercontroller.ListTransactionsQuery) (*bunpaginate.Cursor[ledger.Transaction], error) { + return d.legacyStore.GetTransactions(ctx, q) +} + +func (d *DefaultStoreAdapter) CountTransactions(ctx context.Context, q ledgercontroller.ListTransactionsQuery) (int, error) { + return d.legacyStore.CountTransactions(ctx, q) +} + +func (d *DefaultStoreAdapter) GetTransaction(ctx context.Context, query ledgercontroller.GetTransactionQuery) (*ledger.Transaction, error) { + return d.legacyStore.GetTransactionWithVolumes(ctx, query) +} + +func (d *DefaultStoreAdapter) CountAccounts(ctx context.Context, q ledgercontroller.ListAccountsQuery) (int, error) { + return d.legacyStore.CountAccounts(ctx, q) +} + +func (d *DefaultStoreAdapter) ListAccounts(ctx context.Context, q ledgercontroller.ListAccountsQuery) (*bunpaginate.Cursor[ledger.Account], error) { + return d.legacyStore.GetAccountsWithVolumes(ctx, q) +} + +func (d *DefaultStoreAdapter) GetAccount(ctx context.Context, q ledgercontroller.GetAccountQuery) (*ledger.Account, error) { + return d.legacyStore.GetAccountWithVolumes(ctx, q) +} + +func (d *DefaultStoreAdapter) GetAggregatedBalances(ctx context.Context, q ledgercontroller.GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) { + return d.legacyStore.GetAggregatedBalances(ctx, q) +} + +func (d *DefaultStoreAdapter) GetVolumesWithBalances(ctx context.Context, q ledgercontroller.GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) { + return d.legacyStore.GetVolumesWithBalances(ctx, q) +} + +func (d *DefaultStoreAdapter) IsUpToDate(ctx context.Context) (bool, error) { + return d.newStore.IsUpToDate(ctx) +} + +func (d *DefaultStoreAdapter) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) { + return d.newStore.GetMigrationsInfo(ctx) +} + +func (d *DefaultStoreAdapter) WithTX(ctx context.Context, opts *sql.TxOptions, f func(ledgercontroller.TX) (bool, error)) error { + if opts == nil { + opts = &sql.TxOptions{} + } + + tx, err := d.newStore.GetDB().BeginTx(ctx, opts) + if err != nil { + return err + } + defer func() { + _ = tx.Rollback() + }() + + if commit, err := f(&TX{ + newStore: d.newStore.WithDB(tx), + legacyStore: d.legacyStore.WithDB(tx), + sqlTX: tx, + }); err != nil { + return err + } else if commit { + return tx.Commit() + } + + return nil +} + +func NewDefaultStoreAdapter(store *ledgerstore.Store) *DefaultStoreAdapter { + return &DefaultStoreAdapter{ + newStore: store, + legacyStore: New(store.GetDB(), store.GetLedger().Bucket, store.GetLedger().Name), + } +} + +var _ ledgercontroller.Store = (*DefaultStoreAdapter)(nil) diff --git a/internal/storage/ledgerstore/balances.go b/internal/storage/ledger/legacy/balances.go similarity index 60% rename from internal/storage/ledgerstore/balances.go rename to internal/storage/ledger/legacy/balances.go index e7df72cf7..5266fdb6d 100644 --- a/internal/storage/ledgerstore/balances.go +++ b/internal/storage/ledger/legacy/balances.go @@ -1,19 +1,17 @@ -package ledgerstore +package legacy import ( "context" "errors" "fmt" - "math/big" - - "github.com/formancehq/ledger/internal/storage/sqlutils" - - "github.com/formancehq/go-libs/query" + "github.com/formancehq/go-libs/v2/platform/postgres" + "github.com/formancehq/go-libs/v2/query" ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" "github.com/uptrace/bun" ) -func (store *Store) GetAggregatedBalances(ctx context.Context, q GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) { +func (store *Store) GetAggregatedBalances(ctx context.Context, q ledgercontroller.GetAggregatedBalanceQuery) (ledger.BalancesByAssets, error) { var ( needMetadata bool @@ -25,14 +23,13 @@ func (store *Store) GetAggregatedBalances(ctx context.Context, q GetAggregatedBa subQuery, args, err = q.QueryBuilder.Build(query.ContextFn(func(key, operator string, value any) (string, []any, error) { switch { case key == "address": - // TODO: Should allow comparison operator only if segments not used if operator != "$match" { return "", nil, newErrInvalidQuery("'address' column can only be used with $match") } switch address := value.(type) { case string: - return filterAccountAddress(address, "account_address"), nil, nil + return filterAccountAddress(address, "accounts_address"), nil, nil default: return "", nil, newErrInvalidQuery("unexpected type %T for column 'address'", address) } @@ -80,26 +77,30 @@ func (store *Store) GetAggregatedBalances(ctx context.Context, q GetAggregatedBa if q.UseInsertionDate { pitColumn = "insertion_date" } - moves := store.bucket.db. + moves := store.db. NewSelect(). - Table(MovesTableName). - ColumnExpr("distinct on (moves.account_address, moves.asset) moves.*"). - Order("account_address", "asset"). + ModelTableExpr(store.GetPrefixedRelationName("moves")). + DistinctOn("moves.accounts_address, moves.asset"). Where("moves.ledger = ?", store.name). Apply(filterPIT(q.PIT, pitColumn)) if q.UseInsertionDate { - moves = moves.Order("moves.insertion_date desc") + moves = moves. + ColumnExpr("accounts_address"). + ColumnExpr("asset"). + ColumnExpr("first_value(moves.post_commit_volumes) over (partition by moves.accounts_address, moves.asset order by seq desc) as post_commit_volumes") } else { - moves = moves.Order("moves.effective_date desc") + moves = moves. + ColumnExpr("accounts_address"). + ColumnExpr("asset"). + ColumnExpr("first_value(moves.post_commit_effective_volumes) over (partition by moves.accounts_address, moves.asset order by effective_date desc, seq desc) as post_commit_effective_volumes") } - moves = moves.Order("seq desc") if needMetadata { if q.PIT != nil { moves = moves.Join(`join lateral ( select metadata - from accounts_metadata am + from `+store.GetPrefixedRelationName("accounts_metadata")+` am where am.accounts_seq = moves.accounts_seq and (? is null or date <= ?) order by revision desc limit 1 @@ -107,7 +108,7 @@ func (store *Store) GetAggregatedBalances(ctx context.Context, q GetAggregatedBa } else { moves = moves.Join(`join lateral ( select metadata - from accounts a + from ` + store.GetPrefixedRelationName("accounts") + ` a where a.seq = moves.accounts_seq ) accounts on true`) } @@ -121,51 +122,26 @@ func (store *Store) GetAggregatedBalances(ctx context.Context, q GetAggregatedBa volumesColumn = "post_commit_volumes" } - asJsonb := selectQuery.NewSelect(). - TableExpr("moves"). - ColumnExpr(fmt.Sprintf("volumes_to_jsonb((moves.asset, (sum((moves.%s).inputs), sum((moves.%s).outputs))::volumes)) as aggregated", volumesColumn, volumesColumn)). - Group("moves.asset") - - return selectQuery. + finalQuery := selectQuery. With("moves", moves). - With("data", asJsonb). + With( + "data", + selectQuery.NewSelect(). + TableExpr("moves"). + ColumnExpr(fmt.Sprintf(store.GetPrefixedRelationName("volumes_to_jsonb")+`((moves.asset, (sum((moves.%s).inputs), sum((moves.%s).outputs))::%s)) as aggregated`, volumesColumn, volumesColumn, store.GetPrefixedRelationName("volumes"))). + Group("moves.asset"), + ). TableExpr("data"). ColumnExpr("aggregate_objects(data.aggregated) as aggregated") + + return finalQuery }) - if err != nil && !errors.Is(err, sqlutils.ErrNotFound) { + if err != nil && !errors.Is(err, postgres.ErrNotFound) { return nil, err } - if errors.Is(err, sqlutils.ErrNotFound) { + if errors.Is(err, postgres.ErrNotFound) { return ledger.BalancesByAssets{}, nil } return ret.Aggregated.Balances(), nil } - -func (store *Store) GetBalance(ctx context.Context, address, asset string) (*big.Int, error) { - type Temp struct { - Balance *big.Int `bun:"balance,type:numeric"` - } - v, err := fetch[*Temp](store, false, ctx, func(query *bun.SelectQuery) *bun.SelectQuery { - return query.TableExpr("get_account_balance(?, ?, ?) as balance", store.name, address, asset) - }) - if err != nil { - return nil, err - } - - return v.Balance, nil -} - -type GetAggregatedBalanceQuery struct { - PITFilter - QueryBuilder query.Builder - UseInsertionDate bool -} - -func NewGetAggregatedBalancesQuery(filter PITFilter, qb query.Builder, useInsertionDate bool) GetAggregatedBalanceQuery { - return GetAggregatedBalanceQuery{ - PITFilter: filter, - QueryBuilder: qb, - UseInsertionDate: useInsertionDate, - } -} diff --git a/internal/storage/ledger/legacy/balances_test.go b/internal/storage/ledger/legacy/balances_test.go new file mode 100644 index 000000000..d6c185946 --- /dev/null +++ b/internal/storage/ledger/legacy/balances_test.go @@ -0,0 +1,175 @@ +//go:build it + +package legacy_test + +import ( + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/google/go-cmp/cmp" + "math/big" + "testing" + + "github.com/formancehq/go-libs/v2/time" + + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/pointer" + + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/query" + ledger "github.com/formancehq/ledger/internal" + "github.com/stretchr/testify/require" +) + +func TestGetBalancesAggregated(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + bigInt, _ := big.NewInt(0).SetString("1000", 10) + smallInt := big.NewInt(100) + + tx1 := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "users:1", "USD", bigInt), + ledger.NewPosting("world", "users:2", "USD", smallInt), + ). + WithTimestamp(now). + WithInsertedAt(now) + err := store.newStore.CommitTransaction(ctx, &tx1) + require.NoError(t, err) + + tx2 := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "users:1", "USD", bigInt), + ledger.NewPosting("world", "users:2", "USD", smallInt), + ledger.NewPosting("world", "xxx", "EUR", smallInt), + ). + WithTimestamp(now.Add(-time.Minute)). + WithInsertedAt(now.Add(time.Minute)) + err = store.newStore.CommitTransaction(ctx, &tx2) + require.NoError(t, err) + + require.NoError(t, store.newStore.UpdateAccountsMetadata(ctx, map[string]metadata.Metadata{ + "users:1": { + "category": "premium", + }, + "users:2": { + "category": "premium", + }, + })) + + require.NoError(t, store.newStore.DeleteAccountMetadata(ctx, "users:2", "category")) + + require.NoError(t, store.newStore.UpdateAccountsMetadata(ctx, map[string]metadata.Metadata{ + "users:1": { + "category": "premium", + }, + "users:2": { + "category": "2", + }, + "world": { + "world": "bar", + }, + })) + + t.Run("aggregate on all", func(t *testing.T) { + t.Parallel() + cursor, err := store.GetAggregatedBalances(ctx, ledgercontroller.NewGetAggregatedBalancesQuery(ledgercontroller.PITFilter{}, nil, false)) + require.NoError(t, err) + RequireEqual(t, ledger.BalancesByAssets{ + "USD": big.NewInt(0), + "EUR": big.NewInt(0), + }, cursor) + }) + t.Run("filter on address", func(t *testing.T) { + t.Parallel() + ret, err := store.GetAggregatedBalances(ctx, ledgercontroller.NewGetAggregatedBalancesQuery(ledgercontroller.PITFilter{}, + query.Match("address", "users:"), false)) + require.NoError(t, err) + require.Equal(t, ledger.BalancesByAssets{ + "USD": big.NewInt(0).Add( + big.NewInt(0).Mul(bigInt, big.NewInt(2)), + big.NewInt(0).Mul(smallInt, big.NewInt(2)), + ), + }, ret) + }) + t.Run("using pit on effective date", func(t *testing.T) { + t.Parallel() + ret, err := store.GetAggregatedBalances(ctx, ledgercontroller.NewGetAggregatedBalancesQuery(ledgercontroller.PITFilter{ + PIT: pointer.For(now.Add(-time.Second)), + }, query.Match("address", "users:"), false)) + require.NoError(t, err) + require.Equal(t, ledger.BalancesByAssets{ + "USD": big.NewInt(0).Add( + bigInt, + smallInt, + ), + }, ret) + }) + t.Run("using pit on insertion date", func(t *testing.T) { + t.Parallel() + ret, err := store.GetAggregatedBalances(ctx, ledgercontroller.NewGetAggregatedBalancesQuery(ledgercontroller.PITFilter{ + PIT: pointer.For(now), + }, query.Match("address", "users:"), true)) + require.NoError(t, err) + require.Equal(t, ledger.BalancesByAssets{ + "USD": big.NewInt(0).Add( + bigInt, + smallInt, + ), + }, ret) + }) + t.Run("using a metadata and pit", func(t *testing.T) { + t.Parallel() + + ret, err := store.GetAggregatedBalances(ctx, ledgercontroller.NewGetAggregatedBalancesQuery(ledgercontroller.PITFilter{ + PIT: pointer.For(now.Add(time.Minute)), + }, query.Match("metadata[category]", "premium"), false)) + require.NoError(t, err) + require.Equal(t, ledger.BalancesByAssets{ + "USD": big.NewInt(0).Add( + big.NewInt(0).Mul(bigInt, big.NewInt(2)), + big.NewInt(0), + ), + }, ret) + }) + t.Run("using a metadata without pit", func(t *testing.T) { + t.Parallel() + ret, err := store.GetAggregatedBalances(ctx, ledgercontroller.NewGetAggregatedBalancesQuery(ledgercontroller.PITFilter{}, + query.Match("metadata[category]", "premium"), false)) + require.NoError(t, err) + require.Equal(t, ledger.BalancesByAssets{ + "USD": big.NewInt(0).Mul(bigInt, big.NewInt(2)), + }, ret) + }) + t.Run("when no matching", func(t *testing.T) { + t.Parallel() + ret, err := store.GetAggregatedBalances(ctx, ledgercontroller.NewGetAggregatedBalancesQuery(ledgercontroller.PITFilter{}, + query.Match("metadata[category]", "guest"), false)) + require.NoError(t, err) + require.Equal(t, ledger.BalancesByAssets{}, ret) + }) + + t.Run("using a filter exist on metadata", func(t *testing.T) { + t.Parallel() + ret, err := store.GetAggregatedBalances(ctx, ledgercontroller.NewGetAggregatedBalancesQuery(ledgercontroller.PITFilter{}, query.Exists("metadata", "category"), false)) + require.NoError(t, err) + require.Equal(t, ledger.BalancesByAssets{ + "USD": big.NewInt(0).Add( + big.NewInt(0).Mul(bigInt, big.NewInt(2)), + big.NewInt(0).Mul(smallInt, big.NewInt(2)), + ), + }, ret) + }) +} + +func RequireEqual(t *testing.T, expected, actual any) { + t.Helper() + if diff := cmp.Diff(expected, actual, cmp.Comparer(bigIntComparer)); diff != "" { + require.Failf(t, "Content not matching", diff) + } +} + +func bigIntComparer(v1 *big.Int, v2 *big.Int) bool { + return v1.String() == v2.String() +} diff --git a/internal/storage/ledger/legacy/debug.go b/internal/storage/ledger/legacy/debug.go new file mode 100644 index 000000000..64141226f --- /dev/null +++ b/internal/storage/ledger/legacy/debug.go @@ -0,0 +1,42 @@ +package legacy + +import ( + "context" + "database/sql" + "fmt" + "github.com/shomali11/xsql" + "github.com/uptrace/bun" +) + +//nolint:unused +func (s *Store) DumpTables(ctx context.Context, tables ...string) { + for _, table := range tables { + s.DumpQuery( + ctx, + s.db.NewSelect(). + ModelTableExpr(s.GetPrefixedRelationName(table)), + ) + } +} + +//nolint:unused +func (s *Store) DumpQuery(ctx context.Context, query *bun.SelectQuery) { + fmt.Println(query) + rows, err := query.Rows(ctx) + if err != nil { + panic(err) + } + s.DumpRows(rows) +} + +//nolint:unused +func (s *Store) DumpRows(rows *sql.Rows) { + data, err := xsql.Pretty(rows) + if err != nil { + panic(err) + } + fmt.Println(data) + if err := rows.Close(); err != nil { + panic(err) + } +} diff --git a/internal/storage/ledgerstore/errors.go b/internal/storage/ledger/legacy/errors.go similarity index 95% rename from internal/storage/ledgerstore/errors.go rename to internal/storage/ledger/legacy/errors.go index aa95d49b5..41754951e 100644 --- a/internal/storage/ledgerstore/errors.go +++ b/internal/storage/ledger/legacy/errors.go @@ -1,4 +1,4 @@ -package ledgerstore +package legacy import ( "fmt" diff --git a/internal/storage/ledger/legacy/logs.go b/internal/storage/ledger/legacy/logs.go new file mode 100644 index 000000000..069f4fd94 --- /dev/null +++ b/internal/storage/ledger/legacy/logs.go @@ -0,0 +1,50 @@ +package legacy + +import ( + "context" + "fmt" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + ledgerstore "github.com/formancehq/ledger/internal/storage/ledger" + + "github.com/formancehq/go-libs/v2/query" + ledger "github.com/formancehq/ledger/internal" + "github.com/uptrace/bun" +) + +func (store *Store) logsQueryBuilder(q ledgercontroller.PaginatedQueryOptions[any]) func(*bun.SelectQuery) *bun.SelectQuery { + return func(selectQuery *bun.SelectQuery) *bun.SelectQuery { + + selectQuery = selectQuery.Where("ledger = ?", store.name).ModelTableExpr(store.GetPrefixedRelationName("logs")) + if q.QueryBuilder != nil { + subQuery, args, err := q.QueryBuilder.Build(query.ContextFn(func(key, operator string, value any) (string, []any, error) { + switch { + case key == "date": + return fmt.Sprintf("%s %s ?", key, query.DefaultComparisonOperatorsMapping[operator]), []any{value}, nil + default: + return "", nil, fmt.Errorf("unknown key '%s' when building query", key) + } + })) + if err != nil { + panic(err) + } + selectQuery = selectQuery.Where(subQuery, args...) + } + + return selectQuery + } +} + +func (store *Store) GetLogs(ctx context.Context, q ledgercontroller.GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) { + logs, err := paginateWithColumn[ledgercontroller.PaginatedQueryOptions[any], ledgerstore.Log](store, ctx, + (*bunpaginate.ColumnPaginatedQuery[ledgercontroller.PaginatedQueryOptions[any]])(&q), + store.logsQueryBuilder(q.Options), + ) + if err != nil { + return nil, err + } + + return bunpaginate.MapCursor(logs, func(from ledgerstore.Log) ledger.Log { + return from.ToCore() + }), nil +} diff --git a/internal/storage/ledger/legacy/logs_test.go b/internal/storage/ledger/legacy/logs_test.go new file mode 100644 index 000000000..1f4f297f5 --- /dev/null +++ b/internal/storage/ledger/legacy/logs_test.go @@ -0,0 +1,61 @@ +//go:build it + +package legacy_test + +import ( + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "testing" + + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/time" + + "github.com/formancehq/go-libs/v2/logging" + + "github.com/formancehq/go-libs/v2/query" + ledger "github.com/formancehq/ledger/internal" + "github.com/stretchr/testify/require" +) + +func TestLogsList(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + for i := 1; i <= 3; i++ { + newLog := ledger.NewLog(ledger.CreatedTransaction{ + Transaction: ledger.NewTransaction(), + AccountMetadata: ledger.AccountMetadata{}, + }) + newLog.Date = now.Add(-time.Duration(i) * time.Hour) + + err := store.newStore.InsertLog(ctx, &newLog) + require.NoError(t, err) + } + + cursor, err := store.GetLogs(ctx, ledgercontroller.NewListLogsQuery(ledgercontroller.NewPaginatedQueryOptions[any](nil))) + require.NoError(t, err) + require.Equal(t, bunpaginate.QueryDefaultPageSize, cursor.PageSize) + + require.Equal(t, 3, len(cursor.Data)) + require.EqualValues(t, 3, cursor.Data[0].ID) + + cursor, err = store.GetLogs(ctx, ledgercontroller.NewListLogsQuery(ledgercontroller.NewPaginatedQueryOptions[any](nil).WithPageSize(1))) + require.NoError(t, err) + // Should get only the first log. + require.Equal(t, 1, cursor.PageSize) + require.EqualValues(t, 3, cursor.Data[0].ID) + + cursor, err = store.GetLogs(ctx, ledgercontroller.NewListLogsQuery(ledgercontroller.NewPaginatedQueryOptions[any](nil). + WithQueryBuilder(query.And( + query.Gte("date", now.Add(-2*time.Hour)), + query.Lt("date", now.Add(-time.Hour)), + )). + WithPageSize(10), + )) + require.NoError(t, err) + require.Equal(t, 10, cursor.PageSize) + // Should get only the second log, as StartTime is inclusive and EndTime exclusive. + require.Len(t, cursor.Data, 1) + require.EqualValues(t, 2, cursor.Data[0].ID) +} diff --git a/internal/storage/ledger/legacy/main_test.go b/internal/storage/ledger/legacy/main_test.go new file mode 100644 index 000000000..266753db8 --- /dev/null +++ b/internal/storage/ledger/legacy/main_test.go @@ -0,0 +1,80 @@ +//go:build it + +package legacy_test + +import ( + "github.com/formancehq/go-libs/v2/bun/bundebug" + "github.com/formancehq/go-libs/v2/testing/docker" + "github.com/formancehq/go-libs/v2/testing/utils" + "github.com/formancehq/ledger/internal/storage/bucket" + systemstore "github.com/formancehq/ledger/internal/storage/driver" + ledgerstore "github.com/formancehq/ledger/internal/storage/ledger" + "github.com/formancehq/ledger/internal/storage/ledger/legacy" + "go.opentelemetry.io/otel/trace/noop" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/bun/bunconnect" + + "github.com/uptrace/bun" + + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/testing/platform/pgtesting" + ledger "github.com/formancehq/ledger/internal" + "github.com/google/uuid" + "github.com/stretchr/testify/require" +) + +var ( + srv *pgtesting.PostgresServer +) + +func TestMain(m *testing.M) { + utils.WithTestMain(func(t *utils.TestingTForMain) int { + srv = pgtesting.CreatePostgresServer(t, docker.NewPool(t, logging.Testing())) + + return m.Run() + }) +} + +type T interface { + require.TestingT + Helper() + Cleanup(func()) +} + +type testStore struct { + *legacy.Store + newStore *ledgerstore.Store +} + +func newLedgerStore(t T) *testStore { + t.Helper() + + ledgerName := uuid.NewString()[:8] + ctx := logging.TestingContext() + + pgDatabase := srv.NewDatabase(t) + + hooks := make([]bun.QueryHook, 0) + if os.Getenv("DEBUG") == "true" { + hooks = append(hooks, bundebug.NewQueryHook()) + } + + db, err := bunconnect.OpenSQLDB(ctx, pgDatabase.ConnectionOptions(), hooks...) + require.NoError(t, err) + + require.NoError(t, systemstore.Migrate(ctx, db)) + + l := ledger.MustNewWithDefault(ledgerName) + l.Bucket = ledgerName + + b := bucket.New(db, ledgerName) + require.NoError(t, b.Migrate(ctx, noop.Tracer{})) + require.NoError(t, b.AddLedger(ctx, l, db)) + + return &testStore{ + Store: legacy.New(db, l.Name, l.Name), + newStore: ledgerstore.New(db, b, l), + } +} diff --git a/internal/storage/ledger/legacy/store.go b/internal/storage/ledger/legacy/store.go new file mode 100644 index 000000000..657b0d70a --- /dev/null +++ b/internal/storage/ledger/legacy/store.go @@ -0,0 +1,44 @@ +package legacy + +import ( + "fmt" + _ "github.com/jackc/pgx/v5/stdlib" + "github.com/uptrace/bun" +) + +type Store struct { + db bun.IDB + + bucket string + name string +} + +func (s *Store) GetPrefixedRelationName(v string) string { + return fmt.Sprintf(`"%s".%s`, s.bucket, v) +} + +func (store *Store) Name() string { + return store.name +} + +func (store *Store) GetDB() bun.IDB { + return store.db +} + +func (s *Store) WithDB(db bun.IDB) *Store { + ret := *s + ret.db = db + return &ret +} + +func New( + db bun.IDB, + bucket string, + name string, +) *Store { + return &Store{ + db: db, + bucket: bucket, + name: name, + } +} diff --git a/internal/storage/ledger/legacy/transactions.go b/internal/storage/ledger/legacy/transactions.go new file mode 100644 index 000000000..a7dfa99ae --- /dev/null +++ b/internal/storage/ledger/legacy/transactions.go @@ -0,0 +1,206 @@ +package legacy + +import ( + "context" + "errors" + "fmt" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "regexp" + + "github.com/formancehq/go-libs/v2/time" + + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + + "github.com/formancehq/go-libs/v2/query" + ledger "github.com/formancehq/ledger/internal" + "github.com/uptrace/bun" +) + +var ( + metadataRegex = regexp.MustCompile(`metadata\[(.+)]`) +) + +func (store *Store) buildTransactionQuery(p ledgercontroller.PITFilterWithVolumes, query *bun.SelectQuery) *bun.SelectQuery { + + selectMetadata := query.NewSelect(). + ModelTableExpr(store.GetPrefixedRelationName("transactions_metadata")). + Where("transactions.seq = transactions_metadata.transactions_seq"). + Order("revision desc"). + Limit(1) + + if p.PIT != nil && !p.PIT.IsZero() { + selectMetadata = selectMetadata.Where("date <= ?", p.PIT) + } + + query = query. + ModelTableExpr(store.GetPrefixedRelationName("transactions")). + Where("transactions.ledger = ?", store.name) + + if p.PIT != nil && !p.PIT.IsZero() { + query = query. + Where("timestamp <= ?", p.PIT). + Column("id", "inserted_at", "timestamp", "postings"). + Column("transactions_metadata.metadata"). + Join(fmt.Sprintf(`left join lateral (%s) as transactions_metadata on true`, selectMetadata.String())). + ColumnExpr(fmt.Sprintf("case when reverted_at is not null and reverted_at > '%s' then null else reverted_at end", p.PIT.Format(time.DateFormat))) + } else { + query = query.Column( + "transactions.metadata", + "transactions.id", + "transactions.inserted_at", + "transactions.timestamp", + "transactions.postings", + "transactions.reverted_at", + "transactions.reference", + ) + } + + if p.ExpandEffectiveVolumes { + query = query.ColumnExpr(store.GetPrefixedRelationName("get_aggregated_effective_volumes_for_transaction")+"(?, transactions.seq) as post_commit_effective_volumes", store.name) + } + if p.ExpandVolumes { + query = query.ColumnExpr(store.GetPrefixedRelationName("get_aggregated_volumes_for_transaction")+"(?, transactions.seq) as post_commit_volumes", store.name) + } + return query +} + +func (store *Store) transactionQueryContext(qb query.Builder, q ledgercontroller.ListTransactionsQuery) (string, []any, error) { + + return qb.Build(query.ContextFn(func(key, operator string, value any) (string, []any, error) { + switch { + case key == "reference" || key == "timestamp": + return fmt.Sprintf("%s %s ?", key, query.DefaultComparisonOperatorsMapping[operator]), []any{value}, nil + case key == "reverted": + if operator != "$match" { + return "", nil, newErrInvalidQuery("'reverted' column can only be used with $match") + } + switch value := value.(type) { + case bool: + ret := "reverted_at is" + if value { + ret += " not" + } + return ret + " null", nil, nil + default: + return "", nil, newErrInvalidQuery("'reverted' can only be used with bool value") + } + case key == "account": + if operator != "$match" { + return "", nil, newErrInvalidQuery("'account' column can only be used with $match") + } + switch address := value.(type) { + case string: + return filterAccountAddressOnTransactions(address, true, true), nil, nil + default: + return "", nil, newErrInvalidQuery("unexpected type %T for column 'account'", address) + } + case key == "source": + if operator != "$match" { + return "", nil, errors.New("'source' column can only be used with $match") + } + switch address := value.(type) { + case string: + return filterAccountAddressOnTransactions(address, true, false), nil, nil + default: + return "", nil, newErrInvalidQuery("unexpected type %T for column 'source'", address) + } + case key == "destination": + if operator != "$match" { + return "", nil, errors.New("'destination' column can only be used with $match") + } + switch address := value.(type) { + case string: + return filterAccountAddressOnTransactions(address, false, true), nil, nil + default: + return "", nil, newErrInvalidQuery("unexpected type %T for column 'destination'", address) + } + case metadataRegex.Match([]byte(key)): + if operator != "$match" { + return "", nil, newErrInvalidQuery("'account' column can only be used with $match") + } + match := metadataRegex.FindAllStringSubmatch(key, 3) + + key := "metadata" + if q.Options.Options.PIT != nil && !q.Options.Options.PIT.IsZero() { + key = "transactions_metadata.metadata" + } + + return key + " @> ?", []any{map[string]any{ + match[0][1]: value, + }}, nil + + case key == "metadata": + if operator != "$exists" { + return "", nil, newErrInvalidQuery("'metadata' key filter can only be used with $exists") + } + if q.Options.Options.PIT != nil && !q.Options.Options.PIT.IsZero() { + key = "transactions_metadata.metadata" + } + + return fmt.Sprintf("%s -> ? IS NOT NULL", key), []any{value}, nil + default: + return "", nil, newErrInvalidQuery("unknown key '%s' when building query", key) + } + })) +} + +func (store *Store) buildTransactionListQuery(selectQuery *bun.SelectQuery, q ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes], where string, args []any) *bun.SelectQuery { + + selectQuery = store.buildTransactionQuery(q.Options, selectQuery) + if where != "" { + return selectQuery.Where(where, args...) + } + + return selectQuery +} + +func (store *Store) GetTransactions(ctx context.Context, q ledgercontroller.ListTransactionsQuery) (*bunpaginate.Cursor[ledger.Transaction], error) { + + var ( + where string + args []any + err error + ) + if q.Options.QueryBuilder != nil { + where, args, err = store.transactionQueryContext(q.Options.QueryBuilder, q) + if err != nil { + return nil, err + } + } + + return paginateWithColumn[ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes], ledger.Transaction](store, ctx, + (*bunpaginate.ColumnPaginatedQuery[ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes]])(&q), + func(query *bun.SelectQuery) *bun.SelectQuery { + return store.buildTransactionListQuery(query, q.Options, where, args) + }, + ) +} + +func (store *Store) CountTransactions(ctx context.Context, q ledgercontroller.ListTransactionsQuery) (int, error) { + + var ( + where string + args []any + err error + ) + + if q.Options.QueryBuilder != nil { + where, args, err = store.transactionQueryContext(q.Options.QueryBuilder, q) + if err != nil { + return 0, err + } + } + + return count[ledger.Transaction](store, true, ctx, func(query *bun.SelectQuery) *bun.SelectQuery { + return store.buildTransactionListQuery(query, q.Options, where, args) + }) +} + +func (store *Store) GetTransactionWithVolumes(ctx context.Context, filter ledgercontroller.GetTransactionQuery) (*ledger.Transaction, error) { + return fetch[*ledger.Transaction](store, true, ctx, + func(query *bun.SelectQuery) *bun.SelectQuery { + return store.buildTransactionQuery(filter.PITFilterWithVolumes, query). + Where("transactions.id = ?", filter.ID). + Limit(1) + }) +} diff --git a/internal/storage/ledger/legacy/transactions_test.go b/internal/storage/ledger/legacy/transactions_test.go new file mode 100644 index 000000000..f0be6d1a7 --- /dev/null +++ b/internal/storage/ledger/legacy/transactions_test.go @@ -0,0 +1,280 @@ +//go:build it + +package legacy_test + +import ( + "context" + "fmt" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "math/big" + "testing" + + "github.com/formancehq/go-libs/v2/time" + + "github.com/pkg/errors" + + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/pointer" + + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/query" + ledger "github.com/formancehq/ledger/internal" + "github.com/stretchr/testify/require" +) + +func TestGetTransactionWithVolumes(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + tx1 := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "central_bank", "USD", big.NewInt(100)), + ). + WithReference("tx1"). + WithTimestamp(now.Add(-3 * time.Hour)) + err := store.newStore.CommitTransaction(ctx, &tx1) + require.NoError(t, err) + + tx2 := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "central_bank", "USD", big.NewInt(100)), + ). + WithReference("tx2"). + WithTimestamp(now.Add(-2 * time.Hour)) + err = store.newStore.CommitTransaction(ctx, &tx2) + require.NoError(t, err) + + tx, err := store.GetTransactionWithVolumes(ctx, ledgercontroller.NewGetTransactionQuery(tx1.ID). + WithExpandVolumes(). + WithExpandEffectiveVolumes()) + require.NoError(t, err) + require.Equal(t, tx1.Postings, tx.Postings) + require.Equal(t, tx1.Reference, tx.Reference) + require.Equal(t, tx1.Timestamp, tx.Timestamp) + RequireEqual(t, ledger.PostCommitVolumes{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(100), + }, + }, + "central_bank": { + "USD": { + Input: big.NewInt(100), + Output: big.NewInt(0), + }, + }, + }, tx.PostCommitVolumes) + + tx, err = store.GetTransactionWithVolumes(ctx, ledgercontroller.NewGetTransactionQuery(tx2.ID). + WithExpandVolumes(). + WithExpandEffectiveVolumes()) + require.NoError(t, err) + require.Equal(t, tx2.Postings, tx.Postings) + require.Equal(t, tx2.Reference, tx.Reference) + require.Equal(t, tx2.Timestamp, tx.Timestamp) + RequireEqual(t, ledger.PostCommitVolumes{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(200), + }, + }, + "central_bank": { + "USD": { + Input: big.NewInt(200), + Output: big.NewInt(0), + }, + }, + }, tx.PostCommitVolumes) +} + +func TestCountTransactions(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + ctx := logging.TestingContext() + + for i := 0; i < 3; i++ { + tx := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", fmt.Sprintf("account%d", i), "USD", big.NewInt(100)), + ) + err := store.newStore.CommitTransaction(ctx, &tx) + require.NoError(t, err) + } + + count, err := store.CountTransactions(context.Background(), ledgercontroller.NewListTransactionsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}))) + require.NoError(t, err, "counting transactions should not fail") + require.Equal(t, 3, count, "count should be equal") +} + +func TestGetTransactions(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + tx1 := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "alice", "USD", big.NewInt(100)), + ). + WithMetadata(metadata.Metadata{"category": "1"}). + WithTimestamp(now.Add(-3 * time.Hour)) + err := store.newStore.CommitTransaction(ctx, &tx1) + require.NoError(t, err) + + tx2 := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "bob", "USD", big.NewInt(100)), + ). + WithMetadata(metadata.Metadata{"category": "2"}). + WithTimestamp(now.Add(-2 * time.Hour)) + err = store.newStore.CommitTransaction(ctx, &tx2) + require.NoError(t, err) + + tx3BeforeRevert := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "users:marley", "USD", big.NewInt(100)), + ). + WithMetadata(metadata.Metadata{"category": "3"}). + WithTimestamp(now.Add(-time.Hour)) + err = store.newStore.CommitTransaction(ctx, &tx3BeforeRevert) + require.NoError(t, err) + + _, hasBeenReverted, err := store.newStore.RevertTransaction(ctx, tx3BeforeRevert.ID) + require.NoError(t, err) + require.True(t, hasBeenReverted) + + tx4 := tx3BeforeRevert.Reverse().WithTimestamp(now) + err = store.newStore.CommitTransaction(ctx, &tx4) + require.NoError(t, err) + + _, _, err = store.newStore.UpdateTransactionMetadata(ctx, tx3BeforeRevert.ID, metadata.Metadata{ + "additional_metadata": "true", + }) + require.NoError(t, err) + + // refresh tx3 + // we can't take the result of the call on RevertTransaction nor UpdateTransactionMetadata as the result does not contains pc(e)v + tx3 := func() ledger.Transaction { + tx3, err := store.newStore.GetTransaction(ctx, ledgercontroller.NewGetTransactionQuery(tx3BeforeRevert.ID). + WithExpandVolumes(). + WithExpandEffectiveVolumes()) + require.NoError(t, err) + return *tx3 + }() + + tx5 := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("users:marley", "sellers:amazon", "USD", big.NewInt(100)), + ). + WithTimestamp(now) + err = store.newStore.CommitTransaction(ctx, &tx5) + require.NoError(t, err) + + type testCase struct { + name string + query ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes] + expected []ledger.Transaction + expectError error + } + testCases := []testCase{ + { + name: "nominal", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}), + expected: []ledger.Transaction{tx5, tx4, tx3, tx2, tx1}, + }, + { + name: "address filter", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("account", "bob")), + expected: []ledger.Transaction{tx2}, + }, + { + name: "address filter using segments matching two addresses by individual segments", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("account", "users:amazon")), + expected: []ledger.Transaction{}, + }, + { + name: "address filter using segment", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("account", "users:")), + expected: []ledger.Transaction{tx5, tx4, tx3}, + }, + { + name: "filter using metadata", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("metadata[category]", "2")), + expected: []ledger.Transaction{tx2}, + }, + { + name: "using point in time", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: pointer.For(now.Add(-time.Hour)), + }, + }), + expected: []ledger.Transaction{tx3BeforeRevert, tx2, tx1}, + }, + { + name: "reverted transactions", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("reverted", true)), + expected: []ledger.Transaction{tx3}, + }, + { + name: "filter using exists metadata", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Exists("metadata", "category")), + expected: []ledger.Transaction{tx3, tx2, tx1}, + }, + { + name: "filter using metadata and pit", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: pointer.For(tx3.Timestamp), + }, + }). + WithQueryBuilder(query.Match("metadata[category]", "2")), + expected: []ledger.Transaction{tx2}, + }, + { + name: "filter using not exists metadata", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Not(query.Exists("metadata", "category"))), + expected: []ledger.Transaction{tx5, tx4}, + }, + { + name: "filter using timestamp", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("timestamp", tx5.Timestamp.Format(time.RFC3339Nano))), + expected: []ledger.Transaction{tx5, tx4}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + tc.query.Options.ExpandVolumes = true + tc.query.Options.ExpandEffectiveVolumes = true + + cursor, err := store.GetTransactions(ctx, ledgercontroller.NewListTransactionsQuery(tc.query)) + if tc.expectError != nil { + require.True(t, errors.Is(err, tc.expectError)) + } else { + require.NoError(t, err) + require.Len(t, cursor.Data, len(tc.expected)) + RequireEqual(t, tc.expected, cursor.Data) + + count, err := store.CountTransactions(ctx, ledgercontroller.NewListTransactionsQuery(tc.query)) + require.NoError(t, err) + + require.EqualValues(t, len(tc.expected), count) + } + }) + } +} diff --git a/internal/storage/ledgerstore/utils.go b/internal/storage/ledger/legacy/utils.go similarity index 68% rename from internal/storage/ledgerstore/utils.go rename to internal/storage/ledger/legacy/utils.go index 76bb74f2e..97c6b51c6 100644 --- a/internal/storage/ledgerstore/utils.go +++ b/internal/storage/ledger/legacy/utils.go @@ -1,19 +1,17 @@ -package ledgerstore +package legacy import ( "context" "encoding/json" "fmt" + "github.com/formancehq/go-libs/v2/platform/postgres" "reflect" "strings" - "github.com/formancehq/go-libs/time" + "github.com/formancehq/go-libs/v2/time" - "github.com/formancehq/go-libs/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" - "github.com/formancehq/ledger/internal/storage/sqlutils" - - "github.com/formancehq/go-libs/query" "github.com/uptrace/bun" ) @@ -22,7 +20,7 @@ func fetch[T any](s *Store, addModel bool, ctx context.Context, builders ...func var ret T ret = reflect.New(reflect.TypeOf(ret).Elem()).Interface().(T) - query := s.bucket.db.NewSelect() + query := s.db.NewSelect() if addModel { query = query.Model(ret) @@ -33,7 +31,7 @@ func fetch[T any](s *Store, addModel bool, ctx context.Context, builders ...func } if err := query.Scan(ctx, ret); err != nil { - return ret, sqlutils.PostgresError(err) + return ret, postgres.ResolveError(err) } return ret, nil @@ -42,7 +40,7 @@ func fetch[T any](s *Store, addModel bool, ctx context.Context, builders ...func func paginateWithOffset[FILTERS any, RETURN any](s *Store, ctx context.Context, q *bunpaginate.OffsetPaginatedQuery[FILTERS], builders ...func(query *bun.SelectQuery) *bun.SelectQuery) (*bunpaginate.Cursor[RETURN], error) { - query := s.bucket.db.NewSelect() + query := s.db.NewSelect() for _, builder := range builders { query = query.Apply(builder) } @@ -52,7 +50,7 @@ func paginateWithOffset[FILTERS any, RETURN any](s *Store, ctx context.Context, func paginateWithOffsetWithoutModel[FILTERS any, RETURN any](s *Store, ctx context.Context, q *bunpaginate.OffsetPaginatedQuery[FILTERS], builders ...func(query *bun.SelectQuery) *bun.SelectQuery) (*bunpaginate.Cursor[RETURN], error) { - query := s.bucket.db.NewSelect() + query := s.db.NewSelect() for _, builder := range builders { query = query.Apply(builder) } @@ -61,28 +59,28 @@ func paginateWithOffsetWithoutModel[FILTERS any, RETURN any](s *Store, ctx conte } func paginateWithColumn[FILTERS any, RETURN any](s *Store, ctx context.Context, q *bunpaginate.ColumnPaginatedQuery[FILTERS], builders ...func(query *bun.SelectQuery) *bun.SelectQuery) (*bunpaginate.Cursor[RETURN], error) { - query := s.bucket.db.NewSelect() + query := s.db.NewSelect() for _, builder := range builders { query = query.Apply(builder) } ret, err := bunpaginate.UsingColumn[FILTERS, RETURN](ctx, query, *q) if err != nil { - return nil, sqlutils.PostgresError(err) + return nil, postgres.ResolveError(err) } return ret, nil } func count[T any](s *Store, addModel bool, ctx context.Context, builders ...func(query *bun.SelectQuery) *bun.SelectQuery) (int, error) { - query := s.bucket.db.NewSelect() + query := s.db.NewSelect() if addModel { query = query.Model((*T)(nil)) } for _, builder := range builders { query = query.Apply(builder) } - return s.bucket.db.NewSelect(). + return s.db.NewSelect(). TableExpr("(" + query.String() + ") data"). Count(ctx) } @@ -185,72 +183,3 @@ func filterOOT(oot *time.Time, column string) func(query *bun.SelectQuery) *bun. return query.Where(fmt.Sprintf("%s >= ?", column), oot) } } - -type PaginatedQueryOptions[T any] struct { - QueryBuilder query.Builder `json:"qb"` - PageSize uint64 `json:"pageSize"` - Options T `json:"options"` -} - -func (v *PaginatedQueryOptions[T]) UnmarshalJSON(data []byte) error { - type aux struct { - QueryBuilder json.RawMessage `json:"qb"` - PageSize uint64 `json:"pageSize"` - Options T `json:"options"` - } - x := &aux{} - if err := json.Unmarshal(data, x); err != nil { - return err - } - - *v = PaginatedQueryOptions[T]{ - PageSize: x.PageSize, - Options: x.Options, - } - - var err error - if x.QueryBuilder != nil { - v.QueryBuilder, err = query.ParseJSON(string(x.QueryBuilder)) - if err != nil { - return err - } - } - - return nil -} - -func (opts PaginatedQueryOptions[T]) WithQueryBuilder(qb query.Builder) PaginatedQueryOptions[T] { - opts.QueryBuilder = qb - - return opts -} - -func (opts PaginatedQueryOptions[T]) WithPageSize(pageSize uint64) PaginatedQueryOptions[T] { - opts.PageSize = pageSize - - return opts -} - -func NewPaginatedQueryOptions[T any](options T) PaginatedQueryOptions[T] { - return PaginatedQueryOptions[T]{ - Options: options, - PageSize: bunpaginate.QueryDefaultPageSize, - } -} - -type PITFilter struct { - PIT *time.Time `json:"pit"` - OOT *time.Time `json:"oot"` -} - -type PITFilterWithVolumes struct { - PITFilter - ExpandVolumes bool `json:"volumes"` - ExpandEffectiveVolumes bool `json:"effectiveVolumes"` -} - -type FiltersForVolumes struct { - PITFilter - UseInsertionDate bool - GroupLvl uint -} diff --git a/internal/storage/ledgerstore/volumes.go b/internal/storage/ledger/legacy/volumes.go similarity index 69% rename from internal/storage/ledgerstore/volumes.go rename to internal/storage/ledger/legacy/volumes.go index b29db991b..1e079ae0f 100644 --- a/internal/storage/ledgerstore/volumes.go +++ b/internal/storage/ledger/legacy/volumes.go @@ -1,20 +1,21 @@ -package ledgerstore +package legacy import ( "context" "fmt" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" "regexp" - "github.com/formancehq/go-libs/bun/bunpaginate" - lquery "github.com/formancehq/go-libs/query" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + lquery "github.com/formancehq/go-libs/v2/query" ledger "github.com/formancehq/ledger/internal" "github.com/uptrace/bun" ) -func (store *Store) volumesQueryContext(q GetVolumesWithBalancesQuery) (string, []any, bool, error) { +func (store *Store) volumesQueryContext(q ledgercontroller.GetVolumesWithBalancesQuery) (string, []any, bool, error) { - metadataRegex := regexp.MustCompile("metadata\\[(.+)\\]") - balanceRegex := regexp.MustCompile("balance\\[(.*)\\]") + metadataRegex := regexp.MustCompile(`metadata\[(.+)]`) + balanceRegex := regexp.MustCompile(`balance\[(.*)]`) var ( subQuery string args []any @@ -44,14 +45,13 @@ func (store *Store) volumesQueryContext(q GetVolumesWithBalancesQuery) (string, switch { case key == "account" || key == "address": - // TODO: Should allow comparison operator only if segments not used if operator != "$match" { - return "", nil, newErrInvalidQuery(fmt.Sprintf("'%s' column can only be used with $match", key)) + return "", nil, newErrInvalidQuery("'%s' column can only be used with $match", key) } switch address := value.(type) { case string: - return filterAccountAddress(address, "account_address"), nil, nil + return filterAccountAddress(address, "accounts_address"), nil, nil default: return "", nil, newErrInvalidQuery("unexpected type %T for column 'address'", address) } @@ -90,7 +90,7 @@ func (store *Store) volumesQueryContext(q GetVolumesWithBalancesQuery) (string, } -func (store *Store) buildVolumesWithBalancesQuery(query *bun.SelectQuery, q GetVolumesWithBalancesQuery, where string, args []any, useMetadata bool) *bun.SelectQuery { +func (store *Store) buildVolumesWithBalancesQuery(query *bun.SelectQuery, q ledgercontroller.GetVolumesWithBalancesQuery, where string, args []any, useMetadata bool) *bun.SelectQuery { filtersForVolumes := q.Options.Options dateFilterColumn := "effective_date" @@ -100,24 +100,24 @@ func (store *Store) buildVolumesWithBalancesQuery(query *bun.SelectQuery, q GetV } selectAccounts := store.GetDB().NewSelect(). - Column("account_address_array"). - Column("account_address"). + Column("accounts_address_array"). + Column("accounts_address"). Column("accounts_seq"). Column("asset"). Column("ledger"). ColumnExpr("sum(case when not is_source then amount else 0 end) as input"). ColumnExpr("sum(case when is_source then amount else 0 end) as output"). ColumnExpr("sum(case when not is_source then amount else -amount end) as balance"). - Table("moves"). - Group("ledger", "accounts_seq", "account_address", "account_address_array", "asset"). + ModelTableExpr(store.GetPrefixedRelationName("moves")). + Group("ledger", "accounts_seq", "accounts_address", "accounts_address_array", "asset"). Apply(filterPIT(filtersForVolumes.PIT, dateFilterColumn)). Apply(filterOOT(filtersForVolumes.OOT, dateFilterColumn)) query = query. TableExpr("(?) accountsWithVolumes", selectAccounts). Column( - "account_address", - "account_address_array", + "accounts_address", + "accounts_address_array", "accounts_seq", "ledger", "asset", @@ -131,7 +131,7 @@ func (store *Store) buildVolumesWithBalancesQuery(query *bun.SelectQuery, q GetV ColumnExpr("accounts_metadata.metadata as metadata"). Join(`join lateral ( select metadata - from accounts a + from ` + store.GetPrefixedRelationName("accounts") + ` a where a.seq = accountsWithVolumes.accounts_seq ) accounts_metadata on true`, ) @@ -151,20 +151,21 @@ func (store *Store) buildVolumesWithBalancesQuery(query *bun.SelectQuery, q GetV if filtersForVolumes.GroupLvl > 0 { globalQuery = globalQuery. - ColumnExpr(fmt.Sprintf(`(array_to_string((string_to_array(account_address, ':'))[1:LEAST(array_length(string_to_array(account_address, ':'),1),%d)],':')) as account`, filtersForVolumes.GroupLvl)). + ColumnExpr(fmt.Sprintf(`(array_to_string((string_to_array(accounts_address, ':'))[1:LEAST(array_length(string_to_array(accounts_address, ':'),1),%d)],':')) as account`, filtersForVolumes.GroupLvl)). ColumnExpr("asset"). ColumnExpr("sum(input) as input"). ColumnExpr("sum(output) as output"). ColumnExpr("sum(balance) as balance"). GroupExpr("account, asset") } else { - globalQuery = globalQuery.ColumnExpr("account_address as account, asset, input, output, balance") + globalQuery = globalQuery.ColumnExpr("accounts_address as account, asset, input, output, balance") } + globalQuery = globalQuery.Order("account", "asset") return globalQuery } -func (store *Store) GetVolumesWithBalances(ctx context.Context, q GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) { +func (store *Store) GetVolumesWithBalances(ctx context.Context, q ledgercontroller.GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) { var ( where string args []any @@ -178,20 +179,10 @@ func (store *Store) GetVolumesWithBalances(ctx context.Context, q GetVolumesWith } } - return paginateWithOffsetWithoutModel[PaginatedQueryOptions[FiltersForVolumes], ledger.VolumesWithBalanceByAssetByAccount]( - store, ctx, (*bunpaginate.OffsetPaginatedQuery[PaginatedQueryOptions[FiltersForVolumes]])(&q), + return paginateWithOffsetWithoutModel[ledgercontroller.PaginatedQueryOptions[ledgercontroller.FiltersForVolumes], ledger.VolumesWithBalanceByAssetByAccount]( + store, ctx, (*bunpaginate.OffsetPaginatedQuery[ledgercontroller.PaginatedQueryOptions[ledgercontroller.FiltersForVolumes]])(&q), func(query *bun.SelectQuery) *bun.SelectQuery { return store.buildVolumesWithBalancesQuery(query, q, where, args, useMetadata) }, ) } - -type GetVolumesWithBalancesQuery bunpaginate.OffsetPaginatedQuery[PaginatedQueryOptions[FiltersForVolumes]] - -func NewGetVolumesWithBalancesQuery(opts PaginatedQueryOptions[FiltersForVolumes]) GetVolumesWithBalancesQuery { - return GetVolumesWithBalancesQuery{ - PageSize: opts.PageSize, - Order: bunpaginate.OrderAsc, - Options: opts, - } -} diff --git a/internal/storage/ledger/legacy/volumes_test.go b/internal/storage/ledger/legacy/volumes_test.go new file mode 100644 index 000000000..c5ca7a0a8 --- /dev/null +++ b/internal/storage/ledger/legacy/volumes_test.go @@ -0,0 +1,674 @@ +//go:build it + +package legacy_test + +import ( + "github.com/formancehq/go-libs/v2/pointer" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "math/big" + "testing" + + "github.com/formancehq/go-libs/v2/time" + + "github.com/formancehq/go-libs/v2/logging" + + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/query" + ledger "github.com/formancehq/ledger/internal" + "github.com/stretchr/testify/require" +) + +func TestVolumesList(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + previousPIT := now.Add(-2 * time.Minute) + futurPIT := now.Add(2 * time.Minute) + + previousOOT := now.Add(-2 * time.Minute) + futurOOT := now.Add(2 * time.Minute) + + require.NoError(t, store.newStore.UpdateAccountsMetadata(ctx, map[string]metadata.Metadata{ + "account:1": { + "category": "1", + }, + "account:2": { + "category": "2", + }, + "world": { + "foo": "bar", + }, + })) + + tx1 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(100))). + WithTimestamp(now.Add(-4 * time.Minute)). + WithInsertedAt(now.Add(4 * time.Minute)) + err := store.newStore.CommitTransaction(ctx, &tx1) + require.NoError(t, err) + + tx2 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(100))). + WithTimestamp(now.Add(-3 * time.Minute)). + WithInsertedAt(now.Add(3 * time.Minute)) + err = store.newStore.CommitTransaction(ctx, &tx2) + require.NoError(t, err) + + tx3 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("account:1", "bank", "USD", big.NewInt(50))). + WithTimestamp(now.Add(-2 * time.Minute)). + WithInsertedAt(now.Add(2 * time.Minute)) + err = store.newStore.CommitTransaction(ctx, &tx3) + require.NoError(t, err) + + tx4 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(0))). + WithTimestamp(now.Add(-time.Minute)). + WithInsertedAt(now.Add(time.Minute)) + err = store.newStore.CommitTransaction(ctx, &tx4) + require.NoError(t, err) + + tx5 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:2", "USD", big.NewInt(50))). + WithTimestamp(now). + WithInsertedAt(now) + err = store.newStore.CommitTransaction(ctx, &tx5) + require.NoError(t, err) + + tx6 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:2", "USD", big.NewInt(50))). + WithTimestamp(now.Add(1 * time.Minute)). + WithInsertedAt(now.Add(-time.Minute)) + err = store.newStore.CommitTransaction(ctx, &tx6) + require.NoError(t, err) + + tx7 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("account:2", "bank", "USD", big.NewInt(50))). + WithTimestamp(now.Add(2 * time.Minute)). + WithInsertedAt(now.Add(-2 * time.Minute)) + err = store.newStore.CommitTransaction(ctx, &tx7) + require.NoError(t, err) + + tx8 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:2", "USD", big.NewInt(25))). + WithTimestamp(now.Add(3 * time.Minute)). + WithInsertedAt(now.Add(-3 * time.Minute)) + err = store.newStore.CommitTransaction(ctx, &tx8) + require.NoError(t, err) + + t.Run("Get all volumes with balance for insertion date", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.FiltersForVolumes{UseInsertionDate: true}))) + require.NoError(t, err) + + require.Len(t, volumes.Data, 4) + }) + + t.Run("Get all volumes with balance for effective date", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.FiltersForVolumes{UseInsertionDate: false}))) + require.NoError(t, err) + + require.Len(t, volumes.Data, 4) + }) + + t.Run("Get all volumes with balance for insertion date with previous pit", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: &previousPIT, OOT: nil}, + UseInsertionDate: true, + }))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 3) + require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:2", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(25), + Output: big.NewInt(50), + Balance: big.NewInt(-25), + }, + }, volumes.Data[0]) + }) + + t.Run("Get all volumes with balance for insertion date with futur pit", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: &futurPIT, OOT: nil}, + UseInsertionDate: true, + }))) + require.NoError(t, err) + + require.Len(t, volumes.Data, 4) + }) + + t.Run("Get all volumes with balance for insertion date with previous oot", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: nil, OOT: &previousOOT}, + UseInsertionDate: true, + }))) + require.NoError(t, err) + + require.Len(t, volumes.Data, 4) + }) + + t.Run("Get all volumes with balance for insertion date with future oot", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: nil, OOT: &futurOOT}, + UseInsertionDate: true, + }))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 3) + require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:1", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(200), + Output: big.NewInt(50), + Balance: big.NewInt(150), + }, + }, volumes.Data[0]) + }) + + t.Run("Get all volumes with balance for effective date with previous pit", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: &previousPIT, OOT: nil}, + UseInsertionDate: false, + }))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 3) + require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:1", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(200), + Output: big.NewInt(50), + Balance: big.NewInt(150), + }, + }, volumes.Data[0]) + }) + + t.Run("Get all volumes with balance for effective date with futur pit", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: &futurPIT, OOT: nil}, + UseInsertionDate: false, + }))) + require.NoError(t, err) + + require.Len(t, volumes.Data, 4) + }) + + t.Run("Get all volumes with balance for effective date with previous oot", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: nil, OOT: &previousOOT}, + UseInsertionDate: false, + }))) + require.NoError(t, err) + + require.Len(t, volumes.Data, 4) + }) + + t.Run("Get all volumes with balance for effective date with futur oot", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: nil, OOT: &futurOOT}, + UseInsertionDate: false, + }))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 3) + require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:2", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(25), + Output: big.NewInt(50), + Balance: big.NewInt(-25), + }, + }, volumes.Data[0]) + }) + + t.Run("Get all volumes with balance for insertion date with future PIT and now OOT", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: &futurPIT, OOT: &now}, + UseInsertionDate: true, + }))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 4) + require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:1", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(0), + Output: big.NewInt(50), + Balance: big.NewInt(-50), + }, + }, volumes.Data[0]) + + }) + + t.Run("Get all volumes with balance for insertion date with previous OOT and now PIT", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: &now, OOT: &previousOOT}, + UseInsertionDate: true, + }))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 3) + require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:2", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(100), + Output: big.NewInt(50), + Balance: big.NewInt(50), + }, + }, volumes.Data[0]) + + }) + + t.Run("Get all volumes with balance for effective date with future PIT and now OOT", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: &futurPIT, OOT: &now}, + UseInsertionDate: false, + }))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 3) + require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:2", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(100), + Output: big.NewInt(50), + Balance: big.NewInt(50), + }, + }, volumes.Data[0]) + }) + + t.Run("Get all volumes with balance for insertion date with previous OOT and now PIT", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: &now, OOT: &previousOOT}, + UseInsertionDate: false, + }))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 4) + require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:1", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(0), + Output: big.NewInt(50), + Balance: big.NewInt(-50), + }, + }, volumes.Data[0]) + + }) + + t.Run("Get account1 volume and Balance for insertion date with previous OOT and now PIT", func(t *testing.T) { + t.Parallel() + + volumes, err := store.GetVolumesWithBalances(ctx, + ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: &now, OOT: &previousOOT}, + UseInsertionDate: false, + }).WithQueryBuilder(query.Match("account", "account:1"))), + ) + + require.NoError(t, err) + require.Len(t, volumes.Data, 1) + require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:1", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(0), + Output: big.NewInt(50), + Balance: big.NewInt(-50), + }, + }, volumes.Data[0]) + + }) + + t.Run("Using Metadata regex", func(t *testing.T) { + t.Parallel() + + volumes, err := store.GetVolumesWithBalances(ctx, + ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{}).WithQueryBuilder(query.Match("metadata[foo]", "bar"))), + ) + + require.NoError(t, err) + require.Len(t, volumes.Data, 1) + + }) + + t.Run("Using exists metadata filter 1", func(t *testing.T) { + t.Parallel() + + volumes, err := store.GetVolumesWithBalances(ctx, + ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{}).WithQueryBuilder(query.Exists("metadata", "category"))), + ) + + require.NoError(t, err) + require.Len(t, volumes.Data, 2) + }) + + t.Run("Using exists metadata filter 2", func(t *testing.T) { + t.Parallel() + + volumes, err := store.GetVolumesWithBalances(ctx, + ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{}).WithQueryBuilder(query.Exists("metadata", "foo"))), + ) + + require.NoError(t, err) + require.Len(t, volumes.Data, 1) + }) +} + +func TestVolumesAggregate(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + pit := now.Add(2 * time.Minute) + oot := now.Add(-2 * time.Minute) + + tx1 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1:2", "USD", big.NewInt(100))). + WithTimestamp(now.Add(-4 * time.Minute)). + WithInsertedAt(now) + err := store.newStore.CommitTransaction(ctx, &tx1) + require.NoError(t, err) + + tx2 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1:1", "EUR", big.NewInt(100))). + WithTimestamp(now.Add(-3 * time.Minute)) + err = store.newStore.CommitTransaction(ctx, &tx2) + require.NoError(t, err) + + tx3 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1:2", "EUR", big.NewInt(50))). + WithTimestamp(now.Add(-2 * time.Minute)) + err = store.newStore.CommitTransaction(ctx, &tx3) + require.NoError(t, err) + + tx4 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1:3", "USD", big.NewInt(0))). + WithTimestamp(now.Add(-time.Minute)) + err = store.newStore.CommitTransaction(ctx, &tx4) + require.NoError(t, err) + + tx5 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:2:1", "USD", big.NewInt(50))). + WithTimestamp(now) + err = store.newStore.CommitTransaction(ctx, &tx5) + require.NoError(t, err) + + tx6 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:2:2", "USD", big.NewInt(50))). + WithTimestamp(now.Add(1 * time.Minute)) + err = store.newStore.CommitTransaction(ctx, &tx6) + require.NoError(t, err) + + tx7 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:2:3", "EUR", big.NewInt(25))). + WithTimestamp(now.Add(3 * time.Minute)) + err = store.newStore.CommitTransaction(ctx, &tx7) + require.NoError(t, err) + + require.NoError(t, store.newStore.UpdateAccountsMetadata(ctx, map[string]metadata.Metadata{ + "account:1:1": { + "foo": "bar", + }, + })) + + t.Run("Aggregation Volumes with balance for GroupLvl 0", func(t *testing.T) { + t.Parallel() + + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + UseInsertionDate: true, + GroupLvl: 0, + }).WithQueryBuilder(query.Match("account", "account::")))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 7) + }) + + t.Run("Aggregation Volumes with balance for GroupLvl 1", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + UseInsertionDate: true, + GroupLvl: 1, + }).WithQueryBuilder(query.Match("account", "account::")))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 2) + }) + + t.Run("Aggregation Volumes with balance for GroupLvl 2", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + UseInsertionDate: true, + GroupLvl: 2, + }).WithQueryBuilder(query.Match("account", "account::")))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 4) + }) + + t.Run("Aggregation Volumes with balance for GroupLvl 3", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + UseInsertionDate: true, + GroupLvl: 3, + }).WithQueryBuilder(query.Match("account", "account::")))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 7) + }) + + t.Run("Aggregation Volumes with balance for GroupLvl 1 && PIT && OOT && effectiveDate", func(t *testing.T) { + t.Parallel() + + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &pit, + OOT: &oot, + }, + GroupLvl: 1, + }).WithQueryBuilder(query.Match("account", "account::")))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 2) + require.Equal(t, volumes.Data[0], ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account", + Asset: "EUR", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(50), + Output: big.NewInt(0), + Balance: big.NewInt(50), + }, + }) + require.Equal(t, volumes.Data[1], ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(100), + Output: big.NewInt(0), + Balance: big.NewInt(100), + }, + }) + }) + + t.Run("Aggregation Volumes with balance for GroupLvl 1 && PIT && OOT && effectiveDate && Balance Filter 1", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &pit, + OOT: &oot, + }, + UseInsertionDate: false, + GroupLvl: 1, + }).WithQueryBuilder( + query.And(query.Match("account", "account::"), query.Gte("balance[EUR]", 50))))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 1) + require.Equal(t, volumes.Data[0], ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account", + Asset: "EUR", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(50), + Output: big.NewInt(0), + Balance: big.NewInt(50), + }, + }) + }) + + t.Run("Aggregation Volumes with balance for GroupLvl 1 && Balance Filter 2", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{}, + UseInsertionDate: true, + GroupLvl: 2, + }).WithQueryBuilder( + query.Or( + query.Match("account", "account:1:"), + query.Lte("balance[USD]", 0))))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 3) + require.Equal(t, volumes.Data[0], ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:1", + Asset: "EUR", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(150), + Output: big.NewInt(0), + Balance: big.NewInt(150), + }, + }) + require.Equal(t, volumes.Data[1], ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:1", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(100), + Output: big.NewInt(0), + Balance: big.NewInt(100), + }, + }) + require.Equal(t, volumes.Data[2], ledger.VolumesWithBalanceByAssetByAccount{ + Account: "world", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(0), + Output: big.NewInt(200), + Balance: big.NewInt(-200), + }, + }) + }) + t.Run("filter using account matching, metadata, and group", func(t *testing.T) { + t.Parallel() + + volumes, err := store.GetVolumesWithBalances(ctx, + ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + GroupLvl: 1, + }).WithQueryBuilder(query.And( + query.Match("account", "account::"), + query.Match("metadata[foo]", "bar"), + ))), + ) + + require.NoError(t, err) + require.Len(t, volumes.Data, 1) + }) + + t.Run("filter using account matching, metadata, and group and PIT", func(t *testing.T) { + t.Parallel() + + volumes, err := store.GetVolumesWithBalances(ctx, + ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + GroupLvl: 1, + PITFilter: ledgercontroller.PITFilter{ + PIT: pointer.For(now.Add(time.Minute)), + }, + }).WithQueryBuilder(query.And( + query.Match("account", "account::"), + query.Match("metadata[foo]", "bar"), + ))), + ) + + require.NoError(t, err) + require.Len(t, volumes.Data, 1) + }) + + t.Run("filter using metadata matching only", func(t *testing.T) { + t.Parallel() + + volumes, err := store.GetVolumesWithBalances(ctx, + ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + GroupLvl: 1, + }).WithQueryBuilder(query.And( + query.Match("metadata[foo]", "bar"), + ))), + ) + + require.NoError(t, err) + require.Len(t, volumes.Data, 1) + }) +} diff --git a/internal/storage/ledger/logs.go b/internal/storage/ledger/logs.go new file mode 100644 index 000000000..132ae8bfc --- /dev/null +++ b/internal/storage/ledger/logs.go @@ -0,0 +1,170 @@ +package ledger + +import ( + "context" + "database/sql/driver" + "encoding/json" + "fmt" + "github.com/formancehq/ledger/internal/tracing" + + "errors" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/platform/postgres" + "github.com/formancehq/go-libs/v2/pointer" + "github.com/formancehq/go-libs/v2/query" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" +) + +// Log override ledger.Log to be able to properly read/write payload which is jsonb +// on the database and 'any' on the Log structure (data column) +type Log struct { + *ledger.Log `bun:",extend"` + + Ledger string `bun:"ledger,type:varchar"` + Data RawMessage `bun:"data,type:jsonb"` + Memento RawMessage `bun:"memento,type:bytea"` +} + +func (log Log) ToCore() ledger.Log { + payload, err := ledger.HydrateLog(log.Type, log.Data) + if err != nil { + panic(fmt.Errorf("hydrating log data: %w", err)) + } + log.Log.Data = payload + + return *log.Log +} + +type RawMessage json.RawMessage + +func (j RawMessage) Value() (driver.Value, error) { + if j == nil { + return nil, nil + } + return string(j), nil +} + +func (s *Store) InsertLog(ctx context.Context, log *ledger.Log) error { + + _, err := tracing.TraceWithMetric( + ctx, + "InsertLog", + s.tracer, + s.insertLogHistogram, + tracing.NoResult(func(ctx context.Context) error { + + // We lock logs table as we need than the last log does not change until the transaction commit + if s.ledger.HasFeature(ledger.FeatureHashLogs, "SYNC") { + _, err := s.db.NewRaw(`select pg_advisory_xact_lock(?)`, s.ledger.ID).Exec(ctx) + if err != nil { + return postgres.ResolveError(err) + } + } + + payloadData, err := json.Marshal(log.Data) + if err != nil { + return fmt.Errorf("failed to marshal log data: %w", err) + } + + mementoObject := log.Data.(any) + if memento, ok := mementoObject.(ledger.Memento); ok { + mementoObject = memento.GetMemento() + } + + mementoData, err := json.Marshal(mementoObject) + if err != nil { + return err + } + + _, err = s.db. + NewInsert(). + Model(&Log{ + Log: log, + Ledger: s.ledger.Name, + Data: payloadData, + Memento: mementoData, + }). + ModelTableExpr(s.GetPrefixedRelationName("logs")). + Value("id", "nextval(?)", s.GetPrefixedRelationName(fmt.Sprintf(`"log_id_%d"`, s.ledger.ID))). + Returning("*"). + Exec(ctx) + if err != nil { + err := postgres.ResolveError(err) + switch { + case errors.Is(err, postgres.ErrConstraintsFailed{}): + if err.(postgres.ErrConstraintsFailed).GetConstraint() == "logs_idempotency_key" { + return ledgercontroller.NewErrIdempotencyKeyConflict(log.IdempotencyKey) + } + default: + return fmt.Errorf("inserting log: %w", err) + } + } + + return nil + }), + ) + + return err +} + +func (s *Store) ListLogs(ctx context.Context, q ledgercontroller.GetLogsQuery) (*bunpaginate.Cursor[ledger.Log], error) { + return tracing.TraceWithMetric( + ctx, + "ListLogs", + s.tracer, + s.listLogsHistogram, + func(ctx context.Context) (*bunpaginate.Cursor[ledger.Log], error) { + selectQuery := s.db.NewSelect(). + ModelTableExpr(s.GetPrefixedRelationName("logs")). + ColumnExpr("*"). + Where("ledger = ?", s.ledger.Name) + + if q.Options.QueryBuilder != nil { + subQuery, args, err := q.Options.QueryBuilder.Build(query.ContextFn(func(key, operator string, value any) (string, []any, error) { + switch { + case key == "date": + return fmt.Sprintf("%s %s ?", key, query.DefaultComparisonOperatorsMapping[operator]), []any{value}, nil + default: + return "", nil, fmt.Errorf("unknown key '%s' when building query", key) + } + })) + if err != nil { + return nil, err + } + selectQuery = selectQuery.Where(subQuery, args...) + } + + cursor, err := bunpaginate.UsingColumn[ledgercontroller.PaginatedQueryOptions[any], Log](ctx, selectQuery, bunpaginate.ColumnPaginatedQuery[ledgercontroller.PaginatedQueryOptions[any]](q)) + if err != nil { + return nil, err + } + + return bunpaginate.MapCursor(cursor, Log.ToCore), nil + }, + ) +} + +func (s *Store) ReadLogWithIdempotencyKey(ctx context.Context, key string) (*ledger.Log, error) { + return tracing.TraceWithMetric( + ctx, + "ReadLogWithIdempotencyKey", + s.tracer, + s.readLogWithIdempotencyKeyHistogram, + func(ctx context.Context) (*ledger.Log, error) { + ret := &Log{} + if err := s.db.NewSelect(). + Model(ret). + ModelTableExpr(s.GetPrefixedRelationName("logs")). + Column("*"). + Where("idempotency_key = ?", key). + Where("ledger = ?", s.ledger.Name). + Limit(1). + Scan(ctx); err != nil { + return nil, postgres.ResolveError(err) + } + + return pointer.For(ret.ToCore()), nil + }, + ) +} diff --git a/internal/storage/ledger/logs_test.go b/internal/storage/ledger/logs_test.go new file mode 100644 index 000000000..a4b284a6b --- /dev/null +++ b/internal/storage/ledger/logs_test.go @@ -0,0 +1,208 @@ +//go:build it + +package ledger_test + +import ( + "context" + "database/sql" + "golang.org/x/sync/errgroup" + "math/big" + "testing" + + "errors" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/time" + + "github.com/formancehq/go-libs/v2/logging" + + "github.com/formancehq/go-libs/v2/query" + ledger "github.com/formancehq/ledger/internal" + "github.com/stretchr/testify/require" +) + +func TestInsertLog(t *testing.T) { + t.Parallel() + + store := newLedgerStore(t) + ctx := logging.TestingContext() + + t.Run("check hash against core", func(t *testing.T) { + // Insert a first tx (we don't have any previous hash to use at this moment) + log1 := ledger.NewLog(ledger.CreatedTransaction{ + Transaction: ledger.NewTransaction(), + AccountMetadata: ledger.AccountMetadata{}, + }) + log1Copy := log1 + + err := store.InsertLog(ctx, &log1) + require.NoError(t, err) + + require.Equal(t, 1, log1.ID) + require.NotZero(t, log1.Hash) + require.NotEmpty(t, log1.Date) + + // Ensure than the database hashing is the same as the go hashing + log1Copy.Date = log1.Date + chainedLog1 := log1Copy.ChainLog(nil) + require.Equal(t, chainedLog1.Hash, log1.Hash) + + // Insert a new log to test the hash when a previous hash exists + // We also addi an idempotency key to check for conflicts + log2 := ledger.NewLog(ledger.CreatedTransaction{ + Transaction: ledger.NewTransaction(), + AccountMetadata: ledger.AccountMetadata{}, + }) + log2Copy := log2 + err = store.InsertLog(ctx, &log2) + require.NoError(t, err) + require.Equal(t, 2, log2.ID) + require.NotZero(t, log2.Hash) + require.NotZero(t, log2.Date) + + // Ensure than the database hashing is the same as the go hashing + log2Copy.Date = log2.Date + chainedLog2 := log2Copy.ChainLog(&log1) + require.Equal(t, chainedLog2.Hash, log2.Hash) + }) + + t.Run("duplicate IK", func(t *testing.T) { + // Insert a first tx (we don't have any previous hash to use at this moment) + logTx := ledger.NewLog(ledger.CreatedTransaction{ + Transaction: ledger.NewTransaction(), + AccountMetadata: ledger.AccountMetadata{}, + }). + WithIdempotencyKey("foo") + + err := store.InsertLog(ctx, &logTx) + require.NoError(t, err) + + require.NotZero(t, logTx.ID) + require.NotZero(t, logTx.Hash) + + // Create a new log with the same IK as previous should fail + logTx = ledger.NewLog(ledger.CreatedTransaction{ + Transaction: ledger.NewTransaction(), + AccountMetadata: ledger.AccountMetadata{}, + }). + WithIdempotencyKey("foo") + err = store.InsertLog(ctx, &logTx) + require.Error(t, err) + require.True(t, errors.Is(err, ledgercontroller.ErrIdempotencyKeyConflict{})) + }) + + t.Run("hash consistency over high concurrency", func(t *testing.T) { + errGroup, _ := errgroup.WithContext(ctx) + const countLogs = 50 + for range countLogs { + errGroup.Go(func() error { + tx, err := store.GetDB().BeginTx(ctx, &sql.TxOptions{}) + if err != nil { + return err + } + defer func() { + _ = tx.Rollback() + }() + store := store.WithDB(tx) + + logTx := ledger.NewLog(ledger.CreatedTransaction{ + Transaction: ledger.NewTransaction(), + AccountMetadata: ledger.AccountMetadata{}, + }) + err = store.InsertLog(ctx, &logTx) + if err != nil { + return err + } + return tx.Commit() + }) + } + err := errGroup.Wait() + require.NoError(t, err) + + logs, err := store.ListLogs(ctx, ledgercontroller.NewListLogsQuery(ledgercontroller.PaginatedQueryOptions[any]{ + PageSize: countLogs, + }).WithOrder(bunpaginate.OrderAsc)) + require.NoError(t, err) + + var previous *ledger.Log + for _, log := range logs.Data { + expectedHash := log.Hash + expectedID := log.ID + log.Hash = nil + log.ID = 0 + chainedLog := log.ChainLog(previous) + require.Equal(t, expectedHash, chainedLog.Hash, "checking log hash %d", expectedID) + previous = &chainedLog + } + }) +} + +func TestReadLogWithIdempotencyKey(t *testing.T) { + t.Parallel() + + store := newLedgerStore(t) + ctx := logging.TestingContext() + + logTx := ledger.NewLog( + ledger.CreatedTransaction{ + Transaction: ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ), + AccountMetadata: ledger.AccountMetadata{}, + }, + ) + log := logTx.WithIdempotencyKey("test") + err := store.InsertLog(ctx, &log) + require.NoError(t, err) + + lastLog, err := store.ReadLogWithIdempotencyKey(context.Background(), "test") + require.NoError(t, err) + require.NotNil(t, lastLog) + require.Equal(t, log, *lastLog) +} + +func TestLogsList(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + for i := 1; i <= 3; i++ { + newLog := ledger.NewLog(ledger.CreatedTransaction{ + Transaction: ledger.NewTransaction(), + AccountMetadata: ledger.AccountMetadata{}, + }) + newLog.Date = now.Add(-time.Duration(i) * time.Hour) + + err := store.InsertLog(ctx, &newLog) + require.NoError(t, err) + } + + cursor, err := store.ListLogs(context.Background(), ledgercontroller.NewListLogsQuery(ledgercontroller.NewPaginatedQueryOptions[any](nil))) + require.NoError(t, err) + require.Equal(t, bunpaginate.QueryDefaultPageSize, cursor.PageSize) + + require.Equal(t, 3, len(cursor.Data)) + require.EqualValues(t, 3, cursor.Data[0].ID) + + cursor, err = store.ListLogs(context.Background(), ledgercontroller.NewListLogsQuery(ledgercontroller.NewPaginatedQueryOptions[any](nil).WithPageSize(1))) + require.NoError(t, err) + // Should get only the first log. + require.Equal(t, 1, cursor.PageSize) + require.EqualValues(t, 3, cursor.Data[0].ID) + + cursor, err = store.ListLogs(context.Background(), ledgercontroller.NewListLogsQuery(ledgercontroller.NewPaginatedQueryOptions[any](nil). + WithQueryBuilder(query.And( + query.Gte("date", now.Add(-2*time.Hour)), + query.Lt("date", now.Add(-time.Hour)), + )). + WithPageSize(10), + )) + require.NoError(t, err) + require.Equal(t, 10, cursor.PageSize) + // Should get only the second log, as StartTime is inclusive and EndTime exclusive. + require.Len(t, cursor.Data, 1) + require.EqualValues(t, 2, cursor.Data[0].ID) +} diff --git a/internal/storage/ledger/main_test.go b/internal/storage/ledger/main_test.go new file mode 100644 index 000000000..1c63c7f3c --- /dev/null +++ b/internal/storage/ledger/main_test.go @@ -0,0 +1,105 @@ +//go:build it + +package ledger_test + +import ( + "database/sql" + . "github.com/formancehq/go-libs/v2/testing/utils" + systemstore "github.com/formancehq/ledger/internal/storage/driver" + ledgerstore "github.com/formancehq/ledger/internal/storage/ledger" + "go.opentelemetry.io/otel/trace/noop" + "math/big" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/bun/bunconnect" + "github.com/formancehq/go-libs/v2/bun/bundebug" + "github.com/formancehq/go-libs/v2/testing/docker" + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/storage/bucket" + "github.com/google/go-cmp/cmp" + + "github.com/uptrace/bun/dialect/pgdialect" + + "github.com/uptrace/bun" + + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/testing/platform/pgtesting" + "github.com/google/uuid" + "github.com/stretchr/testify/require" +) + +var ( + srv = NewDeferred[*pgtesting.PostgresServer]() + bunDB = NewDeferred[*bun.DB]() +) + +func TestMain(m *testing.M) { + WithTestMain(func(t *TestingTForMain) int { + srv.LoadAsync(func() *pgtesting.PostgresServer { + ret := pgtesting.CreatePostgresServer(t, docker.NewPool(t, logging.Testing()), pgtesting.WithExtension("pgcrypto")) + + bunDB.LoadAsync(func() *bun.DB { + db, err := sql.Open("pgx", ret.GetDSN()) + require.NoError(t, err) + + bunDB := bun.NewDB(db, pgdialect.New()) + if os.Getenv("DEBUG") == "true" { + bunDB.AddQueryHook(bundebug.NewQueryHook()) + } + + return bunDB + }) + return ret + }) + + return m.Run() + }) +} + +type T interface { + require.TestingT + Helper() + Cleanup(func()) +} + +func newLedgerStore(t T) *ledgerstore.Store { + t.Helper() + + ledgerName := uuid.NewString()[:8] + ctx := logging.TestingContext() + + Wait(srv, bunDB) + + pgDatabase := srv.GetValue().NewDatabase(t) + + hooks := make([]bun.QueryHook, 0) + if os.Getenv("DEBUG") == "true" { + hooks = append(hooks, bundebug.NewQueryHook()) + } + + db, err := bunconnect.OpenSQLDB(ctx, pgDatabase.ConnectionOptions(), hooks...) + require.NoError(t, err) + + require.NoError(t, systemstore.Migrate(ctx, db)) + + l := ledger.MustNewWithDefault(ledgerName) + l.Bucket = ledgerName + + b := bucket.New(db, ledgerName) + require.NoError(t, b.Migrate(ctx, noop.Tracer{})) + require.NoError(t, b.AddLedger(ctx, l, db)) + + return ledgerstore.New(db, b, l) +} + +func bigIntComparer(v1 *big.Int, v2 *big.Int) bool { + return v1.String() == v2.String() +} + +func RequireEqual(t *testing.T, expected, actual any) { + t.Helper() + if diff := cmp.Diff(expected, actual, cmp.Comparer(bigIntComparer)); diff != "" { + require.Failf(t, "Content not matching", diff) + } +} diff --git a/internal/storage/ledger/moves.go b/internal/storage/ledger/moves.go new file mode 100644 index 000000000..7198cd9b5 --- /dev/null +++ b/internal/storage/ledger/moves.go @@ -0,0 +1,82 @@ +package ledger + +import ( + "context" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "github.com/formancehq/go-libs/v2/platform/postgres" + "github.com/formancehq/go-libs/v2/time" + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/tracing" + "github.com/uptrace/bun" +) + +func (s *Store) SortMovesBySeq(date *time.Time) *bun.SelectQuery { + + ret := s.db.NewSelect() + if !s.ledger.HasFeature(ledger.FeatureMovesHistory, "ON") { + return ret.Err(ledgercontroller.NewErrMissingFeature(ledger.FeatureMovesHistory)) + } + + ret = ret. + ModelTableExpr(s.GetPrefixedRelationName("moves")). + Where("ledger = ?", s.ledger.Name). + Order("seq desc") + + if date != nil && !date.IsZero() { + ret = ret.Where("insertion_date <= ?", date) + } + + return ret +} + +func (s *Store) SelectDistinctMovesBySeq(date *time.Time) *bun.SelectQuery { + ret := s.db.NewSelect(). + TableExpr("(?) moves", s.SortMovesBySeq(date)). + DistinctOn("accounts_address, asset"). + Column("accounts_address", "asset"). + ColumnExpr("first_value(post_commit_volumes) over (partition by (accounts_address, asset) order by seq desc) as post_commit_volumes"). + Where("ledger = ?", s.ledger.Name) + + if date != nil && !date.IsZero() { + ret = ret.Where("insertion_date <= ?", date) + } + + return ret +} + +func (s *Store) SelectDistinctMovesByEffectiveDate(date *time.Time) *bun.SelectQuery { + ret := s.db.NewSelect(). + TableExpr(s.GetPrefixedRelationName("moves")). + DistinctOn("accounts_address, asset"). + Column("accounts_address", "asset"). + ColumnExpr("first_value(post_commit_effective_volumes) over (partition by (accounts_address, asset) order by effective_date desc, seq desc) as post_commit_effective_volumes"). + Where("ledger = ?", s.ledger.Name) + + if date != nil && !date.IsZero() { + ret = ret.Where("effective_date <= ?", date) + } + + return ret +} + +func (s *Store) InsertMoves(ctx context.Context, moves ...*ledger.Move) error { + _, err := tracing.TraceWithMetric( + ctx, + "InsertMoves", + s.tracer, + s.insertMovesHistogram, + tracing.NoResult(func(ctx context.Context) error { + _, err := s.db.NewInsert(). + Model(&moves). + Value("ledger", "?", s.ledger.Name). + ModelTableExpr(s.GetPrefixedRelationName("moves")). + Returning("post_commit_volumes, post_commit_effective_volumes"). + Exec(ctx) + + return postgres.ResolveError(err) + }), + ) + + return err +} diff --git a/internal/storage/ledger/moves_test.go b/internal/storage/ledger/moves_test.go new file mode 100644 index 000000000..e667ee8de --- /dev/null +++ b/internal/storage/ledger/moves_test.go @@ -0,0 +1,184 @@ +//go:build it + +package ledger_test + +import ( + "database/sql" + "fmt" + "github.com/formancehq/go-libs/v2/pointer" + "math/big" + "math/rand" + "testing" + + "errors" + "github.com/alitto/pond" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/platform/postgres" + "github.com/formancehq/go-libs/v2/time" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/stretchr/testify/require" +) + +func TestMovesInsert(t *testing.T) { + t.Parallel() + + t.Run("nominal", func(t *testing.T) { + t.Parallel() + + store := newLedgerStore(t) + ctx := logging.TestingContext() + + tx := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), + ) + require.NoError(t, store.InsertTransaction(ctx, &tx)) + + account := &ledger.Account{ + Address: "world", + } + _, err := store.UpsertAccount(ctx, account) + require.NoError(t, err) + + now := time.Now() + + // We will insert 5 moves at five different timestamps and check than pv(c)e evolves correctly + // t0 ---------> t1 ---------> t2 ---------> t3 ----------> t4 + // m1 ---------> m3 ---------> m4 ---------> m2 ----------> m5 + t0 := now + t1 := t0.Add(time.Hour) + t2 := t1.Add(time.Hour) + t3 := t2.Add(time.Hour) + t4 := t3.Add(time.Hour) + + // Insert a first move at t0 + m1 := ledger.Move{ + IsSource: true, + Account: "world", + Amount: (*bunpaginate.BigInt)(big.NewInt(100)), + Asset: "USD", + InsertionDate: t0, + EffectiveDate: t0, + TransactionID: tx.ID, + } + require.NoError(t, store.InsertMoves(ctx, &m1)) + require.NotNil(t, m1.PostCommitEffectiveVolumes) + require.Equal(t, ledger.Volumes{ + Input: big.NewInt(0), + Output: big.NewInt(100), + }, *m1.PostCommitEffectiveVolumes) + + // Add a second move at t3 + m2 := ledger.Move{ + IsSource: false, + Account: "world", + Amount: (*bunpaginate.BigInt)(big.NewInt(50)), + Asset: "USD", + InsertionDate: t3, + EffectiveDate: t3, + TransactionID: tx.ID, + } + require.NoError(t, store.InsertMoves(ctx, &m2)) + require.NotNil(t, m2.PostCommitEffectiveVolumes) + require.Equal(t, ledger.Volumes{ + Input: big.NewInt(50), + Output: big.NewInt(100), + }, *m2.PostCommitEffectiveVolumes) + + // Add a third move at t1 + m3 := ledger.Move{ + IsSource: true, + Account: "world", + Amount: (*bunpaginate.BigInt)(big.NewInt(200)), + Asset: "USD", + InsertionDate: t1, + EffectiveDate: t1, + TransactionID: tx.ID, + } + require.NoError(t, store.InsertMoves(ctx, &m3)) + require.NotNil(t, m3.PostCommitEffectiveVolumes) + require.Equal(t, ledger.Volumes{ + Input: big.NewInt(0), + Output: big.NewInt(300), + }, *m3.PostCommitEffectiveVolumes) + + // Add a fourth move at t2 + m4 := ledger.Move{ + IsSource: false, + Account: "world", + Amount: (*bunpaginate.BigInt)(big.NewInt(50)), + Asset: "USD", + InsertionDate: t2, + EffectiveDate: t2, + TransactionID: tx.ID, + } + require.NoError(t, store.InsertMoves(ctx, &m4)) + require.NotNil(t, m4.PostCommitEffectiveVolumes) + require.Equal(t, ledger.Volumes{ + Input: big.NewInt(50), + Output: big.NewInt(300), + }, *m4.PostCommitEffectiveVolumes) + + // Add a fifth move at t4 + m5 := ledger.Move{ + IsSource: false, + Account: "world", + Amount: (*bunpaginate.BigInt)(big.NewInt(50)), + Asset: "USD", + InsertionDate: t4, + EffectiveDate: t4, + TransactionID: tx.ID, + } + require.NoError(t, store.InsertMoves(ctx, &m5)) + require.NotNil(t, m5.PostCommitEffectiveVolumes) + require.Equal(t, ledger.Volumes{ + Input: big.NewInt(150), + Output: big.NewInt(300), + }, *m5.PostCommitEffectiveVolumes) + }) + + t.Run("with high concurrency", func(t *testing.T) { + t.Parallel() + + store := newLedgerStore(t) + ctx := logging.TestingContext() + + wp := pond.New(10, 10) + for i := 0; i < 1000; i++ { + wp.Submit(func() { + for { + sqlTx, err := store.GetDB().BeginTx(ctx, &sql.TxOptions{}) + require.NoError(t, err) + storeCP := store.WithDB(sqlTx) + + src := fmt.Sprintf("accounts:%d", rand.Intn(1000000)) + dst := fmt.Sprintf("accounts:%d", rand.Intn(1000000)) + + tx := ledger.NewTransaction().WithPostings( + ledger.NewPosting(src, dst, "USD", big.NewInt(1)), + ) + err = storeCP.CommitTransaction(ctx, &tx) + if errors.Is(err, postgres.ErrDeadlockDetected) { + require.NoError(t, sqlTx.Rollback()) + continue + } + require.NoError(t, err) + require.NoError(t, sqlTx.Commit()) + return + } + }) + } + wp.StopAndWait() + + aggregatedBalances, err := store.GetAggregatedBalances(ctx, ledgercontroller.NewGetAggregatedBalancesQuery(ledgercontroller.PITFilter{ + // By using a PIT, we force the usage of the moves table. + // If it was not specified, the test would not been correct. + PIT: pointer.For(time.Now()), + }, nil, true)) + require.NoError(t, err) + RequireEqual(t, ledger.BalancesByAssets{ + "USD": big.NewInt(0), + }, aggregatedBalances) + }) +} diff --git a/internal/storage/ledger/store.go b/internal/storage/ledger/store.go new file mode 100644 index 000000000..d7611c114 --- /dev/null +++ b/internal/storage/ledger/store.go @@ -0,0 +1,214 @@ +package ledger + +import ( + "context" + "fmt" + "github.com/formancehq/go-libs/v2/migrations" + "github.com/formancehq/go-libs/v2/platform/postgres" + "github.com/formancehq/ledger/internal/storage/bucket" + "go.opentelemetry.io/otel/metric" + noopmetrics "go.opentelemetry.io/otel/metric/noop" + "go.opentelemetry.io/otel/trace" + nooptracer "go.opentelemetry.io/otel/trace/noop" + + "errors" + ledger "github.com/formancehq/ledger/internal" + "github.com/uptrace/bun" +) + +type Store struct { + db bun.IDB + bucket *bucket.Bucket + ledger ledger.Ledger + + tracer trace.Tracer + meter metric.Meter + listAccountsHistogram metric.Int64Histogram + checkBucketSchemaHistogram metric.Int64Histogram + checkLedgerSchemaHistogram metric.Int64Histogram + getAccountHistogram metric.Int64Histogram + countAccountsHistogram metric.Int64Histogram + updateAccountsMetadataHistogram metric.Int64Histogram + deleteAccountMetadataHistogram metric.Int64Histogram + upsertAccountHistogram metric.Int64Histogram + getBalancesHistogram metric.Int64Histogram + insertLogHistogram metric.Int64Histogram + listLogsHistogram metric.Int64Histogram + readLogWithIdempotencyKeyHistogram metric.Int64Histogram + insertMovesHistogram metric.Int64Histogram + countTransactionsHistogram metric.Int64Histogram + getTransactionHistogram metric.Int64Histogram + insertTransactionHistogram metric.Int64Histogram + revertTransactionHistogram metric.Int64Histogram + updateTransactionMetadataHistogram metric.Int64Histogram + deleteTransactionMetadataHistogram metric.Int64Histogram + updateBalancesHistogram metric.Int64Histogram + getVolumesWithBalancesHistogram metric.Int64Histogram + listTransactionsHistogram metric.Int64Histogram +} + +func (s *Store) GetLedger() ledger.Ledger { + return s.ledger +} + +func (s *Store) GetDB() bun.IDB { + return s.db +} + +func (s *Store) GetPrefixedRelationName(v string) string { + return fmt.Sprintf(`"%s".%s`, s.ledger.Bucket, v) +} + +func (s *Store) WithDB(db bun.IDB) *Store { + ret := *s + ret.db = db + return &ret +} + +func (s *Store) validateAddressFilter(operator string, value any) error { + if operator != "$match" { + return errors.New("'address' column can only be used with $match") + } + if value, ok := value.(string); !ok { + return fmt.Errorf("invalid 'address' filter") + } else if isSegmentedAddress(value) && !s.ledger.HasFeature(ledger.FeatureIndexAddressSegments, "ON") { + return fmt.Errorf("feature %s must be 'ON' to use segments address", ledger.FeatureIndexAddressSegments) + } + + return nil +} + +func (s *Store) LockLedger(ctx context.Context) error { + _, err := s.db.NewRaw(`lock table ` + s.GetPrefixedRelationName("logs")).Exec(ctx) + return postgres.ResolveError(err) +} + +func New(db bun.IDB, bucket *bucket.Bucket, ledger ledger.Ledger, opts ...Option) *Store { + ret := &Store{ + db: db, + ledger: ledger, + bucket: bucket, + } + for _, opt := range append(defaultOptions, opts...) { + opt(ret) + } + + var err error + ret.listAccountsHistogram, err = ret.meter.Int64Histogram("store.listAccounts") + if err != nil { + panic(err) + } + ret.checkBucketSchemaHistogram, err = ret.meter.Int64Histogram("store.checkBucketSchema") + if err != nil { + panic(err) + } + ret.checkLedgerSchemaHistogram, err = ret.meter.Int64Histogram("store.checkLedgerSchema") + if err != nil { + panic(err) + } + ret.getAccountHistogram, err = ret.meter.Int64Histogram("store.getAccount") + if err != nil { + panic(err) + } + ret.countAccountsHistogram, err = ret.meter.Int64Histogram("store.countAccounts") + if err != nil { + panic(err) + } + ret.updateAccountsMetadataHistogram, err = ret.meter.Int64Histogram("store.updateAccountsMetadata") + if err != nil { + panic(err) + } + ret.deleteAccountMetadataHistogram, err = ret.meter.Int64Histogram("store.deleteAccountMetadata") + if err != nil { + panic(err) + } + ret.upsertAccountHistogram, err = ret.meter.Int64Histogram("store.upsertAccount") + if err != nil { + panic(err) + } + ret.getBalancesHistogram, err = ret.meter.Int64Histogram("store.getBalances") + if err != nil { + panic(err) + } + ret.insertLogHistogram, err = ret.meter.Int64Histogram("store.insertLog") + if err != nil { + panic(err) + } + ret.listLogsHistogram, err = ret.meter.Int64Histogram("store.listLogs") + if err != nil { + panic(err) + } + ret.readLogWithIdempotencyKeyHistogram, err = ret.meter.Int64Histogram("store.readLogWithIdempotencyKey") + if err != nil { + panic(err) + } + ret.insertMovesHistogram, err = ret.meter.Int64Histogram("store.insertMoves") + if err != nil { + panic(err) + } + ret.countTransactionsHistogram, err = ret.meter.Int64Histogram("store.countTransactions") + if err != nil { + panic(err) + } + ret.getTransactionHistogram, err = ret.meter.Int64Histogram("store.getTransaction") + if err != nil { + panic(err) + } + ret.insertTransactionHistogram, err = ret.meter.Int64Histogram("store.insertTransaction") + if err != nil { + panic(err) + } + ret.revertTransactionHistogram, err = ret.meter.Int64Histogram("store.revertTransaction") + if err != nil { + panic(err) + } + ret.updateTransactionMetadataHistogram, err = ret.meter.Int64Histogram("store.updateTransactionMetadata") + if err != nil { + panic(err) + } + ret.deleteTransactionMetadataHistogram, err = ret.meter.Int64Histogram("store.deleteTransactionMetadata") + if err != nil { + panic(err) + } + ret.updateBalancesHistogram, err = ret.meter.Int64Histogram("store.updateBalances") + if err != nil { + panic(err) + } + ret.getVolumesWithBalancesHistogram, err = ret.meter.Int64Histogram("store.getVolumesWithBalances") + if err != nil { + panic(err) + } + ret.listTransactionsHistogram, err = ret.meter.Int64Histogram("store.listTransactions") + if err != nil { + panic(err) + } + + return ret +} + +func (s *Store) IsUpToDate(ctx context.Context) (bool, error) { + return s.bucket.IsUpToDate(ctx) +} + +func (s *Store) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) { + return s.bucket.GetMigrationsInfo(ctx) +} + +type Option func(s *Store) + +func WithMeter(meter metric.Meter) Option { + return func(s *Store) { + s.meter = meter + } +} + +func WithTracer(tracer trace.Tracer) Option { + return func(s *Store) { + s.tracer = tracer + } +} + +var defaultOptions = []Option{ + WithMeter(noopmetrics.Meter{}), + WithTracer(nooptracer.Tracer{}), +} diff --git a/internal/storage/ledger/transactions.go b/internal/storage/ledger/transactions.go new file mode 100644 index 000000000..3ad30d173 --- /dev/null +++ b/internal/storage/ledger/transactions.go @@ -0,0 +1,612 @@ +package ledger + +import ( + "context" + "encoding/json" + "fmt" + "math/big" + "regexp" + "slices" + "strings" + + "github.com/formancehq/ledger/internal/tracing" + + "errors" + "github.com/formancehq/go-libs/v2/platform/postgres" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" + + "github.com/formancehq/go-libs/v2/pointer" + + "github.com/formancehq/go-libs/v2/time" + + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/query" + ledger "github.com/formancehq/ledger/internal" + "github.com/uptrace/bun" +) + +var ( + metadataRegex = regexp.MustCompile(`metadata\[(.+)]`) +) + +func (s *Store) selectDistinctTransactionMetadataHistories(date *time.Time) *bun.SelectQuery { + ret := s.db.NewSelect(). + DistinctOn("transactions_id"). + ModelTableExpr(s.GetPrefixedRelationName("transactions_metadata")). + Where("ledger = ?", s.ledger.Name). + Column("transactions_id", "metadata"). + Order("transactions_id", "revision desc") + + if date != nil && !date.IsZero() { + ret = ret.Where("date <= ?", date) + } + + return ret +} + +func (s *Store) selectTransactions(date *time.Time, expandVolumes, expandEffectiveVolumes bool, q query.Builder) *bun.SelectQuery { + + ret := s.db.NewSelect() + if expandEffectiveVolumes && !s.ledger.HasFeature(ledger.FeatureMovesHistoryPostCommitEffectiveVolumes, "SYNC") { + return ret.Err(ledgercontroller.NewErrMissingFeature(ledger.FeatureMovesHistoryPostCommitEffectiveVolumes)) + } + + if q != nil { + if err := q.Walk(func(operator, key string, value any) error { + switch { + case key == "reverted": + if operator != "$match" { + return ledgercontroller.NewErrInvalidQuery("'reverted' column can only be used with $match") + } + switch value.(type) { + case bool: + return nil + default: + return ledgercontroller.NewErrInvalidQuery("'reverted' can only be used with bool value") + } + case key == "account": + return s.validateAddressFilter(operator, value) + case key == "source": + return s.validateAddressFilter(operator, value) + case key == "destination": + return s.validateAddressFilter(operator, value) + case key == "timestamp": + case metadataRegex.Match([]byte(key)): + if operator != "$match" { + return ledgercontroller.NewErrInvalidQuery("'metadata[xxx]' column can only be used with $match") + } + case key == "metadata": + if operator != "$exists" { + return ledgercontroller.NewErrInvalidQuery("'metadata' key filter can only be used with $exists") + } + default: + return ledgercontroller.NewErrInvalidQuery("unknown key '%s' when building query", key) + } + + return nil + }); err != nil { + return ret.Err(err) + } + } + + ret = ret. + ModelTableExpr(s.GetPrefixedRelationName("transactions")). + Column( + "ledger", + "id", + "timestamp", + "reference", + "inserted_at", + "updated_at", + "postings", + "sources", + "destinations", + "sources_arrays", + "destinations_arrays", + "reverted_at", + "post_commit_volumes", + ). + Where("ledger = ?", s.ledger.Name) + + if date != nil && !date.IsZero() { + ret = ret.Where("timestamp <= ?", date) + } + + if s.ledger.HasFeature(ledger.FeatureAccountMetadataHistory, "SYNC") && date != nil && !date.IsZero() { + ret = ret. + Join( + `left join (?) transactions_metadata on transactions_metadata.transactions_id = transactions.id`, + s.selectDistinctTransactionMetadataHistories(date), + ). + ColumnExpr("coalesce(transactions_metadata.metadata, '{}'::jsonb) as metadata") + } else { + ret = ret.ColumnExpr("metadata") + } + + if s.ledger.HasFeature(ledger.FeatureMovesHistoryPostCommitEffectiveVolumes, "SYNC") && expandEffectiveVolumes { + ret = ret. + Join( + `join (?) pcev on pcev.transactions_id = transactions.id`, + s.db.NewSelect(). + TableExpr( + "(?) data", + s.db.NewSelect(). + TableExpr( + "(?) moves", + s.db.NewSelect(). + DistinctOn("transactions_id, accounts_address, asset"). + ModelTableExpr(s.GetPrefixedRelationName("moves")). + Column("transactions_id", "accounts_address", "asset"). + ColumnExpr(`first_value(moves.post_commit_effective_volumes) over (partition by (transactions_id, accounts_address, asset) order by seq desc) as post_commit_effective_volumes`), + ). + Column("transactions_id"). + ColumnExpr(` + json_build_object( + moves.accounts_address, + json_build_object( + moves.asset, + json_build_object( + 'input', (moves.post_commit_effective_volumes).inputs, + 'output', (moves.post_commit_effective_volumes).outputs + ) + ) + ) as post_commit_effective_volumes + `), + ). + Column("transactions_id"). + ColumnExpr("aggregate_objects(post_commit_effective_volumes::jsonb) as post_commit_effective_volumes"). + Group("transactions_id"), + //s.db.NewSelect(). + // Column("transactions_id"). + // ColumnExpr("aggregate_objects(pcev::jsonb) as post_commit_effective_volumes"). + // TableExpr( + // "(?) data", + // s.db.NewSelect(). + // DistinctOn("transactions_id, accounts_address, asset"). + // ModelTableExpr(s.GetPrefixedRelationName("moves")). + // Column("transactions_id"). + // ColumnExpr(` + // json_build_object( + // moves.accounts_address, + // json_build_object( + // moves.asset, + // first_value(moves.post_commit_effective_volumes) over (partition by (transactions_id, accounts_address, asset) order by seq desc) + // ) + // ) as pcev + // `), + // ). + // Group("transactions_id"), + ). + ColumnExpr("pcev.*") + } + + // Create a parent query which set reverted_at to null if the date passed as argument is before + ret = s.db.NewSelect(). + ModelTableExpr("(?) transactions", ret). + Column( + "ledger", + "id", + "timestamp", + "reference", + "inserted_at", + "updated_at", + "postings", + "sources", + "destinations", + "sources_arrays", + "destinations_arrays", + "metadata", + ) + if expandVolumes { + ret = ret.Column("post_commit_volumes") + } + if expandEffectiveVolumes { + ret = ret.Column("post_commit_effective_volumes") + } + if date != nil && !date.IsZero() { + ret = ret.ColumnExpr("(case when transactions.reverted_at <= ? then transactions.reverted_at else null end) as reverted_at", date) + } else { + ret = ret.Column("reverted_at") + } + + if q != nil { + where, args, err := q.Build(query.ContextFn(func(key, operator string, value any) (string, []any, error) { + switch { + case key == "reference" || key == "timestamp": + return fmt.Sprintf("%s %s ?", key, query.DefaultComparisonOperatorsMapping[operator]), []any{value}, nil + case key == "reverted": + ret := "reverted_at is" + if value.(bool) { + ret += " not" + } + return ret + " null", nil, nil + case key == "account": + return filterAccountAddressOnTransactions(value.(string), true, true), nil, nil + case key == "source": + return filterAccountAddressOnTransactions(value.(string), true, false), nil, nil + case key == "destination": + return filterAccountAddressOnTransactions(value.(string), false, true), nil, nil + case metadataRegex.Match([]byte(key)): + match := metadataRegex.FindAllStringSubmatch(key, 3) + + return "metadata @> ?", []any{map[string]any{ + match[0][1]: value, + }}, nil + + case key == "metadata": + return "metadata -> ? is not null", []any{value}, nil + case key == "timestamp": + return fmt.Sprintf("timestamp %s ?", convertOperatorToSQL(operator)), []any{value}, nil + default: + return "", nil, ledgercontroller.NewErrInvalidQuery("unknown key '%s' when building query", key) + } + })) + if err != nil { + return ret.Err(err) + } + + if len(args) > 0 { + ret = ret.Where(where, args...) + } else { + ret = ret.Where(where) + } + } + + return ret +} + +func (s *Store) CommitTransaction(ctx context.Context, tx *ledger.Transaction) error { + postCommitVolumes, err := s.UpdateVolumes(ctx, tx.VolumeUpdates()...) + if err != nil { + return fmt.Errorf("failed to update balances: %w", err) + } + tx.PostCommitVolumes = postCommitVolumes.Copy() + + err = s.InsertTransaction(ctx, tx) + if err != nil { + return fmt.Errorf("failed to insert transaction: %w", err) + } + + for _, address := range tx.InvolvedAccounts() { + _, err := s.UpsertAccount(ctx, &ledger.Account{ + Address: address, + FirstUsage: tx.Timestamp, + Metadata: make(metadata.Metadata), + }) + if err != nil { + return fmt.Errorf("upserting account: %w", err) + } + } + + if s.ledger.HasFeature(ledger.FeatureMovesHistory, "ON") { + moves := ledger.Moves{} + postings := tx.Postings + slices.Reverse(postings) + + for _, posting := range postings { + moves = append(moves, &ledger.Move{ + Account: posting.Destination, + Amount: (*bunpaginate.BigInt)(posting.Amount), + Asset: posting.Asset, + InsertionDate: tx.InsertedAt, + EffectiveDate: tx.Timestamp, + PostCommitVolumes: pointer.For(postCommitVolumes[posting.Destination][posting.Asset].Copy()), + TransactionID: tx.ID, + }) + postCommitVolumes.AddInput(posting.Destination, posting.Asset, new(big.Int).Neg(posting.Amount)) + + moves = append(moves, &ledger.Move{ + IsSource: true, + Account: posting.Source, + Amount: (*bunpaginate.BigInt)(posting.Amount), + Asset: posting.Asset, + InsertionDate: tx.InsertedAt, + EffectiveDate: tx.Timestamp, + PostCommitVolumes: pointer.For(postCommitVolumes[posting.Source][posting.Asset].Copy()), + TransactionID: tx.ID, + }) + postCommitVolumes.AddOutput(posting.Source, posting.Asset, new(big.Int).Neg(posting.Amount)) + } + + slices.Reverse(moves) + + if err := s.InsertMoves(ctx, moves...); err != nil { + return fmt.Errorf("failed to insert moves: %w", err) + } + + if s.ledger.HasFeature(ledger.FeatureMovesHistoryPostCommitEffectiveVolumes, "SYNC") { + tx.PostCommitEffectiveVolumes = moves.ComputePostCommitEffectiveVolumes() + } + } + + return nil +} + +func (s *Store) ListTransactions(ctx context.Context, q ledgercontroller.ListTransactionsQuery) (*bunpaginate.Cursor[ledger.Transaction], error) { + return tracing.TraceWithMetric( + ctx, + "ListTransactions", + s.tracer, + s.listTransactionsHistogram, + func(ctx context.Context) (*bunpaginate.Cursor[ledger.Transaction], error) { + cursor, err := bunpaginate.UsingColumn[ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes], ledger.Transaction]( + ctx, + s.selectTransactions( + q.Options.Options.PIT, + q.Options.Options.ExpandVolumes, + q.Options.Options.ExpandEffectiveVolumes, + q.Options.QueryBuilder, + ), + bunpaginate.ColumnPaginatedQuery[ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes]](q), + ) + if err != nil { + return nil, err + } + + return cursor, nil + }, + ) +} + +func (s *Store) CountTransactions(ctx context.Context, q ledgercontroller.ListTransactionsQuery) (int, error) { + return tracing.TraceWithMetric( + ctx, + "CountTransactions", + s.tracer, + s.countTransactionsHistogram, + func(ctx context.Context) (int, error) { + return s.db.NewSelect(). + TableExpr("(?) data", s.selectTransactions( + q.Options.Options.PIT, + q.Options.Options.ExpandVolumes, + q.Options.Options.ExpandEffectiveVolumes, + q.Options.QueryBuilder, + )). + Count(ctx) + }, + ) +} + +func (s *Store) GetTransaction(ctx context.Context, filter ledgercontroller.GetTransactionQuery) (*ledger.Transaction, error) { + return tracing.TraceWithMetric( + ctx, + "GetTransaction", + s.tracer, + s.getTransactionHistogram, + func(ctx context.Context) (*ledger.Transaction, error) { + + ret := &ledger.Transaction{} + if err := s.selectTransactions( + filter.PIT, + filter.ExpandVolumes, + filter.ExpandEffectiveVolumes, + nil, + ). + Where("transactions.id = ?", filter.ID). + Limit(1). + Model(ret). + Scan(ctx); err != nil { + return nil, postgres.ResolveError(err) + } + + return ret, nil + }, + ) +} + +func (s *Store) InsertTransaction(ctx context.Context, tx *ledger.Transaction) error { + _, err := tracing.TraceWithMetric( + ctx, + "InsertTransaction", + s.tracer, + s.insertTransactionHistogram, + func(ctx context.Context) (*ledger.Transaction, error) { + _, err := s.db.NewInsert(). + Model(tx). + ModelTableExpr(s.GetPrefixedRelationName("transactions")). + Value("id", "nextval(?)", s.GetPrefixedRelationName(fmt.Sprintf(`"transaction_id_%d"`, s.ledger.ID))). + Value("ledger", "?", s.ledger.Name). + Returning("id, timestamp, inserted_at"). + Exec(ctx) + if err != nil { + err = postgres.ResolveError(err) + switch { + case errors.Is(err, postgres.ErrConstraintsFailed{}): + if err.(postgres.ErrConstraintsFailed).GetConstraint() == "transactions_reference" { + return nil, ledgercontroller.NewErrTransactionReferenceConflict(tx.Reference) + } + default: + return nil, err + } + } + + return tx, nil + }, + func(ctx context.Context, tx *ledger.Transaction) { + trace.SpanFromContext(ctx).SetAttributes( + attribute.Int("id", tx.ID), + attribute.String("timestamp", tx.Timestamp.Format(time.RFC3339Nano)), + ) + }, + ) + + return err +} + +// updateTxWithRetrieve try to apply to provided update query and check (if the update return no rows modified), that the row exists +func (s *Store) updateTxWithRetrieve(ctx context.Context, id int, query *bun.UpdateQuery) (*ledger.Transaction, bool, error) { + type modifiedEntity struct { + ledger.Transaction `bun:",extend"` + Modified bool `bun:"modified"` + } + me := &modifiedEntity{} + + err := s.db.NewSelect(). + With("upd", query). + ModelTableExpr( + "(?) transactions", + s.db.NewSelect(). + ColumnExpr("upd.*, true as modified"). + ModelTableExpr("upd"). + UnionAll( + s.db.NewSelect(). + ModelTableExpr(s.GetPrefixedRelationName("transactions")). + ColumnExpr("*, false as modified"). + Where("id = ? and ledger = ?", id, s.ledger.Name). + Limit(1), + ), + ). + Model(me). + ColumnExpr("*"). + Limit(1). + Scan(ctx) + if err != nil { + return nil, false, postgres.ResolveError(err) + } + + return &me.Transaction, me.Modified, nil +} + +func (s *Store) RevertTransaction(ctx context.Context, id int) (tx *ledger.Transaction, modified bool, err error) { + _, err = tracing.TraceWithMetric( + ctx, + "RevertTransaction", + s.tracer, + s.revertTransactionHistogram, + func(ctx context.Context) (*ledger.Transaction, error) { + tx, modified, err = s.updateTxWithRetrieve( + ctx, + id, + s.db.NewUpdate(). + Model(&ledger.Transaction{}). + ModelTableExpr(s.GetPrefixedRelationName("transactions")). + Where("id = ?", id). + Where("reverted_at is null"). + Where("ledger = ?", s.ledger.Name). + Set("reverted_at = (now() at time zone 'utc')"). + Set("updated_at = (now() at time zone 'utc')"). + Returning("*"), + ) + return nil, err + }, + ) + if err != nil { + return nil, false, err + } + return tx, modified, err +} + +func (s *Store) UpdateTransactionMetadata(ctx context.Context, id int, m metadata.Metadata) (tx *ledger.Transaction, modified bool, err error) { + _, err = tracing.TraceWithMetric( + ctx, + "UpdateTransactionMetadata", + s.tracer, + s.updateTransactionMetadataHistogram, + func(ctx context.Context) (*ledger.Transaction, error) { + tx, modified, err = s.updateTxWithRetrieve( + ctx, + id, + s.db.NewUpdate(). + Model(&ledger.Transaction{}). + ModelTableExpr(s.GetPrefixedRelationName("transactions")). + Where("id = ?", id). + Where("ledger = ?", s.ledger.Name). + Set("metadata = metadata || ?", m). + Set("updated_at = (now() at time zone 'utc')"). + Where("not (metadata @> ?)", m). + Returning("*"), + ) + return nil, err + }, + ) + if err != nil { + return nil, false, err + } + return tx, modified, err +} + +func (s *Store) DeleteTransactionMetadata(ctx context.Context, id int, key string) (tx *ledger.Transaction, modified bool, err error) { + _, err = tracing.TraceWithMetric( + ctx, + "DeleteTransactionMetadata", + s.tracer, + s.deleteTransactionMetadataHistogram, + func(ctx context.Context) (*ledger.Transaction, error) { + tx, modified, err = s.updateTxWithRetrieve( + ctx, + id, + s.db.NewUpdate(). + Model(&ledger.Transaction{}). + ModelTableExpr(s.GetPrefixedRelationName("transactions")). + Set("metadata = metadata - ?", key). + Set("updated_at = (now() at time zone 'utc')"). + Where("id = ?", id). + Where("ledger = ?", s.ledger.Name). + Where("metadata -> ? is not null", key). + Returning("*"), + ) + return nil, err + }, + ) + if err != nil { + return nil, false, err + } + return tx, modified, err +} + +func filterAccountAddressOnTransactions(address string, source, destination bool) string { + src := strings.Split(address, ":") + + needSegmentCheck := false + for _, segment := range src { + needSegmentCheck = segment == "" + if needSegmentCheck { + break + } + } + + if needSegmentCheck { + m := map[string]any{ + fmt.Sprint(len(src)): nil, + } + parts := make([]string, 0) + + for i, segment := range src { + if len(segment) == 0 { + continue + } + m[fmt.Sprint(i)] = segment + } + + data, err := json.Marshal([]any{m}) + if err != nil { + panic(err) + } + + if source { + parts = append(parts, fmt.Sprintf("sources_arrays @> '%s'", string(data))) + } + if destination { + parts = append(parts, fmt.Sprintf("destinations_arrays @> '%s'", string(data))) + } + return strings.Join(parts, " or ") + } + + data, err := json.Marshal([]string{address}) + if err != nil { + panic(err) + } + + parts := make([]string, 0) + if source { + parts = append(parts, fmt.Sprintf("sources @> '%s'", string(data))) + } + if destination { + parts = append(parts, fmt.Sprintf("destinations @> '%s'", string(data))) + } + return strings.Join(parts, " or ") +} diff --git a/internal/storage/ledger/transactions_test.go b/internal/storage/ledger/transactions_test.go new file mode 100644 index 000000000..e41a047bf --- /dev/null +++ b/internal/storage/ledger/transactions_test.go @@ -0,0 +1,745 @@ +//go:build it + +package ledger_test + +import ( + "context" + "database/sql" + "fmt" + "math/big" + "testing" + + "github.com/formancehq/go-libs/v2/platform/postgres" + "github.com/formancehq/go-libs/v2/time" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "errors" + + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/pointer" + + libtime "time" + + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/query" + ledger "github.com/formancehq/ledger/internal" + "github.com/stretchr/testify/require" +) + +func TestTransactionsGetWithVolumes(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + tx1 := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "central_bank", "USD", big.NewInt(100)), + ). + WithReference("tx1"). + WithTimestamp(now.Add(-3 * time.Hour)) + err := store.CommitTransaction(ctx, &tx1) + require.NoError(t, err) + + tx2 := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "central_bank", "USD", big.NewInt(100)), + ). + WithReference("tx2"). + WithTimestamp(now.Add(-2 * time.Hour)) + err = store.CommitTransaction(ctx, &tx2) + require.NoError(t, err) + + tx, err := store.GetTransaction(ctx, ledgercontroller.NewGetTransactionQuery(tx1.ID). + WithExpandVolumes(). + WithExpandEffectiveVolumes()) + require.NoError(t, err) + require.Equal(t, tx1.Postings, tx.Postings) + require.Equal(t, tx1.Reference, tx.Reference) + require.Equal(t, tx1.Timestamp, tx.Timestamp) + + RequireEqual(t, ledger.PostCommitVolumes{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(100), + }, + }, + "central_bank": { + "USD": { + Input: big.NewInt(100), + Output: big.NewInt(0), + }, + }, + }, tx.PostCommitVolumes) + + tx, err = store.GetTransaction(ctx, ledgercontroller.NewGetTransactionQuery(tx2.ID). + WithExpandVolumes(). + WithExpandEffectiveVolumes()) + require.NoError(t, err) + require.Equal(t, tx2.Postings, tx.Postings) + require.Equal(t, tx2.Reference, tx.Reference) + require.Equal(t, tx2.Timestamp, tx.Timestamp) + RequireEqual(t, ledger.PostCommitVolumes{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(200), + }, + }, + "central_bank": { + "USD": { + Input: big.NewInt(200), + Output: big.NewInt(0), + }, + }, + }, tx.PostCommitVolumes) +} + +func TestTransactionsCount(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + ctx := logging.TestingContext() + + for i := 0; i < 3; i++ { + tx := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", fmt.Sprintf("account%d", i), "USD", big.NewInt(100)), + ) + err := store.CommitTransaction(ctx, &tx) + require.NoError(t, err) + } + + count, err := store.CountTransactions(ctx, ledgercontroller.NewListTransactionsQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}))) + require.NoError(t, err, "counting transactions should not fail") + require.Equal(t, 3, count, "count should be equal") +} + +func TestTransactionUpdateMetadata(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + // Create some transactions + tx1 := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "alice", "USD", big.NewInt(100)), + ). + WithTimestamp(now.Add(-3 * time.Hour)) + err := store.CommitTransaction(ctx, &tx1) + require.NoError(t, err) + + tx2 := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "polo", "USD", big.NewInt(200)), + ). + WithTimestamp(now.Add(-2 * time.Hour)) + err = store.CommitTransaction(ctx, &tx2) + require.NoError(t, err) + + // Update their metadata + _, modified, err := store.UpdateTransactionMetadata(ctx, tx1.ID, metadata.Metadata{"foo1": "bar2"}) + require.NoError(t, err) + require.True(t, modified) + + _, _, err = store.UpdateTransactionMetadata(ctx, tx2.ID, metadata.Metadata{"foo2": "bar2"}) + require.NoError(t, err) + + // Check that the database returns metadata + tx, err := store.GetTransaction(ctx, ledgercontroller.NewGetTransactionQuery(tx1.ID).WithExpandVolumes().WithExpandEffectiveVolumes()) + require.NoError(t, err, "getting transaction should not fail") + require.Equal(t, tx.Metadata, metadata.Metadata{"foo1": "bar2"}, "metadata should be equal") + + tx, err = store.GetTransaction(ctx, ledgercontroller.NewGetTransactionQuery(tx2.ID).WithExpandVolumes().WithExpandEffectiveVolumes()) + require.NoError(t, err, "getting transaction should not fail") + require.Equal(t, tx.Metadata, metadata.Metadata{"foo2": "bar2"}, "metadata should be equal") + + // Update metadata of a transaction already having those metadata + _, modified, err = store.UpdateTransactionMetadata(ctx, tx1.ID, metadata.Metadata{"foo1": "bar2"}) + require.NoError(t, err) + require.False(t, modified) + + // Update metadata of non existing transactions + _, modified, err = store.UpdateTransactionMetadata(ctx, 10, metadata.Metadata{"foo2": "bar2"}) + require.Error(t, err) + require.True(t, errors.Is(err, postgres.ErrNotFound)) + require.False(t, modified) +} + +func TestTransactionDeleteMetadata(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + // Create a tx with some metadata + tx1 := pointer.For(ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "alice", "USD", big.NewInt(100)), + ). + WithMetadata(metadata.Metadata{"foo1": "bar1", "foo2": "bar2"}). + WithTimestamp(now.Add(-3 * time.Hour))) + err := store.CommitTransaction(ctx, tx1) + require.NoError(t, err) + + // Get from database and check metadata presence + tx, err := store.GetTransaction(context.Background(), ledgercontroller.NewGetTransactionQuery(tx1.ID)) + require.NoError(t, err) + require.Equal(t, tx.Metadata, metadata.Metadata{"foo1": "bar1", "foo2": "bar2"}) + + // Delete a metadata + tx1, modified, err := store.DeleteTransactionMetadata(ctx, tx1.ID, "foo1") + require.NoError(t, err) + require.True(t, modified) + + tx, err = store.GetTransaction(context.Background(), ledgercontroller.NewGetTransactionQuery(tx1.ID)) + require.NoError(t, err) + require.Equal(t, metadata.Metadata{"foo2": "bar2"}, tx.Metadata) + + // Delete a not existing metadata + _, modified, err = store.DeleteTransactionMetadata(ctx, tx1.ID, "foo1") + require.NoError(t, err) + require.False(t, modified) + + // Delete metadata of a non existing transaction + _, modified, err = store.DeleteTransactionMetadata(ctx, 10, "foo1") + require.Error(t, err) + require.True(t, errors.Is(err, postgres.ErrNotFound)) + require.False(t, modified) +} + +func TestTransactionsCommit(t *testing.T) { + t.Parallel() + + ctx := logging.TestingContext() + + t.Run("inserting some transactions", func(t *testing.T) { + t.Parallel() + + store := newLedgerStore(t) + + tx1 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("account:1", "account:2", "USD", big.NewInt(100)), + ) + err := store.CommitTransaction(ctx, &tx1) + require.NoError(t, err) + require.Equal(t, 1, tx1.ID) + require.Equal(t, ledger.PostCommitVolumes{ + "account:1": ledger.VolumesByAssets{ + "USD": ledger.Volumes{ + Input: big.NewInt(0), + Output: big.NewInt(100), + }, + }, + "account:2": ledger.VolumesByAssets{ + "USD": ledger.Volumes{ + Input: big.NewInt(100), + Output: big.NewInt(0), + }, + }, + }, tx1.PostCommitVolumes) + require.Equal(t, tx1.PostCommitVolumes, tx1.PostCommitEffectiveVolumes) + + tx2 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("account:2", "account:3", "USD", big.NewInt(100)), + ) + err = store.CommitTransaction(ctx, &tx2) + require.NoError(t, err) + require.Equal(t, 2, tx2.ID) + require.Equal(t, ledger.PostCommitVolumes{ + "account:2": ledger.VolumesByAssets{ + "USD": ledger.Volumes{ + Input: big.NewInt(100), + Output: big.NewInt(100), + }, + }, + "account:3": ledger.VolumesByAssets{ + "USD": ledger.Volumes{ + Input: big.NewInt(100), + Output: big.NewInt(0), + }, + }, + }, tx2.PostCommitVolumes) + require.Equal(t, tx2.PostCommitVolumes, tx2.PostCommitEffectiveVolumes) + }) + + t.Run("auto send", func(t *testing.T) { + store := newLedgerStore(t) + + tx3 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("account:x", "account:x", "USD", big.NewInt(100)), + ) + err := store.CommitTransaction(ctx, &tx3) + require.NoError(t, err) + require.Equal(t, 1, tx3.ID) + require.Equal(t, ledger.PostCommitVolumes{ + "account:x": ledger.VolumesByAssets{ + "USD": ledger.Volumes{ + Input: big.NewInt(100), + Output: big.NewInt(100), + }, + }, + }, tx3.PostCommitVolumes) + require.Equal(t, tx3.PostCommitVolumes, tx3.PostCommitEffectiveVolumes) + }) + + t.Run("triggering a deadlock should return appropriate postgres error", func(t *testing.T) { + t.Parallel() + + store := newLedgerStore(t) + + // Create a new sql transaction to commit a transaction from account:1 to account:2. + // It will block until storeWithBlockingTx is commited or rollbacked. + txWithAccount1AsSource, err := store.GetDB().BeginTx(ctx, &sql.TxOptions{}) + require.NoError(t, err) + t.Cleanup(func() { + _ = txWithAccount1AsSource.Rollback() + }) + + errorsChan := make(chan error, 2) + + storeWithTxWithAccount1AsSource := store.WithDB(txWithAccount1AsSource) + unlockTx1Chan := make(chan chan struct{}, 1) + tx1Context, cancel := context.WithCancel(ctx) + t.Cleanup(cancel) + go func() { + // Simulate a transaction with bounded sources by asking for balances before calling CommitTransaction + _, err := storeWithTxWithAccount1AsSource.GetBalances(tx1Context, ledgercontroller.BalanceQuery{ + "account:1": {"USD"}, + }) + require.NoError(t, err) + + ch := make(chan struct{}) + unlockTx1Chan <- ch + <-ch + + errorsChan <- storeWithTxWithAccount1AsSource.CommitTransaction( + tx1Context, + pointer.For(ledger.NewTransaction().WithPostings( + ledger.NewPosting("account:1", "account:2", "USD", big.NewInt(100)), + )), + ) + }() + + var unlockTx1 chan struct{} + select { + case unlockTx1 = <-unlockTx1Chan: + case <-libtime.After(time.Second): + require.Fail(t, "tx should have been started") + } + + // Create a new sql transaction to commit a transaction from account:2 to account:1. + // It will block until storeWithBlockingTx is commited or rollbacked. + txWithAccount2AsSource, err := store.GetDB().BeginTx(ctx, &sql.TxOptions{}) + require.NoError(t, err) + t.Cleanup(func() { + _ = txWithAccount2AsSource.Rollback() + }) + + storeWithTxWithAccount2AsSource := store.WithDB(txWithAccount2AsSource) + unlockTx2Chan := make(chan chan struct{}, 1) + tx2Context, cancel := context.WithCancel(ctx) + t.Cleanup(cancel) + go func() { + // Simulate a transaction with bounded sources by asking for balances before calling CommitTransaction + _, err := storeWithTxWithAccount2AsSource.GetBalances(tx2Context, ledgercontroller.BalanceQuery{ + "account:2": {"USD"}, + }) + require.NoError(t, err) + + ch := make(chan struct{}) + unlockTx2Chan <- ch + <-ch + + errorsChan <- storeWithTxWithAccount2AsSource.CommitTransaction( + tx2Context, + pointer.For(ledger.NewTransaction().WithPostings( + ledger.NewPosting("account:2", "account:1", "USD", big.NewInt(100)), + )), + ) + }() + + var unlockTx2 chan struct{} + select { + case unlockTx2 = <-unlockTx2Chan: + case <-libtime.After(time.Second): + require.Fail(t, "tx should have been started") + } + + // At this point, each sql transaction hold a RowExclusiveLock on balances tables on an account. + // Unlocking them should trigger a deadlock. + close(unlockTx1) + close(unlockTx2) + + select { + case err := <-errorsChan: + if err == nil { + select { + case err = <-errorsChan: + if err == nil { + require.Fail(t, "should have a deadlock") + } + case <-libtime.After(2 * time.Second): + require.Fail(t, "transaction should have finished") + } + } + require.True(t, errors.Is(err, postgres.ErrDeadlockDetected)) + case <-libtime.After(2 * time.Second): + require.Fail(t, "transaction should have finished") + } + }) +} + +func TestInsertTransactionInPast(t *testing.T) { + t.Parallel() + + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + tx1 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD/2", big.NewInt(100)), + ).WithTimestamp(now) + err := store.CommitTransaction(ctx, &tx1) + require.NoError(t, err) + + tx2 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("bank", "user1", "USD/2", big.NewInt(50)), + ).WithTimestamp(now.Add(time.Hour)) + + err = store.CommitTransaction(ctx, &tx2) + require.NoError(t, err) + + // Insert in past must modify pre/post commit volumes of tx2 + tx3 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("bank", "user2", "USD/2", big.NewInt(50)), + ).WithTimestamp(now.Add(30 * time.Minute)) + err = store.CommitTransaction(ctx, &tx3) + require.NoError(t, err) + + // Insert before the oldest tx must update first_usage of involved account + tx4 := ledger.NewTransaction().WithPostings( + ledger.NewPosting("world", "bank", "USD/2", big.NewInt(100)), + ).WithTimestamp(now.Add(-time.Minute)) + err = store.CommitTransaction(ctx, &tx4) + require.NoError(t, err) + + tx2FromDatabase, err := store.GetTransaction(ctx, ledgercontroller.NewGetTransactionQuery(tx2.ID).WithExpandVolumes().WithExpandEffectiveVolumes()) + require.NoError(t, err) + + RequireEqual(t, ledger.PostCommitVolumes{ + "bank": { + "USD/2": ledger.NewVolumesInt64(200, 100), + }, + "user1": { + "USD/2": ledger.NewVolumesInt64(50, 0), + }, + }, tx2FromDatabase.PostCommitEffectiveVolumes) + + account, err := store.GetAccount(ctx, ledgercontroller.NewGetAccountQuery("bank")) + require.NoError(t, err) + require.Equal(t, tx4.Timestamp, account.FirstUsage) +} + +func TestTransactionsRevert(t *testing.T) { + t.Parallel() + + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + // Create a simple tx + tx1 := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "alice", "USD", big.NewInt(100)), + ). + WithMetadata(metadata.Metadata{"category": "1"}). + WithTimestamp(now.Add(-3 * time.Hour)) + err := store.CommitTransaction(ctx, &tx1) + require.NoError(t, err) + + // Revert the tx + revertedTx, reverted, err := store.RevertTransaction(ctx, tx1.ID) + require.NoError(t, err) + require.True(t, reverted) + require.NotNil(t, revertedTx) + require.True(t, revertedTx.IsReverted()) + revertedTx.RevertedAt = nil + // As the RevertTransaction method does not return post commit effective volumes, + // we remove them to be able to compare revertedTx with tx1 + tx1.PostCommitEffectiveVolumes = nil + require.Equal(t, tx1, *revertedTx) + + // Try to revert again + _, reverted, err = store.RevertTransaction(ctx, tx1.ID) + require.NoError(t, err) + require.False(t, reverted) + + // Revert a not existing transaction + revertedTx, reverted, err = store.RevertTransaction(ctx, 2) + require.True(t, errors.Is(err, postgres.ErrNotFound)) + require.False(t, reverted) + require.Nil(t, revertedTx) +} + +func TestTransactionsInsert(t *testing.T) { + t.Parallel() + + now := time.Now() + ctx := logging.TestingContext() + + t.Run("check reference conflict", func(t *testing.T) { + t.Parallel() + + store := newLedgerStore(t) + + // Create a simple tx + tx1 := ledger.Transaction{ + TransactionData: ledger.TransactionData{ + Timestamp: now, + Reference: "foo", + Postings: []ledger.Posting{ + ledger.NewPosting("world", "bank", "USD/2", big.NewInt(100)), + }, + }, + } + err := store.InsertTransaction(ctx, &tx1) + require.NoError(t, err) + require.NotZero(t, tx1.ID) + + // Create another tx with the same reference + tx2 := ledger.Transaction{ + TransactionData: ledger.TransactionData{ + Timestamp: now, + Reference: "foo", + Postings: []ledger.Posting{ + ledger.NewPosting("world", "bank", "USD/2", big.NewInt(100)), + }, + }, + } + err = store.InsertTransaction(ctx, &tx2) + require.Error(t, err) + require.True(t, errors.Is(err, ledgercontroller.ErrTransactionReferenceConflict{})) + }) + t.Run("check denormalization", func(t *testing.T) { + t.Parallel() + + store := newLedgerStore(t) + + tx1 := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "bank", "USD/2", big.NewInt(100)), + ). + WithInsertedAt(now). + WithTimestamp(now) + + err := store.InsertTransaction(ctx, &tx1) + require.NoError(t, err) + + type Model struct { + ledger.Transaction + Sources []string `bun:"sources,type:jsonb"` + Destinations []string `bun:"destinations,type:jsonb"` + SourcesArrays []map[string]any `bun:"sources_arrays,type:jsonb"` + DestinationsArrays []map[string]any `bun:"destinations_arrays,type:jsonb"` + } + + m := Model{} + err = store.GetDB(). + NewSelect(). + Model(&m). + ModelTableExpr(store.GetPrefixedRelationName("transactions") + " as model"). + Scan(ctx) + require.NoError(t, err) + require.Equal(t, Model{ + Transaction: tx1, + Sources: []string{"world"}, + Destinations: []string{"bank"}, + SourcesArrays: []map[string]any{{ + "0": "world", + "1": nil, + }}, + DestinationsArrays: []map[string]any{{ + "0": "bank", + "1": nil, + }}, + }, m) + }) +} + +func TestTransactionsList(t *testing.T) { + t.Parallel() + + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + tx1 := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "alice", "USD", big.NewInt(100)), + ). + WithMetadata(metadata.Metadata{"category": "1"}). + WithTimestamp(now.Add(-3 * time.Hour)) + err := store.CommitTransaction(ctx, &tx1) + require.NoError(t, err) + + tx2 := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "bob", "USD", big.NewInt(100)), + ). + WithMetadata(metadata.Metadata{"category": "2"}). + WithTimestamp(now.Add(-2 * time.Hour)) + err = store.CommitTransaction(ctx, &tx2) + require.NoError(t, err) + + tx3BeforeRevert := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("world", "users:marley", "USD", big.NewInt(100)), + ). + WithMetadata(metadata.Metadata{"category": "3"}). + WithTimestamp(now.Add(-time.Hour)) + err = store.CommitTransaction(ctx, &tx3BeforeRevert) + require.NoError(t, err) + + _, hasBeenReverted, err := store.RevertTransaction(ctx, tx3BeforeRevert.ID) + require.NoError(t, err) + require.True(t, hasBeenReverted) + + tx4 := tx3BeforeRevert.Reverse().WithTimestamp(now) + err = store.CommitTransaction(ctx, &tx4) + require.NoError(t, err) + + _, _, err = store.UpdateTransactionMetadata(ctx, tx3BeforeRevert.ID, metadata.Metadata{ + "additional_metadata": "true", + }) + require.NoError(t, err) + + // refresh tx3 + // we can't take the result of the call on RevertTransaction nor UpdateTransactionMetadata as the result does not contains pc(e)v + tx3 := func() ledger.Transaction { + tx3, err := store.GetTransaction(ctx, ledgercontroller.NewGetTransactionQuery(tx3BeforeRevert.ID). + WithExpandVolumes(). + WithExpandEffectiveVolumes()) + require.NoError(t, err) + return *tx3 + }() + + tx5 := ledger.NewTransaction(). + WithPostings( + ledger.NewPosting("users:marley", "sellers:amazon", "USD", big.NewInt(100)), + ). + WithTimestamp(now) + err = store.CommitTransaction(ctx, &tx5) + require.NoError(t, err) + + type testCase struct { + name string + query ledgercontroller.PaginatedQueryOptions[ledgercontroller.PITFilterWithVolumes] + expected []ledger.Transaction + expectError error + } + testCases := []testCase{ + { + name: "nominal", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}), + expected: []ledger.Transaction{tx5, tx4, tx3, tx2, tx1}, + }, + { + name: "address filter", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("account", "bob")), + expected: []ledger.Transaction{tx2}, + }, + { + name: "address filter using segments matching two addresses by individual segments", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("account", "users:amazon")), + expected: []ledger.Transaction{}, + }, + { + name: "address filter using segment", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("account", "users:")), + expected: []ledger.Transaction{tx5, tx4, tx3}, + }, + { + name: "filter using metadata", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("metadata[category]", "2")), + expected: []ledger.Transaction{tx2}, + }, + { + name: "using point in time", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: pointer.For(now.Add(-time.Hour)), + }, + }), + expected: []ledger.Transaction{tx3BeforeRevert, tx2, tx1}, + }, + { + name: "filter using invalid key", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("invalid", "2")), + expectError: ledgercontroller.ErrInvalidQuery{}, + }, + { + name: "reverted transactions", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("reverted", true)), + expected: []ledger.Transaction{tx3}, + }, + { + name: "filter using exists metadata", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Exists("metadata", "category")), + expected: []ledger.Transaction{tx3, tx2, tx1}, + }, + { + name: "filter using metadata and pit", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: pointer.For(tx3.Timestamp), + }, + }). + WithQueryBuilder(query.Match("metadata[category]", "2")), + expected: []ledger.Transaction{tx2}, + }, + { + name: "filter using not exists metadata", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Not(query.Exists("metadata", "category"))), + expected: []ledger.Transaction{tx5, tx4}, + }, + { + name: "filter using timestamp", + query: ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.PITFilterWithVolumes{}). + WithQueryBuilder(query.Match("timestamp", tx5.Timestamp.Format(time.RFC3339Nano))), + expected: []ledger.Transaction{tx5, tx4}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + tc.query.Options.ExpandVolumes = true + tc.query.Options.ExpandEffectiveVolumes = true + + cursor, err := store.ListTransactions(ctx, ledgercontroller.NewListTransactionsQuery(tc.query)) + if tc.expectError != nil { + require.True(t, errors.Is(err, tc.expectError)) + } else { + require.NoError(t, err) + require.Len(t, cursor.Data, len(tc.expected)) + RequireEqual(t, tc.expected, cursor.Data) + + count, err := store.CountTransactions(ctx, ledgercontroller.NewListTransactionsQuery(tc.query)) + require.NoError(t, err) + + require.EqualValues(t, len(tc.expected), count) + } + }) + } +} diff --git a/internal/storage/ledger/utils.go b/internal/storage/ledger/utils.go new file mode 100644 index 000000000..11b64f438 --- /dev/null +++ b/internal/storage/ledger/utils.go @@ -0,0 +1,48 @@ +package ledger + +import ( + "fmt" + "strings" +) + +func isSegmentedAddress(address string) bool { + src := strings.Split(address, ":") + + needSegmentCheck := false + for _, segment := range src { + needSegmentCheck = segment == "" + if needSegmentCheck { + break + } + } + + return needSegmentCheck +} + +func filterAccountAddress(address, key string) string { + parts := make([]string, 0) + src := strings.Split(address, ":") + + needSegmentCheck := false + for _, segment := range src { + needSegmentCheck = segment == "" + if needSegmentCheck { + break + } + } + + if needSegmentCheck { + parts = append(parts, fmt.Sprintf("jsonb_array_length(%s_array) = %d", key, len(src))) + + for i, segment := range src { + if len(segment) == 0 { + continue + } + parts = append(parts, fmt.Sprintf("%s_array @@ ('$[%d] == \"%s\"')::jsonpath", key, i, segment)) + } + } else { + parts = append(parts, fmt.Sprintf("%s = '%s'", key, address)) + } + + return strings.Join(parts, " and ") +} diff --git a/internal/storage/ledger/volumes.go b/internal/storage/ledger/volumes.go new file mode 100644 index 000000000..53db4e14d --- /dev/null +++ b/internal/storage/ledger/volumes.go @@ -0,0 +1,242 @@ +package ledger + +import ( + "context" + "fmt" + "github.com/formancehq/go-libs/v2/collectionutils" + "github.com/formancehq/go-libs/v2/platform/postgres" + + "github.com/formancehq/ledger/internal/tracing" + + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + lquery "github.com/formancehq/go-libs/v2/query" + "github.com/formancehq/go-libs/v2/time" + ledger "github.com/formancehq/ledger/internal" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + "github.com/uptrace/bun" +) + +func (s *Store) UpdateVolumes(ctx context.Context, accountVolumes ...ledger.AccountsVolumes) (ledger.PostCommitVolumes, error) { + return tracing.TraceWithMetric( + ctx, + "UpdateBalances", + s.tracer, + s.updateBalancesHistogram, + func(ctx context.Context) (ledger.PostCommitVolumes, error) { + + type AccountsVolumesWithLedger struct { + ledger.AccountsVolumes `bun:",extend"` + Ledger string `bun:"ledger,type:varchar"` + } + + accountsVolumesWithLedger := collectionutils.Map(accountVolumes, func(from ledger.AccountsVolumes) AccountsVolumesWithLedger { + return AccountsVolumesWithLedger{ + AccountsVolumes: from, + Ledger: s.ledger.Name, + } + }) + + _, err := s.db.NewInsert(). + Model(&accountsVolumesWithLedger). + ModelTableExpr(s.GetPrefixedRelationName("accounts_volumes")). + On("conflict (ledger, accounts_address, asset) do update"). + Set("input = accounts_volumes.input + excluded.input"). + Set("output = accounts_volumes.output + excluded.output"). + Returning("input, output"). + Exec(ctx) + if err != nil { + return nil, postgres.ResolveError(err) + } + + ret := ledger.PostCommitVolumes{} + for _, volumes := range accountVolumes { + if _, ok := ret[volumes.Account]; !ok { + ret[volumes.Account] = map[string]ledger.Volumes{} + } + ret[volumes.Account][volumes.Asset] = ledger.Volumes{ + Input: volumes.Input, + Output: volumes.Output, + } + } + + return ret, err + }, + ) +} + +func (s *Store) selectVolumes(oot, pit *time.Time, useInsertionDate bool, groupLevel int, q lquery.Builder) *bun.SelectQuery { + ret := s.db.NewSelect() + + if !s.ledger.HasFeature(ledger.FeatureMovesHistory, "ON") { + return ret.Err(ledgercontroller.NewErrMissingFeature(ledger.FeatureMovesHistory)) + } + + var ( + useMetadata bool + needSegmentAddress bool + ) + if q != nil { + err := q.Walk(func(operator, key string, value any) error { + switch { + case key == "account" || key == "address": + if err := s.validateAddressFilter(operator, value); err != nil { + return err + } + if !needSegmentAddress { + needSegmentAddress = isSegmentedAddress(value.(string)) // Safe cast + } + case metadataRegex.Match([]byte(key)): + if operator != "$match" { + return ledgercontroller.NewErrInvalidQuery("'metadata' column can only be used with $match") + } + useMetadata = true + case key == "metadata": + if operator != "$exists" { + return ledgercontroller.NewErrInvalidQuery("'metadata' key filter can only be used with $exists") + } + useMetadata = true + case balanceRegex.Match([]byte(key)): + default: + return ledgercontroller.NewErrInvalidQuery("unknown key '%s' when building query", key) + } + return nil + }) + if err != nil { + return ret.Err(err) + } + } + + selectVolumes := s.db.NewSelect(). + ColumnExpr("accounts_address as address"). + Column("asset"). + ColumnExpr("sum(case when not is_source then amount else 0 end) as input"). + ColumnExpr("sum(case when is_source then amount else 0 end) as output"). + ColumnExpr("sum(case when not is_source then amount else -amount end) as balance"). + ModelTableExpr(s.GetPrefixedRelationName("moves")). + GroupExpr("accounts_address, asset") + + dateFilterColumn := "effective_date" + if useInsertionDate { + dateFilterColumn = "insertion_date" + } + + if pit != nil && !pit.IsZero() { + selectVolumes = selectVolumes.Where(dateFilterColumn+" <= ?", pit) + } + if oot != nil && !oot.IsZero() { + selectVolumes = selectVolumes.Where(dateFilterColumn+" >= ?", oot) + } + + ret = ret. + ModelTableExpr("(?) volumes", selectVolumes). + Column("address", "asset", "input", "output", "balance") + + if needSegmentAddress { + selectAccount := s.db.NewSelect(). + ModelTableExpr(s.GetPrefixedRelationName("accounts")). + Where("ledger = ? and address = volumes.address", s.ledger.Name). + Column("address_array") + if useMetadata && (pit == nil || pit.IsZero()) { + selectAccount = selectAccount.Column("metadata") + } + + ret = ret. + Join("join lateral (?) accounts on true", selectAccount). + Column("accounts.address_array") + if useMetadata && (pit == nil || pit.IsZero()) { + ret = ret.Column("accounts.metadata") + } + } + + if useMetadata { + switch { + case needSegmentAddress && (pit == nil || pit.IsZero()): + // nothing to do, already handled earlier + case !needSegmentAddress && (pit == nil || pit.IsZero()): + selectAccount := s.db.NewSelect(). + ModelTableExpr(s.GetPrefixedRelationName("accounts")). + Where("ledger = ? and address = volumes.address", s.ledger.Name). + Column("metadata") + + ret = ret. + Join("join lateral (?) accounts on true", selectAccount). + Column("accounts.metadata") + case pit != nil && !pit.IsZero(): + selectAccountMetadata := s.db.NewSelect(). + Column("metadata"). + ModelTableExpr(s.GetPrefixedRelationName("accounts_metadata")). + Where("ledger = ? and accounts_address = volumes.address and date <= ?", s.ledger.Name, pit) + + ret = ret. + Join("join lateral (?) accounts_metadata on true", selectAccountMetadata). + Column("accounts_metadata.metadata") + } + } + + if q != nil { + where, args, err := q.Build(lquery.ContextFn(func(key, operator string, value any) (string, []any, error) { + + switch { + case key == "account" || key == "address": + return filterAccountAddress(value.(string), "address"), nil, nil + case metadataRegex.Match([]byte(key)): + match := metadataRegex.FindAllStringSubmatch(key, 3) + return "metadata @> ?", []any{map[string]any{ + match[0][1]: value, + }}, nil + case key == "metadata": + return "metadata -> ? is not null", []any{value}, nil + case balanceRegex.Match([]byte(key)): + match := balanceRegex.FindAllStringSubmatch(key, 2) + return `balance ` + convertOperatorToSQL(operator) + ` ? and asset = ?`, []any{value, match[0][1]}, nil + default: + return "", nil, ledgercontroller.NewErrInvalidQuery("unknown key '%s' when building query", key) + } + })) + if err != nil { + return ret.Err(err) + } + ret = ret.Where(where, args...) + } + + globalQuery := s.db.NewSelect() + globalQuery = globalQuery. + With("query", ret). + ModelTableExpr("query") + + if groupLevel > 0 { + globalQuery = globalQuery. + ColumnExpr(fmt.Sprintf(`(array_to_string((string_to_array(address, ':'))[1:LEAST(array_length(string_to_array(address, ':'),1),%d)],':')) as account`, groupLevel)). + ColumnExpr("asset"). + ColumnExpr("sum(input) as input"). + ColumnExpr("sum(output) as output"). + ColumnExpr("sum(balance) as balance"). + GroupExpr("account, asset") + } else { + globalQuery = globalQuery.ColumnExpr("address as account, asset, input, output, balance") + } + + return globalQuery +} + +func (s *Store) GetVolumesWithBalances(ctx context.Context, q ledgercontroller.GetVolumesWithBalancesQuery) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) { + return tracing.TraceWithMetric( + ctx, + "GetVolumesWithBalances", + s.tracer, + s.getVolumesWithBalancesHistogram, + func(ctx context.Context) (*bunpaginate.Cursor[ledger.VolumesWithBalanceByAssetByAccount], error) { + return bunpaginate.UsingOffset[ledgercontroller.PaginatedQueryOptions[ledgercontroller.FiltersForVolumes], ledger.VolumesWithBalanceByAssetByAccount]( + ctx, + s.selectVolumes( + q.Options.Options.OOT, + q.Options.Options.PIT, + q.Options.Options.UseInsertionDate, + q.Options.Options.GroupLvl, + q.Options.QueryBuilder, + ), + bunpaginate.OffsetPaginatedQuery[ledgercontroller.PaginatedQueryOptions[ledgercontroller.FiltersForVolumes]](q), + ) + }, + ) +} diff --git a/internal/storage/ledger/volumes_test.go b/internal/storage/ledger/volumes_test.go new file mode 100644 index 000000000..2da872f8b --- /dev/null +++ b/internal/storage/ledger/volumes_test.go @@ -0,0 +1,800 @@ +//go:build it + +package ledger_test + +import ( + "database/sql" + "github.com/formancehq/go-libs/v2/pointer" + "math/big" + "testing" + libtime "time" + + "errors" + "github.com/formancehq/go-libs/v2/platform/postgres" + ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger" + + "github.com/formancehq/go-libs/v2/time" + + "github.com/formancehq/go-libs/v2/logging" + + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/query" + ledger "github.com/formancehq/ledger/internal" + "github.com/stretchr/testify/require" +) + +func TestVolumesList(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + previousPIT := now.Add(-2 * time.Minute) + futurPIT := now.Add(2 * time.Minute) + + previousOOT := now.Add(-2 * time.Minute) + futurOOT := now.Add(2 * time.Minute) + + require.NoError(t, store.UpdateAccountsMetadata(ctx, map[string]metadata.Metadata{ + "account:1": { + "category": "1", + }, + "account:2": { + "category": "2", + }, + "world": { + "foo": "bar", + }, + })) + + tx1 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(100))). + WithTimestamp(now.Add(-4 * time.Minute)). + WithInsertedAt(now.Add(4 * time.Minute)) + err := store.CommitTransaction(ctx, &tx1) + require.NoError(t, err) + + tx2 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(100))). + WithTimestamp(now.Add(-3 * time.Minute)). + WithInsertedAt(now.Add(3 * time.Minute)) + err = store.CommitTransaction(ctx, &tx2) + require.NoError(t, err) + + tx3 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("account:1", "bank", "USD", big.NewInt(50))). + WithTimestamp(now.Add(-2 * time.Minute)). + WithInsertedAt(now.Add(2 * time.Minute)) + err = store.CommitTransaction(ctx, &tx3) + require.NoError(t, err) + + tx4 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(0))). + WithTimestamp(now.Add(-time.Minute)). + WithInsertedAt(now.Add(time.Minute)) + err = store.CommitTransaction(ctx, &tx4) + require.NoError(t, err) + + tx5 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:2", "USD", big.NewInt(50))). + WithTimestamp(now). + WithInsertedAt(now) + err = store.CommitTransaction(ctx, &tx5) + require.NoError(t, err) + + tx6 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:2", "USD", big.NewInt(50))). + WithTimestamp(now.Add(1 * time.Minute)). + WithInsertedAt(now.Add(-time.Minute)) + err = store.CommitTransaction(ctx, &tx6) + require.NoError(t, err) + + tx7 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("account:2", "bank", "USD", big.NewInt(50))). + WithTimestamp(now.Add(2 * time.Minute)). + WithInsertedAt(now.Add(-2 * time.Minute)) + err = store.CommitTransaction(ctx, &tx7) + require.NoError(t, err) + + tx8 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:2", "USD", big.NewInt(25))). + WithTimestamp(now.Add(3 * time.Minute)). + WithInsertedAt(now.Add(-3 * time.Minute)) + err = store.CommitTransaction(ctx, &tx8) + require.NoError(t, err) + + t.Run("Get all volumes with balance for insertion date", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.FiltersForVolumes{UseInsertionDate: true}))) + require.NoError(t, err) + + require.Len(t, volumes.Data, 4) + }) + + t.Run("Get all volumes with balance for effective date", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions(ledgercontroller.FiltersForVolumes{UseInsertionDate: false}))) + require.NoError(t, err) + + require.Len(t, volumes.Data, 4) + }) + + t.Run("Get all volumes with balance for insertion date with previous pit", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: &previousPIT, OOT: nil}, + UseInsertionDate: true, + }))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 3) + require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:2", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(25), + Output: big.NewInt(50), + Balance: big.NewInt(-25), + }, + }, volumes.Data[0]) + }) + + t.Run("Get all volumes with balance for insertion date with futur pit", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: &futurPIT, OOT: nil}, + UseInsertionDate: true, + }))) + require.NoError(t, err) + + require.Len(t, volumes.Data, 4) + }) + + t.Run("Get all volumes with balance for insertion date with previous oot", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: nil, OOT: &previousOOT}, + UseInsertionDate: true, + }))) + require.NoError(t, err) + + require.Len(t, volumes.Data, 4) + }) + + t.Run("Get all volumes with balance for insertion date with future oot", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: nil, OOT: &futurOOT}, + UseInsertionDate: true, + }))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 3) + require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:1", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(200), + Output: big.NewInt(50), + Balance: big.NewInt(150), + }, + }, volumes.Data[0]) + }) + + t.Run("Get all volumes with balance for effective date with previous pit", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: &previousPIT, OOT: nil}, + UseInsertionDate: false, + }))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 3) + require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:1", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(200), + Output: big.NewInt(50), + Balance: big.NewInt(150), + }, + }, volumes.Data[0]) + }) + + t.Run("Get all volumes with balance for effective date with futur pit", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: &futurPIT, OOT: nil}, + UseInsertionDate: false, + }))) + require.NoError(t, err) + + require.Len(t, volumes.Data, 4) + }) + + t.Run("Get all volumes with balance for effective date with previous oot", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: nil, OOT: &previousOOT}, + UseInsertionDate: false, + }))) + require.NoError(t, err) + + require.Len(t, volumes.Data, 4) + }) + + t.Run("Get all volumes with balance for effective date with futur oot", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: nil, OOT: &futurOOT}, + UseInsertionDate: false, + }))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 3) + require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:2", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(25), + Output: big.NewInt(50), + Balance: big.NewInt(-25), + }, + }, volumes.Data[0]) + }) + + t.Run("Get all volumes with balance for insertion date with future PIT and now OOT", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: &futurPIT, OOT: &now}, + UseInsertionDate: true, + }))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 4) + require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:1", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(0), + Output: big.NewInt(50), + Balance: big.NewInt(-50), + }, + }, volumes.Data[0]) + + }) + + t.Run("Get all volumes with balance for insertion date with previous OOT and now PIT", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: &now, OOT: &previousOOT}, + UseInsertionDate: true, + }))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 3) + require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:2", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(100), + Output: big.NewInt(50), + Balance: big.NewInt(50), + }, + }, volumes.Data[0]) + + }) + + t.Run("Get all volumes with balance for effective date with future PIT and now OOT", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: &futurPIT, OOT: &now}, + UseInsertionDate: false, + }))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 3) + require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:2", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(100), + Output: big.NewInt(50), + Balance: big.NewInt(50), + }, + }, volumes.Data[0]) + }) + + t.Run("Get all volumes with balance for insertion date with previous OOT and now PIT", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery(ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: &now, OOT: &previousOOT}, + UseInsertionDate: false, + }))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 4) + require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:1", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(0), + Output: big.NewInt(50), + Balance: big.NewInt(-50), + }, + }, volumes.Data[0]) + + }) + + t.Run("Get account1 volume and Balance for insertion date with previous OOT and now PIT", func(t *testing.T) { + t.Parallel() + + volumes, err := store.GetVolumesWithBalances(ctx, + ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{PIT: &now, OOT: &previousOOT}, + UseInsertionDate: false, + }).WithQueryBuilder(query.Match("account", "account:1"))), + ) + + require.NoError(t, err) + require.Len(t, volumes.Data, 1) + require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:1", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(0), + Output: big.NewInt(50), + Balance: big.NewInt(-50), + }, + }, volumes.Data[0]) + + }) + + t.Run("Using Metadata regex", func(t *testing.T) { + t.Parallel() + + volumes, err := store.GetVolumesWithBalances(ctx, + ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{}).WithQueryBuilder(query.Match("metadata[foo]", "bar"))), + ) + + require.NoError(t, err) + require.Len(t, volumes.Data, 1) + + }) + + t.Run("Using exists metadata filter 1", func(t *testing.T) { + t.Parallel() + + volumes, err := store.GetVolumesWithBalances(ctx, + ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{}).WithQueryBuilder(query.Exists("metadata", "category"))), + ) + + require.NoError(t, err) + require.Len(t, volumes.Data, 2) + }) + + t.Run("Using exists metadata filter 2", func(t *testing.T) { + t.Parallel() + + volumes, err := store.GetVolumesWithBalances(ctx, + ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{}).WithQueryBuilder(query.Exists("metadata", "foo"))), + ) + + require.NoError(t, err) + require.Len(t, volumes.Data, 1) + }) +} + +func TestVolumesAggregate(t *testing.T) { + t.Parallel() + store := newLedgerStore(t) + now := time.Now() + ctx := logging.TestingContext() + + pit := now.Add(2 * time.Minute) + oot := now.Add(-2 * time.Minute) + + tx1 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1:2", "USD", big.NewInt(100))). + WithTimestamp(now.Add(-4 * time.Minute)). + WithInsertedAt(now) + err := store.CommitTransaction(ctx, &tx1) + require.NoError(t, err) + + tx2 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1:1", "EUR", big.NewInt(100))). + WithTimestamp(now.Add(-3 * time.Minute)) + err = store.CommitTransaction(ctx, &tx2) + require.NoError(t, err) + + tx3 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1:2", "EUR", big.NewInt(50))). + WithTimestamp(now.Add(-2 * time.Minute)) + err = store.CommitTransaction(ctx, &tx3) + require.NoError(t, err) + + tx4 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:1:3", "USD", big.NewInt(0))). + WithTimestamp(now.Add(-time.Minute)) + err = store.CommitTransaction(ctx, &tx4) + require.NoError(t, err) + + tx5 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:2:1", "USD", big.NewInt(50))). + WithTimestamp(now) + err = store.CommitTransaction(ctx, &tx5) + require.NoError(t, err) + + tx6 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:2:2", "USD", big.NewInt(50))). + WithTimestamp(now.Add(1 * time.Minute)) + err = store.CommitTransaction(ctx, &tx6) + require.NoError(t, err) + + tx7 := ledger.NewTransaction(). + WithPostings(ledger.NewPosting("world", "account:2:3", "EUR", big.NewInt(25))). + WithTimestamp(now.Add(3 * time.Minute)) + err = store.CommitTransaction(ctx, &tx7) + require.NoError(t, err) + + require.NoError(t, store.UpdateAccountsMetadata(ctx, map[string]metadata.Metadata{ + "account:1:1": { + "foo": "bar", + }, + })) + + t.Run("Aggregation Volumes with balance for GroupLvl 0", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + UseInsertionDate: true, + GroupLvl: 0, + }).WithQueryBuilder(query.Match("account", "account::")))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 7) + }) + + t.Run("Aggregation Volumes with balance for GroupLvl 1", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + UseInsertionDate: true, + GroupLvl: 1, + }).WithQueryBuilder(query.Match("account", "account::")))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 2) + }) + + t.Run("Aggregation Volumes with balance for GroupLvl 2", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + UseInsertionDate: true, + GroupLvl: 2, + }).WithQueryBuilder(query.Match("account", "account::")))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 4) + }) + + t.Run("Aggregation Volumes with balance for GroupLvl 3", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + UseInsertionDate: true, + GroupLvl: 3, + }).WithQueryBuilder(query.Match("account", "account::")))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 7) + }) + + t.Run("Aggregation Volumes with balance for GroupLvl 1 && PIT && OOT && effectiveDate", func(t *testing.T) { + t.Parallel() + + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &pit, + OOT: &oot, + }, + GroupLvl: 1, + }).WithQueryBuilder(query.Match("account", "account::")))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 2) + require.Equal(t, volumes.Data[0], ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account", + Asset: "EUR", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(50), + Output: big.NewInt(0), + Balance: big.NewInt(50), + }, + }) + require.Equal(t, volumes.Data[1], ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(100), + Output: big.NewInt(0), + Balance: big.NewInt(100), + }, + }) + }) + + t.Run("Aggregation Volumes with balance for GroupLvl 1 && PIT && OOT && effectiveDate && Balance Filter 1", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{ + PIT: &pit, + OOT: &oot, + }, + UseInsertionDate: false, + GroupLvl: 1, + }).WithQueryBuilder( + query.And(query.Match("account", "account::"), query.Gte("balance[EUR]", 50))))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 1) + require.Equal(t, volumes.Data[0], ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account", + Asset: "EUR", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(50), + Output: big.NewInt(0), + Balance: big.NewInt(50), + }, + }) + }) + + t.Run("Aggregation Volumes with balance for GroupLvl 1 && Balance Filter 2", func(t *testing.T) { + t.Parallel() + volumes, err := store.GetVolumesWithBalances(ctx, ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + PITFilter: ledgercontroller.PITFilter{}, + UseInsertionDate: true, + GroupLvl: 2, + }).WithQueryBuilder( + query.Or( + query.Match("account", "account:1:"), + query.Lte("balance[USD]", 0))))) + + require.NoError(t, err) + require.Len(t, volumes.Data, 3) + require.Equal(t, volumes.Data[0], ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:1", + Asset: "EUR", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(150), + Output: big.NewInt(0), + Balance: big.NewInt(150), + }, + }) + require.Equal(t, volumes.Data[1], ledger.VolumesWithBalanceByAssetByAccount{ + Account: "account:1", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(100), + Output: big.NewInt(0), + Balance: big.NewInt(100), + }, + }) + require.Equal(t, volumes.Data[2], ledger.VolumesWithBalanceByAssetByAccount{ + Account: "world", + Asset: "USD", + VolumesWithBalance: ledger.VolumesWithBalance{ + Input: big.NewInt(0), + Output: big.NewInt(200), + Balance: big.NewInt(-200), + }, + }) + }) + t.Run("filter using account matching, metadata, and group", func(t *testing.T) { + t.Parallel() + + volumes, err := store.GetVolumesWithBalances(ctx, + ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + GroupLvl: 1, + }).WithQueryBuilder(query.And( + query.Match("account", "account::"), + query.Match("metadata[foo]", "bar"), + ))), + ) + + require.NoError(t, err) + require.Len(t, volumes.Data, 1) + }) + + t.Run("filter using account matching, metadata, and group and PIT", func(t *testing.T) { + t.Parallel() + + volumes, err := store.GetVolumesWithBalances(ctx, + ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + GroupLvl: 1, + PITFilter: ledgercontroller.PITFilter{ + PIT: pointer.For(now.Add(time.Minute)), + }, + }).WithQueryBuilder(query.And( + query.Match("account", "account::"), + query.Match("metadata[foo]", "bar"), + ))), + ) + + require.NoError(t, err) + require.Len(t, volumes.Data, 1) + }) + + t.Run("filter using metadata matching only", func(t *testing.T) { + t.Parallel() + + volumes, err := store.GetVolumesWithBalances(ctx, + ledgercontroller.NewGetVolumesWithBalancesQuery( + ledgercontroller.NewPaginatedQueryOptions( + ledgercontroller.FiltersForVolumes{ + GroupLvl: 1, + }).WithQueryBuilder(query.And( + query.Match("metadata[foo]", "bar"), + ))), + ) + + require.NoError(t, err) + require.Len(t, volumes.Data, 1) + }) +} + +func TestUpdateVolumes(t *testing.T) { + t.Parallel() + + t.Run("update volumes of same account sequentially", func(t *testing.T) { + t.Parallel() + + store := newLedgerStore(t) + ctx := logging.TestingContext() + + volumes, err := store.UpdateVolumes(ctx, ledger.AccountsVolumes{ + Account: "world", + Asset: "USD/2", + Input: big.NewInt(0), + Output: big.NewInt(100), + }) + require.NoError(t, err) + require.Equal(t, ledger.PostCommitVolumes{ + "world": { + "USD/2": ledger.NewVolumesInt64(0, 100), + }, + }, volumes) + + volumes, err = store.UpdateVolumes(ctx, ledger.AccountsVolumes{ + Account: "world", + Asset: "USD/2", + Input: big.NewInt(50), + Output: big.NewInt(0), + }) + require.NoError(t, err) + require.Equal(t, ledger.PostCommitVolumes{ + "world": { + "USD/2": ledger.NewVolumesInt64(50, 100), + }, + }, volumes) + + volumes, err = store.UpdateVolumes(ctx, ledger.AccountsVolumes{ + Account: "world", + Asset: "USD/2", + Input: big.NewInt(50), + Output: big.NewInt(50), + }) + require.NoError(t, err) + require.Equal(t, ledger.PostCommitVolumes{ + "world": { + "USD/2": ledger.NewVolumesInt64(100, 150), + }, + }, volumes) + }) + + t.Run("get balance of not existing account should take a lock", func(t *testing.T) { + t.Parallel() + + store := newLedgerStore(t) + ctx := logging.TestingContext() + + sqlTx1, err := store.GetDB().BeginTx(ctx, &sql.TxOptions{}) + require.NoError(t, err) + t.Cleanup(func() { + _ = sqlTx1.Rollback() + }) + storeTx1 := store.WithDB(sqlTx1) + + sqlTx2, err := store.GetDB().BeginTx(ctx, &sql.TxOptions{}) + require.NoError(t, err) + t.Cleanup(func() { + _ = sqlTx2.Rollback() + }) + storeTx2 := store.WithDB(sqlTx2) + + // At this stage, the accounts_volumes table is empty. + // Take balance of the 'world' account should force a lock. + volumes, err := storeTx1.GetBalances(ctx, ledgercontroller.BalanceQuery{ + "world": {"USD"}, + }) + require.NoError(t, err) + require.Equal(t, ledgercontroller.Balances{ + "world": { + "USD": big.NewInt(0), + }, + }, volumes) + + // Take an advisory lock on tx2 + _, err = storeTx2.GetDB().NewRaw(`select pg_advisory_xact_lock(1)`).Exec(ctx) + require.NoError(t, err) + + errChan := make(chan error, 2) + go func() { + // This call should block as the lock for the row holding 'world' balance is owned by tx1 + _, err := storeTx2.GetBalances(ctx, ledgercontroller.BalanceQuery{ + "world": {"USD"}, + }) + errChan <- err + }() + + go func() { + // Take the same advisory lock for tx1 as tx2. + // As tx1 hold a lock on the world balance, and tx2 is waiting for that balance, + // it should trigger a deadlock. + _, err = storeTx1.GetDB().NewRaw(`select pg_advisory_xact_lock(1)`).Exec(ctx) + errChan <- postgres.ResolveError(err) + }() + + // Either tx1 or tx2 should be cancelled by PG with a deadlock error + select { + case err := <-errChan: + if err == nil { + select { + case err = <-errChan: + if err == nil { + require.Fail(t, "should have a deadlock") + } + case <-libtime.After(2 * time.Second): + require.Fail(t, "transaction should have finished") + } + } + require.True(t, errors.Is(err, postgres.ErrDeadlockDetected)) + case <-libtime.After(2 * time.Second): + require.Fail(t, "transaction should have finished") + } + }) +} diff --git a/internal/storage/ledgerstore/accounts_test.go b/internal/storage/ledgerstore/accounts_test.go deleted file mode 100644 index 8d249a459..000000000 --- a/internal/storage/ledgerstore/accounts_test.go +++ /dev/null @@ -1,413 +0,0 @@ -//go:build it - -package ledgerstore - -import ( - "context" - "math/big" - "testing" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/logging" - - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/query" - ledger "github.com/formancehq/ledger/internal" - "github.com/stretchr/testify/require" -) - -func TestGetAccounts(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - now := time.Now() - ctx := logging.TestingContext() - - require.NoError(t, store.InsertLogs(ctx, - ledger.ChainLogs( - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(100))). - WithDate(now), - map[string]metadata.Metadata{ - "account:1": { - "category": "4", - }, - }, - ).WithDate(now), - ledger.NewSetMetadataOnAccountLog(time.Now(), "account:1", metadata.Metadata{"category": "1"}).WithDate(now.Add(time.Minute)), - ledger.NewSetMetadataOnAccountLog(time.Now(), "account:2", metadata.Metadata{"category": "2"}).WithDate(now.Add(2*time.Minute)), - ledger.NewSetMetadataOnAccountLog(time.Now(), "account:3", metadata.Metadata{"category": "3"}).WithDate(now.Add(3*time.Minute)), - ledger.NewSetMetadataOnAccountLog(time.Now(), "orders:1", metadata.Metadata{"foo": "bar"}).WithDate(now.Add(3*time.Minute)), - ledger.NewSetMetadataOnAccountLog(time.Now(), "orders:2", metadata.Metadata{"foo": "bar"}).WithDate(now.Add(3*time.Minute)), - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(100))). - WithIDUint64(1). - WithDate(now.Add(4*time.Minute)), - map[string]metadata.Metadata{}, - ).WithDate(now.Add(100*time.Millisecond)), - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("account:1", "bank", "USD", big.NewInt(50))). - WithDate(now.Add(3*time.Minute)). - WithIDUint64(2), - map[string]metadata.Metadata{}, - ).WithDate(now.Add(200*time.Millisecond)), - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(0))). - WithDate(now.Add(-time.Minute)). - WithIDUint64(3), - map[string]metadata.Metadata{}, - ).WithDate(now.Add(200*time.Millisecond)), - )..., - )) - - t.Run("list all", func(t *testing.T) { - t.Parallel() - accounts, err := store.GetAccountsWithVolumes(ctx, NewGetAccountsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{}))) - require.NoError(t, err) - require.Len(t, accounts.Data, 7) - }) - - t.Run("list using metadata", func(t *testing.T) { - t.Parallel() - accounts, err := store.GetAccountsWithVolumes(ctx, NewGetAccountsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{}). - WithQueryBuilder(query.Match("metadata[category]", "1")), - )) - require.NoError(t, err) - require.Len(t, accounts.Data, 1) - }) - - t.Run("list before date", func(t *testing.T) { - t.Parallel() - accounts, err := store.GetAccountsWithVolumes(ctx, NewGetAccountsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{ - PITFilter: PITFilter{ - PIT: &now, - }, - }))) - require.NoError(t, err) - require.Len(t, accounts.Data, 2) - }) - - t.Run("list with volumes", func(t *testing.T) { - t.Parallel() - - accounts, err := store.GetAccountsWithVolumes(ctx, NewGetAccountsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{ - ExpandVolumes: true, - }).WithQueryBuilder(query.Match("address", "account:1")))) - require.NoError(t, err) - require.Len(t, accounts.Data, 1) - require.Equal(t, ledger.VolumesByAssets{ - "USD": ledger.NewVolumesInt64(200, 50), - }, accounts.Data[0].Volumes) - }) - - t.Run("list with volumes using PIT", func(t *testing.T) { - t.Parallel() - - accounts, err := store.GetAccountsWithVolumes(ctx, NewGetAccountsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{ - PITFilter: PITFilter{ - PIT: &now, - }, - ExpandVolumes: true, - }).WithQueryBuilder(query.Match("address", "account:1")))) - require.NoError(t, err) - require.Len(t, accounts.Data, 1) - require.Equal(t, ledger.VolumesByAssets{ - "USD": ledger.NewVolumesInt64(100, 0), - }, accounts.Data[0].Volumes) - }) - - t.Run("list with effective volumes", func(t *testing.T) { - t.Parallel() - accounts, err := store.GetAccountsWithVolumes(ctx, NewGetAccountsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{ - ExpandEffectiveVolumes: true, - }).WithQueryBuilder(query.Match("address", "account:1")))) - require.NoError(t, err) - require.Len(t, accounts.Data, 1) - require.Equal(t, ledger.VolumesByAssets{ - "USD": ledger.NewVolumesInt64(200, 50), - }, accounts.Data[0].EffectiveVolumes) - }) - - t.Run("list with effective volumes using PIT", func(t *testing.T) { - t.Parallel() - accounts, err := store.GetAccountsWithVolumes(ctx, NewGetAccountsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{ - PITFilter: PITFilter{ - PIT: &now, - }, - ExpandEffectiveVolumes: true, - }).WithQueryBuilder(query.Match("address", "account:1")))) - require.NoError(t, err) - require.Len(t, accounts.Data, 1) - require.Equal(t, ledger.VolumesByAssets{ - "USD": ledger.NewVolumesInt64(100, 0), - }, accounts.Data[0].EffectiveVolumes) - }) - - t.Run("list using filter on address", func(t *testing.T) { - t.Parallel() - accounts, err := store.GetAccountsWithVolumes(ctx, NewGetAccountsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{}). - WithQueryBuilder(query.Match("address", "account:")), - )) - require.NoError(t, err) - require.Len(t, accounts.Data, 3) - }) - t.Run("list using filter on multiple address", func(t *testing.T) { - t.Parallel() - accounts, err := store.GetAccountsWithVolumes(ctx, NewGetAccountsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{}). - WithQueryBuilder( - query.Or( - query.Match("address", "account:1"), - query.Match("address", "orders:"), - ), - ), - )) - require.NoError(t, err) - require.Len(t, accounts.Data, 3) - }) - t.Run("list using filter on balances", func(t *testing.T) { - t.Parallel() - accounts, err := store.GetAccountsWithVolumes(ctx, NewGetAccountsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{}). - WithQueryBuilder(query.Lt("balance[USD]", 0)), - )) - require.NoError(t, err) - require.Len(t, accounts.Data, 1) // world - - accounts, err = store.GetAccountsWithVolumes(ctx, NewGetAccountsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{}). - WithQueryBuilder(query.Gt("balance[USD]", 0)), - )) - require.NoError(t, err) - require.Len(t, accounts.Data, 2) - require.Equal(t, "account:1", accounts.Data[0].Account.Address) - require.Equal(t, "bank", accounts.Data[1].Account.Address) - }) - - t.Run("list using filter on exists metadata", func(t *testing.T) { - t.Parallel() - accounts, err := store.GetAccountsWithVolumes(ctx, NewGetAccountsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{}). - WithQueryBuilder(query.Exists("metadata", "foo")), - )) - require.NoError(t, err) - require.Len(t, accounts.Data, 2) - - accounts, err = store.GetAccountsWithVolumes(ctx, NewGetAccountsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{}). - WithQueryBuilder(query.Exists("metadata", "category")), - )) - require.NoError(t, err) - require.Len(t, accounts.Data, 3) - }) - - t.Run("list using filter invalid field", func(t *testing.T) { - t.Parallel() - _, err := store.GetAccountsWithVolumes(ctx, NewGetAccountsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{}). - WithQueryBuilder(query.Lt("invalid", 0)), - )) - require.Error(t, err) - require.True(t, IsErrInvalidQuery(err)) - }) -} - -func TestUpdateAccountsMetadata(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - - metadata := metadata.Metadata{ - "foo": "bar", - } - - require.NoError(t, store.InsertLogs(context.Background(), - ledger.NewSetMetadataOnAccountLog(time.Now(), "bank", metadata).ChainLog(nil), - ), "account insertion should not fail") - - account, err := store.GetAccountWithVolumes(context.Background(), NewGetAccountQuery("bank")) - require.NoError(t, err, "account retrieval should not fail") - - require.Equal(t, "bank", account.Address, "account address should match") - require.Equal(t, metadata, account.Metadata, "account metadata should match") -} - -func TestGetAccount(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - now := time.Now() - ctx := logging.TestingContext() - - require.NoError(t, store.InsertLogs(ctx, - ledger.ChainLogs( - ledger.NewTransactionLog(ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "multi", "USD/2", big.NewInt(100)), - ).WithDate(now), map[string]metadata.Metadata{}), - ledger.NewSetMetadataLog(now.Add(time.Minute), ledger.SetMetadataLogPayload{ - TargetType: ledger.MetaTargetTypeAccount, - TargetID: "multi", - Metadata: metadata.Metadata{ - "category": "gold", - }, - }), - ledger.NewTransactionLog(ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "multi", "USD/2", big.NewInt(0)), - ).WithID(big.NewInt(1)).WithDate(now.Add(-time.Minute)), map[string]metadata.Metadata{}), - )..., - )) - - t.Run("find account", func(t *testing.T) { - t.Parallel() - account, err := store.GetAccountWithVolumes(ctx, NewGetAccountQuery("multi")) - require.NoError(t, err) - require.Equal(t, ledger.ExpandedAccount{ - Account: ledger.Account{ - Address: "multi", - Metadata: metadata.Metadata{ - "category": "gold", - }, - FirstUsage: now.Add(-time.Minute), - }, - }, *account) - - account, err = store.GetAccountWithVolumes(ctx, NewGetAccountQuery("world")) - require.NoError(t, err) - require.Equal(t, ledger.ExpandedAccount{ - Account: ledger.Account{ - Address: "world", - Metadata: metadata.Metadata{}, - FirstUsage: now.Add(-time.Minute), - }, - }, *account) - }) - - t.Run("find account in past", func(t *testing.T) { - t.Parallel() - account, err := store.GetAccountWithVolumes(ctx, NewGetAccountQuery("multi").WithPIT(now.Add(-30*time.Second))) - require.NoError(t, err) - require.Equal(t, ledger.ExpandedAccount{ - Account: ledger.Account{ - Address: "multi", - Metadata: metadata.Metadata{}, - FirstUsage: now.Add(-time.Minute), - }, - }, *account) - }) - - t.Run("find account with volumes", func(t *testing.T) { - t.Parallel() - account, err := store.GetAccountWithVolumes(ctx, NewGetAccountQuery("multi"). - WithExpandVolumes()) - require.NoError(t, err) - require.Equal(t, ledger.ExpandedAccount{ - Account: ledger.Account{ - Address: "multi", - Metadata: metadata.Metadata{ - "category": "gold", - }, - FirstUsage: now.Add(-time.Minute), - }, - Volumes: ledger.VolumesByAssets{ - "USD/2": ledger.NewVolumesInt64(100, 0), - }, - }, *account) - }) - - t.Run("find account with effective volumes", func(t *testing.T) { - t.Parallel() - account, err := store.GetAccountWithVolumes(ctx, NewGetAccountQuery("multi"). - WithExpandEffectiveVolumes()) - require.NoError(t, err) - require.Equal(t, ledger.ExpandedAccount{ - Account: ledger.Account{ - Address: "multi", - Metadata: metadata.Metadata{ - "category": "gold", - }, - FirstUsage: now.Add(-time.Minute), - }, - EffectiveVolumes: ledger.VolumesByAssets{ - "USD/2": ledger.NewVolumesInt64(100, 0), - }, - }, *account) - }) - - t.Run("find account using pit", func(t *testing.T) { - t.Parallel() - account, err := store.GetAccountWithVolumes(ctx, NewGetAccountQuery("multi").WithPIT(now)) - require.NoError(t, err) - require.Equal(t, ledger.ExpandedAccount{ - Account: ledger.Account{ - Address: "multi", - Metadata: metadata.Metadata{}, - FirstUsage: now.Add(-time.Minute), - }, - }, *account) - }) - - t.Run("not existent account", func(t *testing.T) { - t.Parallel() - _, err := store.GetAccountWithVolumes(ctx, NewGetAccountQuery("account_not_existing")) - require.Error(t, err) - }) - -} - -func TestGetAccountWithVolumes(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - ctx := logging.TestingContext() - - bigInt, _ := big.NewInt(0).SetString("999999999999999999999999999999999999999999999999999999999999999999999999999999999999999", 10) - - require.NoError(t, store.InsertLogs(ctx, - ledger.ChainLogs( - ledger.NewTransactionLog(ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "multi", "USD/2", bigInt), - ).WithDate(now), map[string]metadata.Metadata{}), - )..., - )) - - accountWithVolumes, err := store.GetAccountWithVolumes(ctx, - NewGetAccountQuery("multi").WithExpandVolumes()) - require.NoError(t, err) - require.Equal(t, &ledger.ExpandedAccount{ - Account: ledger.Account{ - Address: "multi", - Metadata: metadata.Metadata{}, - FirstUsage: now, - }, - Volumes: map[string]*ledger.Volumes{ - "USD/2": ledger.NewEmptyVolumes().WithInput(bigInt), - }, - }, accountWithVolumes) -} - -func TestUpdateAccountMetadata(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - ctx := logging.TestingContext() - - require.NoError(t, store.InsertLogs(ctx, - ledger.NewSetMetadataOnAccountLog(time.Now(), "central_bank", metadata.Metadata{ - "foo": "bar", - }).ChainLog(nil), - )) - - account, err := store.GetAccountWithVolumes(ctx, NewGetAccountQuery("central_bank")) - require.NoError(t, err) - require.EqualValues(t, "bar", account.Metadata["foo"]) -} - -func TestCountAccounts(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - ctx := logging.TestingContext() - - require.NoError(t, insertTransactions(ctx, store, - *ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "central_bank", "USD/2", big.NewInt(100)), - ), - )) - - countAccounts, err := store.CountAccounts(ctx, NewGetAccountsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{}))) - require.NoError(t, err) - require.EqualValues(t, 2, countAccounts) // world + central_bank -} diff --git a/internal/storage/ledgerstore/balances_test.go b/internal/storage/ledgerstore/balances_test.go deleted file mode 100644 index 8b099d7f7..000000000 --- a/internal/storage/ledgerstore/balances_test.go +++ /dev/null @@ -1,158 +0,0 @@ -//go:build it - -package ledgerstore - -import ( - "math/big" - "testing" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/pointer" - - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/query" - ledger "github.com/formancehq/ledger/internal" - internaltesting "github.com/formancehq/ledger/internal/testing" - "github.com/stretchr/testify/require" -) - -func TestGetBalancesAggregated(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - now := time.Now() - ctx := logging.TestingContext() - - bigInt, _ := big.NewInt(0).SetString("999999999999999999999999999999999999999999999999999999999999999999999999999999999", 10) - smallInt := big.NewInt(199) - - tx1 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "users:1", "USD", bigInt), - ledger.NewPosting("world", "users:2", "USD", smallInt), - ).WithDate(now) - - tx2 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "users:1", "USD", bigInt), - ledger.NewPosting("world", "users:2", "USD", smallInt), - ledger.NewPosting("world", "xxx", "EUR", smallInt), - ).WithDate(now.Add(-time.Minute)).WithIDUint64(1) - - logs := []*ledger.Log{ - ledger.NewTransactionLog(tx1, map[string]metadata.Metadata{}).WithDate(now), - ledger.NewTransactionLog(tx2, map[string]metadata.Metadata{}).WithDate(now.Add(time.Minute)), - ledger.NewSetMetadataLog(now.Add(time.Minute), ledger.SetMetadataLogPayload{ - TargetType: ledger.MetaTargetTypeAccount, - TargetID: "users:1", - Metadata: metadata.Metadata{ - "category": "premium", - }, - }), - ledger.NewSetMetadataLog(now.Add(time.Minute), ledger.SetMetadataLogPayload{ - TargetType: ledger.MetaTargetTypeAccount, - TargetID: "users:2", - Metadata: metadata.Metadata{ - "category": "premium", - }, - }), - ledger.NewDeleteMetadataLog(now.Add(2*time.Minute), ledger.DeleteMetadataLogPayload{ - TargetType: ledger.MetaTargetTypeAccount, - TargetID: "users:2", - Key: "category", - }), - ledger.NewSetMetadataOnAccountLog(time.Now(), "users:1", metadata.Metadata{"category": "premium"}).WithDate(now.Add(time.Minute)), - ledger.NewSetMetadataOnAccountLog(time.Now(), "users:2", metadata.Metadata{"category": "2"}).WithDate(now.Add(time.Minute)), - ledger.NewSetMetadataOnAccountLog(time.Now(), "world", metadata.Metadata{"foo": "bar"}).WithDate(now.Add(time.Minute)), - } - - require.NoError(t, store.InsertLogs(ctx, ledger.ChainLogs(logs...)...)) - - t.Run("aggregate on all", func(t *testing.T) { - t.Parallel() - cursor, err := store.GetAggregatedBalances(ctx, NewGetAggregatedBalancesQuery(PITFilter{}, nil, false)) - require.NoError(t, err) - internaltesting.RequireEqual(t, ledger.BalancesByAssets{ - "USD": big.NewInt(0), - "EUR": big.NewInt(0), - }, cursor) - }) - t.Run("filter on address", func(t *testing.T) { - t.Parallel() - ret, err := store.GetAggregatedBalances(ctx, NewGetAggregatedBalancesQuery(PITFilter{}, - query.Match("address", "users:"), false)) - require.NoError(t, err) - require.Equal(t, ledger.BalancesByAssets{ - "USD": big.NewInt(0).Add( - big.NewInt(0).Mul(bigInt, big.NewInt(2)), - big.NewInt(0).Mul(smallInt, big.NewInt(2)), - ), - }, ret) - }) - t.Run("using pit on effective date", func(t *testing.T) { - t.Parallel() - ret, err := store.GetAggregatedBalances(ctx, NewGetAggregatedBalancesQuery(PITFilter{ - PIT: pointer.For(now.Add(-time.Second)), - }, query.Match("address", "users:"), false)) - require.NoError(t, err) - require.Equal(t, ledger.BalancesByAssets{ - "USD": big.NewInt(0).Add( - bigInt, - smallInt, - ), - }, ret) - }) - t.Run("using pit on insertion date", func(t *testing.T) { - t.Parallel() - ret, err := store.GetAggregatedBalances(ctx, NewGetAggregatedBalancesQuery(PITFilter{ - PIT: pointer.For(now), - }, query.Match("address", "users:"), true)) - require.NoError(t, err) - require.Equal(t, ledger.BalancesByAssets{ - "USD": big.NewInt(0).Add( - bigInt, - smallInt, - ), - }, ret) - }) - t.Run("using a metadata and pit", func(t *testing.T) { - t.Parallel() - ret, err := store.GetAggregatedBalances(ctx, NewGetAggregatedBalancesQuery(PITFilter{ - PIT: pointer.For(now.Add(time.Minute)), - }, query.Match("metadata[category]", "premium"), false)) - require.NoError(t, err) - require.Equal(t, ledger.BalancesByAssets{ - "USD": big.NewInt(0).Add( - big.NewInt(0).Mul(bigInt, big.NewInt(2)), - big.NewInt(0), - ), - }, ret) - }) - t.Run("using a metadata without pit", func(t *testing.T) { - t.Parallel() - ret, err := store.GetAggregatedBalances(ctx, NewGetAggregatedBalancesQuery(PITFilter{}, - query.Match("metadata[category]", "premium"), false)) - require.NoError(t, err) - require.Equal(t, ledger.BalancesByAssets{ - "USD": big.NewInt(0).Mul(bigInt, big.NewInt(2)), - }, ret) - }) - t.Run("when no matching", func(t *testing.T) { - t.Parallel() - ret, err := store.GetAggregatedBalances(ctx, NewGetAggregatedBalancesQuery(PITFilter{}, - query.Match("metadata[category]", "guest"), false)) - require.NoError(t, err) - require.Equal(t, ledger.BalancesByAssets{}, ret) - }) - - t.Run("using a filter exist on metadata", func(t *testing.T) { - t.Parallel() - ret, err := store.GetAggregatedBalances(ctx, NewGetAggregatedBalancesQuery(PITFilter{}, query.Exists("metadata", "category"), false)) - require.NoError(t, err) - require.Equal(t, ledger.BalancesByAssets{ - "USD": big.NewInt(0).Add( - big.NewInt(0).Mul(bigInt, big.NewInt(2)), - big.NewInt(0).Mul(smallInt, big.NewInt(2)), - ), - }, ret) - }) -} diff --git a/internal/storage/ledgerstore/bucket.go b/internal/storage/ledgerstore/bucket.go deleted file mode 100644 index dc0e217fb..000000000 --- a/internal/storage/ledgerstore/bucket.go +++ /dev/null @@ -1,162 +0,0 @@ -package ledgerstore - -import ( - "context" - "database/sql" - "embed" - "fmt" - - "github.com/formancehq/go-libs/migrations" - - "github.com/formancehq/go-libs/bun/bunconnect" - - "github.com/formancehq/ledger/internal/storage/sqlutils" - "github.com/pkg/errors" - "github.com/uptrace/bun" -) - -//go:embed migrations -var migrationsDir embed.FS - -type Bucket struct { - name string - db *bun.DB -} - -func (b *Bucket) Name() string { - return b.name -} - -func (b *Bucket) Migrate(ctx context.Context) error { - return MigrateBucket(ctx, b.db, b.name) -} - -func (b *Bucket) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) { - return getBucketMigrator(b.name).GetMigrations(ctx, b.db) -} - -func (b *Bucket) IsUpToDate(ctx context.Context) (bool, error) { - ret, err := getBucketMigrator(b.name).IsUpToDate(ctx, b.db) - if err != nil && errors.Is(err, migrations.ErrMissingVersionTable) { - return false, nil - } - return ret, err -} - -func (b *Bucket) Close() error { - return b.db.Close() -} - -func (b *Bucket) createLedgerStore(name string) (*Store, error) { - return New(b, name) -} - -func (b *Bucket) CreateLedgerStore(name string) (*Store, error) { - return b.createLedgerStore(name) -} - -func (b *Bucket) GetLedgerStore(name string) (*Store, error) { - return New(b, name) -} - -func (b *Bucket) IsInitialized(ctx context.Context) (bool, error) { - row := b.db.QueryRowContext(ctx, ` - select schema_name - from information_schema.schemata - where schema_name = ?; - `, b.name) - if row.Err() != nil { - return false, sqlutils.PostgresError(row.Err()) - } - var t string - if err := row.Scan(&t); err != nil { - if errors.Is(err, sql.ErrNoRows) { - return false, nil - } - } - return true, nil -} - -func registerMigrations(migrator *migrations.Migrator, name string) { - ret, err := migrations.CollectMigrationFiles(migrationsDir, "migrations", func(s string) string { - return s - }) - if err != nil { - panic(err) - } - initSchema := ret[0] - - // notes(gfyrag): override default schema initialization to handle ledger v1 upgrades - ret[0] = migrations.Migration{ - Name: "Init schema", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - - needV1Upgrade := false - row := tx.QueryRowContext(ctx, `select exists ( - select from pg_tables - where schemaname = ? and tablename = 'log' - )`, name) - if row.Err() != nil { - return row.Err() - } - var ret string - if err := row.Scan(&ret); err != nil { - panic(err) - } - needV1Upgrade = ret != "false" - - oldSchemaRenamed := fmt.Sprintf(name + oldSchemaRenameSuffix) - if needV1Upgrade { - _, err := tx.ExecContext(ctx, fmt.Sprintf(`alter schema "%s" rename to "%s"`, name, oldSchemaRenamed)) - if err != nil { - return errors.Wrap(err, "renaming old schema") - } - _, err = tx.ExecContext(ctx, fmt.Sprintf(`create schema if not exists "%s"`, name)) - if err != nil { - return errors.Wrap(err, "creating new schema") - } - } - - if err := initSchema.UpWithContext(ctx, tx); err != nil { - return errors.Wrap(err, "initializing new schema") - } - - if needV1Upgrade { - if err := migrateLogs(ctx, oldSchemaRenamed, name, tx); err != nil { - return errors.Wrap(err, "migrating logs") - } - - _, err = tx.ExecContext(ctx, fmt.Sprintf(`create table goose_db_version as table "%s".goose_db_version with no data`, oldSchemaRenamed)) - if err != nil { - return err - } - } - - return nil - }, - } - - migrator.RegisterMigrations(ret...) -} - -func ConnectToBucket(ctx context.Context, connectionOptions bunconnect.ConnectionOptions, name string, hooks ...bun.QueryHook) (*Bucket, error) { - db, err := bunconnect.OpenDBWithSchema(ctx, connectionOptions, name, hooks...) - if err != nil { - return nil, sqlutils.PostgresError(err) - } - - return &Bucket{ - db: db, - name: name, - }, nil -} - -func getBucketMigrator(name string) *migrations.Migrator { - migrator := migrations.NewMigrator(migrations.WithSchema(name, true)) - registerMigrations(migrator, name) - return migrator -} - -func MigrateBucket(ctx context.Context, db bun.IDB, name string) error { - return getBucketMigrator(name).Up(ctx, db) -} diff --git a/internal/storage/ledgerstore/bucket_test.go b/internal/storage/ledgerstore/bucket_test.go deleted file mode 100644 index 2268b7d4c..000000000 --- a/internal/storage/ledgerstore/bucket_test.go +++ /dev/null @@ -1,73 +0,0 @@ -//go:build it - -package ledgerstore - -import ( - "math/big" - "testing" - - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/metadata" - ledger "github.com/formancehq/ledger/internal" - "github.com/google/uuid" - "github.com/stretchr/testify/require" -) - -func TestBuckets(t *testing.T) { - ctx := logging.TestingContext() - bucket := newBucket(t) - var ( - ledger0 = uuid.NewString() - ledger1 = uuid.NewString() - ) - ledger0Store, err := bucket.CreateLedgerStore(ledger0) - require.NoError(t, err) - - ledger1Store, err := bucket.CreateLedgerStore(ledger1) - require.NoError(t, err) - - txLedger0 := ledger.Transaction{ - ID: big.NewInt(0), - TransactionData: ledger.TransactionData{ - Postings: ledger.Postings{ - { - Source: "world", - Destination: "alice", - Amount: big.NewInt(100), - Asset: "USD", - }, - }, - Metadata: metadata.Metadata{}, - }, - } - - txLedger1 := ledger.Transaction{ - ID: big.NewInt(0), - TransactionData: ledger.TransactionData{ - Postings: ledger.Postings{ - { - Source: "world", - Destination: "alice", - Amount: big.NewInt(100), - Asset: "USD", - }, - }, - Metadata: metadata.Metadata{}, - }, - } - - require.NoError(t, ledger0Store.InsertLogs(ctx, - ledger.NewTransactionLog(&txLedger0, map[string]metadata.Metadata{}).ChainLog(nil), - )) - require.NoError(t, ledger1Store.InsertLogs(ctx, - ledger.NewTransactionLog(&txLedger1, map[string]metadata.Metadata{}).ChainLog(nil), - )) - - count, err := ledger0Store.CountTransactions(ctx, NewGetTransactionsQuery(PaginatedQueryOptions[PITFilterWithVolumes]{})) - require.NoError(t, err) - require.Equal(t, count, 1) - - count, err = ledger1Store.CountTransactions(ctx, NewGetTransactionsQuery(PaginatedQueryOptions[PITFilterWithVolumes]{})) - require.NoError(t, err) - require.Equal(t, count, 1) -} diff --git a/internal/storage/ledgerstore/logs.go b/internal/storage/ledgerstore/logs.go deleted file mode 100644 index 50a5362d9..000000000 --- a/internal/storage/ledgerstore/logs.go +++ /dev/null @@ -1,178 +0,0 @@ -package ledgerstore - -import ( - "context" - "database/sql/driver" - "encoding/json" - "fmt" - "math/big" - - "github.com/formancehq/go-libs/collectionutils" - "github.com/formancehq/go-libs/pointer" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/bun/bunpaginate" - - "github.com/formancehq/go-libs/query" - ledger "github.com/formancehq/ledger/internal" - "github.com/pkg/errors" - "github.com/uptrace/bun" -) - -type Logs struct { - bun.BaseModel `bun:"table:logs,alias:logs"` - - Ledger string `bun:"ledger,type:varchar"` - ID *bunpaginate.BigInt `bun:"id,unique,type:numeric"` - Type string `bun:"type,type:log_type"` - Hash []byte `bun:"hash,type:bytea"` - Date time.Time `bun:"date,type:timestamptz"` - Data RawMessage `bun:"data,type:jsonb"` - IdempotencyKey *string `bun:"idempotency_key,type:varchar(256),unique"` -} - -func (log *Logs) ToCore() *ledger.ChainedLog { - - payload, err := ledger.HydrateLog(ledger.LogTypeFromString(log.Type), log.Data) - if err != nil { - panic(errors.Wrap(err, "hydrating log data")) - } - - return &ledger.ChainedLog{ - Log: ledger.Log{ - Type: ledger.LogTypeFromString(log.Type), - Data: payload, - Date: log.Date.UTC(), - IdempotencyKey: func() string { - if log.IdempotencyKey != nil { - return *log.IdempotencyKey - } - return "" - }(), - }, - ID: (*big.Int)(log.ID), - Hash: log.Hash, - } -} - -type RawMessage json.RawMessage - -func (j RawMessage) Value() (driver.Value, error) { - if j == nil { - return nil, nil - } - return string(j), nil -} - -func (store *Store) logsQueryBuilder(q PaginatedQueryOptions[any]) func(*bun.SelectQuery) *bun.SelectQuery { - return func(selectQuery *bun.SelectQuery) *bun.SelectQuery { - - selectQuery = selectQuery.Where("ledger = ?", store.name) - if q.QueryBuilder != nil { - subQuery, args, err := q.QueryBuilder.Build(query.ContextFn(func(key, operator string, value any) (string, []any, error) { - switch { - case key == "date": - return fmt.Sprintf("%s %s ?", key, query.DefaultComparisonOperatorsMapping[operator]), []any{value}, nil - default: - return "", nil, fmt.Errorf("unknown key '%s' when building query", key) - } - })) - if err != nil { - panic(err) - } - selectQuery = selectQuery.Where(subQuery, args...) - } - - return selectQuery - } -} - -func (store *Store) InsertLogs(ctx context.Context, activeLogs ...*ledger.ChainedLog) error { - _, err := store.bucket.db. - NewInsert(). - Model(pointer.For(collectionutils.Map(activeLogs, func(from *ledger.ChainedLog) Logs { - data, err := json.Marshal(from.Data) - if err != nil { - panic(err) - } - - return Logs{ - Ledger: store.name, - ID: (*bunpaginate.BigInt)(from.ID), - Type: from.Type.String(), - Hash: from.Hash, - Date: from.Date, - Data: data, - IdempotencyKey: func() *string { - if from.IdempotencyKey != "" { - return &from.IdempotencyKey - } - return nil - }(), - } - }))). - Exec(ctx) - return err -} - -func (store *Store) GetLastLog(ctx context.Context) (*ledger.ChainedLog, error) { - ret, err := fetch[*Logs](store, true, ctx, - func(query *bun.SelectQuery) *bun.SelectQuery { - return query. - OrderExpr("id desc"). - Where("ledger = ?", store.name). - Limit(1) - }) - if err != nil { - return nil, err - } - - return ret.ToCore(), nil -} - -func (store *Store) GetLogs(ctx context.Context, q GetLogsQuery) (*bunpaginate.Cursor[ledger.ChainedLog], error) { - logs, err := paginateWithColumn[PaginatedQueryOptions[any], Logs](store, ctx, - (*bunpaginate.ColumnPaginatedQuery[PaginatedQueryOptions[any]])(&q), - store.logsQueryBuilder(q.Options), - ) - if err != nil { - return nil, err - } - - return bunpaginate.MapCursor(logs, func(from Logs) ledger.ChainedLog { - return *from.ToCore() - }), nil -} - -func (store *Store) ReadLogWithIdempotencyKey(ctx context.Context, key string) (*ledger.ChainedLog, error) { - ret, err := fetch[*Logs](store, true, ctx, - func(query *bun.SelectQuery) *bun.SelectQuery { - return query. - OrderExpr("id desc"). - Limit(1). - Where("idempotency_key = ?", key). - Where("ledger = ?", store.name) - }) - if err != nil { - return nil, err - } - - return ret.ToCore(), nil -} - -type GetLogsQuery bunpaginate.ColumnPaginatedQuery[PaginatedQueryOptions[any]] - -func (q GetLogsQuery) WithOrder(order bunpaginate.Order) GetLogsQuery { - q.Order = order - return q -} - -func NewGetLogsQuery(options PaginatedQueryOptions[any]) GetLogsQuery { - return GetLogsQuery{ - PageSize: options.PageSize, - Column: "id", - Order: bunpaginate.OrderDesc, - Options: options, - } -} diff --git a/internal/storage/ledgerstore/logs_test.go b/internal/storage/ledgerstore/logs_test.go deleted file mode 100644 index 333c2858a..000000000 --- a/internal/storage/ledgerstore/logs_test.go +++ /dev/null @@ -1,379 +0,0 @@ -//go:build it - -package ledgerstore - -import ( - "context" - "fmt" - "math/big" - "testing" - - "github.com/formancehq/go-libs/bun/bunpaginate" - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/logging" - - "github.com/formancehq/ledger/internal/storage/sqlutils" - - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/query" - ledger "github.com/formancehq/ledger/internal" - "github.com/stretchr/testify/require" -) - -func TestGetLastLog(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - now := time.Now() - - lastLog, err := store.GetLastLog(context.Background()) - require.True(t, sqlutils.IsNotFoundError(err)) - require.Nil(t, lastLog) - tx1 := ledger.ExpandedTransaction{ - Transaction: ledger.Transaction{ - ID: big.NewInt(0), - TransactionData: ledger.TransactionData{ - Postings: []ledger.Posting{ - { - Source: "world", - Destination: "central_bank", - Amount: big.NewInt(100), - Asset: "USD", - }, - }, - Reference: "tx1", - Timestamp: now.Add(-3 * time.Hour), - }, - }, - PostCommitVolumes: ledger.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(100), - }, - }, - "central_bank": { - "USD": { - Input: big.NewInt(100), - Output: big.NewInt(0), - }, - }, - }, - PreCommitVolumes: ledger.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(0), - }, - }, - "central_bank": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(0), - }, - }, - }, - } - - logTx := ledger.NewTransactionLog(&tx1.Transaction, map[string]metadata.Metadata{}).ChainLog(nil) - appendLog(t, store, logTx) - - lastLog, err = store.GetLastLog(context.Background()) - require.NoError(t, err) - require.NotNil(t, lastLog) - - require.Equal(t, tx1.Postings, lastLog.Data.(ledger.NewTransactionLogPayload).Transaction.Postings) - require.Equal(t, tx1.Reference, lastLog.Data.(ledger.NewTransactionLogPayload).Transaction.Reference) - require.Equal(t, tx1.Timestamp, lastLog.Data.(ledger.NewTransactionLogPayload).Transaction.Timestamp) -} - -func TestReadLogWithIdempotencyKey(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - - logTx := ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings( - ledger.NewPosting("world", "bank", "USD", big.NewInt(100)), - ), - map[string]metadata.Metadata{}, - ) - log := logTx.WithIdempotencyKey("test") - - ret := appendLog(t, store, log.ChainLog(nil)) - - lastLog, err := store.ReadLogWithIdempotencyKey(context.Background(), "test") - require.NoError(t, err) - require.NotNil(t, lastLog) - require.Equal(t, *ret, *lastLog) -} - -func TestGetLogs(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - now := time.Now() - - tx1 := ledger.ExpandedTransaction{ - Transaction: ledger.Transaction{ - ID: big.NewInt(0), - TransactionData: ledger.TransactionData{ - Postings: []ledger.Posting{ - { - Source: "world", - Destination: "central_bank", - Amount: big.NewInt(100), - Asset: "USD", - }, - }, - Reference: "tx1", - Timestamp: now.Add(-3 * time.Hour), - }, - }, - PostCommitVolumes: ledger.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(100), - }, - }, - "central_bank": { - "USD": { - Input: big.NewInt(100), - Output: big.NewInt(0), - }, - }, - }, - PreCommitVolumes: ledger.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(0), - }, - }, - "central_bank": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(0), - }, - }, - }, - } - tx2 := ledger.ExpandedTransaction{ - Transaction: ledger.Transaction{ - ID: big.NewInt(1), - TransactionData: ledger.TransactionData{ - Postings: []ledger.Posting{ - { - Source: "world", - Destination: "central_bank", - Amount: big.NewInt(100), - Asset: "USD", - }, - }, - Reference: "tx2", - Timestamp: now.Add(-2 * time.Hour), - }, - }, - PostCommitVolumes: ledger.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(200), - }, - }, - "central_bank": { - "USD": { - Input: big.NewInt(200), - Output: big.NewInt(0), - }, - }, - }, - PreCommitVolumes: ledger.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(100), - }, - }, - "central_bank": { - "USD": { - Input: big.NewInt(100), - Output: big.NewInt(0), - }, - }, - }, - } - tx3 := ledger.ExpandedTransaction{ - Transaction: ledger.Transaction{ - ID: big.NewInt(2), - TransactionData: ledger.TransactionData{ - Postings: []ledger.Posting{ - { - Source: "central_bank", - Destination: "users:1", - Amount: big.NewInt(1), - Asset: "USD", - }, - }, - Reference: "tx3", - Metadata: metadata.Metadata{ - "priority": "high", - }, - Timestamp: now.Add(-1 * time.Hour), - }, - }, - PreCommitVolumes: ledger.AccountsAssetsVolumes{ - "central_bank": { - "USD": { - Input: big.NewInt(200), - Output: big.NewInt(0), - }, - }, - "users:1": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(0), - }, - }, - }, - PostCommitVolumes: ledger.AccountsAssetsVolumes{ - "central_bank": { - "USD": { - Input: big.NewInt(200), - Output: big.NewInt(1), - }, - }, - "users:1": { - "USD": { - Input: big.NewInt(1), - Output: big.NewInt(0), - }, - }, - }, - } - - var previousLog *ledger.ChainedLog - for _, tx := range []ledger.ExpandedTransaction{tx1, tx2, tx3} { - newLog := ledger.NewTransactionLog(&tx.Transaction, map[string]metadata.Metadata{}). - WithDate(tx.Timestamp). - ChainLog(previousLog) - appendLog(t, store, newLog) - previousLog = newLog - } - - cursor, err := store.GetLogs(context.Background(), NewGetLogsQuery(NewPaginatedQueryOptions[any](nil))) - require.NoError(t, err) - require.Equal(t, bunpaginate.QueryDefaultPageSize, cursor.PageSize) - - require.Equal(t, 3, len(cursor.Data)) - require.Equal(t, big.NewInt(2), cursor.Data[0].ID) - require.Equal(t, tx3.Postings, cursor.Data[0].Data.(ledger.NewTransactionLogPayload).Transaction.Postings) - require.Equal(t, tx3.Reference, cursor.Data[0].Data.(ledger.NewTransactionLogPayload).Transaction.Reference) - require.Equal(t, tx3.Timestamp, cursor.Data[0].Data.(ledger.NewTransactionLogPayload).Transaction.Timestamp) - - cursor, err = store.GetLogs(context.Background(), NewGetLogsQuery(NewPaginatedQueryOptions[any](nil).WithPageSize(1))) - require.NoError(t, err) - // Should get only the first log. - require.Equal(t, 1, cursor.PageSize) - require.Equal(t, big.NewInt(2), cursor.Data[0].ID) - - cursor, err = store.GetLogs(context.Background(), NewGetLogsQuery(NewPaginatedQueryOptions[any](nil). - WithQueryBuilder(query.And( - query.Gte("date", now.Add(-2*time.Hour)), - query.Lt("date", now.Add(-time.Hour)), - )). - WithPageSize(10), - )) - require.NoError(t, err) - require.Equal(t, 10, cursor.PageSize) - // Should get only the second log, as StartTime is inclusive and EndTime exclusive. - require.Len(t, cursor.Data, 1) - require.Equal(t, big.NewInt(1), cursor.Data[0].ID) -} - -func TestGetBalance(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - - const ( - batchNumber = 100 - batchSize = 10 - input = 100 - output = 10 - ) - - logs := make([]*ledger.ChainedLog, 0) - var previousLog *ledger.ChainedLog - for i := 0; i < batchNumber; i++ { - for j := 0; j < batchSize; j++ { - chainedLog := ledger.NewTransactionLog( - ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", fmt.Sprintf("account:%d", j), "EUR/2", big.NewInt(input)), - ledger.NewPosting(fmt.Sprintf("account:%d", j), "starbucks", "EUR/2", big.NewInt(output)), - ).WithIDUint64(uint64(i*batchSize+j)), - map[string]metadata.Metadata{}, - ).ChainLog(previousLog) - logs = append(logs, chainedLog) - previousLog = chainedLog - } - } - err := store.InsertLogs(context.Background(), logs...) - require.NoError(t, err) - - balance, err := store.GetBalance(context.Background(), "account:1", "EUR/2") - require.NoError(t, err) - require.Equal(t, big.NewInt((input-output)*batchNumber), balance) -} - -func BenchmarkLogsInsertion(b *testing.B) { - - ctx := logging.TestingContext() - store := newLedgerStore(b) - - b.ResetTimer() - - var lastLog *ledger.ChainedLog - for i := 0; i < b.N; i++ { - log := ledger.NewTransactionLog( - ledger.NewTransaction().WithPostings(ledger.NewPosting( - "world", fmt.Sprintf("user:%d", i), "USD/2", big.NewInt(1000), - )).WithID(big.NewInt(int64(i))), - map[string]metadata.Metadata{}, - ).ChainLog(lastLog) - lastLog = log - require.NoError(b, store.InsertLogs(ctx, log)) - } - b.StopTimer() -} - -func BenchmarkLogsInsertionReusingAccount(b *testing.B) { - - ctx := logging.TestingContext() - store := newLedgerStore(b) - - b.ResetTimer() - - var lastLog *ledger.ChainedLog - for i := 0; i < b.N; i += 2 { - batch := make([]*ledger.ChainedLog, 0) - appendLog := func(log *ledger.Log) *ledger.ChainedLog { - chainedLog := log.ChainLog(lastLog) - batch = append(batch, chainedLog) - lastLog = chainedLog - return chainedLog - } - require.NoError(b, store.InsertLogs(ctx, appendLog(ledger.NewTransactionLog( - ledger.NewTransaction().WithPostings(ledger.NewPosting( - "world", fmt.Sprintf("user:%d", i), "USD/2", big.NewInt(1000), - )).WithID(big.NewInt(int64(i))), - map[string]metadata.Metadata{}, - )))) - require.NoError(b, store.InsertLogs(ctx, appendLog(ledger.NewTransactionLog( - ledger.NewTransaction().WithPostings(ledger.NewPosting( - fmt.Sprintf("user:%d", i), "another:account", "USD/2", big.NewInt(1000), - )).WithID(big.NewInt(int64(i+1))), - map[string]metadata.Metadata{}, - )))) - } - b.StopTimer() -} diff --git a/internal/storage/ledgerstore/main_test.go b/internal/storage/ledgerstore/main_test.go deleted file mode 100644 index 36b06c8f9..000000000 --- a/internal/storage/ledgerstore/main_test.go +++ /dev/null @@ -1,106 +0,0 @@ -//go:build it - -package ledgerstore - -import ( - "context" - "database/sql" - "fmt" - "os" - "testing" - "time" - - "github.com/formancehq/go-libs/testing/docker" - "github.com/formancehq/go-libs/testing/utils" - - "github.com/formancehq/go-libs/bun/bunconnect" - - "github.com/uptrace/bun/dialect/pgdialect" - - "github.com/uptrace/bun" - - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/testing/platform/pgtesting" - ledger "github.com/formancehq/ledger/internal" - "github.com/google/uuid" - "github.com/stretchr/testify/require" -) - -var ( - srv *pgtesting.PostgresServer - bunDB *bun.DB -) - -func TestMain(m *testing.M) { - utils.WithTestMain(func(t *utils.TestingTForMain) int { - srv = pgtesting.CreatePostgresServer(t, docker.NewPool(t, logging.Testing())) - - db, err := sql.Open("postgres", srv.GetDSN()) - if err != nil { - logging.Error(err) - os.Exit(1) - } - - bunDB = bun.NewDB(db, pgdialect.New()) - - return m.Run() - }) -} - -type T interface { - require.TestingT - Helper() - Cleanup(func()) -} - -func newBucket(t T, hooks ...bun.QueryHook) *Bucket { - name := uuid.NewString() - ctx := logging.TestingContext() - - pgDatabase := srv.NewDatabase(t) - - connectionOptions := bunconnect.ConnectionOptions{ - DatabaseSourceName: pgDatabase.ConnString(), - MaxIdleConns: 40, - MaxOpenConns: 40, - ConnMaxIdleTime: time.Minute, - } - - bucket, err := ConnectToBucket(ctx, connectionOptions, name, hooks...) - require.NoError(t, err) - t.Cleanup(func() { - _ = bucket.Close() - }) - - require.NoError(t, bucket.Migrate(ctx)) - - return bucket -} - -func newLedgerStore(t T, hooks ...bun.QueryHook) *Store { - t.Helper() - - ledgerName := uuid.NewString() - ctx := logging.TestingContext() - - _, err := bunDB.ExecContext(ctx, fmt.Sprintf(`create schema if not exists "%s"`, ledgerName)) - require.NoError(t, err) - - t.Cleanup(func() { - _, err = bunDB.ExecContext(ctx, fmt.Sprintf(`drop schema "%s" cascade`, ledgerName)) - require.NoError(t, err) - }) - - bucket := newBucket(t, hooks...) - - store, err := bucket.CreateLedgerStore(ledgerName) - require.NoError(t, err) - - return store -} - -func appendLog(t *testing.T, store *Store, log *ledger.ChainedLog) *ledger.ChainedLog { - err := store.InsertLogs(context.Background(), log) - require.NoError(t, err) - return log -} diff --git a/internal/storage/ledgerstore/migrations_v1.go b/internal/storage/ledgerstore/migrations_v1.go deleted file mode 100644 index f44d9d1cb..000000000 --- a/internal/storage/ledgerstore/migrations_v1.go +++ /dev/null @@ -1,203 +0,0 @@ -package ledgerstore - -import ( - "context" - "encoding/json" - "fmt" - "math/big" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/bun/bunpaginate" - - ledger "github.com/formancehq/ledger/internal" - "github.com/lib/pq" - "github.com/pkg/errors" - "github.com/uptrace/bun" -) - -var ( - batchSize uint64 = 10000 - oldSchemaRenameSuffix = "_save_v2_0_0" -) - -type LogV1 struct { - ID uint64 `bun:"id,unique,type:bigint"` - Type string `bun:"type,type:varchar"` - Hash string `bun:"hash,type:varchar"` - Date time.Time `bun:"date,type:timestamptz"` - Data json.RawMessage `bun:"data,type:jsonb"` -} - -func readLogsRange( - ctx context.Context, - schema string, - sqlTx bun.Tx, - idMin, idMax uint64, -) ([]LogV1, error) { - rawLogs := make([]LogV1, 0) - if err := sqlTx. - NewSelect(). - Table(fmt.Sprintf(`%s.log`, schema)). - Where("id >= ?", idMin). - Where("id < ?", idMax). - Scan(ctx, &rawLogs); err != nil { - return nil, err - } - - return rawLogs, nil -} - -func convertMetadata(ret map[string]any) map[string]any { - oldMetadata := ret["metadata"].(map[string]any) - newMetadata := make(map[string]string) - for k, v := range oldMetadata { - switch v := v.(type) { - case map[string]any: - if len(v) == 2 && v["type"] != nil && v["value"] != nil { - switch v["type"] { - case "asset", "string", "account": - newMetadata[k] = v["value"].(string) - case "monetary": - newMetadata[k] = fmt.Sprintf("%s %d", - v["value"].(map[string]any)["asset"].(string), - int(v["value"].(map[string]any)["amount"].(float64)), - ) - case "portion": - newMetadata[k] = v["value"].(map[string]any)["specific"].(string) - case "number": - newMetadata[k] = fmt.Sprint(v["value"]) - } - } else { - newMetadata[k] = fmt.Sprint(v) - } - default: - newMetadata[k] = fmt.Sprint(v) - } - } - ret["metadata"] = newMetadata - - return ret -} - -func convertTransaction(ret map[string]any) map[string]any { - ret = convertMetadata(ret) - ret["id"] = ret["txid"] - delete(ret, "txid") - - return ret -} - -func (l *LogV1) ToLogsV2() (Logs, error) { - logType := ledger.LogTypeFromString(l.Type) - - ret := make(map[string]any) - if err := json.Unmarshal(l.Data, &ret); err != nil { - panic(err) - } - - var data any - switch logType { - case ledger.NewTransactionLogType: - data = map[string]any{ - "transaction": convertTransaction(ret), - "accountMetadata": map[string]any{}, - } - case ledger.SetMetadataLogType: - data = convertMetadata(ret) - case ledger.RevertedTransactionLogType: - data = l.Data - default: - panic("unknown type " + logType.String()) - } - - asJson, err := json.Marshal(data) - if err != nil { - panic(err) - } - - return Logs{ - ID: (*bunpaginate.BigInt)(big.NewInt(int64(l.ID))), - Type: logType.String(), - Hash: []byte(l.Hash), - Date: l.Date, - Data: asJson, - }, nil -} - -func batchLogs( - ctx context.Context, - schema string, - sqlTx bun.Tx, - logs []Logs, -) error { - // Beware: COPY query is not supported by bun if the pgx driver is used. - stmt, err := sqlTx.PrepareContext(ctx, pq.CopyInSchema( - schema, - "logs", - "ledger", "id", "type", "hash", "date", "data", - )) - if err != nil { - return err - } - - for _, l := range logs { - _, err = stmt.ExecContext(ctx, schema, l.ID, l.Type, l.Hash, l.Date, RawMessage(l.Data)) - if err != nil { - return err - } - } - - _, err = stmt.ExecContext(ctx) - if err != nil { - return err - } - - err = stmt.Close() - if err != nil { - return err - } - - return nil -} - -func migrateLogs( - ctx context.Context, - schemaV1Name string, - schemaV2Name string, - sqlTx bun.Tx, -) error { - - var idMin uint64 - var idMax = idMin + batchSize - for { - logs, err := readLogsRange(ctx, schemaV1Name, sqlTx, idMin, idMax) - if err != nil { - return errors.Wrap(err, "reading logs from old table") - } - - if len(logs) == 0 { - break - } - - logsV2 := make([]Logs, 0, len(logs)) - for _, l := range logs { - logV2, err := l.ToLogsV2() - if err != nil { - return err - } - - logsV2 = append(logsV2, logV2) - } - - err = batchLogs(ctx, schemaV2Name, sqlTx, logsV2) - if err != nil { - return err - } - - idMin = idMax - idMax = idMin + batchSize - } - - return nil -} diff --git a/internal/storage/ledgerstore/store.go b/internal/storage/ledgerstore/store.go deleted file mode 100644 index 3504a4938..000000000 --- a/internal/storage/ledgerstore/store.go +++ /dev/null @@ -1,42 +0,0 @@ -package ledgerstore - -import ( - "context" - - "github.com/formancehq/go-libs/migrations" - - _ "github.com/jackc/pgx/v5/stdlib" - "github.com/uptrace/bun" -) - -type Store struct { - bucket *Bucket - - name string -} - -func (store *Store) Name() string { - return store.name -} - -func (store *Store) GetDB() *bun.DB { - return store.bucket.db -} - -func (store *Store) IsUpToDate(ctx context.Context) (bool, error) { - return store.bucket.IsUpToDate(ctx) -} - -func (store *Store) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) { - return store.bucket.GetMigrationsInfo(ctx) -} - -func New( - bucket *Bucket, - name string, -) (*Store, error) { - return &Store{ - bucket: bucket, - name: name, - }, nil -} diff --git a/internal/storage/ledgerstore/store_benchmarks_test.go b/internal/storage/ledgerstore/store_benchmarks_test.go deleted file mode 100644 index 93b4a5a44..000000000 --- a/internal/storage/ledgerstore/store_benchmarks_test.go +++ /dev/null @@ -1,579 +0,0 @@ -//go:build it - -package ledgerstore - -import ( - "context" - "encoding/json" - "flag" - "fmt" - "math/big" - "os" - "testing" - "text/tabwriter" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/bun/bunexplain" - "github.com/formancehq/go-libs/pointer" - - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/query" - ledger "github.com/formancehq/ledger/internal" - "github.com/stretchr/testify/require" - "github.com/uptrace/bun" -) - -var nbTransactions = flag.Int("transactions", 10000, "number of transactions to create") -var batch = flag.Int("batch", 1000, "logs batching") -var ledgers = flag.Int("ledgers", 100, "number of ledger for multi ledgers benchmarks") - -type bunContextHook struct{} - -func (b bunContextHook) BeforeQuery(ctx context.Context, event *bun.QueryEvent) context.Context { - hooks := ctx.Value("hooks") - if hooks == nil { - return ctx - } - - for _, hook := range hooks.([]bun.QueryHook) { - ctx = hook.BeforeQuery(ctx, event) - } - - return ctx -} - -func (b bunContextHook) AfterQuery(ctx context.Context, event *bun.QueryEvent) { - hooks := ctx.Value("hooks") - if hooks == nil { - return - } - - for _, hook := range hooks.([]bun.QueryHook) { - hook.AfterQuery(ctx, event) - } - - return -} - -var _ bun.QueryHook = &bunContextHook{} - -func contextWithHook(ctx context.Context, hooks ...bun.QueryHook) context.Context { - return context.WithValue(ctx, "hooks", hooks) -} - -type scenarioInfo struct { - nbAccounts int -} - -type scenario struct { - name string - setup func(ctx context.Context, b *testing.B, store *Store) *scenarioInfo -} - -var now = time.Now() - -var scenarios = []scenario{ - { - name: "nominal", - setup: func(ctx context.Context, b *testing.B, store *Store) *scenarioInfo { - var lastLog *ledger.ChainedLog - for i := 0; i < *nbTransactions/(*batch); i++ { - logs := make([]*ledger.ChainedLog, 0) - appendLog := func(log *ledger.Log) { - chainedLog := log.ChainLog(lastLog) - logs = append(logs, chainedLog) - lastLog = chainedLog - } - for j := 0; j < (*batch); j += 2 { - provision := big.NewInt(10000) - itemPrice := provision.Div(provision, big.NewInt(2)) - fees := itemPrice.Div(itemPrice, big.NewInt(100)) // 1% - - appendLog(ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting( - "world", fmt.Sprintf("player:%d", j/2), "USD/2", provision, - )). - WithID(big.NewInt(int64(i*(*batch)+j))). - WithDate(now.Add(time.Minute*time.Duration(i*(*batch)+j))), - map[string]metadata.Metadata{}, - )) - appendLog(ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings( - ledger.NewPosting(fmt.Sprintf("player:%d", j/2), "seller", "USD/2", itemPrice), - ledger.NewPosting("seller", "fees", "USD/2", fees), - ). - WithID(big.NewInt(int64(i*(*batch)+j+1))). - WithDate(now.Add(time.Minute*time.Duration(i*(*batch)+j))), - map[string]metadata.Metadata{}, - )) - status := "pending" - if j%8 == 0 { - status = "terminated" - } - appendLog(ledger.NewSetMetadataLog(now.Add(time.Minute*time.Duration(i*(*batch)+j)), ledger.SetMetadataLogPayload{ - TargetType: ledger.MetaTargetTypeTransaction, - TargetID: big.NewInt(int64(i*(*batch) + j + 1)), - Metadata: map[string]string{ - "status": status, - }, - })) - } - require.NoError(b, store.InsertLogs(ctx, logs...)) - } - - nbAccounts := *batch / 2 - - for i := 0; i < nbAccounts; i++ { - lastLog = ledger.NewSetMetadataLog(now, ledger.SetMetadataLogPayload{ - TargetType: ledger.MetaTargetTypeAccount, - TargetID: fmt.Sprintf("player:%d", i), - Metadata: map[string]string{ - "level": fmt.Sprint(i % 4), - }, - }).ChainLog(lastLog) - require.NoError(b, store.InsertLogs(ctx, lastLog)) - } - - return &scenarioInfo{ - nbAccounts: nbAccounts, - } - }, - }, - { - name: "multi-ledger", - setup: func(ctx context.Context, b *testing.B, store *Store) *scenarioInfo { - var lastLog *ledger.ChainedLog - - nbAccounts := *batch / 2 - loadData := func(store *Store) { - for i := 0; i < *nbTransactions/(*batch); i++ { - logs := make([]*ledger.ChainedLog, 0) - appendLog := func(log *ledger.Log) { - chainedLog := log.ChainLog(lastLog) - logs = append(logs, chainedLog) - lastLog = chainedLog - } - for j := 0; j < (*batch); j += 2 { - provision := big.NewInt(10000) - itemPrice := provision.Div(provision, big.NewInt(2)) - fees := itemPrice.Div(itemPrice, big.NewInt(100)) // 1% - - appendLog(ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting( - "world", fmt.Sprintf("player:%d", j/2), "USD/2", provision, - )). - WithID(big.NewInt(int64(i*(*batch)+j))). - WithDate(now.Add(time.Minute*time.Duration(i*(*batch)+j))), - map[string]metadata.Metadata{}, - )) - appendLog(ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings( - ledger.NewPosting(fmt.Sprintf("player:%d", j/2), "seller", "USD/2", itemPrice), - ledger.NewPosting("seller", "fees", "USD/2", fees), - ). - WithID(big.NewInt(int64(i*(*batch)+j+1))). - WithDate(now.Add(time.Minute*time.Duration(i*(*batch)+j))), - map[string]metadata.Metadata{}, - )) - status := "pending" - if j%8 == 0 { - status = "terminated" - } - appendLog(ledger.NewSetMetadataLog(now.Add(time.Minute*time.Duration(i*(*batch)+j)), ledger.SetMetadataLogPayload{ - TargetType: ledger.MetaTargetTypeTransaction, - TargetID: big.NewInt(int64(i*(*batch) + j + 1)), - Metadata: map[string]string{ - "status": status, - }, - })) - } - require.NoError(b, store.InsertLogs(ctx, logs...)) - } - - for i := 0; i < nbAccounts; i++ { - lastLog = ledger.NewSetMetadataLog(now, ledger.SetMetadataLogPayload{ - TargetType: ledger.MetaTargetTypeAccount, - TargetID: fmt.Sprintf("player:%d", i), - Metadata: map[string]string{ - "level": fmt.Sprint(i % 4), - }, - }).ChainLog(lastLog) - require.NoError(b, store.InsertLogs(ctx, lastLog)) - } - } - - for i := 0; i < *ledgers; i++ { - store := newLedgerStore(b) - loadData(store) - } - loadData(store) - - return &scenarioInfo{ - nbAccounts: nbAccounts, - } - }, - }, -} - -func reportMetrics(ctx context.Context, b *testing.B, store *Store) { - type stat struct { - RelID string `bun:"relid"` - IndexRelID string `bun:"indexrelid"` - RelName string `bun:"relname"` - IndexRelName string `bun:"indexrelname"` - IdxScan int `bun:"idxscan"` - IdxTupRead int `bun:"idx_tup_read"` - IdxTupFetch int `bun:"idx_tup_fetch"` - } - ret := make([]stat, 0) - err := store.GetDB().NewSelect(). - Table("pg_stat_user_indexes"). - Where("schemaname = ?", store.name). - Scan(ctx, &ret) - require.NoError(b, err) - - tabWriter := tabwriter.NewWriter(os.Stderr, 8, 8, 0, '\t', 0) - defer func() { - require.NoError(b, tabWriter.Flush()) - }() - _, err = fmt.Fprintf(tabWriter, "IndexRelName\tIdxScan\tIdxTypRead\tIdxTupFetch\r\n") - require.NoError(b, err) - - _, err = fmt.Fprintf(tabWriter, "---\t---\r\n") - require.NoError(b, err) - - for _, s := range ret { - _, err := fmt.Fprintf(tabWriter, "%s\t%d\t%d\t%d\r\n", s.IndexRelName, s.IdxScan, s.IdxTupRead, s.IdxTupFetch) - require.NoError(b, err) - } -} - -func reportTableSizes(ctx context.Context, b *testing.B, store *Store) { - - tabWriter := tabwriter.NewWriter(os.Stderr, 12, 8, 0, '\t', 0) - defer func() { - require.NoError(b, tabWriter.Flush()) - }() - _, err := fmt.Fprintf(tabWriter, "Table\tTotal size\tTable size\tRelation size\tIndexes size\tMain size\tFSM size\tVM size\tInit size\r\n") - require.NoError(b, err) - - _, err = fmt.Fprintf(tabWriter, "---\t---\t---\t---\t---\t---\t---\t---\r\n") - require.NoError(b, err) - - for _, table := range []string{ - "transactions", "accounts", "moves", "logs", "transactions_metadata", "accounts_metadata", - } { - totalRelationSize := "" - err := store.GetDB().DB.QueryRowContext(ctx, fmt.Sprintf(`select pg_size_pretty(pg_total_relation_size('%s'))`, table)). - Scan(&totalRelationSize) - require.NoError(b, err) - - tableSize := "" - err = store.GetDB().DB.QueryRowContext(ctx, fmt.Sprintf(`select pg_size_pretty(pg_table_size('%s'))`, table)). - Scan(&tableSize) - require.NoError(b, err) - - relationSize := "" - err = store.GetDB().DB.QueryRowContext(ctx, fmt.Sprintf(`select pg_size_pretty(pg_relation_size('%s'))`, table)). - Scan(&relationSize) - require.NoError(b, err) - - indexesSize := "" - err = store.GetDB().DB.QueryRowContext(ctx, fmt.Sprintf(`select pg_size_pretty(pg_indexes_size('%s'))`, table)). - Scan(&indexesSize) - require.NoError(b, err) - - mainSize := "" - err = store.GetDB().DB.QueryRowContext(ctx, fmt.Sprintf(`select pg_size_pretty(pg_relation_size('%s', 'main'))`, table)). - Scan(&mainSize) - require.NoError(b, err) - - fsmSize := "" - err = store.GetDB().DB.QueryRowContext(ctx, fmt.Sprintf(`select pg_size_pretty(pg_relation_size('%s', 'fsm'))`, table)). - Scan(&fsmSize) - require.NoError(b, err) - - vmSize := "" - err = store.GetDB().DB.QueryRowContext(ctx, fmt.Sprintf(`select pg_size_pretty(pg_relation_size('%s', 'vm'))`, table)). - Scan(&vmSize) - require.NoError(b, err) - - initSize := "" - err = store.GetDB().DB.QueryRowContext(ctx, fmt.Sprintf(`select pg_size_pretty(pg_relation_size('%s', 'init'))`, table)). - Scan(&initSize) - require.NoError(b, err) - - _, err = fmt.Fprintf(tabWriter, "%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\r\n", - table, totalRelationSize, tableSize, relationSize, indexesSize, mainSize, fsmSize, vmSize, initSize) - require.NoError(b, err) - } -} - -func BenchmarkList(b *testing.B) { - - ctx := logging.TestingContext() - - for _, scenario := range scenarios { - b.Run(scenario.name, func(b *testing.B) { - store := newLedgerStore(b, &bunContextHook{}) - info := scenario.setup(ctx, b, store) - - defer func() { - if testing.Verbose() { - reportMetrics(ctx, b, store) - reportTableSizes(ctx, b, store) - } - }() - - _, err := store.GetDB().Exec("VACUUM FULL ANALYZE") - require.NoError(b, err) - - runAllWithPIT := func(b *testing.B, pit *time.Time) { - b.Run("transactions", func(b *testing.B) { - benchmarksReadTransactions(b, ctx, store, info, pit) - }) - b.Run("accounts", func(b *testing.B) { - benchmarksReadAccounts(b, ctx, store, pit) - }) - b.Run("aggregates", func(b *testing.B) { - benchmarksGetAggregatedBalances(b, ctx, store, pit) - }) - } - runAllWithPIT(b, nil) - b.Run("using pit", func(b *testing.B) { - // Use pit with the more recent, this way we force the storage to use a join - // Doing this allowing to test the worst case - runAllWithPIT(b, pointer.For(now.Add(time.Minute*time.Duration(*nbTransactions)))) - }) - }) - } -} - -func benchmarksReadTransactions(b *testing.B, ctx context.Context, store *Store, info *scenarioInfo, pit *time.Time) { - type testCase struct { - name string - query query.Builder - allowEmptyResponse bool - expandVolumes bool - expandEffectiveVolumes bool - } - - testCases := []testCase{ - { - name: "no query", - }, - { - name: "using an exact address", - query: query.Match("account", fmt.Sprintf("player:%d", info.nbAccounts-1)), // Last inserted account - }, - { - name: "using an address segment", - query: query.Match("account", fmt.Sprintf(":%d", info.nbAccounts-1)), - }, - { - name: "using a metadata metadata", - query: query.Match("metadata[status]", "terminated"), - }, - { - name: "using non existent account by exact address", - query: query.Match("account", fmt.Sprintf("player:%d", info)), - allowEmptyResponse: true, - }, - { - name: "using non existent metadata", - query: query.Match("metadata[foo]", "bar"), - allowEmptyResponse: true, - }, - { - name: "with expand volumes", - expandVolumes: true, - }, - { - name: "with expand effective volumes", - expandEffectiveVolumes: true, - }, - } - - for _, t := range testCases { - t := t - b.Run(t.name, func(b *testing.B) { - var q GetTransactionsQuery - for i := 0; i < b.N; i++ { - q = NewGetTransactionsQuery(PaginatedQueryOptions[PITFilterWithVolumes]{ - PageSize: 100, - QueryBuilder: t.query, - Options: PITFilterWithVolumes{ - PITFilter: PITFilter{ - PIT: pit, - }, - }, - }) - if t.expandVolumes { - q = q.WithExpandVolumes() - } - if t.expandEffectiveVolumes { - q = q.WithExpandEffectiveVolumes() - } - ret, err := store.GetTransactions(ctx, q) - require.NoError(b, err) - if !t.allowEmptyResponse && len(ret.Data) == 0 { - require.Fail(b, "response should not be empty") - } - } - - explainRequest(ctx, b, func(ctx context.Context) { - _, err := store.GetTransactions(ctx, q) - require.NoError(b, err) - }) - }) - } -} - -func benchmarksReadAccounts(b *testing.B, ctx context.Context, store *Store, pit *time.Time) { - type testCase struct { - name string - query query.Builder - allowEmptyResponse bool - expandVolumes, expandEffectiveVolumes bool - } - - testCases := []testCase{ - { - name: "with no query", - }, - { - name: "filtering on address segment", - query: query.Match("address", ":0"), - }, - { - name: "filtering on metadata", - query: query.Match("metadata[level]", "2"), - }, - { - name: "with expand volumes", - expandVolumes: true, - }, - { - name: "with expand effective volumes", - expandEffectiveVolumes: true, - }, - } - - for _, t := range testCases { - t := t - b.Run(t.name, func(b *testing.B) { - var q GetAccountsQuery - for i := 0; i < b.N; i++ { - q = NewGetAccountsQuery(PaginatedQueryOptions[PITFilterWithVolumes]{ - PageSize: 100, - QueryBuilder: t.query, - Options: PITFilterWithVolumes{ - PITFilter: PITFilter{ - PIT: pit, - }, - }, - }) - if t.expandVolumes { - q = q.WithExpandVolumes() - } - if t.expandEffectiveVolumes { - q = q.WithExpandEffectiveVolumes() - } - ret, err := store.GetAccountsWithVolumes(ctx, q) - require.NoError(b, err) - if !t.allowEmptyResponse && len(ret.Data) == 0 { - require.Fail(b, "response should not be empty") - } - - } - - explainRequest(ctx, b, func(ctx context.Context) { - _, err := store.GetAccountsWithVolumes(ctx, q) - require.NoError(b, err) - }) - }) - } -} - -func benchmarksGetAggregatedBalances(b *testing.B, ctx context.Context, store *Store, pit *time.Time) { - type testCase struct { - name string - query query.Builder - allowEmptyResponse bool - } - - testCases := []testCase{ - { - name: "with no query", - }, - { - name: "filtering on exact account address", - query: query.Match("address", "player:0"), - }, - { - name: "filtering on account address segment", - query: query.Match("address", ":0"), - }, - { - name: "filtering on metadata", - query: query.Match("metadata[level]", "2"), - }, - } - - for _, t := range testCases { - t := t - b.Run(t.name, func(b *testing.B) { - var q GetAggregatedBalanceQuery - for i := 0; i < b.N; i++ { - q = NewGetAggregatedBalancesQuery(PITFilter{ - PIT: pit, - }, t.query, false) - ret, err := store.GetAggregatedBalances(ctx, q) - require.NoError(b, err) - if !t.allowEmptyResponse && len(ret) == 0 { - require.Fail(b, "response should not be empty") - } - } - - explainRequest(ctx, b, func(ctx context.Context) { - _, err := store.GetAggregatedBalances(ctx, q) - require.NoError(b, err) - }) - }) - } -} - -func explainRequest(ctx context.Context, b *testing.B, f func(ctx context.Context)) { - var ( - explained string - jsonExplained string - ) - additionalHooks := make([]bun.QueryHook, 0) - if testing.Verbose() { - additionalHooks = append(additionalHooks, bunexplain.NewExplainHook(bunexplain.WithListener(func(data string) { - explained = data - }))) - } - additionalHooks = append(additionalHooks, bunexplain.NewExplainHook( - bunexplain.WithListener(func(data string) { - jsonExplained = data - }), - bunexplain.WithJSONFormat(), - )) - ctx = contextWithHook(ctx, additionalHooks...) - f(ctx) - - if testing.Verbose() { - fmt.Println(explained) - } - jsonQueryPlan := make([]any, 0) - - require.NoError(b, json.Unmarshal([]byte(jsonExplained), &jsonQueryPlan)) - b.ReportMetric(jsonQueryPlan[0].(map[string]any)["Plan"].(map[string]any)["Total Cost"].(float64), "cost") -} diff --git a/internal/storage/ledgerstore/store_test.go b/internal/storage/ledgerstore/store_test.go deleted file mode 100644 index b3f2cc43f..000000000 --- a/internal/storage/ledgerstore/store_test.go +++ /dev/null @@ -1,21 +0,0 @@ -//go:build it - -package ledgerstore - -import ( - "context" - - "github.com/formancehq/go-libs/collectionutils" - "github.com/formancehq/go-libs/metadata" - ledger "github.com/formancehq/ledger/internal" -) - -// TODO: remove that -func insertTransactions(ctx context.Context, s *Store, txs ...ledger.Transaction) error { - var previous *ledger.ChainedLog - logs := collectionutils.Map(txs, func(from ledger.Transaction) *ledger.ChainedLog { - previous = ledger.NewTransactionLog(&from, map[string]metadata.Metadata{}).ChainLog(previous) - return previous - }) - return s.InsertLogs(ctx, logs...) -} diff --git a/internal/storage/ledgerstore/transactions.go b/internal/storage/ledgerstore/transactions.go deleted file mode 100644 index 6157388fd..000000000 --- a/internal/storage/ledgerstore/transactions.go +++ /dev/null @@ -1,445 +0,0 @@ -package ledgerstore - -import ( - "context" - "database/sql/driver" - "encoding/json" - "errors" - "fmt" - "math/big" - "regexp" - "strings" - - "github.com/formancehq/go-libs/pointer" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/bun/bunpaginate" - - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/query" - ledger "github.com/formancehq/ledger/internal" - "github.com/uptrace/bun" -) - -const ( - MovesTableName = "moves" -) - -var ( - metadataRegex = regexp.MustCompile("metadata\\[(.+)\\]") -) - -type Transaction struct { - bun.BaseModel `bun:"transactions,alias:transactions"` - - ID *bunpaginate.BigInt `bun:"id,type:numeric"` - Timestamp time.Time `bun:"timestamp,type:timestamp without time zone"` - Reference string `bun:"reference,type:varchar,unique,nullzero"` - Postings []ledger.Posting `bun:"postings,type:jsonb"` - Metadata metadata.Metadata `bun:"metadata,type:jsonb,default:'{}'"` - RevertedAt *time.Time `bun:"reverted_at"` - LastUpdate *time.Time `bun:"last_update"` -} - -func (t *Transaction) toCore() *ledger.Transaction { - return &ledger.Transaction{ - TransactionData: ledger.TransactionData{ - Reference: t.Reference, - Metadata: t.Metadata, - Timestamp: t.Timestamp, - Postings: t.Postings, - }, - ID: (*big.Int)(t.ID), - Reverted: t.RevertedAt != nil && !t.RevertedAt.IsZero(), - } -} - -type ExpandedTransaction struct { - Transaction - bun.BaseModel `bun:"transactions,alias:transactions"` - - ID *bunpaginate.BigInt `bun:"id,type:numeric"` - Timestamp time.Time `bun:"timestamp,type:timestamp without time zone"` - Reference string `bun:"reference,type:varchar,unique,nullzero"` - Postings []ledger.Posting `bun:"postings,type:jsonb"` - Metadata metadata.Metadata `bun:"metadata,type:jsonb,default:'{}'"` - PostCommitEffectiveVolumes ledger.AccountsAssetsVolumes `bun:"post_commit_effective_volumes,type:jsonb"` - PostCommitVolumes ledger.AccountsAssetsVolumes `bun:"post_commit_volumes,type:jsonb"` - RevertedAt *time.Time `bun:"reverted_at"` - LastUpdate *time.Time `bun:"last_update"` -} - -func (t *ExpandedTransaction) toCore() *ledger.ExpandedTransaction { - var ( - preCommitEffectiveVolumes ledger.AccountsAssetsVolumes - preCommitVolumes ledger.AccountsAssetsVolumes - ) - if t.PostCommitEffectiveVolumes != nil { - preCommitEffectiveVolumes = t.PostCommitEffectiveVolumes.Copy() - for _, posting := range t.Postings { - preCommitEffectiveVolumes.AddOutput(posting.Source, posting.Asset, big.NewInt(0).Neg(posting.Amount)) - preCommitEffectiveVolumes.AddInput(posting.Destination, posting.Asset, big.NewInt(0).Neg(posting.Amount)) - } - } - if t.PostCommitVolumes != nil { - preCommitVolumes = t.PostCommitVolumes.Copy() - for _, posting := range t.Postings { - preCommitVolumes.AddOutput(posting.Source, posting.Asset, big.NewInt(0).Neg(posting.Amount)) - preCommitVolumes.AddInput(posting.Destination, posting.Asset, big.NewInt(0).Neg(posting.Amount)) - } - } - return &ledger.ExpandedTransaction{ - Transaction: ledger.Transaction{ - TransactionData: ledger.TransactionData{ - Reference: t.Reference, - Metadata: t.Metadata, - Timestamp: t.Timestamp, - Postings: t.Postings, - }, - ID: (*big.Int)(t.ID), - Reverted: t.RevertedAt != nil && !t.RevertedAt.IsZero(), - }, - PreCommitEffectiveVolumes: preCommitEffectiveVolumes, - PostCommitEffectiveVolumes: t.PostCommitEffectiveVolumes, - PreCommitVolumes: preCommitVolumes, - PostCommitVolumes: t.PostCommitVolumes, - } -} - -type account string - -var _ driver.Valuer = account("") - -func (m1 account) Value() (driver.Value, error) { - ret, err := json.Marshal(strings.Split(string(m1), ":")) - if err != nil { - return nil, err - } - return string(ret), nil -} - -// Scan - Implement the database/sql scanner interface -func (m1 *account) Scan(value interface{}) error { - if value == nil { - return nil - } - v, err := driver.String.ConvertValue(value) - if err != nil { - return err - } - - array := make([]string, 0) - switch vv := v.(type) { - case []uint8: - err = json.Unmarshal(vv, &array) - case string: - err = json.Unmarshal([]byte(vv), &array) - default: - panic("not handled type") - } - if err != nil { - return err - } - *m1 = account(strings.Join(array, ":")) - return nil -} - -func (store *Store) buildTransactionQuery(p PITFilterWithVolumes, query *bun.SelectQuery) *bun.SelectQuery { - - selectMetadata := query.NewSelect(). - Table("transactions_metadata"). - Where("transactions.seq = transactions_metadata.transactions_seq"). - Order("revision desc"). - Limit(1) - - if p.PIT != nil && !p.PIT.IsZero() { - selectMetadata = selectMetadata.Where("date <= ?", p.PIT) - } - - query = query. - ModelTableExpr("transactions"). - Where("transactions.ledger = ?", store.name) - - if p.PIT != nil && !p.PIT.IsZero() { - query = query. - Where("timestamp <= ?", p.PIT). - ColumnExpr("transactions.*"). - Column("transactions_metadata.metadata"). - Join(fmt.Sprintf(`left join lateral (%s) as transactions_metadata on true`, selectMetadata.String())). - ColumnExpr(fmt.Sprintf("case when reverted_at is not null and reverted_at > '%s' then null else reverted_at end", p.PIT.Format(time.DateFormat))) - } else { - query = query.Column("transactions.metadata", "transactions.*") - } - - if p.ExpandEffectiveVolumes { - query = query.ColumnExpr("get_aggregated_effective_volumes_for_transaction(?, transactions.seq) as post_commit_effective_volumes", store.name) - } - if p.ExpandVolumes { - query = query.ColumnExpr("get_aggregated_volumes_for_transaction(?, transactions.seq) as post_commit_volumes", store.name) - } - return query -} - -func (store *Store) transactionQueryContext(qb query.Builder, q GetTransactionsQuery) (string, []any, error) { - - return qb.Build(query.ContextFn(func(key, operator string, value any) (string, []any, error) { - switch { - case key == "reference" || key == "timestamp": - return fmt.Sprintf("%s %s ?", key, query.DefaultComparisonOperatorsMapping[operator]), []any{value}, nil - case key == "reverted": - if operator != "$match" { - return "", nil, newErrInvalidQuery("'reverted' column can only be used with $match") - } - switch value := value.(type) { - case bool: - ret := "reverted_at is" - if value { - ret += " not" - } - return ret + " null", nil, nil - default: - return "", nil, newErrInvalidQuery("'reverted' can only be used with bool value") - } - case key == "account": - // TODO: Should allow comparison operator only if segments not used - if operator != "$match" { - return "", nil, newErrInvalidQuery("'account' column can only be used with $match") - } - switch address := value.(type) { - case string: - return filterAccountAddressOnTransactions(address, true, true), nil, nil - default: - return "", nil, newErrInvalidQuery("unexpected type %T for column 'account'", address) - } - case key == "source": - // TODO: Should allow comparison operator only if segments not used - if operator != "$match" { - return "", nil, errors.New("'source' column can only be used with $match") - } - switch address := value.(type) { - case string: - return filterAccountAddressOnTransactions(address, true, false), nil, nil - default: - return "", nil, newErrInvalidQuery("unexpected type %T for column 'source'", address) - } - case key == "destination": - // TODO: Should allow comparison operator only if segments not used - if operator != "$match" { - return "", nil, errors.New("'destination' column can only be used with $match") - } - switch address := value.(type) { - case string: - return filterAccountAddressOnTransactions(address, false, true), nil, nil - default: - return "", nil, newErrInvalidQuery("unexpected type %T for column 'destination'", address) - } - case metadataRegex.Match([]byte(key)): - if operator != "$match" { - return "", nil, newErrInvalidQuery("'account' column can only be used with $match") - } - match := metadataRegex.FindAllStringSubmatch(key, 3) - - key := "metadata" - if q.Options.Options.PIT != nil && !q.Options.Options.PIT.IsZero() { - key = "transactions_metadata.metadata" - } - - return key + " @> ?", []any{map[string]any{ - match[0][1]: value, - }}, nil - - case key == "metadata": - if operator != "$exists" { - return "", nil, newErrInvalidQuery("'metadata' key filter can only be used with $exists") - } - if q.Options.Options.PIT != nil && !q.Options.Options.PIT.IsZero() { - key = "transactions_metadata.metadata" - } - - return fmt.Sprintf("%s -> ? IS NOT NULL", key), []any{value}, nil - default: - return "", nil, newErrInvalidQuery("unknown key '%s' when building query", key) - } - })) -} - -func (store *Store) buildTransactionListQuery(selectQuery *bun.SelectQuery, q PaginatedQueryOptions[PITFilterWithVolumes], where string, args []any) *bun.SelectQuery { - - selectQuery = store.buildTransactionQuery(q.Options, selectQuery) - if where != "" { - return selectQuery.Where(where, args...) - } - - return selectQuery -} - -func (store *Store) GetTransactions(ctx context.Context, q GetTransactionsQuery) (*bunpaginate.Cursor[ledger.ExpandedTransaction], error) { - - var ( - where string - args []any - err error - ) - if q.Options.QueryBuilder != nil { - where, args, err = store.transactionQueryContext(q.Options.QueryBuilder, q) - if err != nil { - return nil, err - } - } - - transactions, err := paginateWithColumn[PaginatedQueryOptions[PITFilterWithVolumes], ExpandedTransaction](store, ctx, - (*bunpaginate.ColumnPaginatedQuery[PaginatedQueryOptions[PITFilterWithVolumes]])(&q), - func(query *bun.SelectQuery) *bun.SelectQuery { - return store.buildTransactionListQuery(query, q.Options, where, args) - }, - ) - if err != nil { - return nil, err - } - - return bunpaginate.MapCursor(transactions, func(from ExpandedTransaction) ledger.ExpandedTransaction { - return *from.toCore() - }), nil -} - -func (store *Store) CountTransactions(ctx context.Context, q GetTransactionsQuery) (int, error) { - - var ( - where string - args []any - err error - ) - - if q.Options.QueryBuilder != nil { - where, args, err = store.transactionQueryContext(q.Options.QueryBuilder, q) - if err != nil { - return 0, err - } - } - - return count[ExpandedTransaction](store, true, ctx, func(query *bun.SelectQuery) *bun.SelectQuery { - return store.buildTransactionListQuery(query, q.Options, where, args) - }) -} - -func (store *Store) GetTransactionWithVolumes(ctx context.Context, filter GetTransactionQuery) (*ledger.ExpandedTransaction, error) { - ret, err := fetch[*ExpandedTransaction](store, true, ctx, - func(query *bun.SelectQuery) *bun.SelectQuery { - return store.buildTransactionQuery(filter.PITFilterWithVolumes, query). - Where("transactions.id = ?", filter.ID). - Limit(1) - }) - if err != nil { - return nil, err - } - - return ret.toCore(), nil -} - -func (store *Store) GetTransaction(ctx context.Context, txId *big.Int) (*ledger.Transaction, error) { - tx, err := fetch[*Transaction](store, true, ctx, - func(query *bun.SelectQuery) *bun.SelectQuery { - return query. - ColumnExpr(`transactions.id, transactions.reference, transactions.postings, transactions.timestamp, transactions.reverted_at, tm.metadata`). - Join("left join transactions_metadata tm on tm.transactions_seq = transactions.seq"). - Where("transactions.id = ?", (*bunpaginate.BigInt)(txId)). - Where("transactions.ledger = ?", store.name). - Order("tm.revision desc"). - Limit(1) - }) - if err != nil { - return nil, err - } - - return tx.toCore(), nil -} - -func (store *Store) GetTransactionByReference(ctx context.Context, ref string) (*ledger.ExpandedTransaction, error) { - ret, err := fetch[*ExpandedTransaction](store, true, ctx, - func(query *bun.SelectQuery) *bun.SelectQuery { - return query. - ColumnExpr(`transactions.id, transactions.reference, transactions.postings, transactions.timestamp, transactions.reverted_at, tm.metadata`). - Join("left join transactions_metadata tm on tm.transactions_seq = transactions.seq"). - Where("transactions.reference = ?", ref). - Where("transactions.ledger = ?", store.name). - Order("tm.revision desc"). - Limit(1) - }) - if err != nil { - return nil, err - } - - return ret.toCore(), nil -} - -func (store *Store) GetLastTransaction(ctx context.Context) (*ledger.ExpandedTransaction, error) { - ret, err := fetch[*ExpandedTransaction](store, true, ctx, - func(query *bun.SelectQuery) *bun.SelectQuery { - return query. - ColumnExpr(`transactions.id, transactions.reference, transactions.postings, transactions.timestamp, transactions.reverted_at, tm.metadata`). - Join("left join transactions_metadata tm on tm.transactions_seq = transactions.seq"). - Order("transactions.seq desc", "tm.revision desc"). - Where("transactions.ledger = ?", store.name). - Limit(1) - }) - if err != nil { - return nil, err - } - - return ret.toCore(), nil -} - -type GetTransactionsQuery bunpaginate.ColumnPaginatedQuery[PaginatedQueryOptions[PITFilterWithVolumes]] - -func (q GetTransactionsQuery) WithExpandVolumes() GetTransactionsQuery { - q.Options.Options.ExpandVolumes = true - - return q -} - -func (q GetTransactionsQuery) WithExpandEffectiveVolumes() GetTransactionsQuery { - q.Options.Options.ExpandEffectiveVolumes = true - - return q -} - -func (q GetTransactionsQuery) WithColumn(column string) GetTransactionsQuery { - ret := pointer.For((bunpaginate.ColumnPaginatedQuery[PaginatedQueryOptions[PITFilterWithVolumes]])(q)) - ret = ret.WithColumn(column) - - return GetTransactionsQuery(*ret) -} - -func NewGetTransactionsQuery(options PaginatedQueryOptions[PITFilterWithVolumes]) GetTransactionsQuery { - return GetTransactionsQuery{ - PageSize: options.PageSize, - Column: "id", - Order: bunpaginate.OrderDesc, - Options: options, - } -} - -type GetTransactionQuery struct { - PITFilterWithVolumes - ID *big.Int -} - -func (q GetTransactionQuery) WithExpandVolumes() GetTransactionQuery { - q.ExpandVolumes = true - - return q -} - -func (q GetTransactionQuery) WithExpandEffectiveVolumes() GetTransactionQuery { - q.ExpandEffectiveVolumes = true - - return q -} - -func NewGetTransactionQuery(id *big.Int) GetTransactionQuery { - return GetTransactionQuery{ - PITFilterWithVolumes: PITFilterWithVolumes{}, - ID: id, - } -} diff --git a/internal/storage/ledgerstore/transactions_test.go b/internal/storage/ledgerstore/transactions_test.go deleted file mode 100644 index 62a2a0142..000000000 --- a/internal/storage/ledgerstore/transactions_test.go +++ /dev/null @@ -1,1169 +0,0 @@ -//go:build it - -package ledgerstore - -import ( - "context" - "math/big" - "testing" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/bun/bunpaginate" - - "github.com/pkg/errors" - - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/pointer" - - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/query" - ledger "github.com/formancehq/ledger/internal" - internaltesting "github.com/formancehq/ledger/internal/testing" - "github.com/stretchr/testify/require" -) - -func expandLogs(logs ...*ledger.Log) []ledger.ExpandedTransaction { - ret := make([]ledger.ExpandedTransaction, 0) - accumulatedVolumes := ledger.AccountsAssetsVolumes{} - - appendTx := func(tx *ledger.Transaction) { - expandedTx := &ledger.ExpandedTransaction{ - Transaction: *tx, - } - for _, posting := range tx.Postings { - expandedTx.PreCommitVolumes.AddInput(posting.Destination, posting.Asset, accumulatedVolumes.GetVolumes(posting.Destination, posting.Asset).Input) - expandedTx.PreCommitVolumes.AddOutput(posting.Destination, posting.Asset, accumulatedVolumes.GetVolumes(posting.Destination, posting.Asset).Output) - expandedTx.PreCommitVolumes.AddOutput(posting.Source, posting.Asset, accumulatedVolumes.GetVolumes(posting.Source, posting.Asset).Output) - expandedTx.PreCommitVolumes.AddInput(posting.Source, posting.Asset, accumulatedVolumes.GetVolumes(posting.Source, posting.Asset).Input) - } - for _, posting := range tx.Postings { - accumulatedVolumes.AddOutput(posting.Source, posting.Asset, posting.Amount) - accumulatedVolumes.AddInput(posting.Destination, posting.Asset, posting.Amount) - } - for _, posting := range tx.Postings { - expandedTx.PostCommitVolumes.AddInput(posting.Destination, posting.Asset, accumulatedVolumes.GetVolumes(posting.Destination, posting.Asset).Input) - expandedTx.PostCommitVolumes.AddOutput(posting.Destination, posting.Asset, accumulatedVolumes.GetVolumes(posting.Destination, posting.Asset).Output) - expandedTx.PostCommitVolumes.AddOutput(posting.Source, posting.Asset, accumulatedVolumes.GetVolumes(posting.Source, posting.Asset).Output) - expandedTx.PostCommitVolumes.AddInput(posting.Source, posting.Asset, accumulatedVolumes.GetVolumes(posting.Source, posting.Asset).Input) - } - ret = append(ret, *expandedTx) - } - - for _, log := range logs { - switch payload := log.Data.(type) { - case ledger.NewTransactionLogPayload: - appendTx(payload.Transaction) - case ledger.RevertedTransactionLogPayload: - appendTx(payload.RevertTransaction) - ret[payload.RevertedTransactionID.Uint64()].Reverted = true - case ledger.SetMetadataLogPayload: - ret[payload.TargetID.(*big.Int).Uint64()].Metadata = ret[payload.TargetID.(*big.Int).Uint64()].Metadata.Merge(payload.Metadata) - } - } - - return ret -} - -func Reverse[T any](values ...T) []T { - ret := make([]T, len(values)) - for i := 0; i < len(values)/2; i++ { - ret[i], ret[len(values)-i-1] = values[len(values)-i-1], values[i] - } - if len(values)%2 == 1 { - ret[(len(values)-1)/2] = values[(len(values)-1)/2] - } - return ret -} - -func TestGetTransactionWithVolumes(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - now := time.Now() - ctx := logging.TestingContext() - - tx1 := ledger.ExpandedTransaction{ - Transaction: ledger.Transaction{ - ID: big.NewInt(0), - TransactionData: ledger.TransactionData{ - Postings: []ledger.Posting{ - { - Source: "world", - Destination: "central_bank", - Amount: big.NewInt(100), - Asset: "USD", - }, - }, - Reference: "tx1", - Timestamp: now.Add(-3 * time.Hour), - }, - }, - PostCommitVolumes: ledger.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(100), - }, - }, - "central_bank": { - "USD": { - Input: big.NewInt(100), - Output: big.NewInt(0), - }, - }, - }, - PreCommitVolumes: ledger.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(0), - }, - }, - "central_bank": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(0), - }, - }, - }, - } - tx2 := ledger.ExpandedTransaction{ - Transaction: ledger.Transaction{ - ID: big.NewInt(1), - TransactionData: ledger.TransactionData{ - Postings: []ledger.Posting{ - { - Source: "world", - Destination: "central_bank", - Amount: big.NewInt(100), - Asset: "USD", - }, - }, - Reference: "tx2", - Timestamp: now.Add(-2 * time.Hour), - }, - }, - PostCommitVolumes: ledger.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(200), - }, - }, - "central_bank": { - "USD": { - Input: big.NewInt(200), - Output: big.NewInt(0), - }, - }, - }, - PreCommitVolumes: ledger.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(100), - }, - }, - "central_bank": { - "USD": { - Input: big.NewInt(100), - Output: big.NewInt(0), - }, - }, - }, - } - - require.NoError(t, insertTransactions(ctx, store, tx1.Transaction, tx2.Transaction)) - - tx, err := store.GetTransactionWithVolumes(ctx, NewGetTransactionQuery(tx1.ID). - WithExpandVolumes(). - WithExpandEffectiveVolumes()) - require.NoError(t, err) - require.Equal(t, tx1.Postings, tx.Postings) - require.Equal(t, tx1.Reference, tx.Reference) - require.Equal(t, tx1.Timestamp, tx.Timestamp) - internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(100), - }, - }, - "central_bank": { - "USD": { - Input: big.NewInt(100), - Output: big.NewInt(0), - }, - }, - }, tx.PostCommitVolumes) - internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(0), - }, - }, - "central_bank": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(0), - }, - }, - }, tx.PreCommitVolumes) - - tx, err = store.GetTransactionWithVolumes(ctx, NewGetTransactionQuery(tx2.ID). - WithExpandVolumes(). - WithExpandEffectiveVolumes()) - require.Equal(t, tx2.Postings, tx.Postings) - require.Equal(t, tx2.Reference, tx.Reference) - require.Equal(t, tx2.Timestamp, tx.Timestamp) - internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(200), - }, - }, - "central_bank": { - "USD": { - Input: big.NewInt(200), - Output: big.NewInt(0), - }, - }, - }, tx.PostCommitVolumes) - internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ - "world": { - "USD": { - Input: big.NewInt(0), - Output: big.NewInt(100), - }, - }, - "central_bank": { - "USD": { - Input: big.NewInt(100), - Output: big.NewInt(0), - }, - }, - }, tx.PreCommitVolumes) -} - -func TestGetTransaction(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - now := time.Now() - - tx1 := ledger.Transaction{ - ID: big.NewInt(0), - TransactionData: ledger.TransactionData{ - Postings: []ledger.Posting{ - { - Source: "world", - Destination: "central_bank", - Amount: big.NewInt(100), - Asset: "USD", - }, - }, - Reference: "tx1", - Timestamp: now.Add(-3 * time.Hour), - }, - } - tx2 := ledger.Transaction{ - ID: big.NewInt(1), - TransactionData: ledger.TransactionData{ - Postings: []ledger.Posting{ - { - Source: "world", - Destination: "central_bank", - Amount: big.NewInt(100), - Asset: "USD", - }, - }, - Reference: "tx2", - Timestamp: now.Add(-2 * time.Hour), - }, - } - - require.NoError(t, insertTransactions(context.Background(), store, tx1, tx2)) - - tx, err := store.GetTransaction(context.Background(), tx1.ID) - require.NoError(t, err) - require.Equal(t, tx1.Postings, tx.Postings) - require.Equal(t, tx1.Reference, tx.Reference) - require.Equal(t, tx1.Timestamp, tx.Timestamp) -} - -func TestGetTransactionByReference(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - now := time.Now() - - tx1 := ledger.Transaction{ - ID: big.NewInt(0), - TransactionData: ledger.TransactionData{ - Postings: []ledger.Posting{ - { - Source: "world", - Destination: "central_bank", - Amount: big.NewInt(100), - Asset: "USD", - }, - }, - Reference: "tx1", - Timestamp: now.Add(-3 * time.Hour), - }, - } - tx2 := ledger.Transaction{ - ID: big.NewInt(1), - TransactionData: ledger.TransactionData{ - Postings: []ledger.Posting{ - { - Source: "world", - Destination: "central_bank", - Amount: big.NewInt(100), - Asset: "USD", - }, - }, - Reference: "tx2", - Timestamp: now.Add(-2 * time.Hour), - }, - } - - require.NoError(t, insertTransactions(context.Background(), store, tx1, tx2)) - - tx, err := store.GetTransactionByReference(context.Background(), "tx1") - require.NoError(t, err) - require.Equal(t, tx1.Postings, tx.Postings) - require.Equal(t, tx1.Reference, tx.Reference) - require.Equal(t, tx1.Timestamp, tx.Timestamp) -} - -func TestInsertTransactions(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - now := time.Now() - ctx := logging.TestingContext() - - t.Run("success inserting transaction", func(t *testing.T) { - tx1 := ledger.ExpandedTransaction{ - Transaction: ledger.Transaction{ - ID: big.NewInt(0), - TransactionData: ledger.TransactionData{ - Postings: ledger.Postings{ - { - Source: "world", - Destination: "alice", - Amount: big.NewInt(100), - Asset: "USD", - }, - { - Source: "world", - Destination: "bob", - Amount: big.NewInt(10), - Asset: "USD", - }, - }, - Timestamp: now.Add(-3 * time.Hour), - Metadata: metadata.Metadata{}, - }, - }, - PreCommitVolumes: map[string]ledger.VolumesByAssets{ - "world": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes(), - }, - "alice": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes(), - }, - "bob": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes(), - }, - }, - PostCommitVolumes: map[string]ledger.VolumesByAssets{ - "world": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithOutputInt64(110), - }, - "alice": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithInputInt64(100), - }, - "bob": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithInputInt64(10), - }, - }, - } - - err := insertTransactions(ctx, store, tx1.Transaction) - require.NoError(t, err, "inserting transaction should not fail") - - tx, err := store.GetTransactionWithVolumes(ctx, NewGetTransactionQuery(big.NewInt(0)). - WithExpandVolumes()) - require.NoError(t, err) - internaltesting.RequireEqual(t, tx1, *tx) - }) - - t.Run("success inserting multiple transactions", func(t *testing.T) { - t.Parallel() - tx2 := ledger.ExpandedTransaction{ - Transaction: ledger.Transaction{ - ID: big.NewInt(1), - TransactionData: ledger.TransactionData{ - Postings: ledger.Postings{ - { - Source: "world", - Destination: "polo", - Amount: big.NewInt(200), - Asset: "USD", - }, - }, - Timestamp: now.Add(-2 * time.Hour), - Metadata: metadata.Metadata{}, - }, - }, - PreCommitVolumes: map[string]ledger.VolumesByAssets{ - "world": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithOutputInt64(110), - }, - "polo": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes(), - }, - }, - PostCommitVolumes: map[string]ledger.VolumesByAssets{ - "world": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithOutputInt64(310), - }, - "polo": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithInputInt64(200), - }, - }, - } - - tx3 := ledger.ExpandedTransaction{ - Transaction: ledger.Transaction{ - ID: big.NewInt(2), - TransactionData: ledger.TransactionData{ - Postings: ledger.Postings{ - { - Source: "world", - Destination: "gfyrag", - Amount: big.NewInt(150), - Asset: "USD", - }, - }, - Timestamp: now.Add(-1 * time.Hour), - Metadata: metadata.Metadata{}, - }, - }, - PreCommitVolumes: map[string]ledger.VolumesByAssets{ - "world": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithOutputInt64(310), - }, - "gfyrag": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes(), - }, - }, - PostCommitVolumes: map[string]ledger.VolumesByAssets{ - "world": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithOutputInt64(460), - }, - "gfyrag": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithInputInt64(150), - }, - }, - } - - require.NoError(t, store.InsertLogs(context.Background(), - ledger.NewTransactionLog(&tx2.Transaction, map[string]metadata.Metadata{}).ChainLog(nil).WithID(2), - ledger.NewTransactionLog(&tx3.Transaction, map[string]metadata.Metadata{}).ChainLog(nil).WithID(3), - )) - - tx, err := store.GetTransactionWithVolumes(context.Background(), NewGetTransactionQuery(big.NewInt(1)).WithExpandVolumes()) - require.NoError(t, err, "getting transaction should not fail") - internaltesting.RequireEqual(t, tx2, *tx) - - tx, err = store.GetTransactionWithVolumes(context.Background(), NewGetTransactionQuery(big.NewInt(2)).WithExpandVolumes()) - require.NoError(t, err, "getting transaction should not fail") - internaltesting.RequireEqual(t, tx3, *tx) - }) -} - -func TestCountTransactions(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - now := time.Now() - - tx1 := ledger.ExpandedTransaction{ - Transaction: ledger.Transaction{ - ID: big.NewInt(0), - TransactionData: ledger.TransactionData{ - Postings: ledger.Postings{ - { - Source: "world", - Destination: "alice", - Amount: big.NewInt(100), - Asset: "USD", - }, - }, - Timestamp: now.Add(-3 * time.Hour), - Metadata: metadata.Metadata{}, - }, - }, - PreCommitVolumes: map[string]ledger.VolumesByAssets{ - "world": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes(), - }, - "alice": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes(), - }, - }, - PostCommitVolumes: map[string]ledger.VolumesByAssets{ - "world": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithOutputInt64(100), - }, - "alice": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithInputInt64(100), - }, - }, - } - tx2 := ledger.ExpandedTransaction{ - Transaction: ledger.Transaction{ - ID: big.NewInt(1), - TransactionData: ledger.TransactionData{ - Postings: ledger.Postings{ - { - Source: "world", - Destination: "polo", - Amount: big.NewInt(200), - Asset: "USD", - }, - }, - Timestamp: now.Add(-2 * time.Hour), - Metadata: metadata.Metadata{}, - }, - }, - PreCommitVolumes: map[string]ledger.VolumesByAssets{ - "world": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithOutputInt64(100), - }, - "polo": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes(), - }, - }, - PostCommitVolumes: map[string]ledger.VolumesByAssets{ - "world": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithOutputInt64(300), - }, - "polo": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithInputInt64(200), - }, - }, - } - - tx3 := ledger.ExpandedTransaction{ - Transaction: ledger.Transaction{ - ID: big.NewInt(2), - TransactionData: ledger.TransactionData{ - Postings: ledger.Postings{ - { - Source: "world", - Destination: "gfyrag", - Amount: big.NewInt(150), - Asset: "USD", - }, - }, - Timestamp: now.Add(-1 * time.Hour), - Metadata: metadata.Metadata{}, - }, - }, - PreCommitVolumes: map[string]ledger.VolumesByAssets{ - "world": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithOutputInt64(300), - }, - "gfyrag": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes(), - }, - }, - PostCommitVolumes: map[string]ledger.VolumesByAssets{ - "world": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithOutputInt64(450), - }, - "gfyrag": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithInputInt64(150), - }, - }, - } - - err := insertTransactions(context.Background(), store, tx1.Transaction, tx2.Transaction, tx3.Transaction) - require.NoError(t, err, "inserting transaction should not fail") - - count, err := store.CountTransactions(context.Background(), NewGetTransactionsQuery(NewPaginatedQueryOptions(PITFilterWithVolumes{}))) - require.NoError(t, err, "counting transactions should not fail") - require.Equal(t, 3, count, "count should be equal") -} - -func TestUpdateTransactionsMetadata(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - now := time.Now() - - tx1 := ledger.ExpandedTransaction{ - Transaction: ledger.Transaction{ - ID: big.NewInt(0), - TransactionData: ledger.TransactionData{ - Postings: ledger.Postings{ - { - Source: "world", - Destination: "alice", - Amount: big.NewInt(100), - Asset: "USD", - }, - }, - Timestamp: now.Add(-3 * time.Hour), - Metadata: metadata.Metadata{}, - }, - }, - PreCommitVolumes: map[string]ledger.VolumesByAssets{ - "world": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes(), - }, - "alice": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes(), - }, - }, - PostCommitVolumes: map[string]ledger.VolumesByAssets{ - "world": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithOutputInt64(100), - }, - "alice": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithInputInt64(100), - }, - }, - } - tx2 := ledger.ExpandedTransaction{ - Transaction: ledger.Transaction{ - ID: big.NewInt(1), - TransactionData: ledger.TransactionData{ - Postings: ledger.Postings{ - { - Source: "world", - Destination: "polo", - Amount: big.NewInt(200), - Asset: "USD", - }, - }, - Timestamp: now.Add(-2 * time.Hour), - Metadata: metadata.Metadata{}, - }, - }, - PreCommitVolumes: map[string]ledger.VolumesByAssets{ - "world": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithOutputInt64(100), - }, - "polo": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes(), - }, - }, - PostCommitVolumes: map[string]ledger.VolumesByAssets{ - "world": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithOutputInt64(300), - }, - "polo": map[string]*ledger.Volumes{ - "USD": ledger.NewEmptyVolumes().WithInputInt64(200), - }, - }, - } - - err := insertTransactions(context.Background(), store, tx1.Transaction, tx2.Transaction) - require.NoError(t, err, "inserting transaction should not fail") - - err = store.InsertLogs(context.Background(), - ledger.NewSetMetadataOnTransactionLog(time.Now(), tx1.ID, metadata.Metadata{"foo1": "bar2"}).ChainLog(nil).WithID(3), - ledger.NewSetMetadataOnTransactionLog(time.Now(), tx2.ID, metadata.Metadata{"foo2": "bar2"}).ChainLog(nil).WithID(4), - ) - require.NoError(t, err, "updating multiple transaction metadata should not fail") - - tx, err := store.GetTransactionWithVolumes(context.Background(), NewGetTransactionQuery(big.NewInt(0)).WithExpandVolumes().WithExpandEffectiveVolumes()) - require.NoError(t, err, "getting transaction should not fail") - require.Equal(t, tx.Metadata, metadata.Metadata{"foo1": "bar2"}, "metadata should be equal") - - tx, err = store.GetTransactionWithVolumes(context.Background(), NewGetTransactionQuery(big.NewInt(1)).WithExpandVolumes().WithExpandEffectiveVolumes()) - require.NoError(t, err, "getting transaction should not fail") - require.Equal(t, tx.Metadata, metadata.Metadata{"foo2": "bar2"}, "metadata should be equal") -} - -func TestDeleteTransactionsMetadata(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - now := time.Now() - - tx1 := ledger.Transaction{ - ID: big.NewInt(0), - TransactionData: ledger.TransactionData{ - Postings: ledger.Postings{ - { - Source: "world", - Destination: "alice", - Amount: big.NewInt(100), - Asset: "USD", - }, - }, - Timestamp: now.Add(-3 * time.Hour), - Metadata: metadata.Metadata{}, - }, - } - - require.NoError(t, store.InsertLogs(context.Background(), - ledger.NewTransactionLog(&tx1, map[string]metadata.Metadata{}).ChainLog(nil).WithID(1), - ledger.NewSetMetadataOnTransactionLog(time.Now(), tx1.ID, metadata.Metadata{"foo1": "bar1", "foo2": "bar2"}).ChainLog(nil).WithID(2), - )) - - tx, err := store.GetTransaction(context.Background(), tx1.ID) - require.NoError(t, err) - require.Equal(t, tx.Metadata, metadata.Metadata{"foo1": "bar1", "foo2": "bar2"}) - - require.NoError(t, store.InsertLogs(context.Background(), - ledger.NewDeleteMetadataLog(time.Now(), ledger.DeleteMetadataLogPayload{ - TargetType: ledger.MetaTargetTypeTransaction, - TargetID: tx1.ID, - Key: "foo1", - }).ChainLog(nil).WithID(3), - )) - - tx, err = store.GetTransaction(context.Background(), tx1.ID) - require.NoError(t, err) - require.Equal(t, metadata.Metadata{"foo2": "bar2"}, tx.Metadata) -} - -func TestInsertTransactionInPast(t *testing.T) { - t.Parallel() - - store := newLedgerStore(t) - now := time.Now() - ctx := logging.TestingContext() - - tx1 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD/2", big.NewInt(100)), - ).WithDate(now) - - tx2 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("bank", "user1", "USD/2", big.NewInt(50)), - ).WithDate(now.Add(time.Hour)).WithIDUint64(1) - - // Insert in past must modify pre/post commit volumes of tx2 - tx3 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("bank", "user2", "USD/2", big.NewInt(50)), - ).WithDate(now.Add(30 * time.Minute)).WithIDUint64(2) - - // Insert before the oldest tx must update first_usage of involved accounts - tx4 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD/2", big.NewInt(100)), - ).WithDate(now.Add(-time.Minute)).WithIDUint64(3) - - require.NoError(t, store.InsertLogs(ctx, - ledger.NewTransactionLog(tx1, map[string]metadata.Metadata{}).ChainLog(nil).WithID(1), - ledger.NewTransactionLog(tx2, map[string]metadata.Metadata{}).ChainLog(nil).WithID(2), - ledger.NewTransactionLog(tx3, map[string]metadata.Metadata{}).ChainLog(nil).WithID(3), - ledger.NewTransactionLog(tx4, map[string]metadata.Metadata{}).ChainLog(nil).WithID(4), - )) - - tx2FromDatabase, err := store.GetTransactionWithVolumes(ctx, NewGetTransactionQuery(tx2.ID).WithExpandVolumes().WithExpandEffectiveVolumes()) - require.NoError(t, err) - - internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ - "bank": { - "USD/2": ledger.NewVolumesInt64(200, 50), - }, - "user1": { - "USD/2": ledger.NewVolumesInt64(0, 0), - }, - }, tx2FromDatabase.PreCommitEffectiveVolumes) - internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ - "bank": { - "USD/2": ledger.NewVolumesInt64(200, 100), - }, - "user1": { - "USD/2": ledger.NewVolumesInt64(50, 0), - }, - }, tx2FromDatabase.PostCommitEffectiveVolumes) - - account, err := store.GetAccount(ctx, "bank") - require.NoError(t, err) - require.Equal(t, tx4.Timestamp, account.FirstUsage) -} - -func TestInsertTransactionInPastInOneBatch(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - now := time.Now() - - tx1 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD/2", big.NewInt(100)), - ).WithDate(now) - - tx2 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("bank", "user1", "USD/2", big.NewInt(50)), - ).WithDate(now.Add(time.Hour)).WithIDUint64(1) - - // Insert in past must modify pre/post commit volumes of tx2 - tx3 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("bank", "user2", "USD/2", big.NewInt(50)), - ).WithDate(now.Add(30 * time.Minute)).WithIDUint64(2) - - require.NoError(t, insertTransactions(context.Background(), store, *tx1, *tx2, *tx3)) - - tx2FromDatabase, err := store.GetTransactionWithVolumes(context.Background(), NewGetTransactionQuery(tx2.ID).WithExpandVolumes().WithExpandEffectiveVolumes()) - require.NoError(t, err) - - internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ - "bank": { - "USD/2": ledger.NewVolumesInt64(100, 50), - }, - "user1": { - "USD/2": ledger.NewVolumesInt64(0, 0), - }, - }, tx2FromDatabase.PreCommitEffectiveVolumes) - internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ - "bank": { - "USD/2": ledger.NewVolumesInt64(100, 100), - }, - "user1": { - "USD/2": ledger.NewVolumesInt64(50, 0), - }, - }, tx2FromDatabase.PostCommitEffectiveVolumes) -} - -func TestInsertTwoTransactionAtSameDateInSameBatch(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - now := time.Now() - - tx1 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD/2", big.NewInt(100)), - ).WithDate(now.Add(-time.Hour)) - - tx2 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("bank", "user1", "USD/2", big.NewInt(10)), - ).WithDate(now).WithIDUint64(1) - - tx3 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("bank", "user2", "USD/2", big.NewInt(10)), - ).WithDate(now).WithIDUint64(2) - - require.NoError(t, insertTransactions(context.Background(), store, *tx1, *tx2, *tx3)) - - tx2FromDatabase, err := store.GetTransactionWithVolumes(context.Background(), NewGetTransactionQuery(tx2.ID).WithExpandVolumes().WithExpandEffectiveVolumes()) - require.NoError(t, err) - - internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ - "bank": { - "USD/2": ledger.NewVolumesInt64(100, 10), - }, - "user1": { - "USD/2": ledger.NewVolumesInt64(10, 0), - }, - }, tx2FromDatabase.PostCommitVolumes) - internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ - "bank": { - "USD/2": ledger.NewVolumesInt64(100, 0), - }, - "user1": { - "USD/2": ledger.NewVolumesInt64(0, 0), - }, - }, tx2FromDatabase.PreCommitVolumes) - - tx3FromDatabase, err := store.GetTransactionWithVolumes(context.Background(), NewGetTransactionQuery(tx3.ID).WithExpandVolumes().WithExpandEffectiveVolumes()) - require.NoError(t, err) - - internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ - "bank": { - "USD/2": ledger.NewVolumesInt64(100, 10), - }, - "user2": { - "USD/2": ledger.NewVolumesInt64(0, 0), - }, - }, tx3FromDatabase.PreCommitVolumes) - internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ - "bank": { - "USD/2": ledger.NewVolumesInt64(100, 20), - }, - "user2": { - "USD/2": ledger.NewVolumesInt64(10, 0), - }, - }, tx3FromDatabase.PostCommitVolumes) -} - -func TestInsertTwoTransactionAtSameDateInTwoBatch(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - now := time.Now() - - tx1 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("world", "bank", "USD/2", big.NewInt(100)), - ).WithDate(now.Add(-time.Hour)) - - tx2 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("bank", "user1", "USD/2", big.NewInt(10)), - ).WithDate(now).WithIDUint64(1) - - require.NoError(t, insertTransactions(context.Background(), store, *tx1, *tx2)) - - tx3 := ledger.NewTransaction().WithPostings( - ledger.NewPosting("bank", "user2", "USD/2", big.NewInt(10)), - ).WithDate(now).WithIDUint64(2) - - require.NoError(t, store.InsertLogs(context.Background(), - ledger.NewTransactionLog(tx3, map[string]metadata.Metadata{}).ChainLog(nil).WithID(3), - )) - - tx3FromDatabase, err := store.GetTransactionWithVolumes(context.Background(), NewGetTransactionQuery(tx3.ID).WithExpandVolumes().WithExpandEffectiveVolumes()) - require.NoError(t, err) - - internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ - "bank": { - "USD/2": ledger.NewVolumesInt64(100, 10), - }, - "user2": { - "USD/2": ledger.NewVolumesInt64(0, 0), - }, - }, tx3FromDatabase.PreCommitVolumes) - internaltesting.RequireEqual(t, ledger.AccountsAssetsVolumes{ - "bank": { - "USD/2": ledger.NewVolumesInt64(100, 20), - }, - "user2": { - "USD/2": ledger.NewVolumesInt64(10, 0), - }, - }, tx3FromDatabase.PostCommitVolumes) -} - -func TestGetTransactions(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - now := time.Now() - ctx := logging.TestingContext() - - tx1 := ledger.NewTransaction(). - WithIDUint64(0). - WithPostings( - ledger.NewPosting("world", "alice", "USD", big.NewInt(100)), - ). - WithMetadata(metadata.Metadata{"category": "1"}). - WithDate(now.Add(-3 * time.Hour)) - tx2 := ledger.NewTransaction(). - WithIDUint64(1). - WithPostings( - ledger.NewPosting("world", "bob", "USD", big.NewInt(100)), - ). - WithMetadata(metadata.Metadata{"category": "2"}). - WithDate(now.Add(-2 * time.Hour)) - tx3 := ledger.NewTransaction(). - WithIDUint64(2). - WithPostings( - ledger.NewPosting("world", "users:marley", "USD", big.NewInt(100)), - ). - WithMetadata(metadata.Metadata{"category": "3"}). - WithDate(now.Add(-time.Hour)) - tx4 := ledger.NewTransaction(). - WithIDUint64(3). - WithPostings( - ledger.NewPosting("users:marley", "world", "USD", big.NewInt(100)), - ). - WithDate(now) - tx5 := ledger.NewTransaction(). - WithIDUint64(4). - WithPostings( - ledger.NewPosting("users:marley", "sellers:amazon", "USD", big.NewInt(100)), - ). - WithDate(now) - - logs := []*ledger.Log{ - ledger.NewTransactionLog(tx1, map[string]metadata.Metadata{}), - ledger.NewTransactionLog(tx2, map[string]metadata.Metadata{}), - ledger.NewTransactionLog(tx3, map[string]metadata.Metadata{}), - ledger.NewRevertedTransactionLog(time.Now(), tx3.ID, tx4), - ledger.NewSetMetadataOnTransactionLog(time.Now(), tx3.ID, metadata.Metadata{ - "additional_metadata": "true", - }), - ledger.NewTransactionLog(tx5, map[string]metadata.Metadata{}), - } - - require.NoError(t, store.InsertLogs(ctx, ledger.ChainLogs(logs...)...)) - - type testCase struct { - name string - query PaginatedQueryOptions[PITFilterWithVolumes] - expected *bunpaginate.Cursor[ledger.ExpandedTransaction] - expectError error - } - testCases := []testCase{ - { - name: "nominal", - query: NewPaginatedQueryOptions(PITFilterWithVolumes{}), - expected: &bunpaginate.Cursor[ledger.ExpandedTransaction]{ - PageSize: 15, - HasMore: false, - Data: Reverse(expandLogs(logs...)...), - }, - }, - { - name: "address filter", - query: NewPaginatedQueryOptions(PITFilterWithVolumes{}). - WithQueryBuilder(query.Match("account", "bob")), - expected: &bunpaginate.Cursor[ledger.ExpandedTransaction]{ - PageSize: 15, - HasMore: false, - Data: expandLogs(logs...)[1:2], - }, - }, - { - name: "address filter using segments matching two addresses by individual segments", - query: NewPaginatedQueryOptions(PITFilterWithVolumes{}). - WithQueryBuilder(query.Match("account", "users:amazon")), - expected: &bunpaginate.Cursor[ledger.ExpandedTransaction]{ - PageSize: 15, - HasMore: false, - Data: []ledger.ExpandedTransaction{}, - }, - }, - { - name: "address filter using segment", - query: NewPaginatedQueryOptions(PITFilterWithVolumes{}). - WithQueryBuilder(query.Match("account", "users:")), - expected: &bunpaginate.Cursor[ledger.ExpandedTransaction]{ - PageSize: 15, - HasMore: false, - Data: Reverse(expandLogs(logs...)[2:]...), - }, - }, - { - name: "filter using metadata", - query: NewPaginatedQueryOptions(PITFilterWithVolumes{}). - WithQueryBuilder(query.Match("metadata[category]", "2")), - expected: &bunpaginate.Cursor[ledger.ExpandedTransaction]{ - PageSize: 15, - HasMore: false, - Data: expandLogs(logs...)[1:2], - }, - }, - { - name: "using point in time", - query: NewPaginatedQueryOptions(PITFilterWithVolumes{ - PITFilter: PITFilter{ - PIT: pointer.For(now.Add(-time.Hour)), - }, - }), - expected: &bunpaginate.Cursor[ledger.ExpandedTransaction]{ - PageSize: 15, - HasMore: false, - Data: Reverse(expandLogs(logs[:3]...)...), - }, - }, - { - name: "filter using invalid key", - query: NewPaginatedQueryOptions(PITFilterWithVolumes{}). - WithQueryBuilder(query.Match("invalid", "2")), - expectError: &errInvalidQuery{}, - }, - { - name: "reverted transactions", - query: NewPaginatedQueryOptions(PITFilterWithVolumes{}). - WithQueryBuilder(query.Match("reverted", true)), - expected: &bunpaginate.Cursor[ledger.ExpandedTransaction]{ - PageSize: 15, - HasMore: false, - Data: expandLogs(logs...)[2:3], - }, - }, - { - name: "filter using exists metadata", - query: NewPaginatedQueryOptions(PITFilterWithVolumes{}). - WithQueryBuilder(query.Exists("metadata", "category")), - expected: &bunpaginate.Cursor[ledger.ExpandedTransaction]{ - PageSize: 15, - HasMore: false, - Data: Reverse(expandLogs(logs...)[0:3]...), - }, - }, - { - name: "filter using exists metadata2", - query: NewPaginatedQueryOptions(PITFilterWithVolumes{}). - WithQueryBuilder(query.Not(query.Exists("metadata", "category"))), - expected: &bunpaginate.Cursor[ledger.ExpandedTransaction]{ - PageSize: 15, - HasMore: false, - Data: Reverse(expandLogs(logs...)[3:5]...), - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - - tc.query.Options.ExpandVolumes = true - tc.query.Options.ExpandEffectiveVolumes = false - cursor, err := store.GetTransactions(ctx, NewGetTransactionsQuery(tc.query)) - if tc.expectError != nil { - require.True(t, errors.Is(err, tc.expectError)) - } else { - require.NoError(t, err) - require.Len(t, cursor.Data, len(tc.expected.Data)) - internaltesting.RequireEqual(t, *tc.expected, *cursor) - - count, err := store.CountTransactions(ctx, NewGetTransactionsQuery(tc.query)) - require.NoError(t, err) - - require.EqualValues(t, len(tc.expected.Data), count) - } - }) - } -} - -func TestGetLastTransaction(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - ctx := logging.TestingContext() - - tx1 := ledger.NewTransaction(). - WithIDUint64(0). - WithPostings( - ledger.NewPosting("world", "alice", "USD", big.NewInt(100)), - ) - tx2 := ledger.NewTransaction(). - WithIDUint64(1). - WithPostings( - ledger.NewPosting("world", "bob", "USD", big.NewInt(100)), - ) - tx3 := ledger.NewTransaction(). - WithIDUint64(2). - WithPostings( - ledger.NewPosting("world", "users:marley", "USD", big.NewInt(100)), - ) - - logs := []*ledger.Log{ - ledger.NewTransactionLog(tx1, map[string]metadata.Metadata{}), - ledger.NewTransactionLog(tx2, map[string]metadata.Metadata{}), - ledger.NewTransactionLog(tx3, map[string]metadata.Metadata{}), - } - - require.NoError(t, store.InsertLogs(ctx, ledger.ChainLogs(logs...)...)) - - tx, err := store.GetLastTransaction(ctx) - require.NoError(t, err) - require.Equal(t, *tx3, tx.Transaction) -} - -func TestTransactionFromWorldToWorld(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - ctx := logging.TestingContext() - - tx := ledger.NewTransaction(). - WithIDUint64(0). - WithPostings( - ledger.NewPosting("world", "world", "USD", big.NewInt(100)), - ) - require.NoError(t, store.InsertLogs(ctx, ledger.ChainLogs(ledger.NewTransactionLog(tx, map[string]metadata.Metadata{}))...)) - - account, err := store.GetAccountWithVolumes(ctx, NewGetAccountQuery("world").WithExpandVolumes()) - require.NoError(t, err) - internaltesting.RequireEqual(t, big.NewInt(0), account.Volumes.Balances()["USD"]) -} diff --git a/internal/storage/ledgerstore/volumes_test.go b/internal/storage/ledgerstore/volumes_test.go deleted file mode 100644 index d07975d25..000000000 --- a/internal/storage/ledgerstore/volumes_test.go +++ /dev/null @@ -1,650 +0,0 @@ -//go:build it - -package ledgerstore - -import ( - "math/big" - "testing" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/logging" - - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/query" - ledger "github.com/formancehq/ledger/internal" - "github.com/stretchr/testify/require" -) - -func TestGetVolumesWithBalances(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - now := time.Now() - ctx := logging.TestingContext() - - previousPIT := now.Add(-2 * time.Minute) - futurPIT := now.Add(2 * time.Minute) - - previousOOT := now.Add(-2 * time.Minute) - futurOOT := now.Add(2 * time.Minute) - - require.NoError(t, store.InsertLogs(ctx, - ledger.ChainLogs( - ledger.NewSetMetadataOnAccountLog(time.Now(), "account:1", metadata.Metadata{"category": "1"}).WithDate(now), - ledger.NewSetMetadataOnAccountLog(time.Now(), "account:2", metadata.Metadata{"category": "2"}).WithDate(now), - ledger.NewSetMetadataOnAccountLog(time.Now(), "world", metadata.Metadata{"foo": "bar"}).WithDate(now), - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(100))). - WithDate(now.Add(-4*time.Minute)), - map[string]metadata.Metadata{}, - ).WithDate(now.Add(4*time.Minute)), - - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(100))). - WithIDUint64(1). - WithDate(now.Add(-3*time.Minute)), - map[string]metadata.Metadata{}, - ).WithDate(now.Add(3*time.Minute)), - - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("account:1", "bank", "USD", big.NewInt(50))). - WithDate(now.Add(-2*time.Minute)). - WithIDUint64(2), - map[string]metadata.Metadata{}, - ).WithDate(now.Add(2*time.Minute)), - - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("world", "account:1", "USD", big.NewInt(0))). - WithDate(now.Add(-time.Minute)). - WithIDUint64(3), - map[string]metadata.Metadata{}, - ).WithDate(now.Add(1*time.Minute)), - - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("world", "account:2", "USD", big.NewInt(50))). - WithDate(now).WithIDUint64(4), - map[string]metadata.Metadata{}, - ).WithDate(now), - - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("world", "account:2", "USD", big.NewInt(50))). - WithIDUint64(5). - WithDate(now.Add(1*time.Minute)), - map[string]metadata.Metadata{}, - ).WithDate(now.Add(-1*time.Minute)), - - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("account:2", "bank", "USD", big.NewInt(50))). - WithDate(now.Add(2*time.Minute)). - WithIDUint64(6), - map[string]metadata.Metadata{}, - ).WithDate(now.Add(-2*time.Minute)), - - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("world", "account:2", "USD", big.NewInt(25))). - WithDate(now.Add(3*time.Minute)). - WithIDUint64(7), - map[string]metadata.Metadata{}, - ).WithDate(now.Add(-3*time.Minute)), - )..., - )) - - t.Run("Get All Volumes with Balance for Insertion date", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery(NewPaginatedQueryOptions(FiltersForVolumes{UseInsertionDate: true}))) - require.NoError(t, err) - - require.Len(t, volumes.Data, 4) - }) - - t.Run("Get All Volumes with Balance for Effective date", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery(NewPaginatedQueryOptions(FiltersForVolumes{UseInsertionDate: false}))) - require.NoError(t, err) - - require.Len(t, volumes.Data, 4) - }) - - t.Run("Get All Volumes with Balance for Insertion date with previous pit", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery(NewPaginatedQueryOptions( - FiltersForVolumes{ - PITFilter: PITFilter{PIT: &previousPIT, OOT: nil}, - UseInsertionDate: true, - }))) - - require.NoError(t, err) - require.Len(t, volumes.Data, 3) - require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ - Account: "account:2", - Asset: "USD", - VolumesWithBalance: ledger.VolumesWithBalance{ - Input: big.NewInt(25), - Output: big.NewInt(50), - Balance: big.NewInt(-25), - }, - }, volumes.Data[0]) - }) - - t.Run("Get All Volumes with Balance for Insertion date with futur pit", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery(NewPaginatedQueryOptions( - FiltersForVolumes{ - PITFilter: PITFilter{PIT: &futurPIT, OOT: nil}, - UseInsertionDate: true, - }))) - require.NoError(t, err) - - require.Len(t, volumes.Data, 4) - }) - - t.Run("Get All Volumes with Balance for Insertion date with previous oot", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery(NewPaginatedQueryOptions( - FiltersForVolumes{ - PITFilter: PITFilter{PIT: nil, OOT: &previousOOT}, - UseInsertionDate: true, - }))) - require.NoError(t, err) - - require.Len(t, volumes.Data, 4) - }) - - t.Run("Get All Volumes with Balance for Insertion date with futur oot", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery(NewPaginatedQueryOptions( - FiltersForVolumes{ - PITFilter: PITFilter{PIT: nil, OOT: &futurOOT}, - UseInsertionDate: true, - }))) - - require.NoError(t, err) - require.Len(t, volumes.Data, 3) - require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ - Account: "account:1", - Asset: "USD", - VolumesWithBalance: ledger.VolumesWithBalance{ - Input: big.NewInt(200), - Output: big.NewInt(50), - Balance: big.NewInt(150), - }, - }, volumes.Data[0]) - }) - - t.Run("Get All Volumes with Balance for Effective date with previous pit", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery(NewPaginatedQueryOptions( - FiltersForVolumes{ - PITFilter: PITFilter{PIT: &previousPIT, OOT: nil}, - UseInsertionDate: false, - }))) - - require.NoError(t, err) - require.Len(t, volumes.Data, 3) - require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ - Account: "account:1", - Asset: "USD", - VolumesWithBalance: ledger.VolumesWithBalance{ - Input: big.NewInt(200), - Output: big.NewInt(50), - Balance: big.NewInt(150), - }, - }, volumes.Data[0]) - }) - - t.Run("Get All Volumes with Balance for Effective date with futur pit", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery(NewPaginatedQueryOptions( - FiltersForVolumes{ - PITFilter: PITFilter{PIT: &futurPIT, OOT: nil}, - UseInsertionDate: false, - }))) - require.NoError(t, err) - - require.Len(t, volumes.Data, 4) - }) - - t.Run("Get All Volumes with Balance for Effective date with previous oot", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery(NewPaginatedQueryOptions( - FiltersForVolumes{ - PITFilter: PITFilter{PIT: nil, OOT: &previousOOT}, - UseInsertionDate: false, - }))) - require.NoError(t, err) - - require.Len(t, volumes.Data, 4) - }) - - t.Run("Get All Volumes with Balance for effective date with futur oot", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery(NewPaginatedQueryOptions( - FiltersForVolumes{ - PITFilter: PITFilter{PIT: nil, OOT: &futurOOT}, - UseInsertionDate: false, - }))) - - require.NoError(t, err) - require.Len(t, volumes.Data, 3) - require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ - Account: "account:2", - Asset: "USD", - VolumesWithBalance: ledger.VolumesWithBalance{ - Input: big.NewInt(25), - Output: big.NewInt(50), - Balance: big.NewInt(-25), - }, - }, volumes.Data[0]) - }) - - t.Run("Get All Volumes with Balance for insertion date with futur PIT and now OOT", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery(NewPaginatedQueryOptions( - FiltersForVolumes{ - PITFilter: PITFilter{PIT: &futurPIT, OOT: &now}, - UseInsertionDate: true, - }))) - - require.NoError(t, err) - require.Len(t, volumes.Data, 4) - require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ - Account: "account:1", - Asset: "USD", - VolumesWithBalance: ledger.VolumesWithBalance{ - Input: big.NewInt(0), - Output: big.NewInt(50), - Balance: big.NewInt(-50), - }, - }, volumes.Data[0]) - - }) - - t.Run("Get All Volumes with Balance for insertion date with previous OOT and now PIT", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery(NewPaginatedQueryOptions( - FiltersForVolumes{ - PITFilter: PITFilter{PIT: &now, OOT: &previousOOT}, - UseInsertionDate: true, - }))) - - require.NoError(t, err) - require.Len(t, volumes.Data, 3) - require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ - Account: "account:2", - Asset: "USD", - VolumesWithBalance: ledger.VolumesWithBalance{ - Input: big.NewInt(100), - Output: big.NewInt(50), - Balance: big.NewInt(50), - }, - }, volumes.Data[0]) - - }) - - t.Run("Get All Volumes with Balance for effective date with futur PIT and now OOT", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery(NewPaginatedQueryOptions( - FiltersForVolumes{ - PITFilter: PITFilter{PIT: &futurPIT, OOT: &now}, - UseInsertionDate: false, - }))) - - require.NoError(t, err) - require.Len(t, volumes.Data, 3) - require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ - Account: "account:2", - Asset: "USD", - VolumesWithBalance: ledger.VolumesWithBalance{ - Input: big.NewInt(100), - Output: big.NewInt(50), - Balance: big.NewInt(50), - }, - }, volumes.Data[0]) - }) - - t.Run("Get All Volumes with Balance for insertion date with previous OOT and now PIT", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery(NewPaginatedQueryOptions( - FiltersForVolumes{ - PITFilter: PITFilter{PIT: &now, OOT: &previousOOT}, - UseInsertionDate: false, - }))) - - require.NoError(t, err) - require.Len(t, volumes.Data, 4) - require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ - Account: "account:1", - Asset: "USD", - VolumesWithBalance: ledger.VolumesWithBalance{ - Input: big.NewInt(0), - Output: big.NewInt(50), - Balance: big.NewInt(-50), - }, - }, volumes.Data[0]) - - }) - - t.Run("Get account1 volume and Balance for insertion date with previous OOT and now PIT", func(t *testing.T) { - t.Parallel() - - volumes, err := store.GetVolumesWithBalances(ctx, - NewGetVolumesWithBalancesQuery( - NewPaginatedQueryOptions( - FiltersForVolumes{ - PITFilter: PITFilter{PIT: &now, OOT: &previousOOT}, - UseInsertionDate: false, - }).WithQueryBuilder(query.Match("account", "account:1"))), - ) - - require.NoError(t, err) - require.Len(t, volumes.Data, 1) - require.Equal(t, ledger.VolumesWithBalanceByAssetByAccount{ - Account: "account:1", - Asset: "USD", - VolumesWithBalance: ledger.VolumesWithBalance{ - Input: big.NewInt(0), - Output: big.NewInt(50), - Balance: big.NewInt(-50), - }, - }, volumes.Data[0]) - - }) - - t.Run("Using Metadata regex", func(t *testing.T) { - t.Parallel() - - volumes, err := store.GetVolumesWithBalances(ctx, - NewGetVolumesWithBalancesQuery( - NewPaginatedQueryOptions( - FiltersForVolumes{}).WithQueryBuilder(query.Match("metadata[foo]", "bar"))), - ) - - require.NoError(t, err) - require.Len(t, volumes.Data, 1) - - }) - - t.Run("Using exists metadata filter 1", func(t *testing.T) { - t.Parallel() - - volumes, err := store.GetVolumesWithBalances(ctx, - NewGetVolumesWithBalancesQuery( - NewPaginatedQueryOptions( - FiltersForVolumes{}).WithQueryBuilder(query.Exists("metadata", "category"))), - ) - - require.NoError(t, err) - require.Len(t, volumes.Data, 2) - - }) - - t.Run("Using exists metadata filter 2", func(t *testing.T) { - t.Parallel() - - volumes, err := store.GetVolumesWithBalances(ctx, - NewGetVolumesWithBalancesQuery( - NewPaginatedQueryOptions( - FiltersForVolumes{}).WithQueryBuilder(query.Exists("metadata", "foo"))), - ) - - require.NoError(t, err) - require.Len(t, volumes.Data, 1) - }) -} - -func TestAggGetVolumesWithBalances(t *testing.T) { - t.Parallel() - store := newLedgerStore(t) - now := time.Now() - ctx := logging.TestingContext() - - futurPIT := now.Add(2 * time.Minute) - previousOOT := now.Add(-2 * time.Minute) - - require.NoError(t, store.InsertLogs(ctx, - ledger.ChainLogs( - - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("world", "account:1:2", "USD", big.NewInt(100))). - WithDate(now.Add(-4*time.Minute)), - map[string]metadata.Metadata{}, - ).WithDate(now), - - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("world", "account:1:1", "EUR", big.NewInt(100))). - WithIDUint64(1). - WithDate(now.Add(-3*time.Minute)), - map[string]metadata.Metadata{}, - ).WithDate(now), - - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("world", "account:1:2", "EUR", big.NewInt(50))). - WithDate(now.Add(-2*time.Minute)). - WithIDUint64(2), - map[string]metadata.Metadata{}, - ).WithDate(now), - - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("world", "account:1:3", "USD", big.NewInt(0))). - WithDate(now.Add(-time.Minute)). - WithIDUint64(3), - map[string]metadata.Metadata{}, - ).WithDate(now), - - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("world", "account:2:1", "USD", big.NewInt(50))). - WithDate(now).WithIDUint64(4), - map[string]metadata.Metadata{}, - ).WithDate(now), - - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("world", "account:2:2", "USD", big.NewInt(50))). - WithIDUint64(5). - WithDate(now.Add(1*time.Minute)), - map[string]metadata.Metadata{}, - ).WithDate(now), - - ledger.NewTransactionLog( - ledger.NewTransaction(). - WithPostings(ledger.NewPosting("world", "account:2:3", "EUR", big.NewInt(25))). - WithDate(now.Add(3*time.Minute)). - WithIDUint64(7), - map[string]metadata.Metadata{}, - ).WithDate(now), - - ledger.NewSetMetadataOnAccountLog(time.Now(), "account:1:1", metadata.Metadata{ - "foo": "bar", - }), - )..., - )) - - t.Run("Aggregation Volumes with Balance for GroupLvl 0", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery( - NewPaginatedQueryOptions( - FiltersForVolumes{ - UseInsertionDate: true, - GroupLvl: 0, - }).WithQueryBuilder(query.Match("account", "account::")))) - - require.NoError(t, err) - require.Len(t, volumes.Data, 7) - }) - - t.Run("Aggregation Volumes with Balance for GroupLvl 1", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery( - NewPaginatedQueryOptions( - FiltersForVolumes{ - UseInsertionDate: true, - GroupLvl: 1, - }).WithQueryBuilder(query.Match("account", "account::")))) - - require.NoError(t, err) - require.Len(t, volumes.Data, 2) - }) - - t.Run("Aggregation Volumes with Balance for GroupLvl 2", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery( - NewPaginatedQueryOptions( - FiltersForVolumes{ - UseInsertionDate: true, - GroupLvl: 2, - }).WithQueryBuilder(query.Match("account", "account::")))) - - require.NoError(t, err) - require.Len(t, volumes.Data, 4) - }) - - t.Run("Aggregation Volumes with Balance for GroupLvl 3", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery( - NewPaginatedQueryOptions( - FiltersForVolumes{ - UseInsertionDate: true, - GroupLvl: 3, - }).WithQueryBuilder(query.Match("account", "account::")))) - - require.NoError(t, err) - require.Len(t, volumes.Data, 7) - }) - - t.Run("Aggregation Volumes with Balance for GroupLvl 1 && PIT && OOT && effectiveDate", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery( - NewPaginatedQueryOptions( - FiltersForVolumes{ - PITFilter: PITFilter{ - PIT: &futurPIT, - OOT: &previousOOT, - }, - UseInsertionDate: false, - GroupLvl: 1, - }).WithQueryBuilder(query.Match("account", "account::")))) - - require.NoError(t, err) - require.Len(t, volumes.Data, 2) - require.Equal(t, volumes.Data[0], ledger.VolumesWithBalanceByAssetByAccount{ - Account: "account", - Asset: "EUR", - VolumesWithBalance: ledger.VolumesWithBalance{ - Input: big.NewInt(50), - Output: big.NewInt(0), - Balance: big.NewInt(50), - }, - }) - require.Equal(t, volumes.Data[1], ledger.VolumesWithBalanceByAssetByAccount{ - Account: "account", - Asset: "USD", - VolumesWithBalance: ledger.VolumesWithBalance{ - Input: big.NewInt(100), - Output: big.NewInt(0), - Balance: big.NewInt(100), - }, - }) - }) - - t.Run("Aggregation Volumes with Balance for GroupLvl 1 && PIT && OOT && effectiveDate && Balance Filter 1", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery( - NewPaginatedQueryOptions( - FiltersForVolumes{ - PITFilter: PITFilter{ - PIT: &futurPIT, - OOT: &previousOOT, - }, - UseInsertionDate: false, - GroupLvl: 1, - }).WithQueryBuilder( - query.And(query.Match("account", "account::"), query.Gte("balance[EUR]", 50))))) - - require.NoError(t, err) - require.Len(t, volumes.Data, 1) - require.Equal(t, volumes.Data[0], ledger.VolumesWithBalanceByAssetByAccount{ - Account: "account", - Asset: "EUR", - VolumesWithBalance: ledger.VolumesWithBalance{ - Input: big.NewInt(50), - Output: big.NewInt(0), - Balance: big.NewInt(50), - }, - }) - }) - - t.Run("Aggregation Volumes with Balance for GroupLvl 1 && Balance Filter 2", func(t *testing.T) { - t.Parallel() - volumes, err := store.GetVolumesWithBalances(ctx, NewGetVolumesWithBalancesQuery( - NewPaginatedQueryOptions( - FiltersForVolumes{ - PITFilter: PITFilter{}, - UseInsertionDate: true, - GroupLvl: 2, - }).WithQueryBuilder( - query.Or( - query.Match("account", "account:1:"), - query.Lte("balance[USD]", 0))))) - - require.NoError(t, err) - require.Len(t, volumes.Data, 3) - require.Equal(t, volumes.Data[0], ledger.VolumesWithBalanceByAssetByAccount{ - Account: "account:1", - Asset: "EUR", - VolumesWithBalance: ledger.VolumesWithBalance{ - Input: big.NewInt(150), - Output: big.NewInt(0), - Balance: big.NewInt(150), - }, - }) - require.Equal(t, volumes.Data[1], ledger.VolumesWithBalanceByAssetByAccount{ - Account: "account:1", - Asset: "USD", - VolumesWithBalance: ledger.VolumesWithBalance{ - Input: big.NewInt(100), - Output: big.NewInt(0), - Balance: big.NewInt(100), - }, - }) - require.Equal(t, volumes.Data[2], ledger.VolumesWithBalanceByAssetByAccount{ - Account: "world", - Asset: "USD", - VolumesWithBalance: ledger.VolumesWithBalance{ - Input: big.NewInt(0), - Output: big.NewInt(200), - Balance: big.NewInt(-200), - }, - }) - }) - - t.Run("filter using account matching, metadata, and group", func(t *testing.T) { - t.Parallel() - - volumes, err := store.GetVolumesWithBalances(ctx, - NewGetVolumesWithBalancesQuery( - NewPaginatedQueryOptions( - FiltersForVolumes{ - GroupLvl: 1, - }).WithQueryBuilder(query.And( - query.Match("account", "account::"), - query.Match("metadata[foo]", "bar"), - ))), - ) - - require.NoError(t, err) - require.Len(t, volumes.Data, 1) - }) -} diff --git a/internal/storage/migrate_ledger_v1_test.go b/internal/storage/migrate_ledger_v1_test.go deleted file mode 100644 index c3c51ffa7..000000000 --- a/internal/storage/migrate_ledger_v1_test.go +++ /dev/null @@ -1,64 +0,0 @@ -//go:build it - -package storage_test - -import ( - "database/sql" - "os" - "path/filepath" - "testing" - - "github.com/formancehq/go-libs/testing/docker" - - "github.com/formancehq/go-libs/bun/bunconnect" - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/testing/platform/pgtesting" - "github.com/formancehq/ledger/internal/storage/driver" - "github.com/formancehq/ledger/internal/storage/ledgerstore" - "github.com/formancehq/ledger/internal/storage/systemstore" - "github.com/stretchr/testify/require" -) - -func TestMigrateLedgerV1(t *testing.T) { - dockerPool := docker.NewPool(t, logging.Testing()) - srv := pgtesting.CreatePostgresServer(t, dockerPool) - - db, err := sql.Open("postgres", srv.GetDSN()) - require.NoError(t, err) - - data, err := os.ReadFile(filepath.Join("testdata", "v1-dump.sql")) - require.NoError(t, err) - - _, err = db.Exec(string(data)) - require.NoError(t, err) - - ctx := logging.TestingContext() - - d := driver.New(bunconnect.ConnectionOptions{ - DatabaseSourceName: srv.GetDSN(), - }) - require.NoError(t, d.Initialize(ctx)) - - ledgers, err := d.GetSystemStore().ListLedgers(ctx, systemstore.ListLedgersQuery{}) - require.NoError(t, err) - - for _, ledger := range ledgers.Data { - require.NotEmpty(t, ledger.Bucket) - require.Equal(t, ledger.Name, ledger.Bucket) - - bucket, err := d.OpenBucket(ctx, ledger.Bucket) - require.NoError(t, err) - require.NoError(t, bucket.Migrate(ctx)) - - store, err := bucket.GetLedgerStore(ledger.Name) - require.NoError(t, err) - - txs, err := store.GetTransactions(ctx, ledgerstore.NewGetTransactionsQuery(ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes]{})) - require.NoError(t, err) - require.NotEmpty(t, txs) - - accounts, err := store.GetAccountsWithVolumes(ctx, ledgerstore.NewGetAccountsQuery(ledgerstore.PaginatedQueryOptions[ledgerstore.PITFilterWithVolumes]{})) - require.NoError(t, err) - require.NotEmpty(t, accounts) - } -} diff --git a/internal/storage/module.go b/internal/storage/module.go new file mode 100644 index 000000000..e9c23ae31 --- /dev/null +++ b/internal/storage/module.go @@ -0,0 +1,12 @@ +package storage + +import ( + "github.com/formancehq/ledger/internal/storage/driver" + "go.uber.org/fx" +) + +func NewFXModule(autoUpgrade bool) fx.Option { + return fx.Options( + driver.NewFXModule(autoUpgrade), + ) +} diff --git a/internal/storage/sqlutils/errors.go b/internal/storage/sqlutils/errors.go deleted file mode 100644 index 07cc5b274..000000000 --- a/internal/storage/sqlutils/errors.go +++ /dev/null @@ -1,70 +0,0 @@ -package sqlutils - -import ( - "database/sql" - - "github.com/lib/pq" - "github.com/pkg/errors" -) - -// postgresError is an helper to wrap postgres errors into storage errors -func PostgresError(err error) error { - if err != nil { - if errors.Is(err, sql.ErrNoRows) { - return ErrNotFound - } - - switch pge := err.(type) { - case *pq.Error: - switch pge.Code { - case "23505": - return newErrConstraintsFailed(err) - case "53300": - return newErrTooManyClient(err) - } - } - - return err - } - - return nil -} - -var ( - ErrNotFound = errors.New("not found") - ErrBucketAlreadyExists = errors.New("bucket already exists") - ErrStoreAlreadyExists = errors.New("store already exists") - ErrStoreNotFound = errors.New("store not found") -) - -func IsNotFoundError(err error) bool { - return errors.Is(err, ErrNotFound) -} - -type errConstraintsFailed struct { - err error -} - -func (e errConstraintsFailed) Error() string { - return e.err.Error() -} - -func newErrConstraintsFailed(err error) *errConstraintsFailed { - return &errConstraintsFailed{ - err: err, - } -} - -type errTooManyClient struct { - err error -} - -func (e errTooManyClient) Error() string { - return e.err.Error() -} - -func newErrTooManyClient(err error) *errTooManyClient { - return &errTooManyClient{ - err: err, - } -} diff --git a/internal/storage/storagetesting/storage.go b/internal/storage/storagetesting/storage.go deleted file mode 100644 index 53afdf244..000000000 --- a/internal/storage/storagetesting/storage.go +++ /dev/null @@ -1,34 +0,0 @@ -package storagetesting - -import ( - "context" - "time" - - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/testing/docker" - - "github.com/formancehq/go-libs/bun/bunconnect" - - "github.com/formancehq/go-libs/testing/platform/pgtesting" - "github.com/formancehq/ledger/internal/storage/driver" - "github.com/stretchr/testify/require" -) - -func StorageDriver(t docker.T) *driver.Driver { - pgServer := pgtesting.CreatePostgresServer(t, docker.NewPool(t, logging.Testing())) - pgDatabase := pgServer.NewDatabase(t) - - d := driver.New(bunconnect.ConnectionOptions{ - DatabaseSourceName: pgDatabase.ConnString(), - MaxIdleConns: 40, - MaxOpenConns: 40, - ConnMaxIdleTime: time.Minute, - }) - - require.NoError(t, d.Initialize(context.Background())) - t.Cleanup(func() { - require.NoError(t, d.Close()) - }) - - return d -} diff --git a/internal/storage/systemstore/configuration.go b/internal/storage/systemstore/configuration.go deleted file mode 100644 index 59c11e9a6..000000000 --- a/internal/storage/systemstore/configuration.go +++ /dev/null @@ -1,53 +0,0 @@ -package systemstore - -import ( - "context" - - "github.com/formancehq/go-libs/time" - - storageerrors "github.com/formancehq/ledger/internal/storage/sqlutils" - - "github.com/uptrace/bun" -) - -type configuration struct { - bun.BaseModel `bun:"_system.configuration,alias:configuration"` - - Key string `bun:"key,type:varchar(255),pk"` // Primary key - Value string `bun:"value,type:text"` - AddedAt time.Time `bun:"addedAt,type:timestamp"` -} - -func (s *Store) GetConfiguration(ctx context.Context, key string) (string, error) { - query := s.db.NewSelect(). - Model((*configuration)(nil)). - Column("value"). - Where("key = ?", key). - Limit(1). - String() - - row := s.db.QueryRowContext(ctx, query) - if row.Err() != nil { - return "", storageerrors.PostgresError(row.Err()) - } - var value string - if err := row.Scan(&value); err != nil { - return "", storageerrors.PostgresError(err) - } - - return value, nil -} - -func (s *Store) InsertConfiguration(ctx context.Context, key, value string) error { - config := &configuration{ - Key: key, - Value: value, - AddedAt: time.Now(), - } - - _, err := s.db.NewInsert(). - Model(config). - Exec(ctx) - - return storageerrors.PostgresError(err) -} diff --git a/internal/storage/systemstore/ledgers.go b/internal/storage/systemstore/ledgers.go deleted file mode 100644 index 85e5fb438..000000000 --- a/internal/storage/systemstore/ledgers.go +++ /dev/null @@ -1,127 +0,0 @@ -package systemstore - -import ( - "context" - - "github.com/formancehq/go-libs/metadata" - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/bun/bunpaginate" - - "github.com/formancehq/ledger/internal/storage/sqlutils" - "github.com/pkg/errors" - "github.com/uptrace/bun" -) - -const ( - StateInitializing = "initializing" - StateInUse = "in-use" -) - -type Ledger struct { - bun.BaseModel `bun:"_system.ledgers,alias:ledgers"` - - Name string `bun:"ledger,type:varchar(255),pk" json:"name"` // Primary key - AddedAt time.Time `bun:"addedat,type:timestamp" json:"addedAt"` - Bucket string `bun:"bucket,type:varchar(255)" json:"bucket"` - Metadata map[string]string `bun:"metadata,type:jsonb" json:"metadata"` - State string `bun:"state,type:varchar(255)" json:"-"` -} - -type PaginatedQueryOptions struct { - PageSize uint64 `json:"pageSize"` -} - -type ListLedgersQuery bunpaginate.OffsetPaginatedQuery[PaginatedQueryOptions] - -func (query ListLedgersQuery) WithPageSize(pageSize uint64) ListLedgersQuery { - query.PageSize = pageSize - return query -} - -func NewListLedgersQuery(pageSize uint64) ListLedgersQuery { - return ListLedgersQuery{ - PageSize: pageSize, - } -} - -func (s *Store) ListLedgers(ctx context.Context, q ListLedgersQuery) (*bunpaginate.Cursor[Ledger], error) { - query := s.db.NewSelect(). - Column("ledger", "bucket", "addedat", "metadata", "state"). - Order("addedat asc") - - return bunpaginate.UsingOffset[PaginatedQueryOptions, Ledger](ctx, query, bunpaginate.OffsetPaginatedQuery[PaginatedQueryOptions](q)) -} - -func (s *Store) DeleteLedger(ctx context.Context, name string) error { - _, err := s.db.NewDelete(). - Model((*Ledger)(nil)). - Where("ledger = ?", name). - Exec(ctx) - - return errors.Wrap(sqlutils.PostgresError(err), "delete ledger from system store") -} - -func (s *Store) RegisterLedger(ctx context.Context, l *Ledger) (bool, error) { - return RegisterLedger(ctx, s.db, l) -} - -func (s *Store) GetLedger(ctx context.Context, name string) (*Ledger, error) { - ret := &Ledger{} - if err := s.db.NewSelect(). - Model(ret). - Column("ledger", "bucket", "addedat", "metadata", "state"). - Where("ledger = ?", name). - Scan(ctx); err != nil { - return nil, sqlutils.PostgresError(err) - } - - return ret, nil -} - -func (s *Store) UpdateLedgerMetadata(ctx context.Context, name string, m metadata.Metadata) error { - _, err := s.db.NewUpdate(). - Model(&Ledger{}). - Set("metadata = metadata || ?", m). - Where("ledger = ?", name). - Exec(ctx) - return err -} - -func (s *Store) UpdateLedgerState(ctx context.Context, name string, state string) error { - _, err := s.db.NewUpdate(). - Model(&Ledger{}). - Set("state = ?", state). - Where("ledger = ?", name). - Exec(ctx) - return err -} - -func (s *Store) DeleteLedgerMetadata(ctx context.Context, name string, key string) error { - _, err := s.db.NewUpdate(). - Model(&Ledger{}). - Set("metadata = metadata - ?", key). - Where("ledger = ?", name). - Exec(ctx) - return err -} - -func RegisterLedger(ctx context.Context, db bun.IDB, l *Ledger) (bool, error) { - if l.Metadata == nil { - l.Metadata = map[string]string{} - } - ret, err := db.NewInsert(). - Model(l). - Ignore(). - Exec(ctx) - if err != nil { - return false, sqlutils.PostgresError(err) - } - - affected, err := ret.RowsAffected() - if err != nil { - return false, sqlutils.PostgresError(err) - } - - return affected > 0, nil -} diff --git a/internal/storage/systemstore/ledgers_test.go b/internal/storage/systemstore/ledgers_test.go deleted file mode 100644 index be6f8781d..000000000 --- a/internal/storage/systemstore/ledgers_test.go +++ /dev/null @@ -1,121 +0,0 @@ -//go:build it - -package systemstore - -import ( - "fmt" - "testing" - - "github.com/google/uuid" - - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/bun/bunconnect" - "github.com/formancehq/go-libs/bun/bunpaginate" - - "github.com/formancehq/go-libs/logging" - "github.com/stretchr/testify/require" -) - -func newSystemStore(t *testing.T) *Store { - t.Parallel() - t.Helper() - ctx := logging.TestingContext() - - pgServer := srv.NewDatabase(t) - - store, err := Connect(ctx, bunconnect.ConnectionOptions{ - DatabaseSourceName: pgServer.ConnString(), - }) - require.NoError(t, err) - t.Cleanup(func() { - require.NoError(t, store.Close()) - }) - - require.NoError(t, Migrate(ctx, store.DB())) - - return store -} - -func TestListLedgers(t *testing.T) { - ctx := logging.TestingContext() - store := newSystemStore(t) - - ledgers := make([]Ledger, 0) - pageSize := uint64(2) - count := uint64(10) - now := time.Now() - for i := uint64(0); i < count; i++ { - m := map[string]string{} - if i%2 == 0 { - m["foo"] = "bar" - } - ledger := Ledger{ - Name: fmt.Sprintf("ledger%d", i), - AddedAt: now.Add(time.Duration(i) * time.Second), - Metadata: m, - } - ledgers = append(ledgers, ledger) - _, err := store.RegisterLedger(ctx, &ledger) - require.NoError(t, err) - } - - cursor, err := store.ListLedgers(ctx, NewListLedgersQuery(pageSize)) - require.NoError(t, err) - require.Len(t, cursor.Data, int(pageSize)) - require.Equal(t, ledgers[:pageSize], cursor.Data) - - for i := pageSize; i < count; i += pageSize { - query := ListLedgersQuery{} - require.NoError(t, bunpaginate.UnmarshalCursor(cursor.Next, &query)) - - cursor, err = store.ListLedgers(ctx, query) - require.NoError(t, err) - require.Len(t, cursor.Data, 2) - require.Equal(t, ledgers[i:i+pageSize], cursor.Data) - } -} - -func TestUpdateLedgerMetadata(t *testing.T) { - ctx := logging.TestingContext() - store := newSystemStore(t) - - ledger := &Ledger{ - Name: uuid.NewString(), - AddedAt: time.Now(), - } - _, err := store.RegisterLedger(ctx, ledger) - require.NoError(t, err) - - addedMetadata := map[string]string{ - "foo": "bar", - } - err = store.UpdateLedgerMetadata(ctx, ledger.Name, addedMetadata) - require.NoError(t, err) - - ledgerFromDB, err := store.GetLedger(ctx, ledger.Name) - require.NoError(t, err) - require.Equal(t, addedMetadata, ledgerFromDB.Metadata) -} - -func TestDeleteLedgerMetadata(t *testing.T) { - ctx := logging.TestingContext() - store := newSystemStore(t) - - ledger := &Ledger{ - Name: uuid.NewString(), - AddedAt: time.Now(), - Metadata: map[string]string{ - "foo": "bar", - }, - } - _, err := store.RegisterLedger(ctx, ledger) - require.NoError(t, err) - - err = store.DeleteLedgerMetadata(ctx, ledger.Name, "foo") - require.NoError(t, err) - - ledgerFromDB, err := store.GetLedger(ctx, ledger.Name) - require.NoError(t, err) - require.Equal(t, map[string]string{}, ledgerFromDB.Metadata) -} diff --git a/internal/storage/systemstore/main_test.go b/internal/storage/systemstore/main_test.go deleted file mode 100644 index 0f62887fe..000000000 --- a/internal/storage/systemstore/main_test.go +++ /dev/null @@ -1,23 +0,0 @@ -//go:build it - -package systemstore - -import ( - "testing" - - "github.com/formancehq/go-libs/testing/docker" - "github.com/formancehq/go-libs/testing/utils" - - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/testing/platform/pgtesting" -) - -var srv *pgtesting.PostgresServer - -func TestMain(m *testing.M) { - utils.WithTestMain(func(t *utils.TestingTForMain) int { - srv = pgtesting.CreatePostgresServer(t, docker.NewPool(t, logging.Testing())) - - return m.Run() - }) -} diff --git a/internal/storage/systemstore/migrations.go b/internal/storage/systemstore/migrations.go deleted file mode 100644 index cac524565..000000000 --- a/internal/storage/systemstore/migrations.go +++ /dev/null @@ -1,127 +0,0 @@ -package systemstore - -import ( - "context" - - "github.com/formancehq/go-libs/logging" - "github.com/pkg/errors" - - "github.com/formancehq/ledger/internal/storage/sqlutils" - - "github.com/formancehq/go-libs/migrations" - "github.com/uptrace/bun" -) - -func Migrate(ctx context.Context, db bun.IDB) error { - migrator := migrations.NewMigrator(migrations.WithSchema(Schema, true)) - migrator.RegisterMigrations( - migrations.Migration{ - Name: "Init schema", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - - logging.FromContext(ctx).Infof("Checking if ledger v1 upgrade") - exists, err := tx.NewSelect(). - TableExpr("information_schema.columns"). - Where("table_name = 'ledgers'"). - Exists(ctx) - if err != nil { - return err - } - - if exists { - logging.FromContext(ctx).Infof("Detect ledger v1 installation, trigger migration") - _, err := tx.NewAddColumn(). - Table("ledgers"). - ColumnExpr("bucket varchar(255)"). - Exec(ctx) - if err != nil { - return errors.Wrap(err, "adding 'bucket' column") - } - _, err = tx.NewUpdate(). - Table("ledgers"). - Set("bucket = ledger"). - Where("1 = 1"). - Exec(ctx) - return errors.Wrap(err, "setting 'bucket' column") - } - - _, err = tx.NewCreateTable(). - Model((*Ledger)(nil)). - IfNotExists(). - Exec(ctx) - if err != nil { - return sqlutils.PostgresError(err) - } - - _, err = tx.NewCreateTable(). - Model((*configuration)(nil)). - IfNotExists(). - Exec(ctx) - return sqlutils.PostgresError(err) - }, - }, - migrations.Migration{ - Name: "Add ledger, bucket naming constraints 63 chars", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - _, err := tx.ExecContext(ctx, ` - alter table ledgers - add column if not exists ledger varchar(63), - add column if not exists bucket varchar(63); - - alter table ledgers - alter column ledger type varchar(63), - alter column bucket type varchar(63); - `) - if err != nil { - return err - } - return nil - }, - }, - migrations.Migration{ - Name: "Add ledger metadata", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - _, err := tx.ExecContext(ctx, ` - alter table ledgers - add column if not exists metadata jsonb; - `) - if err != nil { - return err - } - return nil - }, - }, - migrations.Migration{ - Name: "Fix empty ledger metadata", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - _, err := tx.ExecContext(ctx, ` - update ledgers - set metadata = '{}'::jsonb - where metadata is null; - `) - if err != nil { - return err - } - return nil - }, - }, - migrations.Migration{ - Name: "Add ledger state", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - _, err := tx.ExecContext(ctx, ` - alter table ledgers - add column if not exists state varchar(255) default 'initializing'; - - update ledgers - set state = 'in-use' - where state = ''; - `) - if err != nil { - return err - } - return nil - }, - }, - ) - return migrator.Up(ctx, db) -} diff --git a/internal/storage/systemstore/store.go b/internal/storage/systemstore/store.go deleted file mode 100644 index f76e8e6e4..000000000 --- a/internal/storage/systemstore/store.go +++ /dev/null @@ -1,40 +0,0 @@ -package systemstore - -import ( - "context" - "fmt" - - "github.com/formancehq/go-libs/bun/bunconnect" - "github.com/formancehq/ledger/internal/storage/sqlutils" - - "github.com/uptrace/bun" -) - -const Schema = "_system" - -type Store struct { - db *bun.DB -} - -func Connect(ctx context.Context, connectionOptions bunconnect.ConnectionOptions, hooks ...bun.QueryHook) (*Store, error) { - - db, err := bunconnect.OpenDBWithSchema(ctx, connectionOptions, Schema, hooks...) - if err != nil { - return nil, sqlutils.PostgresError(err) - } - - _, err = db.ExecContext(ctx, fmt.Sprintf(`create schema if not exists "%s"`, Schema)) - if err != nil { - return nil, sqlutils.PostgresError(err) - } - - return &Store{db: db}, nil -} - -func (s *Store) DB() *bun.DB { - return s.db -} - -func (s *Store) Close() error { - return s.db.Close() -} diff --git a/internal/storage/testdata/v1-dump.sql b/internal/storage/testdata/v1-dump.sql deleted file mode 100644 index f8ce01ee4..000000000 --- a/internal/storage/testdata/v1-dump.sql +++ /dev/null @@ -1,959 +0,0 @@ --- --- PostgreSQL database dump --- - --- Dumped from database version 13.8 --- Dumped by pg_dump version 16.1 - -SET statement_timeout = 0; -SET lock_timeout = 0; -SET idle_in_transaction_session_timeout = 0; -SET client_encoding = 'UTF8'; -SET standard_conforming_strings = on; -SELECT pg_catalog.set_config('search_path', '', false); -SET check_function_bodies = false; -SET xmloption = content; -SET client_min_messages = warning; -SET row_security = off; - --- --- Name: _system; Type: SCHEMA; Schema: - --- - -CREATE SCHEMA _system; - --- --- Name: default; Type: SCHEMA; Schema: - --- - -CREATE SCHEMA "default"; - --- --- Name: public; Type: SCHEMA; Schema: - --- - --- *not* creating schema, since initdb creates it - --- --- Name: wallets-002; Type: SCHEMA; Schema: - --- - -CREATE SCHEMA "wallets-002"; - --- --- Name: pg_trgm; Type: EXTENSION; Schema: -; Owner: - --- - -CREATE EXTENSION IF NOT EXISTS pg_trgm WITH SCHEMA public; - - --- --- Name: EXTENSION pg_trgm; Type: COMMENT; Schema: -; Owner: --- - -COMMENT ON EXTENSION pg_trgm IS 'text similarity measurement and index searching based on trigrams'; - - --- --- Name: pgcrypto; Type: EXTENSION; Schema: -; Owner: - --- - -CREATE EXTENSION IF NOT EXISTS pgcrypto WITH SCHEMA public; - - --- --- Name: EXTENSION pgcrypto; Type: COMMENT; Schema: -; Owner: --- - -COMMENT ON EXTENSION pgcrypto IS 'cryptographic functions'; - - --- --- Name: compute_hashes(); Type: FUNCTION; Schema: default --- - -CREATE FUNCTION "default".compute_hashes() RETURNS void - LANGUAGE plpgsql - AS $$ DECLARE r record; BEGIN /* Create JSON object manually as it needs to be in canonical form */ FOR r IN (select id, '{"data":' || "default".normaliz(data::jsonb) || ',"date":"' || to_char (date at time zone 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '","hash":"","id":' || id || ',"type":"' || type || '"}' as canonical from "default".log) LOOP UPDATE "default".log set hash = (select encode(digest( COALESCE((select '{"data":' || "default".normaliz(data::jsonb) || ',"date":"' || to_char (date at time zone 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '","hash":"' || hash || '","id":' || id || ',"type":"' || type || '"}' from "default".log where id = r.id - 1), 'null') || r.canonical, 'sha256' ), 'hex')) WHERE id = r.id; END LOOP; END $$; - --- --- Name: compute_volumes(); Type: FUNCTION; Schema: default --- - -CREATE FUNCTION "default".compute_volumes() RETURNS trigger - LANGUAGE plpgsql - AS $$ DECLARE p record; BEGIN FOR p IN ( SELECT t.postings->>'source' as source, t.postings->>'asset' as asset, sum ((t.postings->>'amount')::bigint) as amount FROM ( SELECT jsonb_array_elements(((newtable.data::jsonb)->>'postings')::jsonb) as postings FROM newtable WHERE newtable.type = 'NEW_TRANSACTION' ) t GROUP BY source, asset ) LOOP INSERT INTO "default".accounts (address, metadata) VALUES (p.source, '{}') ON CONFLICT DO NOTHING; INSERT INTO "default".volumes (account, asset, input, output) VALUES (p.source, p.asset, 0, p.amount::bigint) ON CONFLICT (account, asset) DO UPDATE SET output = p.amount::bigint + ( SELECT output FROM "default".volumes WHERE account = p.source AND asset = p.asset ); END LOOP; FOR p IN ( SELECT t.postings->>'destination' as destination, t.postings->>'asset' as asset, sum ((t.postings->>'amount')::bigint) as amount FROM ( SELECT jsonb_array_elements(((newtable.data::jsonb)->>'postings')::jsonb) as postings FROM newtable WHERE newtable.type = 'NEW_TRANSACTION' ) t GROUP BY destination, asset ) LOOP INSERT INTO "default".accounts (address, metadata) VALUES (p.destination, '{}') ON CONFLICT DO NOTHING; INSERT INTO "default".volumes (account, asset, input, output) VALUES (p.destination, p.asset, p.amount::bigint, 0) ON CONFLICT (account, asset) DO UPDATE SET input = p.amount::bigint + ( SELECT input FROM "default".volumes WHERE account = p.destination AND asset = p.asset ); END LOOP; RETURN NULL; END $$; - - --- --- Name: handle_log_entry(); Type: FUNCTION; Schema: default --- - -CREATE FUNCTION "default".handle_log_entry() RETURNS trigger - LANGUAGE plpgsql - AS $$ BEGIN if NEW.type = 'NEW_TRANSACTION' THEN INSERT INTO "default".transactions(id, timestamp, reference, postings, metadata, pre_commit_volumes, post_commit_volumes) VALUES ( (NEW.data ->> 'txid')::bigint, (NEW.data ->> 'timestamp')::varchar, CASE WHEN (NEW.data ->> 'reference')::varchar = '' THEN NULL ELSE (NEW.data ->> 'reference')::varchar END, (NEW.data ->> 'postings')::jsonb, CASE WHEN (NEW.data ->> 'metadata')::jsonb IS NULL THEN '{}' ELSE (NEW.data ->> 'metadata')::jsonb END, (NEW.data ->> 'preCommitVolumes')::jsonb, (NEW.data ->> 'postCommitVolumes')::jsonb ); END IF; if NEW.type = 'SET_METADATA' THEN if NEW.data ->> 'targetType' = 'TRANSACTION' THEN UPDATE "default".transactions SET metadata = metadata || (NEW.data ->> 'metadata')::jsonb WHERE id = (NEW.data ->> 'targetId')::bigint; END IF; if NEW.data ->> 'targetType' = 'ACCOUNT' THEN INSERT INTO "default".accounts (address, metadata) VALUES ((NEW.data ->> 'targetId')::varchar, (NEW.data ->> 'metadata')::jsonb) ON CONFLICT (address) DO UPDATE SET metadata = accounts.metadata || (NEW.data ->> 'metadata')::jsonb; END IF; END IF; RETURN NEW; END; $$; - - --- --- Name: is_valid_json(text); Type: FUNCTION; Schema: default --- - -CREATE FUNCTION "default".is_valid_json(p_json text) RETURNS boolean - LANGUAGE plpgsql IMMUTABLE - AS $$ BEGIN RETURN (p_json::jsonb IS NOT NULL); EXCEPTION WHEN others THEN RETURN false; END; $$; - - --- --- Name: meta_compare(jsonb, boolean, text[]); Type: FUNCTION; Schema: default --- - -CREATE FUNCTION "default".meta_compare(metadata jsonb, value boolean, VARIADIC path text[]) RETURNS boolean - LANGUAGE plpgsql IMMUTABLE - AS $$ BEGIN return jsonb_extract_path(metadata, variadic path)::bool = value::bool; EXCEPTION WHEN others THEN RAISE INFO 'Error Name: %', SQLERRM; RAISE INFO 'Error State: %', SQLSTATE; RETURN false; END $$; - --- --- Name: meta_compare(jsonb, numeric, text[]); Type: FUNCTION; Schema: default --- - -CREATE FUNCTION "default".meta_compare(metadata jsonb, value numeric, VARIADIC path text[]) RETURNS boolean - LANGUAGE plpgsql IMMUTABLE - AS $$ BEGIN return jsonb_extract_path(metadata, variadic path)::numeric = value::numeric; EXCEPTION WHEN others THEN RAISE INFO 'Error Name: %', SQLERRM; RAISE INFO 'Error State: %', SQLSTATE; RETURN false; END $$; - --- --- Name: meta_compare(jsonb, character varying, text[]); Type: FUNCTION; Schema: default --- - -CREATE FUNCTION "default".meta_compare(metadata jsonb, value character varying, VARIADIC path text[]) RETURNS boolean - LANGUAGE plpgsql IMMUTABLE - AS $$ BEGIN return jsonb_extract_path_text(metadata, variadic path)::varchar = value::varchar; EXCEPTION WHEN others THEN RAISE INFO 'Error Name: %', SQLERRM; RAISE INFO 'Error State: %', SQLSTATE; RETURN false; END $$; - --- --- Name: normaliz(jsonb); Type: FUNCTION; Schema: default --- - -CREATE FUNCTION "default".normaliz(v jsonb) RETURNS text - LANGUAGE plpgsql - AS $$ DECLARE r record; t jsonb; BEGIN if jsonb_typeof(v) = 'object' then return ( SELECT COALESCE('{' || string_agg(keyValue, ',') || '}', '{}') FROM ( SELECT '"' || key || '":' || value as keyValue FROM ( SELECT key, (CASE WHEN "default".is_valid_json((select v ->> key)) THEN (select "default".normaliz((select v ->> key)::jsonb)) ELSE '"' || (select v ->> key) || '"' END) as value FROM ( SELECT jsonb_object_keys(v) as key ) t order by key ) t ) t ); end if; if jsonb_typeof(v) = 'array' then return ( select COALESCE('[' || string_agg(items, ',') || ']', '[]') from ( select "default".normaliz(item) as items from jsonb_array_elements(v) item ) t ); end if; if jsonb_typeof(v) = 'string' then return v::text; end if; if jsonb_typeof(v) = 'number' then return v::bigint; end if; if jsonb_typeof(v) = 'boolean' then return v::boolean; end if; return ''; END $$; - --- --- Name: use_account(jsonb, character varying); Type: FUNCTION; Schema: default --- - -CREATE FUNCTION "default".use_account(postings jsonb, account character varying) RETURNS boolean - LANGUAGE sql - AS $$ SELECT bool_or(v.value) from ( SELECT "default".use_account_as_source(postings, account) AS value UNION SELECT "default".use_account_as_destination(postings, account) AS value ) v $$; - --- --- Name: use_account_as_destination(jsonb, character varying); Type: FUNCTION; Schema: default --- - -CREATE FUNCTION "default".use_account_as_destination(postings jsonb, account character varying) RETURNS boolean - LANGUAGE sql - AS $_$ select bool_or(v.value::bool) from ( select jsonb_extract_path_text(jsonb_array_elements(postings), 'destination') ~ ('^' || account || '$') as value) as v; $_$; - --- --- Name: use_account_as_source(jsonb, character varying); Type: FUNCTION; Schema: default --- - -CREATE FUNCTION "default".use_account_as_source(postings jsonb, account character varying) RETURNS boolean - LANGUAGE sql - AS $_$ select bool_or(v.value::bool) from ( select jsonb_extract_path_text(jsonb_array_elements(postings), 'source') ~ ('^' || account || '$') as value) as v; $_$; - --- --- Name: compute_hashes(); Type: FUNCTION; Schema: wallets-002 --- - -CREATE FUNCTION "wallets-002".compute_hashes() RETURNS void - LANGUAGE plpgsql - AS $$ DECLARE r record; BEGIN /* Create JSON object manually as it needs to be in canonical form */ FOR r IN (select id, '{"data":' || "wallets-002".normaliz(data::jsonb) || ',"date":"' || to_char (date at time zone 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '","hash":"","id":' || id || ',"type":"' || type || '"}' as canonical from "wallets-002".log) LOOP UPDATE "wallets-002".log set hash = (select encode(digest( COALESCE((select '{"data":' || "wallets-002".normaliz(data::jsonb) || ',"date":"' || to_char (date at time zone 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '","hash":"' || hash || '","id":' || id || ',"type":"' || type || '"}' from "wallets-002".log where id = r.id - 1), 'null') || r.canonical, 'sha256' ), 'hex')) WHERE id = r.id; END LOOP; END $$; - --- --- Name: compute_volumes(); Type: FUNCTION; Schema: wallets-002 --- - -CREATE FUNCTION "wallets-002".compute_volumes() RETURNS trigger - LANGUAGE plpgsql - AS $$ DECLARE p record; BEGIN FOR p IN ( SELECT t.postings->>'source' as source, t.postings->>'asset' as asset, sum ((t.postings->>'amount')::bigint) as amount FROM ( SELECT jsonb_array_elements(((newtable.data::jsonb)->>'postings')::jsonb) as postings FROM newtable WHERE newtable.type = 'NEW_TRANSACTION' ) t GROUP BY source, asset ) LOOP INSERT INTO "wallets-002".accounts (address, metadata) VALUES (p.source, '{}') ON CONFLICT DO NOTHING; INSERT INTO "wallets-002".volumes (account, asset, input, output) VALUES (p.source, p.asset, 0, p.amount::bigint) ON CONFLICT (account, asset) DO UPDATE SET output = p.amount::bigint + ( SELECT output FROM "wallets-002".volumes WHERE account = p.source AND asset = p.asset ); END LOOP; FOR p IN ( SELECT t.postings->>'destination' as destination, t.postings->>'asset' as asset, sum ((t.postings->>'amount')::bigint) as amount FROM ( SELECT jsonb_array_elements(((newtable.data::jsonb)->>'postings')::jsonb) as postings FROM newtable WHERE newtable.type = 'NEW_TRANSACTION' ) t GROUP BY destination, asset ) LOOP INSERT INTO "wallets-002".accounts (address, metadata) VALUES (p.destination, '{}') ON CONFLICT DO NOTHING; INSERT INTO "wallets-002".volumes (account, asset, input, output) VALUES (p.destination, p.asset, p.amount::bigint, 0) ON CONFLICT (account, asset) DO UPDATE SET input = p.amount::bigint + ( SELECT input FROM "wallets-002".volumes WHERE account = p.destination AND asset = p.asset ); END LOOP; RETURN NULL; END $$; - --- --- Name: handle_log_entry(); Type: FUNCTION; Schema: wallets-002 --- - -CREATE FUNCTION "wallets-002".handle_log_entry() RETURNS trigger - LANGUAGE plpgsql - AS $$ BEGIN if NEW.type = 'NEW_TRANSACTION' THEN INSERT INTO "wallets-002".transactions(id, timestamp, reference, postings, metadata, pre_commit_volumes, post_commit_volumes) VALUES ( (NEW.data ->> 'txid')::bigint, (NEW.data ->> 'timestamp')::varchar, CASE WHEN (NEW.data ->> 'reference')::varchar = '' THEN NULL ELSE (NEW.data ->> 'reference')::varchar END, (NEW.data ->> 'postings')::jsonb, CASE WHEN (NEW.data ->> 'metadata')::jsonb IS NULL THEN '{}' ELSE (NEW.data ->> 'metadata')::jsonb END, (NEW.data ->> 'preCommitVolumes')::jsonb, (NEW.data ->> 'postCommitVolumes')::jsonb ); END IF; if NEW.type = 'SET_METADATA' THEN if NEW.data ->> 'targetType' = 'TRANSACTION' THEN UPDATE "wallets-002".transactions SET metadata = metadata || (NEW.data ->> 'metadata')::jsonb WHERE id = (NEW.data ->> 'targetId')::bigint; END IF; if NEW.data ->> 'targetType' = 'ACCOUNT' THEN INSERT INTO "wallets-002".accounts (address, metadata) VALUES ((NEW.data ->> 'targetId')::varchar, (NEW.data ->> 'metadata')::jsonb) ON CONFLICT (address) DO UPDATE SET metadata = accounts.metadata || (NEW.data ->> 'metadata')::jsonb; END IF; END IF; RETURN NEW; END; $$; - --- --- Name: is_valid_json(text); Type: FUNCTION; Schema: wallets-002 --- - -CREATE FUNCTION "wallets-002".is_valid_json(p_json text) RETURNS boolean - LANGUAGE plpgsql IMMUTABLE - AS $$ BEGIN RETURN (p_json::jsonb IS NOT NULL); EXCEPTION WHEN others THEN RETURN false; END; $$; - --- --- Name: meta_compare(jsonb, boolean, text[]); Type: FUNCTION; Schema: wallets-002 --- - -CREATE FUNCTION "wallets-002".meta_compare(metadata jsonb, value boolean, VARIADIC path text[]) RETURNS boolean - LANGUAGE plpgsql IMMUTABLE - AS $$ BEGIN return jsonb_extract_path(metadata, variadic path)::bool = value::bool; EXCEPTION WHEN others THEN RAISE INFO 'Error Name: %', SQLERRM; RAISE INFO 'Error State: %', SQLSTATE; RETURN false; END $$; - --- --- Name: meta_compare(jsonb, numeric, text[]); Type: FUNCTION; Schema: wallets-002 --- - -CREATE FUNCTION "wallets-002".meta_compare(metadata jsonb, value numeric, VARIADIC path text[]) RETURNS boolean - LANGUAGE plpgsql IMMUTABLE - AS $$ BEGIN return jsonb_extract_path(metadata, variadic path)::numeric = value::numeric; EXCEPTION WHEN others THEN RAISE INFO 'Error Name: %', SQLERRM; RAISE INFO 'Error State: %', SQLSTATE; RETURN false; END $$; - --- --- Name: meta_compare(jsonb, character varying, text[]); Type: FUNCTION; Schema: wallets-002 --- - -CREATE FUNCTION "wallets-002".meta_compare(metadata jsonb, value character varying, VARIADIC path text[]) RETURNS boolean - LANGUAGE plpgsql IMMUTABLE - AS $$ BEGIN return jsonb_extract_path_text(metadata, variadic path)::varchar = value::varchar; EXCEPTION WHEN others THEN RAISE INFO 'Error Name: %', SQLERRM; RAISE INFO 'Error State: %', SQLSTATE; RETURN false; END $$; - --- --- Name: normaliz(jsonb); Type: FUNCTION; Schema: wallets-002 --- - -CREATE FUNCTION "wallets-002".normaliz(v jsonb) RETURNS text - LANGUAGE plpgsql - AS $$ DECLARE r record; t jsonb; BEGIN if jsonb_typeof(v) = 'object' then return ( SELECT COALESCE('{' || string_agg(keyValue, ',') || '}', '{}') FROM ( SELECT '"' || key || '":' || value as keyValue FROM ( SELECT key, (CASE WHEN "wallets-002".is_valid_json((select v ->> key)) THEN (select "wallets-002".normaliz((select v ->> key)::jsonb)) ELSE '"' || (select v ->> key) || '"' END) as value FROM ( SELECT jsonb_object_keys(v) as key ) t order by key ) t ) t ); end if; if jsonb_typeof(v) = 'array' then return ( select COALESCE('[' || string_agg(items, ',') || ']', '[]') from ( select "wallets-002".normaliz(item) as items from jsonb_array_elements(v) item ) t ); end if; if jsonb_typeof(v) = 'string' then return v::text; end if; if jsonb_typeof(v) = 'number' then return v::bigint; end if; if jsonb_typeof(v) = 'boolean' then return v::boolean; end if; return ''; END $$; - --- --- Name: use_account(jsonb, character varying); Type: FUNCTION; Schema: wallets-002 --- - -CREATE FUNCTION "wallets-002".use_account(postings jsonb, account character varying) RETURNS boolean - LANGUAGE sql - AS $$ SELECT bool_or(v.value) from ( SELECT "wallets-002".use_account_as_source(postings, account) AS value UNION SELECT "wallets-002".use_account_as_destination(postings, account) AS value ) v $$; - --- --- Name: use_account_as_destination(jsonb, character varying); Type: FUNCTION; Schema: wallets-002 --- - -CREATE FUNCTION "wallets-002".use_account_as_destination(postings jsonb, account character varying) RETURNS boolean - LANGUAGE sql - AS $_$ select bool_or(v.value::bool) from ( select jsonb_extract_path_text(jsonb_array_elements(postings), 'destination') ~ ('^' || account || '$') as value) as v; $_$; - --- --- Name: use_account_as_source(jsonb, character varying); Type: FUNCTION; Schema: wallets-002 --- - -CREATE FUNCTION "wallets-002".use_account_as_source(postings jsonb, account character varying) RETURNS boolean - LANGUAGE sql - AS $_$ select bool_or(v.value::bool) from ( select jsonb_extract_path_text(jsonb_array_elements(postings), 'source') ~ ('^' || account || '$') as value) as v; $_$; - -SET default_tablespace = ''; - -SET default_table_access_method = heap; - --- --- Name: configuration; Type: TABLE; Schema: _system --- - -CREATE TABLE _system.configuration ( - key character varying(255) NOT NULL, - value text, - addedat timestamp without time zone -); - --- --- Name: ledgers; Type: TABLE; Schema: _system --- - -CREATE TABLE _system.ledgers ( - ledger character varying(255) NOT NULL, - addedat timestamp without time zone -); - --- --- Name: accounts; Type: TABLE; Schema: default --- - -CREATE TABLE "default".accounts ( - address character varying NOT NULL, - metadata jsonb DEFAULT '{}'::jsonb, - address_json jsonb -); - --- --- Name: idempotency; Type: TABLE; Schema: default --- - -CREATE TABLE "default".idempotency ( - key character varying NOT NULL, - date character varying, - status_code integer, - headers character varying, - body character varying, - request_hash character varying -); - --- --- Name: log; Type: TABLE; Schema: default --- - -CREATE TABLE "default".log ( - id bigint, - type character varying, - hash character varying, - date timestamp with time zone, - data jsonb -); - --- --- Name: log_seq; Type: SEQUENCE; Schema: default --- - -CREATE SEQUENCE "default".log_seq - START WITH 0 - INCREMENT BY 1 - MINVALUE 0 - NO MAXVALUE - CACHE 1; - --- --- Name: mapping; Type: TABLE; Schema: default --- - -CREATE TABLE "default".mapping ( - mapping_id character varying, - mapping character varying -); - --- --- Name: migrations; Type: TABLE; Schema: default --- - -CREATE TABLE "default".migrations ( - version character varying, - date character varying -); - --- --- Name: postings; Type: TABLE; Schema: default --- - -CREATE TABLE "default".postings ( - txid bigint, - posting_index integer, - source jsonb, - destination jsonb -); - --- --- Name: transactions; Type: TABLE; Schema: default --- - -CREATE TABLE "default".transactions ( - id bigint, - "timestamp" timestamp with time zone, - reference character varying, - hash character varying, - postings jsonb, - metadata jsonb DEFAULT '{}'::jsonb, - pre_commit_volumes jsonb, - post_commit_volumes jsonb -); - --- --- Name: volumes; Type: TABLE; Schema: default --- - -CREATE TABLE "default".volumes ( - account character varying, - asset character varying, - input numeric, - output numeric, - account_json jsonb -); - --- --- Name: accounts; Type: TABLE; Schema: wallets-002 --- - -CREATE TABLE "wallets-002".accounts ( - address character varying NOT NULL, - metadata jsonb DEFAULT '{}'::jsonb, - address_json jsonb -); - --- --- Name: idempotency; Type: TABLE; Schema: wallets-002 --- - -CREATE TABLE "wallets-002".idempotency ( - key character varying NOT NULL, - date character varying, - status_code integer, - headers character varying, - body character varying, - request_hash character varying -); - --- --- Name: log; Type: TABLE; Schema: wallets-002 --- - -CREATE TABLE "wallets-002".log ( - id bigint, - type character varying, - hash character varying, - date timestamp with time zone, - data jsonb -); - --- --- Name: log_seq; Type: SEQUENCE; Schema: wallets-002 --- - -CREATE SEQUENCE "wallets-002".log_seq - START WITH 0 - INCREMENT BY 1 - MINVALUE 0 - NO MAXVALUE - CACHE 1; - --- --- Name: mapping; Type: TABLE; Schema: wallets-002 --- - -CREATE TABLE "wallets-002".mapping ( - mapping_id character varying, - mapping character varying -); - --- --- Name: migrations; Type: TABLE; Schema: wallets-002 --- - -CREATE TABLE "wallets-002".migrations ( - version character varying, - date character varying -); - --- --- Name: postings; Type: TABLE; Schema: wallets-002 --- - -CREATE TABLE "wallets-002".postings ( - txid bigint, - posting_index integer, - source jsonb, - destination jsonb -); - --- --- Name: transactions; Type: TABLE; Schema: wallets-002 --- - -CREATE TABLE "wallets-002".transactions ( - id bigint, - "timestamp" timestamp with time zone, - reference character varying, - hash character varying, - postings jsonb, - metadata jsonb DEFAULT '{}'::jsonb, - pre_commit_volumes jsonb, - post_commit_volumes jsonb -); - --- --- Name: volumes; Type: TABLE; Schema: wallets-002 --- - -CREATE TABLE "wallets-002".volumes ( - account character varying, - asset character varying, - input numeric, - output numeric, - account_json jsonb -); - --- --- Data for Name: configuration; Type: TABLE DATA; Schema: _system --- - -INSERT INTO _system.configuration (key, value, addedat) VALUES ('appId', '7f50ba54-cdb1-4e79-a2f7-3e704ce08d08', '2023-12-13 18:16:31'); - - --- --- Data for Name: ledgers; Type: TABLE DATA; Schema: _system --- - -INSERT INTO _system.ledgers (ledger, addedat) VALUES ('wallets-002', '2023-12-13 18:16:35.943038'); -INSERT INTO _system.ledgers (ledger, addedat) VALUES ('default', '2023-12-13 18:21:05.044237'); - - --- --- Data for Name: accounts; Type: TABLE DATA; Schema: default --- - -INSERT INTO "default".accounts (address, metadata, address_json) VALUES ('world', '{}', '["world"]'); -INSERT INTO "default".accounts (address, metadata, address_json) VALUES ('bank', '{}', '["bank"]'); -INSERT INTO "default".accounts (address, metadata, address_json) VALUES ('bob', '{}', '["bob"]'); -INSERT INTO "default".accounts (address, metadata, address_json) VALUES ('alice', '{"foo": "bar"}', '["alice"]'); - - --- --- Data for Name: idempotency; Type: TABLE DATA; Schema: default --- - - - --- --- Data for Name: log; Type: TABLE DATA; Schema: default --- - -INSERT INTO "default".log (id, type, hash, date, data) VALUES (0, 'NEW_TRANSACTION', '79fc36b46f2668ee1f682a109765af8e849d11715d078bd361e7b4eb61fadc70', '2023-12-13 18:21:05+00', '{"txid": 0, "metadata": {}, "postings": [{"asset": "USD/2", "amount": 10000, "source": "world", "destination": "bank"}], "reference": "", "timestamp": "2023-12-13T18:21:05Z"}'); -INSERT INTO "default".log (id, type, hash, date, data) VALUES (1, 'NEW_TRANSACTION', 'e493bab4fcce0c281193414ea43a7d34b73c89ac1bb103755e9fb1064d00c0e8', '2023-12-13 18:21:40+00', '{"txid": 1, "metadata": {}, "postings": [{"asset": "USD/2", "amount": 10000, "source": "world", "destination": "bob"}], "reference": "", "timestamp": "2023-12-13T18:21:40Z"}'); -INSERT INTO "default".log (id, type, hash, date, data) VALUES (2, 'NEW_TRANSACTION', '19ac0ffff69a271615ba09c6564f3851ab0fe32e7aabe3ab9083b63501f29332', '2023-12-13 18:21:46+00', '{"txid": 2, "metadata": {}, "postings": [{"asset": "USD/2", "amount": 10000, "source": "world", "destination": "alice"}], "reference": "", "timestamp": "2023-12-13T18:21:46Z"}'); -INSERT INTO "default".log (id, type, hash, date, data) VALUES (3, 'SET_METADATA', '839800b3bf685903b37240e8a59e1872d29c2ed9715a79c56b86edb5b5b0976f', '2023-12-14 09:30:31+00', '{"metadata": {"foo": "bar"}, "targetId": "alice", "targetType": "ACCOUNT"}'); - - --- --- Data for Name: mapping; Type: TABLE DATA; Schema: default --- - - - --- --- Data for Name: migrations; Type: TABLE DATA; Schema: default --- - -INSERT INTO "default".migrations (version, date) VALUES ('0', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('1', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('2', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('3', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('4', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('5', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('6', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('7', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('8', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('9', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('10', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('11', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('12', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('13', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('14', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('15', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('16', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('17', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('18', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('19', '2023-12-13T18:21:05Z'); -INSERT INTO "default".migrations (version, date) VALUES ('20', '2023-12-13T18:21:05Z'); - - --- --- Data for Name: postings; Type: TABLE DATA; Schema: default --- - -INSERT INTO "default".postings (txid, posting_index, source, destination) VALUES (0, 0, '["world"]', '["bank"]'); -INSERT INTO "default".postings (txid, posting_index, source, destination) VALUES (1, 0, '["world"]', '["bob"]'); -INSERT INTO "default".postings (txid, posting_index, source, destination) VALUES (2, 0, '["world"]', '["alice"]'); - - --- --- Data for Name: transactions; Type: TABLE DATA; Schema: default --- - -INSERT INTO "default".transactions (id, "timestamp", reference, hash, postings, metadata, pre_commit_volumes, post_commit_volumes) VALUES (0, '2023-12-13 18:21:05+00', NULL, NULL, '[{"asset": "USD/2", "amount": 10000, "source": "world", "destination": "bank"}]', '{}', '{"bank": {"USD/2": {"input": 0, "output": 0, "balance": 0}}, "world": {"USD/2": {"input": 0, "output": 0, "balance": 0}}}', '{"bank": {"USD/2": {"input": 10000, "output": 0, "balance": 10000}}, "world": {"USD/2": {"input": 0, "output": 10000, "balance": -10000}}}'); -INSERT INTO "default".transactions (id, "timestamp", reference, hash, postings, metadata, pre_commit_volumes, post_commit_volumes) VALUES (1, '2023-12-13 18:21:40+00', NULL, NULL, '[{"asset": "USD/2", "amount": 10000, "source": "world", "destination": "bob"}]', '{}', '{"bob": {"USD/2": {"input": 0, "output": 0, "balance": 0}}, "world": {"USD/2": {"input": 0, "output": 10000, "balance": -10000}}}', '{"bob": {"USD/2": {"input": 10000, "output": 0, "balance": 10000}}, "world": {"USD/2": {"input": 0, "output": 20000, "balance": -20000}}}'); -INSERT INTO "default".transactions (id, "timestamp", reference, hash, postings, metadata, pre_commit_volumes, post_commit_volumes) VALUES (2, '2023-12-13 18:21:46+00', NULL, NULL, '[{"asset": "USD/2", "amount": 10000, "source": "world", "destination": "alice"}]', '{}', '{"alice": {"USD/2": {"input": 0, "output": 0, "balance": 0}}, "world": {"USD/2": {"input": 0, "output": 20000, "balance": -20000}}}', '{"alice": {"USD/2": {"input": 10000, "output": 0, "balance": 10000}}, "world": {"USD/2": {"input": 0, "output": 30000, "balance": -30000}}}'); - - --- --- Data for Name: volumes; Type: TABLE DATA; Schema: default --- - -INSERT INTO "default".volumes (account, asset, input, output, account_json) VALUES ('bank', 'USD/2', 10000, 0, '["bank"]'); -INSERT INTO "default".volumes (account, asset, input, output, account_json) VALUES ('bob', 'USD/2', 10000, 0, '["bob"]'); -INSERT INTO "default".volumes (account, asset, input, output, account_json) VALUES ('alice', 'USD/2', 10000, 0, '["alice"]'); -INSERT INTO "default".volumes (account, asset, input, output, account_json) VALUES ('world', 'USD/2', 0, 30000, '["world"]'); - - --- --- Data for Name: accounts; Type: TABLE DATA; Schema: wallets-002 --- - -INSERT INTO "wallets-002".accounts (address, metadata, address_json) VALUES ('wallets:15b7a366c6e9473f96276803ef585ae9:main', '{"wallets/id": "15b7a366-c6e9-473f-9627-6803ef585ae9", "wallets/name": "wallet1", "wallets/balances": "true", "wallets/createdAt": "2023-12-14T09:30:48.01540488Z", "wallets/spec/type": "wallets.primary", "wallets/custom_data": {}, "wallets/balances/name": "main"}', '["wallets", "15b7a366c6e9473f96276803ef585ae9", "main"]'); -INSERT INTO "wallets-002".accounts (address, metadata, address_json) VALUES ('world', '{}', '["world"]'); -INSERT INTO "wallets-002".accounts (address, metadata, address_json) VALUES ('wallets:71e6788ad1954139bec5c3e35ee4a2dc:main', '{"wallets/id": "71e6788a-d195-4139-bec5-c3e35ee4a2dc", "wallets/name": "wallet2", "wallets/balances": "true", "wallets/createdAt": "2023-12-14T09:32:38.001913219Z", "wallets/spec/type": "wallets.primary", "wallets/custom_data": {"catgory": "gold"}, "wallets/balances/name": "main"}', '["wallets", "71e6788ad1954139bec5c3e35ee4a2dc", "main"]'); - - --- --- Data for Name: idempotency; Type: TABLE DATA; Schema: wallets-002 --- - - - --- --- Data for Name: log; Type: TABLE DATA; Schema: wallets-002 --- - -INSERT INTO "wallets-002".log (id, type, hash, date, data) VALUES (0, 'SET_METADATA', 'c3d4b844838f4feaf0d35f1f37f8eae496b66328a69fc3d73e46a7cd53b231b6', '2023-12-14 09:30:48+00', '{"metadata": {"wallets/id": "15b7a366-c6e9-473f-9627-6803ef585ae9", "wallets/name": "wallet1", "wallets/balances": "true", "wallets/createdAt": "2023-12-14T09:30:48.01540488Z", "wallets/spec/type": "wallets.primary", "wallets/custom_data": {}, "wallets/balances/name": "main"}, "targetId": "wallets:15b7a366c6e9473f96276803ef585ae9:main", "targetType": "ACCOUNT"}'); -INSERT INTO "wallets-002".log (id, type, hash, date, data) VALUES (1, 'NEW_TRANSACTION', '1f2d8e75e937cee1c91e0a2696f5fbe59947d77ad568cf45c58a01430acb5f0b', '2023-12-14 09:32:04+00', '{"txid": 0, "metadata": {"wallets/custom_data": {}, "wallets/transaction": "true"}, "postings": [{"asset": "USD/2", "amount": 100, "source": "world", "destination": "wallets:15b7a366c6e9473f96276803ef585ae9:main"}], "reference": "", "timestamp": "2023-12-14T09:32:04Z"}'); -INSERT INTO "wallets-002".log (id, type, hash, date, data) VALUES (2, 'SET_METADATA', '3665750bbbe64e79c4631927e9399a8c7f817b55d572ef41cfd9714bd679db7d', '2023-12-14 09:32:38+00', '{"metadata": {"wallets/id": "71e6788a-d195-4139-bec5-c3e35ee4a2dc", "wallets/name": "wallet2", "wallets/balances": "true", "wallets/createdAt": "2023-12-14T09:32:38.001913219Z", "wallets/spec/type": "wallets.primary", "wallets/custom_data": {"catgory": "gold"}, "wallets/balances/name": "main"}, "targetId": "wallets:71e6788ad1954139bec5c3e35ee4a2dc:main", "targetType": "ACCOUNT"}'); - - --- --- Data for Name: mapping; Type: TABLE DATA; Schema: wallets-002 --- - - - --- --- Data for Name: migrations; Type: TABLE DATA; Schema: wallets-002 --- - -INSERT INTO "wallets-002".migrations (version, date) VALUES ('0', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('1', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('2', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('3', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('4', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('5', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('6', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('7', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('8', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('9', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('10', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('11', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('12', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('13', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('14', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('15', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('16', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('17', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('18', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('19', '2023-12-13T18:16:36Z'); -INSERT INTO "wallets-002".migrations (version, date) VALUES ('20', '2023-12-13T18:16:36Z'); - - --- --- Data for Name: postings; Type: TABLE DATA; Schema: wallets-002 --- - -INSERT INTO "wallets-002".postings (txid, posting_index, source, destination) VALUES (0, 0, '["world"]', '["wallets", "15b7a366c6e9473f96276803ef585ae9", "main"]'); - - --- --- Data for Name: transactions; Type: TABLE DATA; Schema: wallets-002 --- - -INSERT INTO "wallets-002".transactions (id, "timestamp", reference, hash, postings, metadata, pre_commit_volumes, post_commit_volumes) VALUES (0, '2023-12-14 09:32:04+00', NULL, NULL, '[{"asset": "USD/2", "amount": 100, "source": "world", "destination": "wallets:15b7a366c6e9473f96276803ef585ae9:main"}]', '{"wallets/custom_data": {}, "wallets/transaction": "true"}', '{"world": {"USD/2": {"input": 0, "output": 0, "balance": 0}}, "wallets:15b7a366c6e9473f96276803ef585ae9:main": {"USD/2": {"input": 0, "output": 0, "balance": 0}}}', '{"world": {"USD/2": {"input": 0, "output": 100, "balance": -100}}, "wallets:15b7a366c6e9473f96276803ef585ae9:main": {"USD/2": {"input": 100, "output": 0, "balance": 100}}}'); - - --- --- Data for Name: volumes; Type: TABLE DATA; Schema: wallets-002 --- - -INSERT INTO "wallets-002".volumes (account, asset, input, output, account_json) VALUES ('world', 'USD/2', 0, 100, '["world"]'); -INSERT INTO "wallets-002".volumes (account, asset, input, output, account_json) VALUES ('wallets:15b7a366c6e9473f96276803ef585ae9:main', 'USD/2', 100, 0, '["wallets", "15b7a366c6e9473f96276803ef585ae9", "main"]'); - - --- --- Name: log_seq; Type: SEQUENCE SET; Schema: default --- - -SELECT pg_catalog.setval('"default".log_seq', 0, false); - - --- --- Name: log_seq; Type: SEQUENCE SET; Schema: wallets-002 --- - -SELECT pg_catalog.setval('"wallets-002".log_seq', 0, false); - - --- --- Name: configuration configuration_pkey; Type: CONSTRAINT; Schema: _system --- - -ALTER TABLE ONLY _system.configuration - ADD CONSTRAINT configuration_pkey PRIMARY KEY (key); - - --- --- Name: ledgers ledgers_pkey; Type: CONSTRAINT; Schema: _system --- - -ALTER TABLE ONLY _system.ledgers - ADD CONSTRAINT ledgers_pkey PRIMARY KEY (ledger); - - --- --- Name: accounts accounts_address_key; Type: CONSTRAINT; Schema: default --- - -ALTER TABLE ONLY "default".accounts - ADD CONSTRAINT accounts_address_key UNIQUE (address); - - --- --- Name: idempotency idempotency_pkey; Type: CONSTRAINT; Schema: default --- - -ALTER TABLE ONLY "default".idempotency - ADD CONSTRAINT idempotency_pkey PRIMARY KEY (key); - - --- --- Name: log log_id_key; Type: CONSTRAINT; Schema: default --- - -ALTER TABLE ONLY "default".log - ADD CONSTRAINT log_id_key UNIQUE (id); - - --- --- Name: mapping mapping_mapping_id_key; Type: CONSTRAINT; Schema: default --- - -ALTER TABLE ONLY "default".mapping - ADD CONSTRAINT mapping_mapping_id_key UNIQUE (mapping_id); - - --- --- Name: migrations migrations_version_key; Type: CONSTRAINT; Schema: default --- - -ALTER TABLE ONLY "default".migrations - ADD CONSTRAINT migrations_version_key UNIQUE (version); - - --- --- Name: transactions transactions_id_key; Type: CONSTRAINT; Schema: default --- - -ALTER TABLE ONLY "default".transactions - ADD CONSTRAINT transactions_id_key UNIQUE (id); - - --- --- Name: transactions transactions_reference_key; Type: CONSTRAINT; Schema: default --- - -ALTER TABLE ONLY "default".transactions - ADD CONSTRAINT transactions_reference_key UNIQUE (reference); - - --- --- Name: volumes volumes_account_asset_key; Type: CONSTRAINT; Schema: default --- - -ALTER TABLE ONLY "default".volumes - ADD CONSTRAINT volumes_account_asset_key UNIQUE (account, asset); - - --- --- Name: accounts accounts_address_key; Type: CONSTRAINT; Schema: wallets-002 --- - -ALTER TABLE ONLY "wallets-002".accounts - ADD CONSTRAINT accounts_address_key UNIQUE (address); - - --- --- Name: idempotency idempotency_pkey; Type: CONSTRAINT; Schema: wallets-002 --- - -ALTER TABLE ONLY "wallets-002".idempotency - ADD CONSTRAINT idempotency_pkey PRIMARY KEY (key); - - --- --- Name: log log_id_key; Type: CONSTRAINT; Schema: wallets-002 --- - -ALTER TABLE ONLY "wallets-002".log - ADD CONSTRAINT log_id_key UNIQUE (id); - - --- --- Name: mapping mapping_mapping_id_key; Type: CONSTRAINT; Schema: wallets-002 --- - -ALTER TABLE ONLY "wallets-002".mapping - ADD CONSTRAINT mapping_mapping_id_key UNIQUE (mapping_id); - - --- --- Name: migrations migrations_version_key; Type: CONSTRAINT; Schema: wallets-002 --- - -ALTER TABLE ONLY "wallets-002".migrations - ADD CONSTRAINT migrations_version_key UNIQUE (version); - - --- --- Name: transactions transactions_id_key; Type: CONSTRAINT; Schema: wallets-002 --- - -ALTER TABLE ONLY "wallets-002".transactions - ADD CONSTRAINT transactions_id_key UNIQUE (id); - - --- --- Name: transactions transactions_reference_key; Type: CONSTRAINT; Schema: wallets-002 --- - -ALTER TABLE ONLY "wallets-002".transactions - ADD CONSTRAINT transactions_reference_key UNIQUE (reference); - - --- --- Name: volumes volumes_account_asset_key; Type: CONSTRAINT; Schema: wallets-002 --- - -ALTER TABLE ONLY "wallets-002".volumes - ADD CONSTRAINT volumes_account_asset_key UNIQUE (account, asset); - - --- --- Name: accounts_address_json; Type: INDEX; Schema: default --- - -CREATE INDEX accounts_address_json ON "default".accounts USING gin (address_json); - - --- --- Name: accounts_array_length; Type: INDEX; Schema: default --- - -CREATE INDEX accounts_array_length ON "default".accounts USING btree (jsonb_array_length(address_json)); - - --- --- Name: postings_addresses; Type: INDEX; Schema: default --- - -CREATE INDEX postings_addresses ON "default".transactions USING gin (postings); - - --- --- Name: postings_array_length_dst; Type: INDEX; Schema: default --- - -CREATE INDEX postings_array_length_dst ON "default".postings USING btree (jsonb_array_length(destination)); - - --- --- Name: postings_array_length_src; Type: INDEX; Schema: default --- - -CREATE INDEX postings_array_length_src ON "default".postings USING btree (jsonb_array_length(source)); - - --- --- Name: postings_dest; Type: INDEX; Schema: default --- - -CREATE INDEX postings_dest ON "default".postings USING gin (destination); - - --- --- Name: postings_src; Type: INDEX; Schema: default --- - -CREATE INDEX postings_src ON "default".postings USING gin (source); - - --- --- Name: postings_txid; Type: INDEX; Schema: default --- - -CREATE INDEX postings_txid ON "default".postings USING btree (txid); - - --- --- Name: volumes_account_json; Type: INDEX; Schema: default --- - -CREATE INDEX volumes_account_json ON "default".volumes USING gin (account_json); - - --- --- Name: volumes_array_length; Type: INDEX; Schema: default --- - -CREATE INDEX volumes_array_length ON "default".volumes USING btree (jsonb_array_length(account_json)); - - --- --- Name: accounts_address_json; Type: INDEX; Schema: wallets-002 --- - -CREATE INDEX accounts_address_json ON "wallets-002".accounts USING gin (address_json); - - --- --- Name: accounts_array_length; Type: INDEX; Schema: wallets-002 --- - -CREATE INDEX accounts_array_length ON "wallets-002".accounts USING btree (jsonb_array_length(address_json)); - - --- --- Name: postings_addresses; Type: INDEX; Schema: wallets-002 --- - -CREATE INDEX postings_addresses ON "wallets-002".transactions USING gin (postings); - - --- --- Name: postings_array_length_dst; Type: INDEX; Schema: wallets-002 --- - -CREATE INDEX postings_array_length_dst ON "wallets-002".postings USING btree (jsonb_array_length(destination)); - - --- --- Name: postings_array_length_src; Type: INDEX; Schema: wallets-002 --- - -CREATE INDEX postings_array_length_src ON "wallets-002".postings USING btree (jsonb_array_length(source)); - - --- --- Name: postings_dest; Type: INDEX; Schema: wallets-002 --- - -CREATE INDEX postings_dest ON "wallets-002".postings USING gin (destination); - - --- --- Name: postings_src; Type: INDEX; Schema: wallets-002 --- - -CREATE INDEX postings_src ON "wallets-002".postings USING gin (source); - - --- --- Name: postings_txid; Type: INDEX; Schema: wallets-002 --- - -CREATE INDEX postings_txid ON "wallets-002".postings USING btree (txid); - - --- --- Name: volumes_account_json; Type: INDEX; Schema: wallets-002 --- - -CREATE INDEX volumes_account_json ON "wallets-002".volumes USING gin (account_json); - - --- --- Name: volumes_array_length; Type: INDEX; Schema: wallets-002 --- - -CREATE INDEX volumes_array_length ON "wallets-002".volumes USING btree (jsonb_array_length(account_json)); - - --- --- PostgreSQL database dump complete --- - diff --git a/internal/testing/compare.go b/internal/testing/compare.go deleted file mode 100644 index 0e978c8c2..000000000 --- a/internal/testing/compare.go +++ /dev/null @@ -1,20 +0,0 @@ -package testing - -import ( - "math/big" - "testing" - - "github.com/google/go-cmp/cmp" - "github.com/stretchr/testify/require" -) - -func bigIntComparer(v1 *big.Int, v2 *big.Int) bool { - return v1.String() == v2.String() -} - -func RequireEqual(t *testing.T, expected, actual any) { - t.Helper() - if diff := cmp.Diff(expected, actual, cmp.Comparer(bigIntComparer)); diff != "" { - require.Failf(t, "Content not matching", diff) - } -} diff --git a/internal/tracing/tracing.go b/internal/tracing/tracing.go new file mode 100644 index 000000000..635c07800 --- /dev/null +++ b/internal/tracing/tracing.go @@ -0,0 +1,57 @@ +package tracing + +import ( + "context" + "github.com/formancehq/go-libs/v2/time" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/metric" + + "go.opentelemetry.io/otel/trace" +) + +func TraceWithMetric[RET any]( + ctx context.Context, + operationName string, + tracer trace.Tracer, + histogram metric.Int64Histogram, + fn func(ctx context.Context) (RET, error), + finalizers ...func(ctx context.Context, ret RET), +) (RET, error) { + var zeroRet RET + + return Trace(ctx, tracer, operationName, func(ctx context.Context) (RET, error) { + now := time.Now() + ret, err := fn(ctx) + if err != nil { + trace.SpanFromContext(ctx).RecordError(err) + return zeroRet, err + } + + latency := time.Since(now) + histogram.Record(ctx, latency.Milliseconds()) + trace.SpanFromContext(ctx).SetAttributes(attribute.String("latency", latency.String())) + + for _, finalizer := range finalizers { + finalizer(ctx, ret) + } + + return ret, nil + }) +} + +func Trace[RET any](ctx context.Context, tracer trace.Tracer, name string, fn func(ctx context.Context) (RET, error)) (RET, error) { + ctx, trace := tracer.Start(ctx, name) + defer trace.End() + + return fn(ctx) +} + +func NoResult(fn func(ctx context.Context) error) func(ctx context.Context) (any, error) { + return func(ctx context.Context) (any, error) { + return nil, fn(ctx) + } +} + +func SkipResult[RET any](_ RET, err error) error { + return err +} diff --git a/internal/transaction.go b/internal/transaction.go index 597cb4565..c577f6b6d 100644 --- a/internal/transaction.go +++ b/internal/transaction.go @@ -1,19 +1,15 @@ package ledger import ( + "encoding/json" + "github.com/formancehq/go-libs/v2/time" + "github.com/invopop/jsonschema" + "github.com/uptrace/bun" "math/big" + "slices" + "sort" - "github.com/formancehq/go-libs/time" - - "github.com/formancehq/go-libs/pointer" - - "github.com/pkg/errors" - - "github.com/formancehq/go-libs/metadata" -) - -var ( - ErrNoPostings = errors.New("invalid payload: should contain either postings or script") + "github.com/formancehq/go-libs/v2/metadata" ) type Transactions struct { @@ -21,15 +17,16 @@ type Transactions struct { } type TransactionData struct { - Postings Postings `json:"postings"` - Metadata metadata.Metadata `json:"metadata"` - Timestamp time.Time `json:"timestamp"` - Reference string `json:"reference,omitempty"` + Postings Postings `json:"postings" bun:"postings,type:jsonb"` + Metadata metadata.Metadata `json:"metadata" bun:"metadata,type:jsonb,default:'{}'"` + Timestamp time.Time `json:"timestamp" bun:"timestamp,type:timestamp without time zone,nullzero"` + Reference string `json:"reference,omitempty" bun:"reference,type:varchar,unique,nullzero"` + InsertedAt time.Time `json:"insertedAt,omitempty" bun:"inserted_at,type:timestamp without time zone,nullzero"` } -func (d TransactionData) WithPostings(postings ...Posting) TransactionData { - d.Postings = append(d.Postings, postings...) - return d +func (data TransactionData) WithPostings(postings ...Posting) TransactionData { + data.Postings = append(data.Postings, postings...) + return data } func NewTransactionData() TransactionData { @@ -38,118 +35,201 @@ func NewTransactionData() TransactionData { } } -func (t *TransactionData) Reverse() TransactionData { - postings := make(Postings, len(t.Postings)) - copy(postings, t.Postings) - postings.Reverse() +type Transaction struct { + bun.BaseModel `bun:"table:transactions,alias:transactions"` - return TransactionData{ - Postings: postings, - } + TransactionData + ID int `json:"id" bun:"id,type:numeric"` + RevertedAt *time.Time `json:"revertedAt,omitempty" bun:"reverted_at,type:timestamp without time zone"` + // PostCommitVolumes are the volumes of each account/asset after a transaction has been committed. + // Those volumes will never change as those are computed in flight. + PostCommitVolumes PostCommitVolumes `json:"postCommitVolumes,omitempty" bun:"post_commit_volumes,type:jsonb"` + // PostCommitEffectiveVolumes are the volumes of each account/asset after the transaction TransactionData.Timestamp. + // Those volumes are also computed in flight, but can be updated if a transaction is inserted in the past. + PostCommitEffectiveVolumes PostCommitVolumes `json:"postCommitEffectiveVolumes,omitempty" bun:"post_commit_effective_volumes,type:jsonb,scanonly"` } -func (d TransactionData) WithDate(now time.Time) TransactionData { - d.Timestamp = now - - return d +func (Transaction) JSONSchemaExtend(schema *jsonschema.Schema) { + schema.Properties.Set("reverted", &jsonschema.Schema{ + Type: "boolean", + }) + postCommitVolumesSchema, _ := schema.Properties.Get("postCommitVolumes") + schema.Properties.Set("preCommitVolumes", postCommitVolumesSchema) + schema.Properties.Set("preCommitEffectiveVolumes", postCommitVolumesSchema) } -type Transaction struct { - TransactionData - ID *big.Int `json:"id"` - Reverted bool `json:"reverted"` +func (tx Transaction) Reverse() Transaction { + ret := NewTransaction().WithPostings(tx.Postings.Reverse()...) + return ret } -func (t *Transaction) WithPostings(postings ...Posting) *Transaction { - t.TransactionData = t.TransactionData.WithPostings(postings...) - return t +func (tx Transaction) WithPostings(postings ...Posting) Transaction { + tx.TransactionData = tx.TransactionData.WithPostings(postings...) + return tx } -func (t *Transaction) WithReference(ref string) *Transaction { - t.Reference = ref - return t +func (tx Transaction) WithReference(ref string) Transaction { + tx.Reference = ref + return tx } -func (t *Transaction) WithDate(ts time.Time) *Transaction { - t.Timestamp = ts - return t +func (tx Transaction) WithTimestamp(ts time.Time) Transaction { + tx.Timestamp = ts + return tx } -func (t *Transaction) WithIDUint64(id uint64) *Transaction { - t.ID = big.NewInt(int64(id)) - return t +func (tx Transaction) WithMetadata(m metadata.Metadata) Transaction { + tx.Metadata = m + return tx } -func (t *Transaction) WithID(id *big.Int) *Transaction { - t.ID = id - return t -} +func (tx Transaction) WithInsertedAt(date time.Time) Transaction { + tx.InsertedAt = date -func (t *Transaction) WithMetadata(m metadata.Metadata) *Transaction { - t.Metadata = m - return t + return tx } -func NewTransaction() *Transaction { - return &Transaction{ - ID: big.NewInt(0), - TransactionData: NewTransactionData(). - WithDate(time.Now()), +func (tx Transaction) InvolvedAccountAndAssets() map[string][]string { + ret := make(map[string][]string) + for _, posting := range tx.Postings { + ret[posting.Source] = append(ret[posting.Source], posting.Asset) + ret[posting.Destination] = append(ret[posting.Destination], posting.Asset) } -} -type ExpandedTransaction struct { - Transaction - PreCommitVolumes AccountsAssetsVolumes `json:"preCommitVolumes,omitempty"` - PostCommitVolumes AccountsAssetsVolumes `json:"postCommitVolumes,omitempty"` - PreCommitEffectiveVolumes AccountsAssetsVolumes `json:"preCommitEffectiveVolumes,omitempty"` - PostCommitEffectiveVolumes AccountsAssetsVolumes `json:"postCommitEffectiveVolumes,omitempty"` + for account, assets := range ret { + sort.Strings(assets) + ret[account] = slices.Compact(assets) + } + + return ret } -func (t *ExpandedTransaction) AppendPosting(p Posting) { - t.Postings = append(t.Postings, p) +func (tx Transaction) InvolvedAccounts() []string { + ret := make([]string, 0) + for _, posting := range tx.Postings { + ret = append(ret, posting.Source, posting.Destination) + } + + sort.Strings(ret) + + return slices.Compact(ret) } -func ExpandTransaction(tx *Transaction, preCommitVolumes AccountsAssetsVolumes) ExpandedTransaction { - postCommitVolumes := preCommitVolumes.Copy() +func (tx Transaction) VolumeUpdates() []AccountsVolumes { + aggregatedVolumes := make(map[string]map[string][]Posting) for _, posting := range tx.Postings { - preCommitVolumes.AddInput(posting.Destination, posting.Asset, Zero) - preCommitVolumes.AddOutput(posting.Source, posting.Asset, Zero) - postCommitVolumes.AddOutput(posting.Source, posting.Asset, posting.Amount) - postCommitVolumes.AddInput(posting.Destination, posting.Asset, posting.Amount) + if _, ok := aggregatedVolumes[posting.Source]; !ok { + aggregatedVolumes[posting.Source] = make(map[string][]Posting) + } + aggregatedVolumes[posting.Source][posting.Asset] = append(aggregatedVolumes[posting.Source][posting.Asset], posting) + + if posting.Source == posting.Destination { + continue + } + + if _, ok := aggregatedVolumes[posting.Destination]; !ok { + aggregatedVolumes[posting.Destination] = make(map[string][]Posting) + } + aggregatedVolumes[posting.Destination][posting.Asset] = append(aggregatedVolumes[posting.Destination][posting.Asset], posting) } - return ExpandedTransaction{ - Transaction: *tx, - PreCommitVolumes: preCommitVolumes, - PostCommitVolumes: postCommitVolumes, + + ret := make([]AccountsVolumes, 0) + for account, movesByAsset := range aggregatedVolumes { + for asset, postings := range movesByAsset { + volumes := NewEmptyVolumes() + for _, posting := range postings { + if account == posting.Source { + volumes.Output.Add(volumes.Output, posting.Amount) + } + if account == posting.Destination { + volumes.Input.Add(volumes.Input, posting.Amount) + } + } + + ret = append(ret, AccountsVolumes{ + Account: account, + Asset: asset, + Input: volumes.Input, + Output: volumes.Output, + }) + } } -} -type TransactionRequest struct { - Postings Postings `json:"postings"` - Script ScriptV1 `json:"script"` - Timestamp time.Time `json:"timestamp"` - Reference string `json:"reference"` - Metadata metadata.Metadata `json:"metadata" swaggertype:"object"` + slices.SortStableFunc(ret, func(a, b AccountsVolumes) int { + switch { + case a.Account < b.Account: + return -1 + case a.Account > b.Account: + return 1 + default: + switch { + case a.Asset < b.Asset: + return -1 + case a.Asset > b.Asset: + return 1 + default: + return 0 + } + } + }) + + return ret } -func (req *TransactionRequest) ToRunScript() *RunScript { +func (tx Transaction) MarshalJSON() ([]byte, error) { + type Aux Transaction + type Ret struct { + Aux - if len(req.Postings) > 0 { - txData := TransactionData{ - Postings: req.Postings, - Timestamp: req.Timestamp, - Reference: req.Reference, - Metadata: req.Metadata, - } + Reverted bool `json:"reverted"` + PreCommitVolumes PostCommitVolumes `json:"preCommitVolumes,omitempty"` + PreCommitEffectiveVolumes PostCommitVolumes `json:"preCommitEffectiveVolumes,omitempty"` + } - return pointer.For(TxToScriptData(txData, false)) + var ( + preCommitVolumes PostCommitVolumes + preCommitEffectiveVolumes PostCommitVolumes + ) + if len(tx.PostCommitVolumes) > 0 { + preCommitVolumes = tx.PostCommitVolumes.Copy() + for _, posting := range tx.Postings { + preCommitVolumes.AddOutput(posting.Source, posting.Asset, big.NewInt(0).Neg(posting.Amount)) + preCommitVolumes.AddInput(posting.Destination, posting.Asset, big.NewInt(0).Neg(posting.Amount)) + } } + if len(tx.PostCommitEffectiveVolumes) > 0 { + preCommitEffectiveVolumes = tx.PostCommitEffectiveVolumes.Copy() + for _, posting := range tx.Postings { + preCommitEffectiveVolumes.AddOutput(posting.Source, posting.Asset, big.NewInt(0).Neg(posting.Amount)) + preCommitEffectiveVolumes.AddInput(posting.Destination, posting.Asset, big.NewInt(0).Neg(posting.Amount)) + } + } + + return json.Marshal(&Ret{ + Aux: Aux(tx), + Reverted: tx.RevertedAt != nil && !tx.RevertedAt.IsZero(), + PreCommitVolumes: preCommitVolumes, + PreCommitEffectiveVolumes: preCommitEffectiveVolumes, + }) +} + +func (tx Transaction) IsReverted() bool { + return tx.RevertedAt != nil && !tx.RevertedAt.IsZero() +} + +func (tx Transaction) WithRevertedAt(timestamp time.Time) Transaction { + tx.RevertedAt = ×tamp + return tx +} + +func (tx Transaction) WithPostCommitEffectiveVolumes(volumes PostCommitVolumes) Transaction { + tx.PostCommitEffectiveVolumes = volumes + + return tx +} - return &RunScript{ - Script: req.Script.ToCore(), - Timestamp: req.Timestamp, - Reference: req.Reference, - Metadata: req.Metadata, +func NewTransaction() Transaction { + return Transaction{ + TransactionData: NewTransactionData(), } } diff --git a/internal/transaction_test.go b/internal/transaction_test.go index 7a98cb022..2b8966a9d 100644 --- a/internal/transaction_test.go +++ b/internal/transaction_test.go @@ -1,154 +1,168 @@ package ledger import ( + "encoding/base64" + "github.com/formancehq/go-libs/v2/metadata" "math/big" "testing" - "github.com/formancehq/go-libs/metadata" + "github.com/formancehq/go-libs/v2/time" + "github.com/stretchr/testify/require" ) -func TestReverseTransaction(t *testing.T) { - t.Run("1 posting", func(t *testing.T) { - tx := &ExpandedTransaction{ - Transaction: Transaction{ - TransactionData: TransactionData{ - Postings: Postings{ - { - Source: "world", - Destination: "users:001", - Amount: big.NewInt(100), - Asset: "COIN", - }, - }, - Reference: "foo", - }, - }, - } +func TestTransactionsReverse(t *testing.T) { + tx := NewTransaction(). + WithPostings( + NewPosting("world", "users:001", "COIN", big.NewInt(100)), + NewPosting("users:001", "payments:001", "COIN", big.NewInt(100)), + ) - expected := TransactionData{ - Postings: Postings{ - { - Source: "users:001", - Destination: "world", - Amount: big.NewInt(100), - Asset: "COIN", - }, - }, - } - require.Equal(t, expected, tx.Reverse()) - }) + expected := NewTransaction(). + WithPostings( + NewPosting("payments:001", "users:001", "COIN", big.NewInt(100)), + NewPosting("users:001", "world", "COIN", big.NewInt(100)), + ). + WithTimestamp(tx.Timestamp) - t.Run("2 postings", func(t *testing.T) { - tx := &ExpandedTransaction{ - Transaction: Transaction{ - TransactionData: TransactionData{ - Postings: Postings{ - { - Source: "world", - Destination: "users:001", - Amount: big.NewInt(100), - Asset: "COIN", - }, - { - Source: "users:001", - Destination: "payments:001", - Amount: big.NewInt(100), - Asset: "COIN", - }, - }, - Reference: "foo", - }, - }, - } + reversed := tx.Reverse() + reversed.Timestamp = time.Time{} + expected.Timestamp = time.Time{} + require.Equal(t, expected, reversed) +} - expected := TransactionData{ - Postings: Postings{ - { - Source: "payments:001", - Destination: "users:001", - Amount: big.NewInt(100), - Asset: "COIN", - }, - { - Source: "users:001", - Destination: "world", - Amount: big.NewInt(100), - Asset: "COIN", - }, - }, - } - require.Equal(t, expected, tx.Reverse()) - }) +func TestTransactionsVolumesUpdate(t *testing.T) { + tx := NewTransaction(). + WithPostings( + NewPosting("world", "users:001", "COIN", big.NewInt(100)), + NewPosting("users:001", "payments:001", "COIN", big.NewInt(100)), + NewPosting("payments:001", "world", "COIN", big.NewInt(100)), + ) - t.Run("3 postings", func(t *testing.T) { - tx := &ExpandedTransaction{ - Transaction: Transaction{ - TransactionData: TransactionData{ - Postings: Postings{ - { - Source: "world", - Destination: "users:001", - Amount: big.NewInt(100), - Asset: "COIN", - }, - { - Source: "users:001", - Destination: "payments:001", - Amount: big.NewInt(100), - Asset: "COIN", - }, - { - Source: "payments:001", - Destination: "alice", - Amount: big.NewInt(100), - Asset: "COIN", - }, - }, - Reference: "foo", - }, - }, - } + require.Equal(t, []AccountsVolumes{ + { + Account: "payments:001", + Asset: "COIN", + Input: big.NewInt(100), + Output: big.NewInt(100), + }, + { + Account: "users:001", + Asset: "COIN", + Input: big.NewInt(100), + Output: big.NewInt(100), + }, + { + Account: "world", + Asset: "COIN", + Input: big.NewInt(100), + Output: big.NewInt(100), + }, + }, tx.VolumeUpdates()) +} - expected := TransactionData{ - Postings: Postings{ - { - Source: "alice", - Destination: "payments:001", - Amount: big.NewInt(100), - Asset: "COIN", - }, - { - Source: "payments:001", - Destination: "users:001", - Amount: big.NewInt(100), - Asset: "COIN", +func TestHash(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + log Log + expectedHash string + expectedHashRetry string + } + + refDate := time.Time{} + + for _, tc := range []testCase{ + { + name: "new transaction", + log: NewLog(CreatedTransaction{ + Transaction: NewTransaction(), + AccountMetadata: make(AccountMetadata), + }), + expectedHash: "RjKsuJOuPYeFljGJlXZ5nk4_21apQY_k8daJamyZTVI=", + expectedHashRetry: "klWyIDudjjWU-BNHjRcFzTYHpU2CWi8lEVdVYjizHKo=", + }, + { + name: "new transaction with reference", + log: NewLog(CreatedTransaction{ + Transaction: NewTransaction().WithReference("foo"), + AccountMetadata: make(AccountMetadata), + }), + expectedHash: "SZ7XX-W_odawRCRvAmZkF0U_YnHDKY0Ku9zG_oaRgA4=", + expectedHashRetry: "KWxResFbWNf2xoH5u1gKggQkxbXSG7wdrzrKVVBk6BE=", + }, + { + name: "new transaction with nil account metadata", + log: NewLog(CreatedTransaction{ + Transaction: NewTransaction(), + AccountMetadata: nil, + }), + expectedHash: "I4IOKCBxlOWAeTSwj52ZElJAWc88F1UkA63QtJceshw=", + expectedHashRetry: "2cGS1rsuOcbHNqyeiOAx8mMBSvpNSFl_u_dSANI2BIM=", + }, + { + name: "saved metadata on account", + log: NewLog(SavedMetadata{ + TargetType: MetaTargetTypeAccount, + TargetID: "world", + Metadata: metadata.Metadata{ + "foo": "bar", }, - { - Source: "users:001", - Destination: "world", - Amount: big.NewInt(100), - Asset: "COIN", + }), + expectedHash: "6TifTCapZm6xc2EaazWo-PTdruDa7DYtAn1SU6zS4uI=", + expectedHashRetry: "a_pPkeX87fuTPof7SCIovxCbDF3EXvhASqrcXtzqoTs=", + }, + { + name: "saved metadata on transaction", + log: NewLog(SavedMetadata{ + TargetType: MetaTargetTypeTransaction, + TargetID: big.NewInt(1), + Metadata: metadata.Metadata{ + "foo": "bar", }, - }, - } - require.Equal(t, expected, tx.Reverse()) - }) -} + }), + expectedHash: "zH6jHi4kW8HvZnqhnpBxga-R-WPkuFaTCiFn8vgR0is=", + expectedHashRetry: "y-zQAnOwKdfqMetWoi6btTXuix5JgWkMEGP2a0z3YbY=", + }, + { + name: "deleted metadata on account", + log: NewLog(DeletedMetadata{ + TargetType: MetaTargetTypeAccount, + TargetID: "world", + Key: "foo", + }), + expectedHash: "e5Hb2rvqnhr96jCfoek69Fw7iYgoKoCYtl-qstYBvIg=", + expectedHashRetry: "t0SizlUMhLc5RkF9849zQZ34JPSom29WRVnBXlDM-O8=", + }, + { + name: "deleted metadata on transaction", + log: NewLog(DeletedMetadata{ + TargetType: MetaTargetTypeTransaction, + TargetID: big.NewInt(1), + Key: "foo", + }), + expectedHash: "3TAvOvastJtB_KxvccNFpuXp57MEv8kSR3NiUf7zosg=", + expectedHashRetry: "izQj6mfY65ePSC9utaiAftBnsPVwP8PaHPdoi7ruSN4=", + }, + { + name: "reverted transaction", + log: NewLog(RevertedTransaction{ + RevertedTransaction: Transaction{ID: 1}, + RevertTransaction: NewTransaction().WithTimestamp(refDate), + }), + expectedHash: "14SSRP9Nf7zxJWPSH7KOz15favZmBhyWZ59V-WQZx18=", + expectedHashRetry: "Re0FjRP34EBKzJTp4emmnVC1OKwd9f4mxVzbnTrvPd4=", + }, + } { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() -func BenchmarkHash(b *testing.B) { - logs := make([]ChainedLog, b.N) - var previous *ChainedLog - for i := 0; i < b.N; i++ { - newLog := NewTransactionLog(NewTransaction().WithPostings( - NewPosting("world", "bank", "USD", big.NewInt(100)), - ), map[string]metadata.Metadata{}).ChainLog(previous) - previous = newLog - logs = append(logs, *newLog) - } + chainedLog := tc.log.ChainLog(nil) + require.Equal(t, tc.expectedHash, base64.URLEncoding.EncodeToString(chainedLog.Hash)) - b.ResetTimer() - for i := 1; i < b.N; i++ { - logs[i].ComputeHash(&logs[i-1]) + hashedAgain := tc.log.ChainLog(&chainedLog) + require.Equal(t, tc.expectedHashRetry, base64.URLEncoding.EncodeToString(hashedAgain.Hash)) + }) } } diff --git a/internal/volumes.go b/internal/volumes.go index c26de1546..3abcca42b 100644 --- a/internal/volumes.go +++ b/internal/volumes.go @@ -3,7 +3,10 @@ package ledger import ( "database/sql/driver" "encoding/json" + "fmt" + "github.com/invopop/jsonschema" "math/big" + "strings" ) type Volumes struct { @@ -11,54 +14,47 @@ type Volumes struct { Output *big.Int `json:"output"` } -func (v Volumes) CopyWithZerosIfNeeded() *Volumes { - var input *big.Int - if v.Input == nil { - input = &big.Int{} - } else { - input = new(big.Int).Set(v.Input) - } - var output *big.Int - if v.Output == nil { - output = &big.Int{} - } else { - output = new(big.Int).Set(v.Output) - } - return &Volumes{ - Input: input, - Output: output, - } +func (v Volumes) Value() (driver.Value, error) { + return fmt.Sprintf("(%s, %s)", v.Input.String(), v.Output.String()), nil } -func (v Volumes) WithInput(input *big.Int) *Volumes { - v.Input = input - return &v -} +func (v *Volumes) Scan(src interface{}) error { + // stored as (input, output) + parts := strings.Split(src.(string)[1:(len(src.(string))-1)], ",") -func (v Volumes) WithInputInt64(value int64) *Volumes { - v.Input = big.NewInt(value) - return &v -} + v.Input = new(big.Int) + _, ok := v.Input.SetString(parts[0], 10) + if !ok { + return fmt.Errorf("unable to parse input '%s' as big int", parts[0]) + } -func (v Volumes) WithOutput(output *big.Int) *Volumes { - v.Output = output - return &v + v.Output = new(big.Int) + _, ok = v.Output.SetString(parts[1], 10) + if !ok { + return fmt.Errorf("unable to parse output '%s' as big int", parts[1]) + } + + return nil } -func (v Volumes) WithOutputInt64(value int64) *Volumes { - v.Output = big.NewInt(value) - return &v +func (Volumes) JSONSchemaExtend(schema *jsonschema.Schema) { + inputProperty, _ := schema.Properties.Get("input") + schema.Properties.Set("balance", inputProperty) } -func NewEmptyVolumes() *Volumes { - return &Volumes{ - Input: new(big.Int), - Output: new(big.Int), +func (v Volumes) Copy() Volumes { + return Volumes{ + Input: new(big.Int).Set(v.Input), + Output: new(big.Int).Set(v.Output), } } -func NewVolumesInt64(input, output int64) *Volumes { - return &Volumes{ +func NewEmptyVolumes() Volumes { + return NewVolumesInt64(0, 0) +} + +func NewVolumesInt64(input, output int64) Volumes { + return Volumes{ Input: big.NewInt(input), Output: big.NewInt(output), } @@ -87,19 +83,11 @@ func (v Volumes) MarshalJSON() ([]byte, error) { } func (v Volumes) Balance() *big.Int { - input := v.Input - if input == nil { - input = Zero - } - output := v.Output - if output == nil { - output = Zero - } - return new(big.Int).Sub(input, output) + return new(big.Int).Sub(v.Input, v.Output) } -func (v Volumes) copy() *Volumes { - return &Volumes{ +func (v Volumes) copy() Volumes { + return Volumes{ Input: new(big.Int).Set(v.Input), Output: new(big.Int).Set(v.Output), } @@ -107,7 +95,7 @@ func (v Volumes) copy() *Volumes { type BalancesByAssets map[string]*big.Int -type VolumesByAssets map[string]*Volumes +type VolumesByAssets map[string]Volumes type BalancesByAssetsByAccounts map[string]BalancesByAssets @@ -127,131 +115,41 @@ func (v VolumesByAssets) copy() VolumesByAssets { return ret } -type AccountsAssetsVolumes map[string]VolumesByAssets +type PostCommitVolumes map[string]VolumesByAssets -func (a AccountsAssetsVolumes) GetVolumes(account, asset string) *Volumes { - if a == nil { - return &Volumes{ - Input: &big.Int{}, - Output: &big.Int{}, - } - } - if assetsVolumes, ok := a[account]; !ok { - return &Volumes{ - Input: &big.Int{}, - Output: &big.Int{}, - } - } else { - return &Volumes{ - Input: assetsVolumes[asset].Input, - Output: assetsVolumes[asset].Output, - } - } +func (a PostCommitVolumes) AddInput(account, asset string, input *big.Int) { + volumes := a[account][asset].Copy() + volumes.Input.Add(volumes.Input, input) + a[account][asset] = volumes } -func (a *AccountsAssetsVolumes) SetVolumes(account, asset string, volumes *Volumes) { - if *a == nil { - *a = AccountsAssetsVolumes{} - } - if assetsVolumes, ok := (*a)[account]; !ok { - (*a)[account] = map[string]*Volumes{ - asset: volumes.CopyWithZerosIfNeeded(), - } - } else { - assetsVolumes[asset] = volumes.CopyWithZerosIfNeeded() - } +func (a PostCommitVolumes) AddOutput(account, asset string, output *big.Int) { + volumes := a[account][asset].Copy() + volumes.Output.Add(volumes.Output, output) + a[account][asset] = volumes } -func (a *AccountsAssetsVolumes) AddInput(account, asset string, input *big.Int) { - if *a == nil { - *a = AccountsAssetsVolumes{} - } - if assetsVolumes, ok := (*a)[account]; !ok { - (*a)[account] = map[string]*Volumes{ - asset: { - Input: input, - Output: &big.Int{}, - }, - } - } else { - volumes := assetsVolumes[asset].CopyWithZerosIfNeeded() - volumes.Input.Add(volumes.Input, input) - assetsVolumes[asset] = volumes - } -} - -func (a *AccountsAssetsVolumes) AddOutput(account, asset string, output *big.Int) { - if *a == nil { - *a = AccountsAssetsVolumes{} - } - if assetsVolumes, ok := (*a)[account]; !ok { - (*a)[account] = map[string]*Volumes{ - asset: { - Output: output, - Input: &big.Int{}, - }, - } - } else { - volumes := assetsVolumes[asset].CopyWithZerosIfNeeded() - volumes.Output.Add(volumes.Output, output) - assetsVolumes[asset] = volumes - } -} - -func (a AccountsAssetsVolumes) HasAccount(account string) bool { - if a == nil { - return false - } - _, ok := a[account] - return ok -} - -func (a AccountsAssetsVolumes) HasAccountAndAsset(account, asset string) bool { - if a == nil { - return false - } - volumesByAsset, ok := a[account] - if !ok { - return false - } - _, ok = volumesByAsset[asset] - return ok -} - -// Scan - Implement the database/sql scanner interface -func (a *AccountsAssetsVolumes) Scan(value interface{}) error { - if value == nil { - return nil - } - - val, err := driver.String.ConvertValue(value) - if err != nil { - return err - } - - *a = AccountsAssetsVolumes{} - switch val := val.(type) { - case []uint8: - return json.Unmarshal(val, a) - case string: - return json.Unmarshal([]byte(val), a) - default: - panic("not handled type") - } -} - -func (a AccountsAssetsVolumes) Copy() AccountsAssetsVolumes { - ret := AccountsAssetsVolumes{} +func (a PostCommitVolumes) Copy() PostCommitVolumes { + ret := PostCommitVolumes{} for key, volumes := range a { ret[key] = volumes.copy() } return ret } -func (a AccountsAssetsVolumes) Balances() BalancesByAssetsByAccounts { - ret := BalancesByAssetsByAccounts{} - for account, volumesByAssets := range a { - ret[account] = volumesByAssets.Balances() +func (a PostCommitVolumes) Merge(volumes PostCommitVolumes) PostCommitVolumes { + for account, volumesByAssets := range volumes { + if _, ok := a[account]; !ok { + a[account] = map[string]Volumes{} + } + for asset, volumes := range volumesByAssets { + if _, ok := a[account][asset]; !ok { + a[account][asset] = NewEmptyVolumes() + } + a[account][asset].Input.Add(a[account][asset].Input, volumes.Input) + a[account][asset].Output.Add(a[account][asset].Output, volumes.Output) + } } - return ret + + return a } diff --git a/openapi.yaml b/openapi.yaml index fe21b0048..e0c60b639 100644 --- a/openapi.yaml +++ b/openapi.yaml @@ -2,7 +2,7 @@ openapi: 3.0.3 info: title: Ledger API contact: {} - version: LEDGER_VERSION + version: v1 servers: - url: http://localhost:8080/ paths: @@ -1179,10 +1179,10 @@ paths: security: - Authorization: - ledger:read - /v2/_info: + /_/info: get: tags: - - ledger.v2 + - ledger summary: Show server information operationId: v2GetInfo x-speakeasy-name-override: GetInfo @@ -1902,6 +1902,12 @@ paths: description: Use an idempotency key schema: type: string + - name: force + in: query + description: Disable balance checks when passing postings + schema: + type: boolean + example: true requestBody: required: true description: | @@ -2115,6 +2121,12 @@ paths: required: false schema: type: boolean + - name: dryRun + in: query + description: Set the dryRun mode. dry run mode doesn't add the logs to the database or publish a message to the message broker. + schema: + type: boolean + example: true responses: "201": description: OK @@ -2949,6 +2961,7 @@ components: - COMPILATION_FAILED - METADATA_OVERRIDE - NOT_FOUND + - TIMEOUT example: INSUFFICIENT_FUND LedgerInfoResponse: properties: @@ -3047,7 +3060,7 @@ components: data: type: array items: - $ref: '#/components/schemas/V2ExpandedTransaction' + $ref: '#/components/schemas/V2Transaction' V2LogsCursorResponse: type: object required: @@ -3216,6 +3229,9 @@ components: V2Transaction: type: object properties: + insertedAt: + type: string + format: date-time timestamp: type: string format: date-time @@ -3234,21 +3250,24 @@ components: minimum: 0 reverted: type: boolean + revertedAt: + type: string + format: date-time + preCommitVolumes: + $ref: '#/components/schemas/V2AggregatedVolumes' + postCommitVolumes: + $ref: '#/components/schemas/V2AggregatedVolumes' + preCommitEffectiveVolumes: + $ref: '#/components/schemas/V2AggregatedVolumes' + postCommitEffectiveVolumes: + $ref: '#/components/schemas/V2AggregatedVolumes' required: - postings - timestamp - id - metadata - reverted - V2ExpandedTransaction: - allOf: - - $ref: '#/components/schemas/V2Transaction' - - type: object - properties: - preCommitVolumes: - $ref: '#/components/schemas/V2AggregatedVolumes' - postCommitVolumes: - $ref: '#/components/schemas/V2AggregatedVolumes' + - insertedAt V2PostTransaction: type: object required: @@ -3343,7 +3362,7 @@ components: V2GetTransactionResponse: properties: data: - $ref: '#/components/schemas/V2ExpandedTransaction' + $ref: '#/components/schemas/V2Transaction' type: object required: - data @@ -3432,6 +3451,8 @@ components: - NO_POSTINGS - LEDGER_NOT_FOUND - IMPORT + - TIMEOUT + - BULK_SIZE_EXCEEDED example: VALIDATION V2LedgerInfoResponse: type: object @@ -3650,6 +3671,10 @@ components: type: string metadata: $ref: '#/components/schemas/V2Metadata' + features: + type: object + additionalProperties: + type: string V2Ledger: type: object properties: diff --git a/openapi/v1.yaml b/openapi/v1.yaml index ff9129b4a..ab33d5bb9 100644 --- a/openapi/v1.yaml +++ b/openapi/v1.yaml @@ -2,7 +2,7 @@ openapi: 3.0.3 info: title: Ledger API contact: {} - version: LEDGER_VERSION + version: v1 paths: /_info: get: @@ -1852,6 +1852,7 @@ components: - COMPILATION_FAILED - METADATA_OVERRIDE - NOT_FOUND + - TIMEOUT example: INSUFFICIENT_FUND LedgerInfoResponse: properties: diff --git a/openapi/v2.yaml b/openapi/v2.yaml index e0c200b15..a06345033 100644 --- a/openapi/v2.yaml +++ b/openapi/v2.yaml @@ -2,14 +2,14 @@ openapi: 3.0.3 info: title: Ledger API contact: {} - version: LEDGER_VERSION + version: v2 servers: - url: http://localhost:8080/ paths: - /v2/_info: + /_/info: get: tags: - - ledger.v2 + - ledger summary: Show server information operationId: v2GetInfo x-speakeasy-name-override: GetInfo @@ -754,6 +754,13 @@ paths: description: Use an idempotency key schema: type: string + - name: force + in: query + description: + Disable balance checks when passing postings + schema: + type: boolean + example: true requestBody: required: true description: | @@ -969,6 +976,14 @@ paths: required: false schema: type: boolean + - name: dryRun + in: query + description: >- + Set the dryRun mode. dry run mode doesn't add the logs to the + database or publish a message to the message broker. + schema: + type: boolean + example: true responses: '201': description: OK @@ -1322,7 +1337,7 @@ components: data: type: array items: - $ref: '#/components/schemas/V2ExpandedTransaction' + $ref: '#/components/schemas/V2Transaction' V2LogsCursorResponse: type: object required: @@ -1491,6 +1506,9 @@ components: V2Transaction: type: object properties: + insertedAt: + type: string + format: date-time timestamp: type: string format: date-time @@ -1509,21 +1527,24 @@ components: minimum: 0 reverted: type: boolean + revertedAt: + type: string + format: date-time + preCommitVolumes: + $ref: '#/components/schemas/V2AggregatedVolumes' + postCommitVolumes: + $ref: '#/components/schemas/V2AggregatedVolumes' + preCommitEffectiveVolumes: + $ref: '#/components/schemas/V2AggregatedVolumes' + postCommitEffectiveVolumes: + $ref: '#/components/schemas/V2AggregatedVolumes' required: - postings - timestamp - id - metadata - reverted - V2ExpandedTransaction: - allOf: - - $ref: '#/components/schemas/V2Transaction' - - type: object - properties: - preCommitVolumes: - $ref: '#/components/schemas/V2AggregatedVolumes' - postCommitVolumes: - $ref: '#/components/schemas/V2AggregatedVolumes' + - insertedAt V2PostTransaction: type: object required: @@ -1611,7 +1632,7 @@ components: V2GetTransactionResponse: properties: data: - $ref: '#/components/schemas/V2ExpandedTransaction' + $ref: '#/components/schemas/V2Transaction' type: object required: - data @@ -1701,6 +1722,8 @@ components: - NO_POSTINGS - LEDGER_NOT_FOUND - IMPORT + - TIMEOUT + - BULK_SIZE_EXCEEDED example: VALIDATION V2LedgerInfoResponse: type: object @@ -1919,6 +1942,10 @@ components: type: string metadata: $ref: '#/components/schemas/V2Metadata' + features: + type: object + additionalProperties: + type: string V2Ledger: type: object properties: diff --git a/pkg/core/accounts/account_test.go b/pkg/accounts/account_test.go similarity index 100% rename from pkg/core/accounts/account_test.go rename to pkg/accounts/account_test.go diff --git a/pkg/core/accounts/accounts.go b/pkg/accounts/accounts.go similarity index 100% rename from pkg/core/accounts/accounts.go rename to pkg/accounts/accounts.go diff --git a/pkg/core/assets/asset.go b/pkg/assets/asset.go similarity index 100% rename from pkg/core/assets/asset.go rename to pkg/assets/asset.go diff --git a/pkg/client/.speakeasy/gen.lock b/pkg/client/.speakeasy/gen.lock index 46f840747..7db1bbd33 100644 --- a/pkg/client/.speakeasy/gen.lock +++ b/pkg/client/.speakeasy/gen.lock @@ -1,12 +1,12 @@ lockVersion: 2.0.0 id: a9ac79e1-e429-4ee3-96c4-ec973f19bec3 management: - docChecksum: ca116951d61ec04bcf24308e613bf2d7 - docVersion: LEDGER_VERSION + docChecksum: f87e5e30078da93c83bec4115056a7a9 + docVersion: v1 speakeasyVersion: 1.351.0 generationVersion: 2.384.1 - releaseVersion: 0.3.0 - configChecksum: c8d8f683fb8781c1d3ca28aa7371d52e + releaseVersion: 0.4.15 + configChecksum: 82b92b8a70bc4a520560afe9e887834c features: go: additionalDependencies: 0.1.0 @@ -18,7 +18,6 @@ features: downloadStreams: 0.1.1 envVarSecurityUsage: 0.2.1 errors: 2.81.9 - flattening: 2.81.1 getRequestBodies: 2.81.1 globalSecurity: 2.82.9 globalSecurityCallbacks: 0.1.0 @@ -55,6 +54,7 @@ generatedFiles: - internal/utils/retries.go - internal/utils/security.go - internal/utils/utils.go + - /models/operations/v2getinfo.go - /models/operations/getinfo.go - /models/operations/getledgerinfo.go - /models/operations/countaccounts.go @@ -75,7 +75,6 @@ generatedFiles: - /models/operations/getbalances.go - /models/operations/getbalancesaggregated.go - /models/operations/listlogs.go - - /models/operations/v2getinfo.go - /models/operations/v2listledgers.go - /models/operations/v2getledger.go - /models/operations/v2createledger.go @@ -101,11 +100,15 @@ generatedFiles: - /models/operations/v2listlogs.go - /models/operations/v2importlogs.go - /models/operations/v2exportlogs.go + - /models/sdkerrors/v2errorresponse.go + - /models/sdkerrors/errorresponse.go + - /models/components/v2errorsenum.go + - /models/components/v2configinforesponse.go + - /models/components/httpmetadata.go - /models/components/configinforesponse.go - /models/components/configinfo.go - /models/components/config.go - /models/components/ledgerstorage.go - - /models/components/httpmetadata.go - /models/components/errorsenum.go - /models/components/ledgerinforesponse.go - /models/components/ledgerinfo.go @@ -134,8 +137,6 @@ generatedFiles: - /models/components/aggregatebalancesresponse.go - /models/components/logscursorresponse.go - /models/components/log.go - - /models/components/v2errorsenum.go - - /models/components/v2configinforesponse.go - /models/components/v2ledgerlistresponse.go - /models/components/v2ledger.go - /models/components/v2getledgerresponse.go @@ -146,6 +147,7 @@ generatedFiles: - /models/components/v2bulkresponse.go - /models/components/v2bulkelementresult.go - /models/components/v2transaction.go + - /models/components/v2volume.go - /models/components/v2posting.go - /models/components/v2bulkelement.go - /models/components/v2bulkelementcreatetransaction.go @@ -157,12 +159,10 @@ generatedFiles: - /models/components/v2bulkelementdeletemetadata.go - /models/components/v2accountscursorresponse.go - /models/components/v2account.go - - /models/components/v2volume.go - /models/components/v2accountresponse.go - /models/components/v2statsresponse.go - /models/components/v2stats.go - /models/components/v2transactionscursorresponse.go - - /models/components/v2expandedtransaction.go - /models/components/v2createtransactionresponse.go - /models/components/v2gettransactionresponse.go - /models/components/v2reverttransactionresponse.go @@ -172,8 +172,7 @@ generatedFiles: - /models/components/v2logscursorresponse.go - /models/components/v2log.go - /models/components/security.go - - /models/sdkerrors/errorresponse.go - - /models/sdkerrors/v2errorresponse.go + - docs/models/operations/v2getinforesponse.md - docs/models/operations/getinforesponse.md - docs/models/operations/getledgerinforequest.md - docs/models/operations/getledgerinforesponse.md @@ -214,7 +213,6 @@ generatedFiles: - docs/models/operations/getbalancesaggregatedresponse.md - docs/models/operations/listlogsrequest.md - docs/models/operations/listlogsresponse.md - - docs/models/operations/v2getinforesponse.md - docs/models/operations/v2listledgersrequest.md - docs/models/operations/v2listledgersresponse.md - docs/models/operations/v2getledgerrequest.md @@ -266,11 +264,15 @@ generatedFiles: - docs/models/operations/v2importlogsresponse.md - docs/models/operations/v2exportlogsrequest.md - docs/models/operations/v2exportlogsresponse.md + - docs/models/sdkerrors/v2errorresponse.md + - docs/models/sdkerrors/errorresponse.md + - docs/models/components/v2errorsenum.md + - docs/models/components/v2configinforesponse.md + - docs/models/components/httpmetadata.md - docs/models/components/configinforesponse.md - docs/models/components/configinfo.md - docs/models/components/config.md - docs/models/components/ledgerstorage.md - - docs/models/components/httpmetadata.md - docs/models/components/errorsenum.md - docs/models/components/ledgerinforesponse.md - docs/models/components/storage.md @@ -308,8 +310,6 @@ generatedFiles: - docs/models/components/logscursorresponse.md - docs/models/components/type.md - docs/models/components/log.md - - docs/models/components/v2errorsenum.md - - docs/models/components/v2configinforesponse.md - docs/models/components/v2ledgerlistresponsecursor.md - docs/models/components/v2ledgerlistresponse.md - docs/models/components/v2ledger.md @@ -328,6 +328,7 @@ generatedFiles: - docs/models/components/v2bulkelementresultcreatetransaction.md - docs/models/components/v2bulkelementresult.md - docs/models/components/v2transaction.md + - docs/models/components/v2volume.md - docs/models/components/v2posting.md - docs/models/components/v2bulkelement.md - docs/models/components/v2bulkelementcreatetransaction.md @@ -344,13 +345,11 @@ generatedFiles: - docs/models/components/v2accountscursorresponsecursor.md - docs/models/components/v2accountscursorresponse.md - docs/models/components/v2account.md - - docs/models/components/v2volume.md - docs/models/components/v2accountresponse.md - docs/models/components/v2statsresponse.md - docs/models/components/v2stats.md - docs/models/components/v2transactionscursorresponsecursor.md - docs/models/components/v2transactionscursorresponse.md - - docs/models/components/v2expandedtransaction.md - docs/models/components/v2createtransactionresponse.md - docs/models/components/v2gettransactionresponse.md - docs/models/components/v2reverttransactionresponse.md @@ -363,11 +362,9 @@ generatedFiles: - docs/models/components/v2logtype.md - docs/models/components/v2log.md - docs/models/components/security.md - - docs/models/sdkerrors/errorresponse.md - - docs/models/sdkerrors/v2errorresponse.md - docs/sdks/formance/README.md - - docs/sdks/ledger/README.md - docs/models/operations/option.md + - docs/sdks/ledger/README.md - docs/sdks/v1/README.md - docs/sdks/v2/README.md - USAGE.md diff --git a/pkg/client/.speakeasy/gen.yaml b/pkg/client/.speakeasy/gen.yaml index f8aaf5189..ad5852e79 100644 --- a/pkg/client/.speakeasy/gen.yaml +++ b/pkg/client/.speakeasy/gen.yaml @@ -15,7 +15,7 @@ generation: auth: oAuth2ClientCredentialsEnabled: true go: - version: 0.3.0 + version: 0.4.15 additionalDependencies: {} allowUnknownFieldsInWeakUnions: false clientServerStatusCodesAsErrors: true @@ -30,7 +30,7 @@ go: shared: models/components webhooks: models/webhooks inputModelSuffix: input - maxMethodParams: 4 + maxMethodParams: 0 methodArguments: require-security-and-request outputModelSuffix: output packageName: github.com/formancehq/stack/ledger/client diff --git a/pkg/client/README.md b/pkg/client/README.md index e1fb468a5..3049357ff 100644 --- a/pkg/client/README.md +++ b/pkg/client/README.md @@ -69,11 +69,11 @@ func main() { ) ctx := context.Background() - res, err := s.Ledger.V1.GetInfo(ctx) + res, err := s.Ledger.GetInfo(ctx) if err != nil { log.Fatal(err) } - if res.ConfigInfoResponse != nil { + if res.V2ConfigInfoResponse != nil { // handle response } } @@ -84,6 +84,10 @@ func main() { ## Available Resources and Operations +### [Ledger](docs/sdks/ledger/README.md) + +* [GetInfo](docs/sdks/ledger/README.md#getinfo) - Show server information + ### [Ledger.V1](docs/sdks/v1/README.md) * [GetInfo](docs/sdks/v1/README.md#getinfo) - Show server information @@ -109,7 +113,6 @@ func main() { ### [Ledger.V2](docs/sdks/v2/README.md) -* [GetInfo](docs/sdks/v2/README.md#getinfo) - Show server information * [ListLedgers](docs/sdks/v2/README.md#listledgers) - List ledgers * [GetLedger](docs/sdks/v2/README.md#getledger) - Get a ledger * [CreateLedger](docs/sdks/v2/README.md#createledger) - Create a ledger @@ -164,7 +167,7 @@ func main() { ) ctx := context.Background() - res, err := s.Ledger.V1.GetInfo(ctx, operations.WithRetries( + res, err := s.Ledger.GetInfo(ctx, operations.WithRetries( retry.Config{ Strategy: "backoff", Backoff: &retry.BackoffStrategy{ @@ -178,7 +181,7 @@ func main() { if err != nil { log.Fatal(err) } - if res.ConfigInfoResponse != nil { + if res.V2ConfigInfoResponse != nil { // handle response } } @@ -217,11 +220,11 @@ func main() { ) ctx := context.Background() - res, err := s.Ledger.V1.GetInfo(ctx) + res, err := s.Ledger.GetInfo(ctx) if err != nil { log.Fatal(err) } - if res.ConfigInfoResponse != nil { + if res.V2ConfigInfoResponse != nil { // handle response } } @@ -234,10 +237,10 @@ func main() { Handling errors in this SDK should largely match your expectations. All operations return a response object or an error, they will never return both. When specified by the OpenAPI spec document, the SDK will return the appropriate subclass. -| Error Object | Status Code | Content Type | -| ----------------------- | ----------------------- | ----------------------- | -| sdkerrors.ErrorResponse | default | application/json | -| sdkerrors.SDKError | 4xx-5xx | */* | +| Error Object | Status Code | Content Type | +| ------------------------- | ------------------------- | ------------------------- | +| sdkerrors.V2ErrorResponse | default | application/json | +| sdkerrors.SDKError | 4xx-5xx | */* | ### Example @@ -262,10 +265,10 @@ func main() { ) ctx := context.Background() - res, err := s.Ledger.V1.GetInfo(ctx) + res, err := s.Ledger.GetInfo(ctx) if err != nil { - var e *sdkerrors.ErrorResponse + var e *sdkerrors.V2ErrorResponse if errors.As(err, &e) { // handle error log.Fatal(e.Error()) @@ -315,11 +318,11 @@ func main() { ) ctx := context.Background() - res, err := s.Ledger.V1.GetInfo(ctx) + res, err := s.Ledger.GetInfo(ctx) if err != nil { log.Fatal(err) } - if res.ConfigInfoResponse != nil { + if res.V2ConfigInfoResponse != nil { // handle response } } @@ -350,11 +353,11 @@ func main() { ) ctx := context.Background() - res, err := s.Ledger.V1.GetInfo(ctx) + res, err := s.Ledger.GetInfo(ctx) if err != nil { log.Fatal(err) } - if res.ConfigInfoResponse != nil { + if res.V2ConfigInfoResponse != nil { // handle response } } @@ -429,11 +432,11 @@ func main() { ) ctx := context.Background() - res, err := s.Ledger.V1.GetInfo(ctx) + res, err := s.Ledger.GetInfo(ctx) if err != nil { log.Fatal(err) } - if res.ConfigInfoResponse != nil { + if res.V2ConfigInfoResponse != nil { // handle response } } diff --git a/pkg/client/USAGE.md b/pkg/client/USAGE.md index 4dad1a404..01cb3093b 100644 --- a/pkg/client/USAGE.md +++ b/pkg/client/USAGE.md @@ -18,11 +18,11 @@ func main() { ) ctx := context.Background() - res, err := s.Ledger.V1.GetInfo(ctx) + res, err := s.Ledger.GetInfo(ctx) if err != nil { log.Fatal(err) } - if res.ConfigInfoResponse != nil { + if res.V2ConfigInfoResponse != nil { // handle response } } diff --git a/pkg/client/docs/models/components/errorsenum.md b/pkg/client/docs/models/components/errorsenum.md index f7cdb1cec..f645cefad 100644 --- a/pkg/client/docs/models/components/errorsenum.md +++ b/pkg/client/docs/models/components/errorsenum.md @@ -12,4 +12,5 @@ | `ErrorsEnumNoScript` | NO_SCRIPT | | `ErrorsEnumCompilationFailed` | COMPILATION_FAILED | | `ErrorsEnumMetadataOverride` | METADATA_OVERRIDE | -| `ErrorsEnumNotFound` | NOT_FOUND | \ No newline at end of file +| `ErrorsEnumNotFound` | NOT_FOUND | +| `ErrorsEnumTimeout` | TIMEOUT | \ No newline at end of file diff --git a/pkg/client/docs/models/components/v2createledgerrequest.md b/pkg/client/docs/models/components/v2createledgerrequest.md index af6e8fd58..31267cfda 100644 --- a/pkg/client/docs/models/components/v2createledgerrequest.md +++ b/pkg/client/docs/models/components/v2createledgerrequest.md @@ -6,4 +6,5 @@ | Field | Type | Required | Description | Example | | ------------------- | ------------------- | ------------------- | ------------------- | ------------------- | | `Bucket` | **string* | :heavy_minus_sign: | N/A | | -| `Metadata` | map[string]*string* | :heavy_minus_sign: | N/A | {
"admin": "true"
} | \ No newline at end of file +| `Metadata` | map[string]*string* | :heavy_minus_sign: | N/A | {
"admin": "true"
} | +| `Features` | map[string]*string* | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/pkg/client/docs/models/components/v2errorsenum.md b/pkg/client/docs/models/components/v2errorsenum.md index 088a86b2a..a292606a7 100644 --- a/pkg/client/docs/models/components/v2errorsenum.md +++ b/pkg/client/docs/models/components/v2errorsenum.md @@ -16,4 +16,6 @@ | `V2ErrorsEnumAlreadyRevert` | ALREADY_REVERT | | `V2ErrorsEnumNoPostings` | NO_POSTINGS | | `V2ErrorsEnumLedgerNotFound` | LEDGER_NOT_FOUND | -| `V2ErrorsEnumImport` | IMPORT | \ No newline at end of file +| `V2ErrorsEnumImport` | IMPORT | +| `V2ErrorsEnumTimeout` | TIMEOUT | +| `V2ErrorsEnumBulkSizeExceeded` | BULK_SIZE_EXCEEDED | \ No newline at end of file diff --git a/pkg/client/docs/models/components/v2expandedtransaction.md b/pkg/client/docs/models/components/v2expandedtransaction.md deleted file mode 100644 index 6df0fa3a4..000000000 --- a/pkg/client/docs/models/components/v2expandedtransaction.md +++ /dev/null @@ -1,15 +0,0 @@ -# V2ExpandedTransaction - - -## Fields - -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | -| `Timestamp` | [time.Time](https://pkg.go.dev/time#Time) | :heavy_check_mark: | N/A | | -| `Postings` | [][components.V2Posting](../../models/components/v2posting.md) | :heavy_check_mark: | N/A | | -| `Reference` | **string* | :heavy_minus_sign: | N/A | ref:001 | -| `Metadata` | map[string]*string* | :heavy_check_mark: | N/A | {
"admin": "true"
} | -| `ID` | [*big.Int](https://pkg.go.dev/math/big#Int) | :heavy_check_mark: | N/A | | -| `Reverted` | *bool* | :heavy_check_mark: | N/A | | -| `PreCommitVolumes` | map[string]map[string][components.V2Volume](../../models/components/v2volume.md) | :heavy_minus_sign: | N/A | {
"orders:1": {
"USD": {
"input": 100,
"output": 10,
"balance": 90
}
},
"orders:2": {
"USD": {
"input": 100,
"output": 10,
"balance": 90
}
}
} | -| `PostCommitVolumes` | map[string]map[string][components.V2Volume](../../models/components/v2volume.md) | :heavy_minus_sign: | N/A | {
"orders:1": {
"USD": {
"input": 100,
"output": 10,
"balance": 90
}
},
"orders:2": {
"USD": {
"input": 100,
"output": 10,
"balance": 90
}
}
} | \ No newline at end of file diff --git a/pkg/client/docs/models/components/v2gettransactionresponse.md b/pkg/client/docs/models/components/v2gettransactionresponse.md index 08f98f748..db50d06c6 100644 --- a/pkg/client/docs/models/components/v2gettransactionresponse.md +++ b/pkg/client/docs/models/components/v2gettransactionresponse.md @@ -3,6 +3,6 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | -| `Data` | [components.V2ExpandedTransaction](../../models/components/v2expandedtransaction.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `Data` | [components.V2Transaction](../../models/components/v2transaction.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/pkg/client/docs/models/components/v2transaction.md b/pkg/client/docs/models/components/v2transaction.md index 87c7060d1..d09ce0ce0 100644 --- a/pkg/client/docs/models/components/v2transaction.md +++ b/pkg/client/docs/models/components/v2transaction.md @@ -3,11 +3,17 @@ ## Fields -| Field | Type | Required | Description | Example | -| -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | -| `Timestamp` | [time.Time](https://pkg.go.dev/time#Time) | :heavy_check_mark: | N/A | | -| `Postings` | [][components.V2Posting](../../models/components/v2posting.md) | :heavy_check_mark: | N/A | | -| `Reference` | **string* | :heavy_minus_sign: | N/A | ref:001 | -| `Metadata` | map[string]*string* | :heavy_check_mark: | N/A | {
"admin": "true"
} | -| `ID` | [*big.Int](https://pkg.go.dev/math/big#Int) | :heavy_check_mark: | N/A | | -| `Reverted` | *bool* | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | +| `InsertedAt` | [time.Time](https://pkg.go.dev/time#Time) | :heavy_check_mark: | N/A | | +| `Timestamp` | [time.Time](https://pkg.go.dev/time#Time) | :heavy_check_mark: | N/A | | +| `Postings` | [][components.V2Posting](../../models/components/v2posting.md) | :heavy_check_mark: | N/A | | +| `Reference` | **string* | :heavy_minus_sign: | N/A | ref:001 | +| `Metadata` | map[string]*string* | :heavy_check_mark: | N/A | {
"admin": "true"
} | +| `ID` | [*big.Int](https://pkg.go.dev/math/big#Int) | :heavy_check_mark: | N/A | | +| `Reverted` | *bool* | :heavy_check_mark: | N/A | | +| `RevertedAt` | [*time.Time](https://pkg.go.dev/time#Time) | :heavy_minus_sign: | N/A | | +| `PreCommitVolumes` | map[string]map[string][components.V2Volume](../../models/components/v2volume.md) | :heavy_minus_sign: | N/A | {
"orders:1": {
"USD": {
"input": 100,
"output": 10,
"balance": 90
}
},
"orders:2": {
"USD": {
"input": 100,
"output": 10,
"balance": 90
}
}
} | +| `PostCommitVolumes` | map[string]map[string][components.V2Volume](../../models/components/v2volume.md) | :heavy_minus_sign: | N/A | {
"orders:1": {
"USD": {
"input": 100,
"output": 10,
"balance": 90
}
},
"orders:2": {
"USD": {
"input": 100,
"output": 10,
"balance": 90
}
}
} | +| `PreCommitEffectiveVolumes` | map[string]map[string][components.V2Volume](../../models/components/v2volume.md) | :heavy_minus_sign: | N/A | {
"orders:1": {
"USD": {
"input": 100,
"output": 10,
"balance": 90
}
},
"orders:2": {
"USD": {
"input": 100,
"output": 10,
"balance": 90
}
}
} | +| `PostCommitEffectiveVolumes` | map[string]map[string][components.V2Volume](../../models/components/v2volume.md) | :heavy_minus_sign: | N/A | {
"orders:1": {
"USD": {
"input": 100,
"output": 10,
"balance": 90
}
},
"orders:2": {
"USD": {
"input": 100,
"output": 10,
"balance": 90
}
}
} | \ No newline at end of file diff --git a/pkg/client/docs/models/components/v2transactionscursorresponsecursor.md b/pkg/client/docs/models/components/v2transactionscursorresponsecursor.md index bb2df6995..8c3058a5d 100644 --- a/pkg/client/docs/models/components/v2transactionscursorresponsecursor.md +++ b/pkg/client/docs/models/components/v2transactionscursorresponsecursor.md @@ -3,10 +3,10 @@ ## Fields -| Field | Type | Required | Description | Example | -| -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -| `PageSize` | *int64* | :heavy_check_mark: | N/A | 15 | -| `HasMore` | *bool* | :heavy_check_mark: | N/A | false | -| `Previous` | **string* | :heavy_minus_sign: | N/A | YXVsdCBhbmQgYSBtYXhpbXVtIG1heF9yZXN1bHRzLol= | -| `Next` | **string* | :heavy_minus_sign: | N/A | | -| `Data` | [][components.V2ExpandedTransaction](../../models/components/v2expandedtransaction.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | +| `PageSize` | *int64* | :heavy_check_mark: | N/A | 15 | +| `HasMore` | *bool* | :heavy_check_mark: | N/A | false | +| `Previous` | **string* | :heavy_minus_sign: | N/A | YXVsdCBhbmQgYSBtYXhpbXVtIG1heF9yZXN1bHRzLol= | +| `Next` | **string* | :heavy_minus_sign: | N/A | | +| `Data` | [][components.V2Transaction](../../models/components/v2transaction.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/pkg/client/docs/models/operations/v2createtransactionrequest.md b/pkg/client/docs/models/operations/v2createtransactionrequest.md index 73fbead12..a3acbfe48 100644 --- a/pkg/client/docs/models/operations/v2createtransactionrequest.md +++ b/pkg/client/docs/models/operations/v2createtransactionrequest.md @@ -8,4 +8,5 @@ | `Ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | | `DryRun` | **bool* | :heavy_minus_sign: | Set the dryRun mode. dry run mode doesn't add the logs to the database or publish a message to the message broker. | true | | `IdempotencyKey` | **string* | :heavy_minus_sign: | Use an idempotency key | | +| `Force` | **bool* | :heavy_minus_sign: | Disable balance checks when passing postings | true | | `V2PostTransaction` | [components.V2PostTransaction](../../models/components/v2posttransaction.md) | :heavy_check_mark: | The request body must contain at least one of the following objects:
- `postings`: suitable for simple transactions
- `script`: enabling more complex transactions with Numscript
| | \ No newline at end of file diff --git a/pkg/client/docs/models/operations/v2reverttransactionrequest.md b/pkg/client/docs/models/operations/v2reverttransactionrequest.md index 58973420d..a076ba2b0 100644 --- a/pkg/client/docs/models/operations/v2reverttransactionrequest.md +++ b/pkg/client/docs/models/operations/v2reverttransactionrequest.md @@ -3,9 +3,10 @@ ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------- | ------------------------------------------------------- | ------------------------------------------------------- | ------------------------------------------------------- | ------------------------------------------------------- | -| `Ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `ID` | [*big.Int](https://pkg.go.dev/math/big#Int) | :heavy_check_mark: | Transaction ID. | 1234 | -| `Force` | **bool* | :heavy_minus_sign: | Force revert | | -| `AtEffectiveDate` | **bool* | :heavy_minus_sign: | Revert transaction at effective date of the original tx | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | +| `Ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | +| `ID` | [*big.Int](https://pkg.go.dev/math/big#Int) | :heavy_check_mark: | Transaction ID. | 1234 | +| `Force` | **bool* | :heavy_minus_sign: | Force revert | | +| `AtEffectiveDate` | **bool* | :heavy_minus_sign: | Revert transaction at effective date of the original tx | | +| `DryRun` | **bool* | :heavy_minus_sign: | Set the dryRun mode. dry run mode doesn't add the logs to the database or publish a message to the message broker. | true | \ No newline at end of file diff --git a/pkg/client/docs/sdks/ledger/README.md b/pkg/client/docs/sdks/ledger/README.md index 9609c10c1..a6280ff7e 100644 --- a/pkg/client/docs/sdks/ledger/README.md +++ b/pkg/client/docs/sdks/ledger/README.md @@ -3,3 +3,55 @@ ### Available Operations +* [GetInfo](#getinfo) - Show server information + +## GetInfo + +Show server information + +### Example Usage + +```go +package main + +import( + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client" + "context" + "log" +) + +func main() { + s := client.New( + client.WithSecurity(components.Security{ + ClientID: "", + ClientSecret: "", + }), + ) + + ctx := context.Background() + res, err := s.Ledger.GetInfo(ctx) + if err != nil { + log.Fatal(err) + } + if res.V2ConfigInfoResponse != nil { + // handle response + } +} +``` + +### Parameters + +| Parameter | Type | Required | Description | +| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | + + +### Response + +**[*operations.V2GetInfoResponse](../../models/operations/v2getinforesponse.md), error** +| Error Object | Status Code | Content Type | +| ------------------------- | ------------------------- | ------------------------- | +| sdkerrors.V2ErrorResponse | default | application/json | +| sdkerrors.SDKError | 4xx-5xx | */* | diff --git a/pkg/client/docs/sdks/v1/README.md b/pkg/client/docs/sdks/v1/README.md index 974b44403..ec70b36c1 100644 --- a/pkg/client/docs/sdks/v1/README.md +++ b/pkg/client/docs/sdks/v1/README.md @@ -87,6 +87,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -98,9 +99,11 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" + request := operations.GetLedgerInfoRequest{ + Ledger: "ledger001", + } ctx := context.Background() - res, err := s.Ledger.V1.GetLedgerInfo(ctx, ledger) + res, err := s.Ledger.V1.GetLedgerInfo(ctx, request) if err != nil { log.Fatal(err) } @@ -112,11 +115,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.GetLedgerInfoRequest](../../models/operations/getledgerinforequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -139,6 +142,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -150,64 +154,64 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - var address *string = client.String("users:.+") - - var metadata map[string]any = map[string]any{ - "0": "m", - "1": "e", - "2": "t", - "3": "a", - "4": "d", - "5": "a", - "6": "t", - "7": "a", - "8": "[", - "9": "k", - "10": "e", - "11": "y", - "12": "]", - "13": "=", - "14": "v", - "15": "a", - "16": "l", - "17": "u", - "18": "e", - "19": "1", - "20": "&", - "21": "m", - "22": "e", - "23": "t", - "24": "a", - "25": "d", - "26": "a", - "27": "t", - "28": "a", - "29": "[", - "30": "a", - "31": ".", - "32": "n", - "33": "e", - "34": "s", - "35": "t", - "36": "e", - "37": "d", - "38": ".", - "39": "k", - "40": "e", - "41": "y", - "42": "]", - "43": "=", - "44": "v", - "45": "a", - "46": "l", - "47": "u", - "48": "e", - "49": "2", + request := operations.CountAccountsRequest{ + Ledger: "ledger001", + Address: client.String("users:.+"), + Metadata: map[string]any{ + "0": "m", + "1": "e", + "2": "t", + "3": "a", + "4": "d", + "5": "a", + "6": "t", + "7": "a", + "8": "[", + "9": "k", + "10": "e", + "11": "y", + "12": "]", + "13": "=", + "14": "v", + "15": "a", + "16": "l", + "17": "u", + "18": "e", + "19": "1", + "20": "&", + "21": "m", + "22": "e", + "23": "t", + "24": "a", + "25": "d", + "26": "a", + "27": "t", + "28": "a", + "29": "[", + "30": "a", + "31": ".", + "32": "n", + "33": "e", + "34": "s", + "35": "t", + "36": "e", + "37": "d", + "38": ".", + "39": "k", + "40": "e", + "41": "y", + "42": "]", + "43": "=", + "44": "v", + "45": "a", + "46": "l", + "47": "u", + "48": "e", + "49": "2", + }, } ctx := context.Background() - res, err := s.Ledger.V1.CountAccounts(ctx, ledger, address, metadata) + res, err := s.Ledger.V1.CountAccounts(ctx, request) if err != nil { log.Fatal(err) } @@ -219,13 +223,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `address` | **string* | :heavy_minus_sign: | Filter accounts by address pattern (regular expression placed between ^ and $). | users:.+ | -| `metadata` | map[string]*any* | :heavy_minus_sign: | Filter accounts by metadata key value pairs. The filter can be used like this metadata[key]=value1&metadata[a.nested.key]=value2 | metadata[key]=value1&metadata[a.nested.key]=value2 | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.CountAccountsRequest](../../models/operations/countaccountsrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -360,6 +362,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -371,11 +374,12 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - var address string = "users:001" + request := operations.GetAccountRequest{ + Ledger: "ledger001", + Address: "users:001", + } ctx := context.Background() - res, err := s.Ledger.V1.GetAccount(ctx, ledger, address) + res, err := s.Ledger.V1.GetAccount(ctx, request) if err != nil { log.Fatal(err) } @@ -387,12 +391,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `address` | *string* | :heavy_check_mark: | Exact address of the account. It must match the following regular expressions pattern:
```
^\w+(:\w+)*$
```
| users:001 | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.GetAccountRequest](../../models/operations/getaccountrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -415,6 +418,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -426,15 +430,15 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - var address string = "users:001" - - var requestBody map[string]any = map[string]any{ - "key": "", + request := operations.AddMetadataToAccountRequest{ + Ledger: "ledger001", + Address: "users:001", + RequestBody: map[string]any{ + "key": "", + }, } ctx := context.Background() - res, err := s.Ledger.V1.AddMetadataToAccount(ctx, ledger, address, requestBody) + res, err := s.Ledger.V1.AddMetadataToAccount(ctx, request) if err != nil { log.Fatal(err) } @@ -446,13 +450,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `address` | *string* | :heavy_check_mark: | Exact address of the account. It must match the following regular expressions pattern:
```
^\w+(:\w+)*$
```
| users:001 | -| `requestBody` | map[string]*any* | :heavy_check_mark: | metadata | | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.AddMetadataToAccountRequest](../../models/operations/addmetadatatoaccountrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -475,6 +477,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -486,9 +489,11 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" + request := operations.GetMappingRequest{ + Ledger: "ledger001", + } ctx := context.Background() - res, err := s.Ledger.V1.GetMapping(ctx, ledger) + res, err := s.Ledger.V1.GetMapping(ctx, request) if err != nil { log.Fatal(err) } @@ -500,11 +505,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.GetMappingRequest](../../models/operations/getmappingrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -527,6 +532,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -538,18 +544,19 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - var mapping *components.Mapping = &components.Mapping{ - Contracts: []components.Contract{ - components.Contract{ - Account: client.String("users:001"), - Expr: components.Expr{}, + request := operations.UpdateMappingRequest{ + Ledger: "ledger001", + Mapping: &components.Mapping{ + Contracts: []components.Contract{ + components.Contract{ + Account: client.String("users:001"), + Expr: components.Expr{}, + }, }, }, } ctx := context.Background() - res, err := s.Ledger.V1.UpdateMapping(ctx, ledger, mapping) + res, err := s.Ledger.V1.UpdateMapping(ctx, request) if err != nil { log.Fatal(err) } @@ -561,12 +568,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `mapping` | [components.Mapping](../../models/components/mapping.md) | :heavy_check_mark: | N/A | | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.UpdateMappingRequest](../../models/operations/updatemappingrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -592,6 +598,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -603,26 +610,26 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - script := components.Script{ - Plain: "vars { - account $user - } - send [COIN 10] ( - source = @world - destination = $user - ) - ", - Vars: map[string]any{ - "user": "users:042", + request := operations.RunScriptRequest{ + Ledger: "ledger001", + Preview: client.Bool(true), + Script: components.Script{ + Plain: "vars { + account $user + } + send [COIN 10] ( + source = @world + destination = $user + ) + ", + Vars: map[string]any{ + "user": "users:042", + }, + Reference: client.String("order_1234"), }, - Reference: client.String("order_1234"), } - - var preview *bool = client.Bool(true) ctx := context.Background() - res, err := s.Ledger.V1.RunScript(ctx, ledger, script, preview) + res, err := s.Ledger.V1.RunScript(ctx, request) if err != nil { log.Fatal(err) } @@ -634,13 +641,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `script` | [components.Script](../../models/components/script.md) | :heavy_check_mark: | N/A | | -| `preview` | **bool* | :heavy_minus_sign: | Set the preview mode. Preview mode doesn't add the logs to the database or publish a message to the message broker. | true | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.RunScriptRequest](../../models/operations/runscriptrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -663,6 +668,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -674,9 +680,11 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" + request := operations.ReadStatsRequest{ + Ledger: "ledger001", + } ctx := context.Background() - res, err := s.Ledger.V1.ReadStats(ctx, ledger) + res, err := s.Ledger.V1.ReadStats(ctx, request) if err != nil { log.Fatal(err) } @@ -688,11 +696,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | name of the ledger | ledger001 | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.ReadStatsRequest](../../models/operations/readstatsrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -837,6 +845,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "math/big" "context" "log" @@ -849,36 +858,36 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - postTransaction := components.PostTransaction{ - Postings: []components.Posting{ - components.Posting{ - Amount: big.NewInt(100), - Asset: "COIN", - Destination: "users:002", - Source: "users:001", + request := operations.CreateTransactionRequest{ + Ledger: "ledger001", + Preview: client.Bool(true), + PostTransaction: components.PostTransaction{ + Postings: []components.Posting{ + components.Posting{ + Amount: big.NewInt(100), + Asset: "COIN", + Destination: "users:002", + Source: "users:001", + }, }, - }, - Script: &components.PostTransactionScript{ - Plain: "vars { - account $user - } - send [COIN 10] ( - source = @world - destination = $user - ) - ", - Vars: map[string]any{ - "user": "users:042", + Script: &components.PostTransactionScript{ + Plain: "vars { + account $user + } + send [COIN 10] ( + source = @world + destination = $user + ) + ", + Vars: map[string]any{ + "user": "users:042", + }, }, + Reference: client.String("ref:001"), }, - Reference: client.String("ref:001"), } - - var preview *bool = client.Bool(true) ctx := context.Background() - res, err := s.Ledger.V1.CreateTransaction(ctx, ledger, postTransaction, preview) + res, err := s.Ledger.V1.CreateTransaction(ctx, request) if err != nil { log.Fatal(err) } @@ -890,13 +899,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `postTransaction` | [components.PostTransaction](../../models/components/posttransaction.md) | :heavy_check_mark: | The request body must contain at least one of the following objects:
- `postings`: suitable for simple transactions
- `script`: enabling more complex transactions with Numscript
| | -| `preview` | **bool* | :heavy_minus_sign: | Set the preview mode. Preview mode doesn't add the logs to the database or publish a message to the message broker. | true | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.CreateTransactionRequest](../../models/operations/createtransactionrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -919,6 +926,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "math/big" "context" "log" @@ -931,11 +939,12 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - var txid *big.Int = big.NewInt(1234) + request := operations.GetTransactionRequest{ + Ledger: "ledger001", + Txid: big.NewInt(1234), + } ctx := context.Background() - res, err := s.Ledger.V1.GetTransaction(ctx, ledger, txid) + res, err := s.Ledger.V1.GetTransaction(ctx, request) if err != nil { log.Fatal(err) } @@ -947,12 +956,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `txid` | [*big.Int](https://pkg.go.dev/math/big#Int) | :heavy_check_mark: | Transaction ID. | 1234 | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.GetTransactionRequest](../../models/operations/gettransactionrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -975,6 +983,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "math/big" "context" "log" @@ -987,11 +996,12 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - var txid *big.Int = big.NewInt(1234) + request := operations.AddMetadataOnTransactionRequest{ + Ledger: "ledger001", + Txid: big.NewInt(1234), + } ctx := context.Background() - res, err := s.Ledger.V1.AddMetadataOnTransaction(ctx, ledger, txid, nil) + res, err := s.Ledger.V1.AddMetadataOnTransaction(ctx, request) if err != nil { log.Fatal(err) } @@ -1003,13 +1013,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `txid` | [*big.Int](https://pkg.go.dev/math/big#Int) | :heavy_check_mark: | Transaction ID. | 1234 | -| `requestBody` | map[string]*any* | :heavy_minus_sign: | metadata | | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.AddMetadataOnTransactionRequest](../../models/operations/addmetadataontransactionrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -1032,6 +1040,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "math/big" "context" "log" @@ -1044,11 +1053,12 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - var txid *big.Int = big.NewInt(1234) + request := operations.RevertTransactionRequest{ + Ledger: "ledger001", + Txid: big.NewInt(1234), + } ctx := context.Background() - res, err := s.Ledger.V1.RevertTransaction(ctx, ledger, txid, nil) + res, err := s.Ledger.V1.RevertTransaction(ctx, request) if err != nil { log.Fatal(err) } @@ -1060,13 +1070,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `txid` | [*big.Int](https://pkg.go.dev/math/big#Int) | :heavy_check_mark: | Transaction ID. | 1234 | -| `disableChecks` | **bool* | :heavy_minus_sign: | Allow to disable balances checks | | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.RevertTransactionRequest](../../models/operations/reverttransactionrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -1089,6 +1097,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "math/big" "context" "log" @@ -1101,25 +1110,26 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - transactions := components.Transactions{ - Transactions: []components.TransactionData{ - components.TransactionData{ - Postings: []components.Posting{ - components.Posting{ - Amount: big.NewInt(100), - Asset: "COIN", - Destination: "users:002", - Source: "users:001", + request := operations.CreateTransactionsRequest{ + Ledger: "ledger001", + Transactions: components.Transactions{ + Transactions: []components.TransactionData{ + components.TransactionData{ + Postings: []components.Posting{ + components.Posting{ + Amount: big.NewInt(100), + Asset: "COIN", + Destination: "users:002", + Source: "users:001", + }, }, + Reference: client.String("ref:001"), }, - Reference: client.String("ref:001"), }, }, } ctx := context.Background() - res, err := s.Ledger.V1.CreateTransactions(ctx, ledger, transactions) + res, err := s.Ledger.V1.CreateTransactions(ctx, request) if err != nil { log.Fatal(err) } @@ -1131,12 +1141,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| ------------------------------------------------------------------ | ------------------------------------------------------------------ | ------------------------------------------------------------------ | ------------------------------------------------------------------ | ------------------------------------------------------------------ | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `transactions` | [components.Transactions](../../models/components/transactions.md) | :heavy_check_mark: | N/A | | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.CreateTransactionsRequest](../../models/operations/createtransactionsrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -1217,6 +1226,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -1228,11 +1238,12 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - var address *string = client.String("users:001") + request := operations.GetBalancesAggregatedRequest{ + Ledger: "ledger001", + Address: client.String("users:001"), + } ctx := context.Background() - res, err := s.Ledger.V1.GetBalancesAggregated(ctx, ledger, address, nil) + res, err := s.Ledger.V1.GetBalancesAggregated(ctx, request) if err != nil { log.Fatal(err) } @@ -1244,13 +1255,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| ------------------------------------------------------------------------- | ------------------------------------------------------------------------- | ------------------------------------------------------------------------- | ------------------------------------------------------------------------- | ------------------------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `address` | **string* | :heavy_minus_sign: | Filter balances involving given account, either as source or destination. | users:001 | -| `useInsertionDate` | **bool* | :heavy_minus_sign: | Use insertion date instead of effective date | | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.GetBalancesAggregatedRequest](../../models/operations/getbalancesaggregatedrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response diff --git a/pkg/client/docs/sdks/v2/README.md b/pkg/client/docs/sdks/v2/README.md index e8cf9c8aa..391a50b8b 100644 --- a/pkg/client/docs/sdks/v2/README.md +++ b/pkg/client/docs/sdks/v2/README.md @@ -3,7 +3,6 @@ ### Available Operations -* [GetInfo](#getinfo) - Show server information * [ListLedgers](#listledgers) - List ledgers * [GetLedger](#getledger) - Get a ledger * [CreateLedger](#createledger) - Create a ledger @@ -30,57 +29,6 @@ * [ImportLogs](#importlogs) * [ExportLogs](#exportlogs) - Export logs -## GetInfo - -Show server information - -### Example Usage - -```go -package main - -import( - "github.com/formancehq/stack/ledger/client/models/components" - "github.com/formancehq/stack/ledger/client" - "context" - "log" -) - -func main() { - s := client.New( - client.WithSecurity(components.Security{ - ClientID: "", - ClientSecret: "", - }), - ) - - ctx := context.Background() - res, err := s.Ledger.V2.GetInfo(ctx) - if err != nil { - log.Fatal(err) - } - if res.V2ConfigInfoResponse != nil { - // handle response - } -} -``` - -### Parameters - -| Parameter | Type | Required | Description | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | - - -### Response - -**[*operations.V2GetInfoResponse](../../models/operations/v2getinforesponse.md), error** -| Error Object | Status Code | Content Type | -| ------------------------- | ------------------------- | ------------------------- | -| sdkerrors.V2ErrorResponse | default | application/json | -| sdkerrors.SDKError | 4xx-5xx | */* | - ## ListLedgers List ledgers @@ -93,6 +41,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -104,11 +53,12 @@ func main() { ClientSecret: "", }), ) - var pageSize *int64 = client.Int64(100) - - var cursor *string = client.String("aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ==") + request := operations.V2ListLedgersRequest{ + PageSize: client.Int64(100), + Cursor: client.String("aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ=="), + } ctx := context.Background() - res, err := s.Ledger.V2.ListLedgers(ctx, pageSize, cursor) + res, err := s.Ledger.V2.ListLedgers(ctx, request) if err != nil { log.Fatal(err) } @@ -120,12 +70,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `pageSize` | **int64* | :heavy_minus_sign: | The maximum number of results to return per page.
| 100 | -| `cursor` | **string* | :heavy_minus_sign: | Parameter used in pagination requests. Maximum page size is set to 15.
Set to the value of next for the next page of results.
Set to the value of previous for the previous page of results.
No other parameters can be set when this parameter is set.
| aHR0cHM6Ly9nLnBhZ2UvTmVrby1SYW1lbj9zaGFyZQ== | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2ListLedgersRequest](../../models/operations/v2listledgersrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -148,6 +97,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -159,9 +109,11 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" + request := operations.V2GetLedgerRequest{ + Ledger: "ledger001", + } ctx := context.Background() - res, err := s.Ledger.V2.GetLedger(ctx, ledger) + res, err := s.Ledger.V2.GetLedger(ctx, request) if err != nil { log.Fatal(err) } @@ -173,11 +125,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2GetLedgerRequest](../../models/operations/v2getledgerrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -200,6 +152,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -211,15 +164,16 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - var v2CreateLedgerRequest *components.V2CreateLedgerRequest = &components.V2CreateLedgerRequest{ - Metadata: map[string]string{ - "admin": "true", + request := operations.V2CreateLedgerRequest{ + Ledger: "ledger001", + V2CreateLedgerRequest: &components.V2CreateLedgerRequest{ + Metadata: map[string]string{ + "admin": "true", + }, }, } ctx := context.Background() - res, err := s.Ledger.V2.CreateLedger(ctx, ledger, v2CreateLedgerRequest) + res, err := s.Ledger.V2.CreateLedger(ctx, request) if err != nil { log.Fatal(err) } @@ -231,12 +185,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `v2CreateLedgerRequest` | [*components.V2CreateLedgerRequest](../../models/components/v2createledgerrequest.md) | :heavy_minus_sign: | N/A | | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2CreateLedgerRequest](../../models/operations/v2createledgerrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -259,6 +212,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -270,13 +224,14 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - var requestBody map[string]string = map[string]string{ - "admin": "true", + request := operations.V2UpdateLedgerMetadataRequest{ + Ledger: "ledger001", + RequestBody: map[string]string{ + "admin": "true", + }, } ctx := context.Background() - res, err := s.Ledger.V2.UpdateLedgerMetadata(ctx, ledger, requestBody) + res, err := s.Ledger.V2.UpdateLedgerMetadata(ctx, request) if err != nil { log.Fatal(err) } @@ -288,12 +243,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `requestBody` | map[string]*string* | :heavy_minus_sign: | N/A | {
"admin": "true"
} | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2UpdateLedgerMetadataRequest](../../models/operations/v2updateledgermetadatarequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -316,6 +270,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -327,11 +282,12 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - var key string = "foo" + request := operations.V2DeleteLedgerMetadataRequest{ + Ledger: "ledger001", + Key: "foo", + } ctx := context.Background() - res, err := s.Ledger.V2.DeleteLedgerMetadata(ctx, ledger, key) + res, err := s.Ledger.V2.DeleteLedgerMetadata(ctx, request) if err != nil { log.Fatal(err) } @@ -343,12 +299,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `key` | *string* | :heavy_check_mark: | Key to remove. | foo | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2DeleteLedgerMetadataRequest](../../models/operations/v2deleteledgermetadatarequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -371,6 +326,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -382,9 +338,11 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" + request := operations.V2GetLedgerInfoRequest{ + Ledger: "ledger001", + } ctx := context.Background() - res, err := s.Ledger.V2.GetLedgerInfo(ctx, ledger) + res, err := s.Ledger.V2.GetLedgerInfo(ctx, request) if err != nil { log.Fatal(err) } @@ -396,11 +354,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2GetLedgerInfoRequest](../../models/operations/v2getledgerinforequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -423,6 +381,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "math/big" "context" "log" @@ -435,44 +394,45 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - var requestBody []components.V2BulkElement = []components.V2BulkElement{ - components.CreateV2BulkElementV2BulkElementCreateTransaction( - components.V2BulkElementCreateTransaction{ - Action: "", - Data: &components.V2PostTransaction{ - Postings: []components.V2Posting{ - components.V2Posting{ - Amount: big.NewInt(100), - Asset: "COIN", - Destination: "users:002", - Source: "users:001", + request := operations.V2CreateBulkRequest{ + Ledger: "ledger001", + RequestBody: []components.V2BulkElement{ + components.CreateV2BulkElementV2BulkElementCreateTransaction( + components.V2BulkElementCreateTransaction{ + Action: "", + Data: &components.V2PostTransaction{ + Postings: []components.V2Posting{ + components.V2Posting{ + Amount: big.NewInt(100), + Asset: "COIN", + Destination: "users:002", + Source: "users:001", + }, }, - }, - Script: &components.V2PostTransactionScript{ - Plain: "vars { - account $user - } - send [COIN 10] ( - source = @world - destination = $user - ) - ", - Vars: map[string]any{ - "user": "users:042", + Script: &components.V2PostTransactionScript{ + Plain: "vars { + account $user + } + send [COIN 10] ( + source = @world + destination = $user + ) + ", + Vars: map[string]any{ + "user": "users:042", + }, + }, + Reference: client.String("ref:001"), + Metadata: map[string]string{ + "admin": "true", }, - }, - Reference: client.String("ref:001"), - Metadata: map[string]string{ - "admin": "true", }, }, - }, - ), + ), + }, } ctx := context.Background() - res, err := s.Ledger.V2.CreateBulk(ctx, ledger, requestBody) + res, err := s.Ledger.V2.CreateBulk(ctx, request) if err != nil { log.Fatal(err) } @@ -484,12 +444,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `requestBody` | [][components.V2BulkElement](../../models/components/v2bulkelement.md) | :heavy_minus_sign: | N/A | | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2CreateBulkRequest](../../models/operations/v2createbulkrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -512,7 +471,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" - "time" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -524,9 +483,11 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" + request := operations.V2CountAccountsRequest{ + Ledger: "ledger001", + } ctx := context.Background() - res, err := s.Ledger.V2.CountAccounts(ctx, ledger, nil, nil) + res, err := s.Ledger.V2.CountAccounts(ctx, request) if err != nil { log.Fatal(err) } @@ -538,13 +499,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `pit` | [*time.Time](https://pkg.go.dev/time#Time) | :heavy_minus_sign: | N/A | | -| `requestBody` | map[string]*any* | :heavy_minus_sign: | N/A | | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2CountAccountsRequest](../../models/operations/v2countaccountsrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -624,7 +583,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" - "time" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -636,11 +595,12 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - var address string = "users:001" + request := operations.V2GetAccountRequest{ + Ledger: "ledger001", + Address: "users:001", + } ctx := context.Background() - res, err := s.Ledger.V2.GetAccount(ctx, ledger, address, nil, nil) + res, err := s.Ledger.V2.GetAccount(ctx, request) if err != nil { log.Fatal(err) } @@ -652,14 +612,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `address` | *string* | :heavy_check_mark: | Exact address of the account. It must match the following regular expressions pattern:
```
^\w+(:\w+)*$
```
| users:001 | -| `expand` | **string* | :heavy_minus_sign: | N/A | | -| `pit` | [*time.Time](https://pkg.go.dev/time#Time) | :heavy_minus_sign: | N/A | | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2GetAccountRequest](../../models/operations/v2getaccountrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -742,6 +699,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -753,13 +711,13 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - var address string = "" - - var key string = "foo" + request := operations.V2DeleteAccountMetadataRequest{ + Ledger: "ledger001", + Address: "69266 Krajcik Bypass", + Key: "foo", + } ctx := context.Background() - res, err := s.Ledger.V2.DeleteAccountMetadata(ctx, ledger, address, key) + res, err := s.Ledger.V2.DeleteAccountMetadata(ctx, request) if err != nil { log.Fatal(err) } @@ -771,13 +729,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `address` | *string* | :heavy_check_mark: | Account address | | -| `key` | *string* | :heavy_check_mark: | The key to remove. | foo | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2DeleteAccountMetadataRequest](../../models/operations/v2deleteaccountmetadatarequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -801,6 +757,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -812,9 +769,11 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" + request := operations.V2ReadStatsRequest{ + Ledger: "ledger001", + } ctx := context.Background() - res, err := s.Ledger.V2.ReadStats(ctx, ledger) + res, err := s.Ledger.V2.ReadStats(ctx, request) if err != nil { log.Fatal(err) } @@ -826,11 +785,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | name of the ledger | ledger001 | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2ReadStatsRequest](../../models/operations/v2readstatsrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -853,7 +812,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" - "time" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -865,9 +824,11 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" + request := operations.V2CountTransactionsRequest{ + Ledger: "ledger001", + } ctx := context.Background() - res, err := s.Ledger.V2.CountTransactions(ctx, ledger, nil, nil) + res, err := s.Ledger.V2.CountTransactions(ctx, request) if err != nil { log.Fatal(err) } @@ -879,13 +840,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `pit` | [*time.Time](https://pkg.go.dev/time#Time) | :heavy_minus_sign: | N/A | | -| `requestBody` | map[string]*any* | :heavy_minus_sign: | N/A | | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2CountTransactionsRequest](../../models/operations/v2counttransactionsrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -965,6 +924,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "math/big" "context" "log" @@ -977,39 +937,40 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - v2PostTransaction := components.V2PostTransaction{ - Postings: []components.V2Posting{ - components.V2Posting{ - Amount: big.NewInt(100), - Asset: "COIN", - Destination: "users:002", - Source: "users:001", + request := operations.V2CreateTransactionRequest{ + Ledger: "ledger001", + DryRun: client.Bool(true), + Force: client.Bool(true), + V2PostTransaction: components.V2PostTransaction{ + Postings: []components.V2Posting{ + components.V2Posting{ + Amount: big.NewInt(100), + Asset: "COIN", + Destination: "users:002", + Source: "users:001", + }, }, - }, - Script: &components.V2PostTransactionScript{ - Plain: "vars { - account $user - } - send [COIN 10] ( - source = @world - destination = $user - ) - ", - Vars: map[string]any{ - "user": "users:042", + Script: &components.V2PostTransactionScript{ + Plain: "vars { + account $user + } + send [COIN 10] ( + source = @world + destination = $user + ) + ", + Vars: map[string]any{ + "user": "users:042", + }, + }, + Reference: client.String("ref:001"), + Metadata: map[string]string{ + "admin": "true", }, - }, - Reference: client.String("ref:001"), - Metadata: map[string]string{ - "admin": "true", }, } - - var dryRun *bool = client.Bool(true) ctx := context.Background() - res, err := s.Ledger.V2.CreateTransaction(ctx, ledger, v2PostTransaction, dryRun, nil) + res, err := s.Ledger.V2.CreateTransaction(ctx, request) if err != nil { log.Fatal(err) } @@ -1021,14 +982,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `v2PostTransaction` | [components.V2PostTransaction](../../models/components/v2posttransaction.md) | :heavy_check_mark: | The request body must contain at least one of the following objects:
- `postings`: suitable for simple transactions
- `script`: enabling more complex transactions with Numscript
| | -| `dryRun` | **bool* | :heavy_minus_sign: | Set the dryRun mode. dry run mode doesn't add the logs to the database or publish a message to the message broker. | true | -| `idempotencyKey` | **string* | :heavy_minus_sign: | Use an idempotency key | | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2CreateTransactionRequest](../../models/operations/v2createtransactionrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -1051,8 +1009,8 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "math/big" - "time" "context" "log" ) @@ -1064,11 +1022,12 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - var id *big.Int = big.NewInt(1234) + request := operations.V2GetTransactionRequest{ + Ledger: "ledger001", + ID: big.NewInt(1234), + } ctx := context.Background() - res, err := s.Ledger.V2.GetTransaction(ctx, ledger, id, nil, nil) + res, err := s.Ledger.V2.GetTransaction(ctx, request) if err != nil { log.Fatal(err) } @@ -1080,14 +1039,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `id` | [*big.Int](https://pkg.go.dev/math/big#Int) | :heavy_check_mark: | Transaction ID. | 1234 | -| `expand` | **string* | :heavy_minus_sign: | N/A | | -| `pit` | [*time.Time](https://pkg.go.dev/time#Time) | :heavy_minus_sign: | N/A | | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2GetTransactionRequest](../../models/operations/v2gettransactionrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -1171,6 +1127,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "math/big" "context" "log" @@ -1183,13 +1140,13 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - var id *big.Int = big.NewInt(1234) - - var key string = "foo" + request := operations.V2DeleteTransactionMetadataRequest{ + Ledger: "ledger001", + ID: big.NewInt(1234), + Key: "foo", + } ctx := context.Background() - res, err := s.Ledger.V2.DeleteTransactionMetadata(ctx, ledger, id, key) + res, err := s.Ledger.V2.DeleteTransactionMetadata(ctx, request) if err != nil { log.Fatal(err) } @@ -1201,13 +1158,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `id` | [*big.Int](https://pkg.go.dev/math/big#Int) | :heavy_check_mark: | Transaction ID. | 1234 | -| `key` | *string* | :heavy_check_mark: | The key to remove. | foo | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2DeleteTransactionMetadataRequest](../../models/operations/v2deletetransactionmetadatarequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -1230,6 +1185,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "math/big" "context" "log" @@ -1242,11 +1198,13 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" - - var id *big.Int = big.NewInt(1234) + request := operations.V2RevertTransactionRequest{ + Ledger: "ledger001", + ID: big.NewInt(1234), + DryRun: client.Bool(true), + } ctx := context.Background() - res, err := s.Ledger.V2.RevertTransaction(ctx, ledger, id, nil, nil) + res, err := s.Ledger.V2.RevertTransaction(ctx, request) if err != nil { log.Fatal(err) } @@ -1258,14 +1216,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `id` | [*big.Int](https://pkg.go.dev/math/big#Int) | :heavy_check_mark: | Transaction ID. | 1234 | -| `force` | **bool* | :heavy_minus_sign: | Force revert | | -| `atEffectiveDate` | **bool* | :heavy_minus_sign: | Revert transaction at effective date of the original tx | | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2RevertTransactionRequest](../../models/operations/v2reverttransactionrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -1288,7 +1243,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" - "time" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -1300,9 +1255,11 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" + request := operations.V2GetBalancesAggregatedRequest{ + Ledger: "ledger001", + } ctx := context.Background() - res, err := s.Ledger.V2.GetBalancesAggregated(ctx, ledger, nil, nil, nil) + res, err := s.Ledger.V2.GetBalancesAggregated(ctx, request) if err != nil { log.Fatal(err) } @@ -1314,14 +1271,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `pit` | [*time.Time](https://pkg.go.dev/time#Time) | :heavy_minus_sign: | N/A | | -| `useInsertionDate` | **bool* | :heavy_minus_sign: | Use insertion date instead of effective date | | -| `requestBody` | map[string]*any* | :heavy_minus_sign: | N/A | | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2GetBalancesAggregatedRequest](../../models/operations/v2getbalancesaggregatedrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -1457,6 +1411,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -1468,9 +1423,11 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" + request := operations.V2ImportLogsRequest{ + Ledger: "ledger001", + } ctx := context.Background() - res, err := s.Ledger.V2.ImportLogs(ctx, ledger, nil) + res, err := s.Ledger.V2.ImportLogs(ctx, request) if err != nil { log.Fatal(err) } @@ -1482,12 +1439,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `requestBody` | **string* | :heavy_minus_sign: | N/A | | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2ImportLogsRequest](../../models/operations/v2importlogsrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response @@ -1510,6 +1466,7 @@ package main import( "github.com/formancehq/stack/ledger/client/models/components" "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/operations" "context" "log" ) @@ -1521,9 +1478,11 @@ func main() { ClientSecret: "", }), ) - var ledger string = "ledger001" + request := operations.V2ExportLogsRequest{ + Ledger: "ledger001", + } ctx := context.Background() - res, err := s.Ledger.V2.ExportLogs(ctx, ledger) + res, err := s.Ledger.V2.ExportLogs(ctx, request) if err != nil { log.Fatal(err) } @@ -1535,11 +1494,11 @@ func main() { ### Parameters -| Parameter | Type | Required | Description | Example | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | | -| `ledger` | *string* | :heavy_check_mark: | Name of the ledger. | ledger001 | -| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | | +| Parameter | Type | Required | Description | +| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | +| `ctx` | [context.Context](https://pkg.go.dev/context#Context) | :heavy_check_mark: | The context to use for the request. | +| `request` | [operations.V2ExportLogsRequest](../../models/operations/v2exportlogsrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `opts` | [][operations.Option](../../models/operations/option.md) | :heavy_minus_sign: | The options for this request. | ### Response diff --git a/pkg/client/formance.go b/pkg/client/formance.go index fc75add9a..e1a4b7c0c 100644 --- a/pkg/client/formance.go +++ b/pkg/client/formance.go @@ -142,10 +142,10 @@ func New(opts ...SDKOption) *Formance { sdk := &Formance{ sdkConfiguration: sdkConfiguration{ Language: "go", - OpenAPIDocVersion: "LEDGER_VERSION", - SDKVersion: "0.3.0", + OpenAPIDocVersion: "v1", + SDKVersion: "0.4.15", GenVersion: "2.384.1", - UserAgent: "speakeasy-sdk/go 0.3.0 2.384.1 LEDGER_VERSION github.com/formancehq/stack/ledger/client", + UserAgent: "speakeasy-sdk/go 0.4.15 2.384.1 v1 github.com/formancehq/stack/ledger/client", Hooks: hooks.New(), }, } diff --git a/pkg/client/ledger.go b/pkg/client/ledger.go index 25fcac408..5b04266a7 100644 --- a/pkg/client/ledger.go +++ b/pkg/client/ledger.go @@ -2,6 +2,21 @@ package client +import ( + "bytes" + "context" + "fmt" + "github.com/cenkalti/backoff/v4" + "github.com/formancehq/stack/ledger/client/internal/hooks" + "github.com/formancehq/stack/ledger/client/internal/utils" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + "github.com/formancehq/stack/ledger/client/models/sdkerrors" + "io" + "net/http" + "net/url" +) + type Ledger struct { V1 *V1 V2 *V2 @@ -16,3 +31,194 @@ func newLedger(sdkConfig sdkConfiguration) *Ledger { V2: newV2(sdkConfig), } } + +// GetInfo - Show server information +func (s *Ledger) GetInfo(ctx context.Context, opts ...operations.Option) (*operations.V2GetInfoResponse, error) { + hookCtx := hooks.HookContext{ + Context: ctx, + OperationID: "v2GetInfo", + OAuth2Scopes: []string{"ledger:read", "ledger:read"}, + SecuritySource: s.sdkConfiguration.Security, + } + + o := operations.Options{} + supportedOptions := []string{ + operations.SupportedOptionRetries, + operations.SupportedOptionTimeout, + } + + for _, opt := range opts { + if err := opt(&o, supportedOptions...); err != nil { + return nil, fmt.Errorf("error applying option: %w", err) + } + } + + baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) + opURL, err := url.JoinPath(baseURL, "/_/info") + if err != nil { + return nil, fmt.Errorf("error generating URL: %w", err) + } + + timeout := o.Timeout + if timeout == nil { + timeout = s.sdkConfiguration.Timeout + } + + if timeout != nil { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, *timeout) + defer cancel() + } + + req, err := http.NewRequestWithContext(ctx, "GET", opURL, nil) + if err != nil { + return nil, fmt.Errorf("error creating request: %w", err) + } + req.Header.Set("Accept", "application/json") + req.Header.Set("User-Agent", s.sdkConfiguration.UserAgent) + + if err := utils.PopulateSecurity(ctx, req, s.sdkConfiguration.Security); err != nil { + return nil, err + } + + globalRetryConfig := s.sdkConfiguration.RetryConfig + retryConfig := o.Retries + if retryConfig == nil { + if globalRetryConfig != nil { + retryConfig = globalRetryConfig + } + } + + var httpRes *http.Response + if retryConfig != nil { + httpRes, err = utils.Retry(ctx, utils.Retries{ + Config: retryConfig, + StatusCodes: []string{ + "429", + "500", + "502", + "503", + "504", + }, + }, func() (*http.Response, error) { + if req.Body != nil { + copyBody, err := req.GetBody() + if err != nil { + return nil, err + } + req.Body = copyBody + } + + req, err = s.sdkConfiguration.Hooks.BeforeRequest(hooks.BeforeRequestContext{HookContext: hookCtx}, req) + if err != nil { + return nil, backoff.Permanent(err) + } + + httpRes, err := s.sdkConfiguration.Client.Do(req) + if err != nil || httpRes == nil { + if err != nil { + err = fmt.Errorf("error sending request: %w", err) + } else { + err = fmt.Errorf("error sending request: no response") + } + + _, err = s.sdkConfiguration.Hooks.AfterError(hooks.AfterErrorContext{HookContext: hookCtx}, nil, err) + } + return httpRes, err + }) + + if err != nil { + return nil, err + } else { + httpRes, err = s.sdkConfiguration.Hooks.AfterSuccess(hooks.AfterSuccessContext{HookContext: hookCtx}, httpRes) + if err != nil { + return nil, err + } + } + } else { + req, err = s.sdkConfiguration.Hooks.BeforeRequest(hooks.BeforeRequestContext{HookContext: hookCtx}, req) + if err != nil { + return nil, err + } + + httpRes, err = s.sdkConfiguration.Client.Do(req) + if err != nil || httpRes == nil { + if err != nil { + err = fmt.Errorf("error sending request: %w", err) + } else { + err = fmt.Errorf("error sending request: no response") + } + + _, err = s.sdkConfiguration.Hooks.AfterError(hooks.AfterErrorContext{HookContext: hookCtx}, nil, err) + return nil, err + } else if utils.MatchStatusCodes([]string{"default"}, httpRes.StatusCode) { + _httpRes, err := s.sdkConfiguration.Hooks.AfterError(hooks.AfterErrorContext{HookContext: hookCtx}, httpRes, nil) + if err != nil { + return nil, err + } else if _httpRes != nil { + httpRes = _httpRes + } + } else { + httpRes, err = s.sdkConfiguration.Hooks.AfterSuccess(hooks.AfterSuccessContext{HookContext: hookCtx}, httpRes) + if err != nil { + return nil, err + } + } + } + + res := &operations.V2GetInfoResponse{ + HTTPMeta: components.HTTPMetadata{ + Request: req, + Response: httpRes, + }, + } + + rawBody, err := io.ReadAll(httpRes.Body) + if err != nil { + return nil, fmt.Errorf("error reading response body: %w", err) + } + httpRes.Body.Close() + httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) + + switch { + case httpRes.StatusCode == 200: + switch { + case utils.MatchContentType(httpRes.Header.Get("Content-Type"), `application/json`): + var out components.V2ConfigInfoResponse + if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil { + return nil, err + } + + res.V2ConfigInfoResponse = &out + default: + return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", httpRes.Header.Get("Content-Type")), httpRes.StatusCode, string(rawBody), httpRes) + } + case httpRes.StatusCode >= 500 && httpRes.StatusCode < 600: + switch { + case utils.MatchContentType(httpRes.Header.Get("Content-Type"), `application/json`): + var out sdkerrors.V2ErrorResponse + if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil { + return nil, err + } + + res.V2ErrorResponse = &out + default: + return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", httpRes.Header.Get("Content-Type")), httpRes.StatusCode, string(rawBody), httpRes) + } + default: + switch { + case utils.MatchContentType(httpRes.Header.Get("Content-Type"), `application/json`): + var out sdkerrors.V2ErrorResponse + if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil { + return nil, err + } + + return nil, &out + default: + return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", httpRes.Header.Get("Content-Type")), httpRes.StatusCode, string(rawBody), httpRes) + } + } + + return res, nil + +} diff --git a/pkg/client/models/components/errorsenum.go b/pkg/client/models/components/errorsenum.go index d38e729ed..d7cd7ce1b 100644 --- a/pkg/client/models/components/errorsenum.go +++ b/pkg/client/models/components/errorsenum.go @@ -18,6 +18,7 @@ const ( ErrorsEnumCompilationFailed ErrorsEnum = "COMPILATION_FAILED" ErrorsEnumMetadataOverride ErrorsEnum = "METADATA_OVERRIDE" ErrorsEnumNotFound ErrorsEnum = "NOT_FOUND" + ErrorsEnumTimeout ErrorsEnum = "TIMEOUT" ) func (e ErrorsEnum) ToPointer() *ErrorsEnum { @@ -44,6 +45,8 @@ func (e *ErrorsEnum) UnmarshalJSON(data []byte) error { case "METADATA_OVERRIDE": fallthrough case "NOT_FOUND": + fallthrough + case "TIMEOUT": *e = ErrorsEnum(v) return nil default: diff --git a/pkg/client/models/components/v2createledgerrequest.go b/pkg/client/models/components/v2createledgerrequest.go index 6d2dd9f42..2b7d9c08d 100644 --- a/pkg/client/models/components/v2createledgerrequest.go +++ b/pkg/client/models/components/v2createledgerrequest.go @@ -5,6 +5,7 @@ package components type V2CreateLedgerRequest struct { Bucket *string `json:"bucket,omitempty"` Metadata map[string]string `json:"metadata,omitempty"` + Features map[string]string `json:"features,omitempty"` } func (o *V2CreateLedgerRequest) GetBucket() *string { @@ -20,3 +21,10 @@ func (o *V2CreateLedgerRequest) GetMetadata() map[string]string { } return o.Metadata } + +func (o *V2CreateLedgerRequest) GetFeatures() map[string]string { + if o == nil { + return nil + } + return o.Features +} diff --git a/pkg/client/models/components/v2errorsenum.go b/pkg/client/models/components/v2errorsenum.go index defc9c108..598f3248c 100644 --- a/pkg/client/models/components/v2errorsenum.go +++ b/pkg/client/models/components/v2errorsenum.go @@ -22,6 +22,8 @@ const ( V2ErrorsEnumNoPostings V2ErrorsEnum = "NO_POSTINGS" V2ErrorsEnumLedgerNotFound V2ErrorsEnum = "LEDGER_NOT_FOUND" V2ErrorsEnumImport V2ErrorsEnum = "IMPORT" + V2ErrorsEnumTimeout V2ErrorsEnum = "TIMEOUT" + V2ErrorsEnumBulkSizeExceeded V2ErrorsEnum = "BULK_SIZE_EXCEEDED" ) func (e V2ErrorsEnum) ToPointer() *V2ErrorsEnum { @@ -56,6 +58,10 @@ func (e *V2ErrorsEnum) UnmarshalJSON(data []byte) error { case "LEDGER_NOT_FOUND": fallthrough case "IMPORT": + fallthrough + case "TIMEOUT": + fallthrough + case "BULK_SIZE_EXCEEDED": *e = V2ErrorsEnum(v) return nil default: diff --git a/pkg/client/models/components/v2expandedtransaction.go b/pkg/client/models/components/v2expandedtransaction.go deleted file mode 100644 index 976a7c6b2..000000000 --- a/pkg/client/models/components/v2expandedtransaction.go +++ /dev/null @@ -1,87 +0,0 @@ -// Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT. - -package components - -import ( - "github.com/formancehq/stack/ledger/client/internal/utils" - "math/big" - "time" -) - -type V2ExpandedTransaction struct { - Timestamp time.Time `json:"timestamp"` - Postings []V2Posting `json:"postings"` - Reference *string `json:"reference,omitempty"` - Metadata map[string]string `json:"metadata"` - ID *big.Int `json:"id"` - Reverted bool `json:"reverted"` - PreCommitVolumes map[string]map[string]V2Volume `json:"preCommitVolumes,omitempty"` - PostCommitVolumes map[string]map[string]V2Volume `json:"postCommitVolumes,omitempty"` -} - -func (v V2ExpandedTransaction) MarshalJSON() ([]byte, error) { - return utils.MarshalJSON(v, "", false) -} - -func (v *V2ExpandedTransaction) UnmarshalJSON(data []byte) error { - if err := utils.UnmarshalJSON(data, &v, "", false, false); err != nil { - return err - } - return nil -} - -func (o *V2ExpandedTransaction) GetTimestamp() time.Time { - if o == nil { - return time.Time{} - } - return o.Timestamp -} - -func (o *V2ExpandedTransaction) GetPostings() []V2Posting { - if o == nil { - return []V2Posting{} - } - return o.Postings -} - -func (o *V2ExpandedTransaction) GetReference() *string { - if o == nil { - return nil - } - return o.Reference -} - -func (o *V2ExpandedTransaction) GetMetadata() map[string]string { - if o == nil { - return map[string]string{} - } - return o.Metadata -} - -func (o *V2ExpandedTransaction) GetID() *big.Int { - if o == nil { - return big.NewInt(0) - } - return o.ID -} - -func (o *V2ExpandedTransaction) GetReverted() bool { - if o == nil { - return false - } - return o.Reverted -} - -func (o *V2ExpandedTransaction) GetPreCommitVolumes() map[string]map[string]V2Volume { - if o == nil { - return nil - } - return o.PreCommitVolumes -} - -func (o *V2ExpandedTransaction) GetPostCommitVolumes() map[string]map[string]V2Volume { - if o == nil { - return nil - } - return o.PostCommitVolumes -} diff --git a/pkg/client/models/components/v2gettransactionresponse.go b/pkg/client/models/components/v2gettransactionresponse.go index d8772bc9a..bb1cbbe71 100644 --- a/pkg/client/models/components/v2gettransactionresponse.go +++ b/pkg/client/models/components/v2gettransactionresponse.go @@ -3,12 +3,12 @@ package components type V2GetTransactionResponse struct { - Data V2ExpandedTransaction `json:"data"` + Data V2Transaction `json:"data"` } -func (o *V2GetTransactionResponse) GetData() V2ExpandedTransaction { +func (o *V2GetTransactionResponse) GetData() V2Transaction { if o == nil { - return V2ExpandedTransaction{} + return V2Transaction{} } return o.Data } diff --git a/pkg/client/models/components/v2transaction.go b/pkg/client/models/components/v2transaction.go index 747a9738a..5184a4560 100644 --- a/pkg/client/models/components/v2transaction.go +++ b/pkg/client/models/components/v2transaction.go @@ -9,12 +9,18 @@ import ( ) type V2Transaction struct { - Timestamp time.Time `json:"timestamp"` - Postings []V2Posting `json:"postings"` - Reference *string `json:"reference,omitempty"` - Metadata map[string]string `json:"metadata"` - ID *big.Int `json:"id"` - Reverted bool `json:"reverted"` + InsertedAt time.Time `json:"insertedAt"` + Timestamp time.Time `json:"timestamp"` + Postings []V2Posting `json:"postings"` + Reference *string `json:"reference,omitempty"` + Metadata map[string]string `json:"metadata"` + ID *big.Int `json:"id"` + Reverted bool `json:"reverted"` + RevertedAt *time.Time `json:"revertedAt,omitempty"` + PreCommitVolumes map[string]map[string]V2Volume `json:"preCommitVolumes,omitempty"` + PostCommitVolumes map[string]map[string]V2Volume `json:"postCommitVolumes,omitempty"` + PreCommitEffectiveVolumes map[string]map[string]V2Volume `json:"preCommitEffectiveVolumes,omitempty"` + PostCommitEffectiveVolumes map[string]map[string]V2Volume `json:"postCommitEffectiveVolumes,omitempty"` } func (v V2Transaction) MarshalJSON() ([]byte, error) { @@ -28,6 +34,13 @@ func (v *V2Transaction) UnmarshalJSON(data []byte) error { return nil } +func (o *V2Transaction) GetInsertedAt() time.Time { + if o == nil { + return time.Time{} + } + return o.InsertedAt +} + func (o *V2Transaction) GetTimestamp() time.Time { if o == nil { return time.Time{} @@ -69,3 +82,38 @@ func (o *V2Transaction) GetReverted() bool { } return o.Reverted } + +func (o *V2Transaction) GetRevertedAt() *time.Time { + if o == nil { + return nil + } + return o.RevertedAt +} + +func (o *V2Transaction) GetPreCommitVolumes() map[string]map[string]V2Volume { + if o == nil { + return nil + } + return o.PreCommitVolumes +} + +func (o *V2Transaction) GetPostCommitVolumes() map[string]map[string]V2Volume { + if o == nil { + return nil + } + return o.PostCommitVolumes +} + +func (o *V2Transaction) GetPreCommitEffectiveVolumes() map[string]map[string]V2Volume { + if o == nil { + return nil + } + return o.PreCommitEffectiveVolumes +} + +func (o *V2Transaction) GetPostCommitEffectiveVolumes() map[string]map[string]V2Volume { + if o == nil { + return nil + } + return o.PostCommitEffectiveVolumes +} diff --git a/pkg/client/models/components/v2transactionscursorresponse.go b/pkg/client/models/components/v2transactionscursorresponse.go index ed42ff90f..bb46022b8 100644 --- a/pkg/client/models/components/v2transactionscursorresponse.go +++ b/pkg/client/models/components/v2transactionscursorresponse.go @@ -3,11 +3,11 @@ package components type V2TransactionsCursorResponseCursor struct { - PageSize int64 `json:"pageSize"` - HasMore bool `json:"hasMore"` - Previous *string `json:"previous,omitempty"` - Next *string `json:"next,omitempty"` - Data []V2ExpandedTransaction `json:"data"` + PageSize int64 `json:"pageSize"` + HasMore bool `json:"hasMore"` + Previous *string `json:"previous,omitempty"` + Next *string `json:"next,omitempty"` + Data []V2Transaction `json:"data"` } func (o *V2TransactionsCursorResponseCursor) GetPageSize() int64 { @@ -38,9 +38,9 @@ func (o *V2TransactionsCursorResponseCursor) GetNext() *string { return o.Next } -func (o *V2TransactionsCursorResponseCursor) GetData() []V2ExpandedTransaction { +func (o *V2TransactionsCursorResponseCursor) GetData() []V2Transaction { if o == nil { - return []V2ExpandedTransaction{} + return []V2Transaction{} } return o.Data } diff --git a/pkg/client/models/operations/v2createtransaction.go b/pkg/client/models/operations/v2createtransaction.go index aeb743cd1..2bc7f63e9 100644 --- a/pkg/client/models/operations/v2createtransaction.go +++ b/pkg/client/models/operations/v2createtransaction.go @@ -13,6 +13,8 @@ type V2CreateTransactionRequest struct { DryRun *bool `queryParam:"style=form,explode=true,name=dryRun"` // Use an idempotency key IdempotencyKey *string `header:"style=simple,explode=false,name=Idempotency-Key"` + // Disable balance checks when passing postings + Force *bool `queryParam:"style=form,explode=true,name=force"` // The request body must contain at least one of the following objects: // - `postings`: suitable for simple transactions // - `script`: enabling more complex transactions with Numscript @@ -41,6 +43,13 @@ func (o *V2CreateTransactionRequest) GetIdempotencyKey() *string { return o.IdempotencyKey } +func (o *V2CreateTransactionRequest) GetForce() *bool { + if o == nil { + return nil + } + return o.Force +} + func (o *V2CreateTransactionRequest) GetV2PostTransaction() components.V2PostTransaction { if o == nil { return components.V2PostTransaction{} diff --git a/pkg/client/models/operations/v2reverttransaction.go b/pkg/client/models/operations/v2reverttransaction.go index 786e3a47d..2f2aa15eb 100644 --- a/pkg/client/models/operations/v2reverttransaction.go +++ b/pkg/client/models/operations/v2reverttransaction.go @@ -17,6 +17,8 @@ type V2RevertTransactionRequest struct { Force *bool `queryParam:"style=form,explode=true,name=force"` // Revert transaction at effective date of the original tx AtEffectiveDate *bool `queryParam:"style=form,explode=true,name=atEffectiveDate"` + // Set the dryRun mode. dry run mode doesn't add the logs to the database or publish a message to the message broker. + DryRun *bool `queryParam:"style=form,explode=true,name=dryRun"` } func (v V2RevertTransactionRequest) MarshalJSON() ([]byte, error) { @@ -58,6 +60,13 @@ func (o *V2RevertTransactionRequest) GetAtEffectiveDate() *bool { return o.AtEffectiveDate } +func (o *V2RevertTransactionRequest) GetDryRun() *bool { + if o == nil { + return nil + } + return o.DryRun +} + type V2RevertTransactionResponse struct { HTTPMeta components.HTTPMetadata `json:"-"` // OK diff --git a/pkg/client/v1.go b/pkg/client/v1.go index 64127606d..646feaa2f 100644 --- a/pkg/client/v1.go +++ b/pkg/client/v1.go @@ -13,7 +13,6 @@ import ( "github.com/formancehq/stack/ledger/client/models/operations" "github.com/formancehq/stack/ledger/client/models/sdkerrors" "io" - "math/big" "net/http" "net/url" ) @@ -208,7 +207,7 @@ func (s *V1) GetInfo(ctx context.Context, opts ...operations.Option) (*operation } // GetLedgerInfo - Get information about a ledger -func (s *V1) GetLedgerInfo(ctx context.Context, ledger string, opts ...operations.Option) (*operations.GetLedgerInfoResponse, error) { +func (s *V1) GetLedgerInfo(ctx context.Context, request operations.GetLedgerInfoRequest, opts ...operations.Option) (*operations.GetLedgerInfoResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "getLedgerInfo", @@ -216,10 +215,6 @@ func (s *V1) GetLedgerInfo(ctx context.Context, ledger string, opts ...operation SecuritySource: s.sdkConfiguration.Security, } - request := operations.GetLedgerInfoRequest{ - Ledger: ledger, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -391,7 +386,7 @@ func (s *V1) GetLedgerInfo(ctx context.Context, ledger string, opts ...operation } // CountAccounts - Count the accounts from a ledger -func (s *V1) CountAccounts(ctx context.Context, ledger string, address *string, metadata map[string]any, opts ...operations.Option) (*operations.CountAccountsResponse, error) { +func (s *V1) CountAccounts(ctx context.Context, request operations.CountAccountsRequest, opts ...operations.Option) (*operations.CountAccountsResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "countAccounts", @@ -399,12 +394,6 @@ func (s *V1) CountAccounts(ctx context.Context, ledger string, address *string, SecuritySource: s.sdkConfiguration.Security, } - request := operations.CountAccountsRequest{ - Ledger: ledger, - Address: address, - Metadata: metadata, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -767,7 +756,7 @@ func (s *V1) ListAccounts(ctx context.Context, request operations.ListAccountsRe } // GetAccount - Get account by its address -func (s *V1) GetAccount(ctx context.Context, ledger string, address string, opts ...operations.Option) (*operations.GetAccountResponse, error) { +func (s *V1) GetAccount(ctx context.Context, request operations.GetAccountRequest, opts ...operations.Option) (*operations.GetAccountResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "getAccount", @@ -775,11 +764,6 @@ func (s *V1) GetAccount(ctx context.Context, ledger string, address string, opts SecuritySource: s.sdkConfiguration.Security, } - request := operations.GetAccountRequest{ - Ledger: ledger, - Address: address, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -951,7 +935,7 @@ func (s *V1) GetAccount(ctx context.Context, ledger string, address string, opts } // AddMetadataToAccount - Add metadata to an account -func (s *V1) AddMetadataToAccount(ctx context.Context, ledger string, address string, requestBody map[string]any, opts ...operations.Option) (*operations.AddMetadataToAccountResponse, error) { +func (s *V1) AddMetadataToAccount(ctx context.Context, request operations.AddMetadataToAccountRequest, opts ...operations.Option) (*operations.AddMetadataToAccountResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "addMetadataToAccount", @@ -959,12 +943,6 @@ func (s *V1) AddMetadataToAccount(ctx context.Context, ledger string, address st SecuritySource: s.sdkConfiguration.Security, } - request := operations.AddMetadataToAccountRequest{ - Ledger: ledger, - Address: address, - RequestBody: requestBody, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -1131,7 +1109,7 @@ func (s *V1) AddMetadataToAccount(ctx context.Context, ledger string, address st } // GetMapping - Get the mapping of a ledger -func (s *V1) GetMapping(ctx context.Context, ledger string, opts ...operations.Option) (*operations.GetMappingResponse, error) { +func (s *V1) GetMapping(ctx context.Context, request operations.GetMappingRequest, opts ...operations.Option) (*operations.GetMappingResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "getMapping", @@ -1139,10 +1117,6 @@ func (s *V1) GetMapping(ctx context.Context, ledger string, opts ...operations.O SecuritySource: s.sdkConfiguration.Security, } - request := operations.GetMappingRequest{ - Ledger: ledger, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -1314,7 +1288,7 @@ func (s *V1) GetMapping(ctx context.Context, ledger string, opts ...operations.O } // UpdateMapping - Update the mapping of a ledger -func (s *V1) UpdateMapping(ctx context.Context, ledger string, mapping *components.Mapping, opts ...operations.Option) (*operations.UpdateMappingResponse, error) { +func (s *V1) UpdateMapping(ctx context.Context, request operations.UpdateMappingRequest, opts ...operations.Option) (*operations.UpdateMappingResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "updateMapping", @@ -1322,11 +1296,6 @@ func (s *V1) UpdateMapping(ctx context.Context, ledger string, mapping *componen SecuritySource: s.sdkConfiguration.Security, } - request := operations.UpdateMappingRequest{ - Ledger: ledger, - Mapping: mapping, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -1507,7 +1476,7 @@ func (s *V1) UpdateMapping(ctx context.Context, ledger string, mapping *componen // This route is deprecated, and has been merged into `POST /{ledger}/transactions`. // // Deprecated method: This will be removed in a future release, please migrate away from it as soon as possible. -func (s *V1) RunScript(ctx context.Context, ledger string, script components.Script, preview *bool, opts ...operations.Option) (*operations.RunScriptResponse, error) { +func (s *V1) RunScript(ctx context.Context, request operations.RunScriptRequest, opts ...operations.Option) (*operations.RunScriptResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "runScript", @@ -1515,12 +1484,6 @@ func (s *V1) RunScript(ctx context.Context, ledger string, script components.Scr SecuritySource: s.sdkConfiguration.Security, } - request := operations.RunScriptRequest{ - Ledger: ledger, - Preview: preview, - Script: script, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -1693,7 +1656,7 @@ func (s *V1) RunScript(ctx context.Context, ledger string, script components.Scr // ReadStats - Get statistics from a ledger // Get statistics from a ledger. (aggregate metrics on accounts and transactions) -func (s *V1) ReadStats(ctx context.Context, ledger string, opts ...operations.Option) (*operations.ReadStatsResponse, error) { +func (s *V1) ReadStats(ctx context.Context, request operations.ReadStatsRequest, opts ...operations.Option) (*operations.ReadStatsResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "readStats", @@ -1701,10 +1664,6 @@ func (s *V1) ReadStats(ctx context.Context, ledger string, opts ...operations.Op SecuritySource: s.sdkConfiguration.Security, } - request := operations.ReadStatsRequest{ - Ledger: ledger, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -2234,7 +2193,7 @@ func (s *V1) ListTransactions(ctx context.Context, request operations.ListTransa } // CreateTransaction - Create a new transaction to a ledger -func (s *V1) CreateTransaction(ctx context.Context, ledger string, postTransaction components.PostTransaction, preview *bool, opts ...operations.Option) (*operations.CreateTransactionResponse, error) { +func (s *V1) CreateTransaction(ctx context.Context, request operations.CreateTransactionRequest, opts ...operations.Option) (*operations.CreateTransactionResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "createTransaction", @@ -2242,12 +2201,6 @@ func (s *V1) CreateTransaction(ctx context.Context, ledger string, postTransacti SecuritySource: s.sdkConfiguration.Security, } - request := operations.CreateTransactionRequest{ - Ledger: ledger, - Preview: preview, - PostTransaction: postTransaction, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -2429,7 +2382,7 @@ func (s *V1) CreateTransaction(ctx context.Context, ledger string, postTransacti } // GetTransaction - Get transaction from a ledger by its ID -func (s *V1) GetTransaction(ctx context.Context, ledger string, txid *big.Int, opts ...operations.Option) (*operations.GetTransactionResponse, error) { +func (s *V1) GetTransaction(ctx context.Context, request operations.GetTransactionRequest, opts ...operations.Option) (*operations.GetTransactionResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "getTransaction", @@ -2437,11 +2390,6 @@ func (s *V1) GetTransaction(ctx context.Context, ledger string, txid *big.Int, o SecuritySource: s.sdkConfiguration.Security, } - request := operations.GetTransactionRequest{ - Ledger: ledger, - Txid: txid, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -2613,7 +2561,7 @@ func (s *V1) GetTransaction(ctx context.Context, ledger string, txid *big.Int, o } // AddMetadataOnTransaction - Set the metadata of a transaction by its ID -func (s *V1) AddMetadataOnTransaction(ctx context.Context, ledger string, txid *big.Int, requestBody map[string]any, opts ...operations.Option) (*operations.AddMetadataOnTransactionResponse, error) { +func (s *V1) AddMetadataOnTransaction(ctx context.Context, request operations.AddMetadataOnTransactionRequest, opts ...operations.Option) (*operations.AddMetadataOnTransactionResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "addMetadataOnTransaction", @@ -2621,12 +2569,6 @@ func (s *V1) AddMetadataOnTransaction(ctx context.Context, ledger string, txid * SecuritySource: s.sdkConfiguration.Security, } - request := operations.AddMetadataOnTransactionRequest{ - Ledger: ledger, - Txid: txid, - RequestBody: requestBody, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -2793,7 +2735,7 @@ func (s *V1) AddMetadataOnTransaction(ctx context.Context, ledger string, txid * } // RevertTransaction - Revert a ledger transaction by its ID -func (s *V1) RevertTransaction(ctx context.Context, ledger string, txid *big.Int, disableChecks *bool, opts ...operations.Option) (*operations.RevertTransactionResponse, error) { +func (s *V1) RevertTransaction(ctx context.Context, request operations.RevertTransactionRequest, opts ...operations.Option) (*operations.RevertTransactionResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "revertTransaction", @@ -2801,12 +2743,6 @@ func (s *V1) RevertTransaction(ctx context.Context, ledger string, txid *big.Int SecuritySource: s.sdkConfiguration.Security, } - request := operations.RevertTransactionRequest{ - Ledger: ledger, - Txid: txid, - DisableChecks: disableChecks, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -2982,7 +2918,7 @@ func (s *V1) RevertTransaction(ctx context.Context, ledger string, txid *big.Int } // CreateTransactions - Create a new batch of transactions to a ledger -func (s *V1) CreateTransactions(ctx context.Context, ledger string, transactions components.Transactions, opts ...operations.Option) (*operations.CreateTransactionsResponse, error) { +func (s *V1) CreateTransactions(ctx context.Context, request operations.CreateTransactionsRequest, opts ...operations.Option) (*operations.CreateTransactionsResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "CreateTransactions", @@ -2990,11 +2926,6 @@ func (s *V1) CreateTransactions(ctx context.Context, ledger string, transactions SecuritySource: s.sdkConfiguration.Security, } - request := operations.CreateTransactionsRequest{ - Ledger: ledger, - Transactions: transactions, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -3355,7 +3286,7 @@ func (s *V1) GetBalances(ctx context.Context, request operations.GetBalancesRequ } // GetBalancesAggregated - Get the aggregated balances from selected accounts -func (s *V1) GetBalancesAggregated(ctx context.Context, ledger string, address *string, useInsertionDate *bool, opts ...operations.Option) (*operations.GetBalancesAggregatedResponse, error) { +func (s *V1) GetBalancesAggregated(ctx context.Context, request operations.GetBalancesAggregatedRequest, opts ...operations.Option) (*operations.GetBalancesAggregatedResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "getBalancesAggregated", @@ -3363,12 +3294,6 @@ func (s *V1) GetBalancesAggregated(ctx context.Context, ledger string, address * SecuritySource: s.sdkConfiguration.Security, } - request := operations.GetBalancesAggregatedRequest{ - Ledger: ledger, - Address: address, - UseInsertionDate: useInsertionDate, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, diff --git a/pkg/client/v2.go b/pkg/client/v2.go index 1b6fad03d..58a581b02 100644 --- a/pkg/client/v2.go +++ b/pkg/client/v2.go @@ -13,10 +13,8 @@ import ( "github.com/formancehq/stack/ledger/client/models/operations" "github.com/formancehq/stack/ledger/client/models/sdkerrors" "io" - "math/big" "net/http" "net/url" - "time" ) type V2 struct { @@ -29,199 +27,8 @@ func newV2(sdkConfig sdkConfiguration) *V2 { } } -// GetInfo - Show server information -func (s *V2) GetInfo(ctx context.Context, opts ...operations.Option) (*operations.V2GetInfoResponse, error) { - hookCtx := hooks.HookContext{ - Context: ctx, - OperationID: "v2GetInfo", - OAuth2Scopes: []string{"ledger:read", "ledger:read"}, - SecuritySource: s.sdkConfiguration.Security, - } - - o := operations.Options{} - supportedOptions := []string{ - operations.SupportedOptionRetries, - operations.SupportedOptionTimeout, - } - - for _, opt := range opts { - if err := opt(&o, supportedOptions...); err != nil { - return nil, fmt.Errorf("error applying option: %w", err) - } - } - - baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - opURL, err := url.JoinPath(baseURL, "/v2/_info") - if err != nil { - return nil, fmt.Errorf("error generating URL: %w", err) - } - - timeout := o.Timeout - if timeout == nil { - timeout = s.sdkConfiguration.Timeout - } - - if timeout != nil { - var cancel context.CancelFunc - ctx, cancel = context.WithTimeout(ctx, *timeout) - defer cancel() - } - - req, err := http.NewRequestWithContext(ctx, "GET", opURL, nil) - if err != nil { - return nil, fmt.Errorf("error creating request: %w", err) - } - req.Header.Set("Accept", "application/json") - req.Header.Set("User-Agent", s.sdkConfiguration.UserAgent) - - if err := utils.PopulateSecurity(ctx, req, s.sdkConfiguration.Security); err != nil { - return nil, err - } - - globalRetryConfig := s.sdkConfiguration.RetryConfig - retryConfig := o.Retries - if retryConfig == nil { - if globalRetryConfig != nil { - retryConfig = globalRetryConfig - } - } - - var httpRes *http.Response - if retryConfig != nil { - httpRes, err = utils.Retry(ctx, utils.Retries{ - Config: retryConfig, - StatusCodes: []string{ - "429", - "500", - "502", - "503", - "504", - }, - }, func() (*http.Response, error) { - if req.Body != nil { - copyBody, err := req.GetBody() - if err != nil { - return nil, err - } - req.Body = copyBody - } - - req, err = s.sdkConfiguration.Hooks.BeforeRequest(hooks.BeforeRequestContext{HookContext: hookCtx}, req) - if err != nil { - return nil, backoff.Permanent(err) - } - - httpRes, err := s.sdkConfiguration.Client.Do(req) - if err != nil || httpRes == nil { - if err != nil { - err = fmt.Errorf("error sending request: %w", err) - } else { - err = fmt.Errorf("error sending request: no response") - } - - _, err = s.sdkConfiguration.Hooks.AfterError(hooks.AfterErrorContext{HookContext: hookCtx}, nil, err) - } - return httpRes, err - }) - - if err != nil { - return nil, err - } else { - httpRes, err = s.sdkConfiguration.Hooks.AfterSuccess(hooks.AfterSuccessContext{HookContext: hookCtx}, httpRes) - if err != nil { - return nil, err - } - } - } else { - req, err = s.sdkConfiguration.Hooks.BeforeRequest(hooks.BeforeRequestContext{HookContext: hookCtx}, req) - if err != nil { - return nil, err - } - - httpRes, err = s.sdkConfiguration.Client.Do(req) - if err != nil || httpRes == nil { - if err != nil { - err = fmt.Errorf("error sending request: %w", err) - } else { - err = fmt.Errorf("error sending request: no response") - } - - _, err = s.sdkConfiguration.Hooks.AfterError(hooks.AfterErrorContext{HookContext: hookCtx}, nil, err) - return nil, err - } else if utils.MatchStatusCodes([]string{"default"}, httpRes.StatusCode) { - _httpRes, err := s.sdkConfiguration.Hooks.AfterError(hooks.AfterErrorContext{HookContext: hookCtx}, httpRes, nil) - if err != nil { - return nil, err - } else if _httpRes != nil { - httpRes = _httpRes - } - } else { - httpRes, err = s.sdkConfiguration.Hooks.AfterSuccess(hooks.AfterSuccessContext{HookContext: hookCtx}, httpRes) - if err != nil { - return nil, err - } - } - } - - res := &operations.V2GetInfoResponse{ - HTTPMeta: components.HTTPMetadata{ - Request: req, - Response: httpRes, - }, - } - - rawBody, err := io.ReadAll(httpRes.Body) - if err != nil { - return nil, fmt.Errorf("error reading response body: %w", err) - } - httpRes.Body.Close() - httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) - - switch { - case httpRes.StatusCode == 200: - switch { - case utils.MatchContentType(httpRes.Header.Get("Content-Type"), `application/json`): - var out components.V2ConfigInfoResponse - if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil { - return nil, err - } - - res.V2ConfigInfoResponse = &out - default: - return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", httpRes.Header.Get("Content-Type")), httpRes.StatusCode, string(rawBody), httpRes) - } - case httpRes.StatusCode >= 500 && httpRes.StatusCode < 600: - switch { - case utils.MatchContentType(httpRes.Header.Get("Content-Type"), `application/json`): - var out sdkerrors.V2ErrorResponse - if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil { - return nil, err - } - - res.V2ErrorResponse = &out - default: - return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", httpRes.Header.Get("Content-Type")), httpRes.StatusCode, string(rawBody), httpRes) - } - default: - switch { - case utils.MatchContentType(httpRes.Header.Get("Content-Type"), `application/json`): - var out sdkerrors.V2ErrorResponse - if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil { - return nil, err - } - - return nil, &out - default: - return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", httpRes.Header.Get("Content-Type")), httpRes.StatusCode, string(rawBody), httpRes) - } - } - - return res, nil - -} - // ListLedgers - List ledgers -func (s *V2) ListLedgers(ctx context.Context, pageSize *int64, cursor *string, opts ...operations.Option) (*operations.V2ListLedgersResponse, error) { +func (s *V2) ListLedgers(ctx context.Context, request operations.V2ListLedgersRequest, opts ...operations.Option) (*operations.V2ListLedgersResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2ListLedgers", @@ -229,11 +36,6 @@ func (s *V2) ListLedgers(ctx context.Context, pageSize *int64, cursor *string, o SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2ListLedgersRequest{ - PageSize: pageSize, - Cursor: cursor, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -409,7 +211,7 @@ func (s *V2) ListLedgers(ctx context.Context, pageSize *int64, cursor *string, o } // GetLedger - Get a ledger -func (s *V2) GetLedger(ctx context.Context, ledger string, opts ...operations.Option) (*operations.V2GetLedgerResponse, error) { +func (s *V2) GetLedger(ctx context.Context, request operations.V2GetLedgerRequest, opts ...operations.Option) (*operations.V2GetLedgerResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2GetLedger", @@ -417,10 +219,6 @@ func (s *V2) GetLedger(ctx context.Context, ledger string, opts ...operations.Op SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2GetLedgerRequest{ - Ledger: ledger, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -592,7 +390,7 @@ func (s *V2) GetLedger(ctx context.Context, ledger string, opts ...operations.Op } // CreateLedger - Create a ledger -func (s *V2) CreateLedger(ctx context.Context, ledger string, v2CreateLedgerRequest *components.V2CreateLedgerRequest, opts ...operations.Option) (*operations.V2CreateLedgerResponse, error) { +func (s *V2) CreateLedger(ctx context.Context, request operations.V2CreateLedgerRequest, opts ...operations.Option) (*operations.V2CreateLedgerResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2CreateLedger", @@ -600,11 +398,6 @@ func (s *V2) CreateLedger(ctx context.Context, ledger string, v2CreateLedgerRequ SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2CreateLedgerRequest{ - Ledger: ledger, - V2CreateLedgerRequest: v2CreateLedgerRequest, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -771,7 +564,7 @@ func (s *V2) CreateLedger(ctx context.Context, ledger string, v2CreateLedgerRequ } // UpdateLedgerMetadata - Update ledger metadata -func (s *V2) UpdateLedgerMetadata(ctx context.Context, ledger string, requestBody map[string]string, opts ...operations.Option) (*operations.V2UpdateLedgerMetadataResponse, error) { +func (s *V2) UpdateLedgerMetadata(ctx context.Context, request operations.V2UpdateLedgerMetadataRequest, opts ...operations.Option) (*operations.V2UpdateLedgerMetadataResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2UpdateLedgerMetadata", @@ -779,11 +572,6 @@ func (s *V2) UpdateLedgerMetadata(ctx context.Context, ledger string, requestBod SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2UpdateLedgerMetadataRequest{ - Ledger: ledger, - RequestBody: requestBody, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -962,7 +750,7 @@ func (s *V2) UpdateLedgerMetadata(ctx context.Context, ledger string, requestBod } // DeleteLedgerMetadata - Delete ledger metadata by key -func (s *V2) DeleteLedgerMetadata(ctx context.Context, ledger string, key string, opts ...operations.Option) (*operations.V2DeleteLedgerMetadataResponse, error) { +func (s *V2) DeleteLedgerMetadata(ctx context.Context, request operations.V2DeleteLedgerMetadataRequest, opts ...operations.Option) (*operations.V2DeleteLedgerMetadataResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2DeleteLedgerMetadata", @@ -970,11 +758,6 @@ func (s *V2) DeleteLedgerMetadata(ctx context.Context, ledger string, key string SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2DeleteLedgerMetadataRequest{ - Ledger: ledger, - Key: key, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -1135,7 +918,7 @@ func (s *V2) DeleteLedgerMetadata(ctx context.Context, ledger string, key string } // GetLedgerInfo - Get information about a ledger -func (s *V2) GetLedgerInfo(ctx context.Context, ledger string, opts ...operations.Option) (*operations.V2GetLedgerInfoResponse, error) { +func (s *V2) GetLedgerInfo(ctx context.Context, request operations.V2GetLedgerInfoRequest, opts ...operations.Option) (*operations.V2GetLedgerInfoResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2GetLedgerInfo", @@ -1143,10 +926,6 @@ func (s *V2) GetLedgerInfo(ctx context.Context, ledger string, opts ...operation SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2GetLedgerInfoRequest{ - Ledger: ledger, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -1318,7 +1097,7 @@ func (s *V2) GetLedgerInfo(ctx context.Context, ledger string, opts ...operation } // CreateBulk - Bulk request -func (s *V2) CreateBulk(ctx context.Context, ledger string, requestBody []components.V2BulkElement, opts ...operations.Option) (*operations.V2CreateBulkResponse, error) { +func (s *V2) CreateBulk(ctx context.Context, request operations.V2CreateBulkRequest, opts ...operations.Option) (*operations.V2CreateBulkResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2CreateBulk", @@ -1326,11 +1105,6 @@ func (s *V2) CreateBulk(ctx context.Context, ledger string, requestBody []compon SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2CreateBulkRequest{ - Ledger: ledger, - RequestBody: requestBody, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -1510,7 +1284,7 @@ func (s *V2) CreateBulk(ctx context.Context, ledger string, requestBody []compon } // CountAccounts - Count the accounts from a ledger -func (s *V2) CountAccounts(ctx context.Context, ledger string, pit *time.Time, requestBody map[string]any, opts ...operations.Option) (*operations.V2CountAccountsResponse, error) { +func (s *V2) CountAccounts(ctx context.Context, request operations.V2CountAccountsRequest, opts ...operations.Option) (*operations.V2CountAccountsResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2CountAccounts", @@ -1518,12 +1292,6 @@ func (s *V2) CountAccounts(ctx context.Context, ledger string, pit *time.Time, r SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2CountAccountsRequest{ - Ledger: ledger, - Pit: pit, - RequestBody: requestBody, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -1886,7 +1654,7 @@ func (s *V2) ListAccounts(ctx context.Context, request operations.V2ListAccounts } // GetAccount - Get account by its address -func (s *V2) GetAccount(ctx context.Context, ledger string, address string, expand *string, pit *time.Time, opts ...operations.Option) (*operations.V2GetAccountResponse, error) { +func (s *V2) GetAccount(ctx context.Context, request operations.V2GetAccountRequest, opts ...operations.Option) (*operations.V2GetAccountResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2GetAccount", @@ -1894,13 +1662,6 @@ func (s *V2) GetAccount(ctx context.Context, ledger string, address string, expa SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2GetAccountRequest{ - Ledger: ledger, - Address: address, - Expand: expand, - Pit: pit, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -2257,7 +2018,7 @@ func (s *V2) AddMetadataToAccount(ctx context.Context, request operations.V2AddM // DeleteAccountMetadata - Delete metadata by key // Delete metadata by key -func (s *V2) DeleteAccountMetadata(ctx context.Context, ledger string, address string, key string, opts ...operations.Option) (*operations.V2DeleteAccountMetadataResponse, error) { +func (s *V2) DeleteAccountMetadata(ctx context.Context, request operations.V2DeleteAccountMetadataRequest, opts ...operations.Option) (*operations.V2DeleteAccountMetadataResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2DeleteAccountMetadata", @@ -2265,12 +2026,6 @@ func (s *V2) DeleteAccountMetadata(ctx context.Context, ledger string, address s SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2DeleteAccountMetadataRequest{ - Ledger: ledger, - Address: address, - Key: key, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -2432,7 +2187,7 @@ func (s *V2) DeleteAccountMetadata(ctx context.Context, ledger string, address s // ReadStats - Get statistics from a ledger // Get statistics from a ledger. (aggregate metrics on accounts and transactions) -func (s *V2) ReadStats(ctx context.Context, ledger string, opts ...operations.Option) (*operations.V2ReadStatsResponse, error) { +func (s *V2) ReadStats(ctx context.Context, request operations.V2ReadStatsRequest, opts ...operations.Option) (*operations.V2ReadStatsResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2ReadStats", @@ -2440,10 +2195,6 @@ func (s *V2) ReadStats(ctx context.Context, ledger string, opts ...operations.Op SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2ReadStatsRequest{ - Ledger: ledger, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -2615,7 +2366,7 @@ func (s *V2) ReadStats(ctx context.Context, ledger string, opts ...operations.Op } // CountTransactions - Count the transactions from a ledger -func (s *V2) CountTransactions(ctx context.Context, ledger string, pit *time.Time, requestBody map[string]any, opts ...operations.Option) (*operations.V2CountTransactionsResponse, error) { +func (s *V2) CountTransactions(ctx context.Context, request operations.V2CountTransactionsRequest, opts ...operations.Option) (*operations.V2CountTransactionsResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2CountTransactions", @@ -2623,12 +2374,6 @@ func (s *V2) CountTransactions(ctx context.Context, ledger string, pit *time.Tim SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2CountTransactionsRequest{ - Ledger: ledger, - Pit: pit, - RequestBody: requestBody, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -2991,7 +2736,7 @@ func (s *V2) ListTransactions(ctx context.Context, request operations.V2ListTran } // CreateTransaction - Create a new transaction to a ledger -func (s *V2) CreateTransaction(ctx context.Context, ledger string, v2PostTransaction components.V2PostTransaction, dryRun *bool, idempotencyKey *string, opts ...operations.Option) (*operations.V2CreateTransactionResponse, error) { +func (s *V2) CreateTransaction(ctx context.Context, request operations.V2CreateTransactionRequest, opts ...operations.Option) (*operations.V2CreateTransactionResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2CreateTransaction", @@ -2999,13 +2744,6 @@ func (s *V2) CreateTransaction(ctx context.Context, ledger string, v2PostTransac SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2CreateTransactionRequest{ - Ledger: ledger, - DryRun: dryRun, - IdempotencyKey: idempotencyKey, - V2PostTransaction: v2PostTransaction, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -3189,7 +2927,7 @@ func (s *V2) CreateTransaction(ctx context.Context, ledger string, v2PostTransac } // GetTransaction - Get transaction from a ledger by its ID -func (s *V2) GetTransaction(ctx context.Context, ledger string, id *big.Int, expand *string, pit *time.Time, opts ...operations.Option) (*operations.V2GetTransactionResponse, error) { +func (s *V2) GetTransaction(ctx context.Context, request operations.V2GetTransactionRequest, opts ...operations.Option) (*operations.V2GetTransactionResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2GetTransaction", @@ -3197,13 +2935,6 @@ func (s *V2) GetTransaction(ctx context.Context, ledger string, id *big.Int, exp SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2GetTransactionRequest{ - Ledger: ledger, - ID: id, - Expand: expand, - Pit: pit, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -3560,7 +3291,7 @@ func (s *V2) AddMetadataOnTransaction(ctx context.Context, request operations.V2 // DeleteTransactionMetadata - Delete metadata by key // Delete metadata by key -func (s *V2) DeleteTransactionMetadata(ctx context.Context, ledger string, id *big.Int, key string, opts ...operations.Option) (*operations.V2DeleteTransactionMetadataResponse, error) { +func (s *V2) DeleteTransactionMetadata(ctx context.Context, request operations.V2DeleteTransactionMetadataRequest, opts ...operations.Option) (*operations.V2DeleteTransactionMetadataResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2DeleteTransactionMetadata", @@ -3568,12 +3299,6 @@ func (s *V2) DeleteTransactionMetadata(ctx context.Context, ledger string, id *b SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2DeleteTransactionMetadataRequest{ - Ledger: ledger, - ID: id, - Key: key, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -3734,7 +3459,7 @@ func (s *V2) DeleteTransactionMetadata(ctx context.Context, ledger string, id *b } // RevertTransaction - Revert a ledger transaction by its ID -func (s *V2) RevertTransaction(ctx context.Context, ledger string, id *big.Int, force *bool, atEffectiveDate *bool, opts ...operations.Option) (*operations.V2RevertTransactionResponse, error) { +func (s *V2) RevertTransaction(ctx context.Context, request operations.V2RevertTransactionRequest, opts ...operations.Option) (*operations.V2RevertTransactionResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2RevertTransaction", @@ -3742,13 +3467,6 @@ func (s *V2) RevertTransaction(ctx context.Context, ledger string, id *big.Int, SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2RevertTransactionRequest{ - Ledger: ledger, - ID: id, - Force: force, - AtEffectiveDate: atEffectiveDate, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -3924,7 +3642,7 @@ func (s *V2) RevertTransaction(ctx context.Context, ledger string, id *big.Int, } // GetBalancesAggregated - Get the aggregated balances from selected accounts -func (s *V2) GetBalancesAggregated(ctx context.Context, ledger string, pit *time.Time, useInsertionDate *bool, requestBody map[string]any, opts ...operations.Option) (*operations.V2GetBalancesAggregatedResponse, error) { +func (s *V2) GetBalancesAggregated(ctx context.Context, request operations.V2GetBalancesAggregatedRequest, opts ...operations.Option) (*operations.V2GetBalancesAggregatedResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2GetBalancesAggregated", @@ -3932,13 +3650,6 @@ func (s *V2) GetBalancesAggregated(ctx context.Context, ledger string, pit *time SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2GetBalancesAggregatedRequest{ - Ledger: ledger, - Pit: pit, - UseInsertionDate: useInsertionDate, - RequestBody: requestBody, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -4498,7 +4209,7 @@ func (s *V2) ListLogs(ctx context.Context, request operations.V2ListLogsRequest, } -func (s *V2) ImportLogs(ctx context.Context, ledger string, requestBody *string, opts ...operations.Option) (*operations.V2ImportLogsResponse, error) { +func (s *V2) ImportLogs(ctx context.Context, request operations.V2ImportLogsRequest, opts ...operations.Option) (*operations.V2ImportLogsResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2ImportLogs", @@ -4506,11 +4217,6 @@ func (s *V2) ImportLogs(ctx context.Context, ledger string, requestBody *string, SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2ImportLogsRequest{ - Ledger: ledger, - RequestBody: requestBody, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, @@ -4677,7 +4383,7 @@ func (s *V2) ImportLogs(ctx context.Context, ledger string, requestBody *string, } // ExportLogs - Export logs -func (s *V2) ExportLogs(ctx context.Context, ledger string, opts ...operations.Option) (*operations.V2ExportLogsResponse, error) { +func (s *V2) ExportLogs(ctx context.Context, request operations.V2ExportLogsRequest, opts ...operations.Option) (*operations.V2ExportLogsResponse, error) { hookCtx := hooks.HookContext{ Context: ctx, OperationID: "v2ExportLogs", @@ -4685,10 +4391,6 @@ func (s *V2) ExportLogs(ctx context.Context, ledger string, opts ...operations.O SecuritySource: s.sdkConfiguration.Security, } - request := operations.V2ExportLogsRequest{ - Ledger: ledger, - } - o := operations.Options{} supportedOptions := []string{ operations.SupportedOptionRetries, diff --git a/pkg/testserver/api.go b/pkg/testserver/api.go new file mode 100644 index 000000000..305949de6 --- /dev/null +++ b/pkg/testserver/api.go @@ -0,0 +1,243 @@ +package testserver + +import ( + "bytes" + "context" + "io" + "math/big" + "strconv" + + "github.com/formancehq/go-libs/v2/api" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + "github.com/formancehq/stack/ledger/client/models/sdkerrors" +) + +func CreateLedger(ctx context.Context, srv *Server, request operations.V2CreateLedgerRequest) error { + _, err := srv.Client().Ledger.V2.CreateLedger(ctx, request) + return mapSDKError(err) +} + +func GetLedger(ctx context.Context, srv *Server, request operations.V2GetLedgerRequest) (*components.V2Ledger, error) { + ret, err := srv.Client().Ledger.V2.GetLedger(ctx, request) + if err := mapSDKError(err); err != nil { + return nil, err + } + return &ret.V2GetLedgerResponse.Data, nil +} + +func GetInfo(ctx context.Context, srv *Server) (*operations.V2GetInfoResponse, error) { + return srv.Client().Ledger.GetInfo(ctx) +} + +func CreateTransaction(ctx context.Context, srv *Server, request operations.V2CreateTransactionRequest) (*components.V2Transaction, error) { + response, err := srv.Client().Ledger.V2.CreateTransaction(ctx, request) + + if err != nil { + return nil, mapSDKError(err) + } + + return &response.V2CreateTransactionResponse.Data, nil +} + +func CreateBulk(ctx context.Context, srv *Server, request operations.V2CreateBulkRequest) ([]components.V2BulkElementResult, error) { + response, err := srv.Client().Ledger.V2.CreateBulk(ctx, request) + if err != nil { + return nil, mapSDKError(err) + } + return response.V2BulkResponse.Data, nil +} + +func GetBalancesAggregated(ctx context.Context, srv *Server, request operations.V2GetBalancesAggregatedRequest) (map[string]*big.Int, error) { + response, err := srv.Client().Ledger.V2.GetBalancesAggregated(ctx, request) + if err != nil { + return nil, mapSDKError(err) + } + return response.V2AggregateBalancesResponse.Data, nil +} + +func GetVolumesWithBalances(ctx context.Context, srv *Server, request operations.V2GetVolumesWithBalancesRequest) (*components.V2VolumesWithBalanceCursorResponseCursor, error) { + response, err := srv.Client().Ledger.V2.GetVolumesWithBalances(ctx, request) + if err != nil { + return nil, mapSDKError(err) + } + return &response.V2VolumesWithBalanceCursorResponse.Cursor, nil +} + +func UpdateLedgerMetadata(ctx context.Context, srv *Server, request operations.V2UpdateLedgerMetadataRequest) error { + _, err := srv.Client().Ledger.V2.UpdateLedgerMetadata(ctx, request) + return mapSDKError(err) +} + +func DeleteLedgerMetadata(ctx context.Context, srv *Server, request operations.V2DeleteLedgerMetadataRequest) error { + _, err := srv.Client().Ledger.V2.DeleteLedgerMetadata(ctx, request) + return mapSDKError(err) +} + +func AddMetadataToAccount(ctx context.Context, srv *Server, request operations.V2AddMetadataToAccountRequest) error { + _, err := srv.Client().Ledger.V2.AddMetadataToAccount(ctx, request) + return mapSDKError(err) +} + +func AddMetadataToTransaction(ctx context.Context, srv *Server, request operations.V2AddMetadataOnTransactionRequest) error { + _, err := srv.Client().Ledger.V2.AddMetadataOnTransaction(ctx, request) + return mapSDKError(err) +} + +func DeleteAccountMetadata(ctx context.Context, srv *Server, request operations.V2DeleteAccountMetadataRequest) error { + _, err := srv.Client().Ledger.V2.DeleteAccountMetadata(ctx, request) + return mapSDKError(err) +} + +func DeleteTransactionMetadata(ctx context.Context, srv *Server, request operations.V2DeleteTransactionMetadataRequest) error { + _, err := srv.Client().Ledger.V2.DeleteTransactionMetadata(ctx, request) + return mapSDKError(err) +} + +func RevertTransaction(ctx context.Context, srv *Server, request operations.V2RevertTransactionRequest) (*components.V2Transaction, error) { + response, err := srv.Client().Ledger.V2.RevertTransaction(ctx, request) + + if err != nil { + return nil, mapSDKError(err) + } + + return &response.V2RevertTransactionResponse.Data, nil +} + +func GetTransaction(ctx context.Context, srv *Server, request operations.V2GetTransactionRequest) (*components.V2Transaction, error) { + response, err := srv.Client().Ledger.V2.GetTransaction(ctx, request) + + if err != nil { + return nil, mapSDKError(err) + } + + return &response.V2GetTransactionResponse.Data, nil +} + +func GetAccount(ctx context.Context, srv *Server, request operations.V2GetAccountRequest) (*components.V2Account, error) { + response, err := srv.Client().Ledger.V2.GetAccount(ctx, request) + + if err != nil { + return nil, mapSDKError(err) + } + + return &response.V2AccountResponse.Data, nil +} + +func ListTransactions(ctx context.Context, srv *Server, request operations.V2ListTransactionsRequest) (*components.V2TransactionsCursorResponseCursor, error) { + response, err := srv.Client().Ledger.V2.ListTransactions(ctx, request) + + if err != nil { + return nil, mapSDKError(err) + } + + return &response.V2TransactionsCursorResponse.Cursor, nil +} + +func CountTransactions(ctx context.Context, srv *Server, request operations.V2CountTransactionsRequest) (int, error) { + response, err := srv.Client().Ledger.V2.CountTransactions(ctx, request) + + if err != nil { + return 0, mapSDKError(err) + } + + ret, err := strconv.ParseInt(response.Headers["Count"][0], 10, 64) + if err != nil { + return 0, err + } + + return int(ret), nil +} + +func ListAccounts(ctx context.Context, srv *Server, request operations.V2ListAccountsRequest) (*components.V2AccountsCursorResponseCursor, error) { + response, err := srv.Client().Ledger.V2.ListAccounts(ctx, request) + + if err != nil { + return nil, mapSDKError(err) + } + + return &response.V2AccountsCursorResponse.Cursor, nil +} + +func ListLogs(ctx context.Context, srv *Server, request operations.V2ListLogsRequest) (*components.V2LogsCursorResponseCursor, error) { + response, err := srv.Client().Ledger.V2.ListLogs(ctx, request) + + if err != nil { + return nil, mapSDKError(err) + } + + return &response.V2LogsCursorResponse.Cursor, nil +} + +func CountAccounts(ctx context.Context, srv *Server, request operations.V2CountAccountsRequest) (int, error) { + response, err := srv.Client().Ledger.V2.CountAccounts(ctx, request) + + if err != nil { + return 0, mapSDKError(err) + } + + ret, err := strconv.ParseInt(response.Headers["Count"][0], 10, 64) + if err != nil { + return 0, err + } + + return int(ret), nil +} + +func ListLedgers(ctx context.Context, srv *Server, request operations.V2ListLedgersRequest) (*components.V2LedgerListResponseCursor, error) { + response, err := srv.Client().Ledger.V2.ListLedgers(ctx, request) + + if err != nil { + return nil, mapSDKError(err) + } + + return &response.V2LedgerListResponse.Cursor, nil +} + +func GetAggregatedBalances(ctx context.Context, srv *Server, request operations.V2GetBalancesAggregatedRequest) (map[string]*big.Int, error) { + response, err := srv.Client().Ledger.V2.GetBalancesAggregated(ctx, request) + + if err != nil { + return nil, mapSDKError(err) + } + + return response.GetV2AggregateBalancesResponse().Data, nil +} + +func Export(ctx context.Context, srv *Server, request operations.V2ExportLogsRequest) (io.Reader, error) { + response, err := srv.Client().Ledger.V2.ExportLogs(ctx, request) + if err != nil { + return nil, mapSDKError(err) + } + + data, err := io.ReadAll(response.HTTPMeta.Response.Body) + if err != nil { + return nil, err + } + + return bytes.NewBuffer(data), nil +} + +func Import(ctx context.Context, srv *Server, request operations.V2ImportLogsRequest) error { + _, err := srv.Client().Ledger.V2.ImportLogs(ctx, request) + return mapSDKError(err) +} + +func mapSDKError(err error) error { + // notes: *sdkerrors.V2ErrorResponse does not implements errors.Is + switch err := err.(type) { + case *sdkerrors.V2ErrorResponse: + return api.ErrorResponse{ + ErrorCode: string(err.ErrorCode), + ErrorMessage: err.ErrorMessage, + Details: func() string { + if err.Details == nil { + return "" + } + return *err.Details + }(), + } + default: + return err + } +} diff --git a/pkg/testserver/helpers.go b/pkg/testserver/helpers.go index 154165d53..1e1aa5167 100644 --- a/pkg/testserver/helpers.go +++ b/pkg/testserver/helpers.go @@ -1,11 +1,15 @@ package testserver import ( - . "github.com/formancehq/go-libs/testing/utils" + "github.com/formancehq/go-libs/v2/collectionutils" + . "github.com/formancehq/go-libs/v2/testing/utils" + "github.com/formancehq/go-libs/v2/time" + "github.com/formancehq/ledger/internal" + "github.com/formancehq/stack/ledger/client/models/components" . "github.com/onsi/ginkgo/v2" ) -func UseNewTestServer(configurationProvider func() Configuration) *Deferred[*Server] { +func NewTestServer(configurationProvider func() Configuration) *Deferred[*Server] { d := NewDeferred[*Server]() BeforeEach(func() { d.Reset() @@ -13,3 +17,49 @@ func UseNewTestServer(configurationProvider func() Configuration) *Deferred[*Ser }) return d } + +func ConvertSDKTxToCoreTX(tx *components.V2Transaction) ledger.Transaction { + return ledger.Transaction{ + TransactionData: ledger.TransactionData{ + Postings: collectionutils.Map(tx.Postings, ConvertSDKPostingToCorePosting), + Timestamp: time.New(tx.Timestamp), + InsertedAt: time.New(tx.InsertedAt), + Metadata: tx.Metadata, + Reference: func() string { + if tx.Reference == nil { + return "" + } + return *tx.Reference + }(), + }, + ID: int(tx.ID.Int64()), + PostCommitVolumes: ConvertSDKPostCommitVolumesToCorePostCommitVolumes(tx.PostCommitVolumes), + PostCommitEffectiveVolumes: ConvertSDKPostCommitVolumesToCorePostCommitVolumes(tx.PostCommitEffectiveVolumes), + } +} + +func ConvertSDKPostCommitVolumesToCorePostCommitVolumes(volumes map[string]map[string]components.V2Volume) ledger.PostCommitVolumes { + ret := ledger.PostCommitVolumes{} + for account, volumesByAsset := range volumes { + for asset, volumes := range volumesByAsset { + ret.Merge(ledger.PostCommitVolumes{ + account: { + asset: ledger.Volumes{ + Input: volumes.Input, + Output: volumes.Output, + }, + }, + }) + } + } + return ret +} + +func ConvertSDKPostingToCorePosting(p components.V2Posting) ledger.Posting { + return ledger.Posting{ + Source: p.Source, + Destination: p.Destination, + Asset: p.Asset, + Amount: p.Amount, + } +} diff --git a/pkg/testserver/matchers.go b/pkg/testserver/matchers.go new file mode 100644 index 000000000..6a04c1c9c --- /dev/null +++ b/pkg/testserver/matchers.go @@ -0,0 +1,191 @@ +package testserver + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "github.com/formancehq/go-libs/v2/pointer" + "github.com/formancehq/go-libs/v2/publish" + "github.com/formancehq/stack/ledger/client/models/operations" + "github.com/google/go-cmp/cmp" + "github.com/invopop/jsonschema" + "github.com/nats-io/nats.go" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/types" + "github.com/xeipuuv/gojsonschema" + "math/big" + "reflect" +) + +type HaveCoherentStateMatcher struct{} + +func (h HaveCoherentStateMatcher) Match(actual interface{}) (success bool, err error) { + srv, ok := actual.(*Server) + if !ok { + return false, fmt.Errorf("expect type %T", new(Server)) + } + ctx := context.Background() + + ledgers, err := ListLedgers(ctx, srv, operations.V2ListLedgersRequest{ + PageSize: pointer.For(int64(100)), + }) + if err != nil { + return false, err + } + + for _, ledger := range ledgers.Data { + aggregatedBalances, err := GetAggregatedBalances(ctx, srv, operations.V2GetBalancesAggregatedRequest{ + Ledger: ledger.Name, + UseInsertionDate: pointer.For(true), + }) + Expect(err).To(BeNil()) + if len(aggregatedBalances) == 0 { // it's random, a ledger could not have been targeted + // just in case, check if the ledger has transactions + txs, err := ListTransactions(ctx, srv, operations.V2ListTransactionsRequest{ + Ledger: ledger.Name, + }) + Expect(err).To(BeNil()) + Expect(txs.Data).To(HaveLen(0)) + } else { + Expect(aggregatedBalances).To(HaveLen(1)) + Expect(aggregatedBalances["USD"]).To(Equal(big.NewInt(0))) + } + } + + return true, nil +} + +func (h HaveCoherentStateMatcher) FailureMessage(_ interface{}) (message string) { + return "server should has coherent state" +} + +func (h HaveCoherentStateMatcher) NegatedFailureMessage(_ interface{}) (message string) { + return "server should not has coherent state but has" +} + +var _ types.GomegaMatcher = (*HaveCoherentStateMatcher)(nil) + +func HaveCoherentState() *HaveCoherentStateMatcher { + return &HaveCoherentStateMatcher{} +} + +type PayloadMatcher interface { + Match(actual interface{}) error +} + +type NoOpPayloadMatcher struct{} + +func (n NoOpPayloadMatcher) Match(interface{}) error { + return nil +} + +var _ PayloadMatcher = (*NoOpPayloadMatcher)(nil) + +type StructPayloadMatcher struct { + expected any +} + +func (e StructPayloadMatcher) Match(payload interface{}) error { + rawSchema := jsonschema.Reflect(e.expected) + data, err := json.Marshal(rawSchema) + if err != nil { + return fmt.Errorf("unable to marshal schema: %s", err) + } + + schemaJSONLoader := gojsonschema.NewStringLoader(string(data)) + schema, err := gojsonschema.NewSchema(schemaJSONLoader) + if err != nil { + return fmt.Errorf("unable to load json schema: %s", err) + } + + dataJsonLoader := gojsonschema.NewRawLoader(payload) + + validate, err := schema.Validate(dataJsonLoader) + if err != nil { + return err + } + + if !validate.Valid() { + return fmt.Errorf("%s", validate.Errors()) + } + + marshaledPayload, err := json.Marshal(payload) + if err != nil { + return fmt.Errorf("unable to marshal payload: %s", err) + } + + unmarshalledPayload := reflect.New(reflect.TypeOf(e.expected)).Interface() + if err := json.Unmarshal(marshaledPayload, unmarshalledPayload); err != nil { + return fmt.Errorf("unable to unmarshal payload: %s", err) + } + + // unmarshalledPayload is actually a pointer + // as it is seen as "any" by the code, we use reflection to get the targeted valud + unmarshalledPayload = reflect.ValueOf(unmarshalledPayload).Elem().Interface() + + diff := cmp.Diff(unmarshalledPayload, e.expected, cmp.Comparer(func(v1 *big.Int, v2 *big.Int) bool { + return v1.String() == v2.String() + })) + if diff != "" { + return errors.New(diff) + } + + return nil +} + +func WithPayload(v any) StructPayloadMatcher { + return StructPayloadMatcher{ + expected: v, + } +} + +var _ PayloadMatcher = (*StructPayloadMatcher)(nil) + +// todo(libs): move in shared libs +type EventMatcher struct { + eventName string + matchers []PayloadMatcher + err error +} + +func (e *EventMatcher) Match(actual any) (success bool, err error) { + msg, ok := actual.(*nats.Msg) + if !ok { + return false, fmt.Errorf("expected type %t", actual) + } + + ev := publish.EventMessage{} + if err := json.Unmarshal(msg.Data, &ev); err != nil { + return false, fmt.Errorf("unable to unmarshal msg: %s", err) + } + + if ev.Type != e.eventName { + return false, nil + } + + for _, matcher := range e.matchers { + if e.err = matcher.Match(ev.Payload); e.err != nil { + return false, nil + } + } + + return true, nil +} + +func (e *EventMatcher) FailureMessage(_ any) (message string) { + return fmt.Sprintf("event does not match expectations: %s", e.err) +} + +func (e *EventMatcher) NegatedFailureMessage(_ any) (message string) { + return "event should not match" +} + +var _ types.GomegaMatcher = (*EventMatcher)(nil) + +func Event(eventName string, matchers ...PayloadMatcher) types.GomegaMatcher { + return &EventMatcher{ + matchers: matchers, + eventName: eventName, + } +} diff --git a/pkg/testserver/server.go b/pkg/testserver/server.go index 736f88c52..2939de5b1 100644 --- a/pkg/testserver/server.go +++ b/pkg/testserver/server.go @@ -3,17 +3,22 @@ package testserver import ( "context" "fmt" + "github.com/formancehq/go-libs/v2/otlp" + "github.com/formancehq/go-libs/v2/otlp/otlpmetrics" + "github.com/formancehq/go-libs/v2/publish" + "github.com/google/uuid" + "github.com/nats-io/nats.go" + "github.com/uptrace/bun" "io" "net/http" - "os" "strings" "time" - "github.com/formancehq/go-libs/bun/bunconnect" - "github.com/formancehq/go-libs/httpclient" - "github.com/formancehq/go-libs/httpserver" - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/service" + "github.com/formancehq/go-libs/v2/bun/bunconnect" + "github.com/formancehq/go-libs/v2/httpclient" + "github.com/formancehq/go-libs/v2/httpserver" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/service" "github.com/formancehq/ledger/cmd" ledgerclient "github.com/formancehq/stack/ledger/client" "github.com/stretchr/testify/require" @@ -21,16 +26,24 @@ import ( type T interface { require.TestingT - TempDir() string Cleanup(func()) Helper() Logf(format string, args ...any) } +type OTLPConfig struct { + BaseConfig otlp.Config + Metrics *otlpmetrics.ModuleConfig +} + type Configuration struct { PostgresConfiguration bunconnect.ConnectionOptions + NatsURL string Output io.Writer Debug bool + OTLPConfig *OTLPConfig + ExperimentalFeatures bool + BulkMaxSize int } type Server struct { @@ -40,25 +53,34 @@ type Server struct { cancel func() ctx context.Context errorChan chan error + id string } func (s *Server) Start() { s.t.Helper() - tmpDir := s.t.TempDir() - require.NoError(s.t, os.MkdirAll(tmpDir, 0700)) - s.t.Cleanup(func() { - _ = os.RemoveAll(tmpDir) - }) - rootCmd := cmd.NewRootCommand() args := []string{ "serve", "--" + cmd.BindFlag, ":0", + "--" + cmd.AutoUpgradeFlag, "--" + bunconnect.PostgresURIFlag, s.configuration.PostgresConfiguration.DatabaseSourceName, "--" + bunconnect.PostgresMaxOpenConnsFlag, fmt.Sprint(s.configuration.PostgresConfiguration.MaxOpenConns), "--" + bunconnect.PostgresConnMaxIdleTimeFlag, fmt.Sprint(s.configuration.PostgresConfiguration.ConnMaxIdleTime), } + if s.configuration.ExperimentalFeatures { + args = append( + args, + "--"+cmd.ExperimentalFeaturesFlag, + ) + } + if s.configuration.BulkMaxSize != 0 { + args = append( + args, + "--"+cmd.BulkMaxSizeFlag, + fmt.Sprint(s.configuration.BulkMaxSize), + ) + } if s.configuration.PostgresConfiguration.MaxIdleConns != 0 { args = append( args, @@ -80,6 +102,66 @@ func (s *Server) Start() { fmt.Sprint(s.configuration.PostgresConfiguration.ConnMaxIdleTime), ) } + if s.configuration.NatsURL != "" { + args = append( + args, + "--"+publish.PublisherNatsEnabledFlag, + "--"+publish.PublisherNatsURLFlag, s.configuration.NatsURL, + "--"+publish.PublisherTopicMappingFlag, fmt.Sprintf("*:%s", s.id), + ) + } + if s.configuration.OTLPConfig != nil { + if s.configuration.OTLPConfig.Metrics != nil { + args = append( + args, + "--"+otlpmetrics.OtelMetricsExporterFlag, s.configuration.OTLPConfig.Metrics.Exporter, + ) + if s.configuration.OTLPConfig.Metrics.KeepInMemory { + args = append( + args, + "--"+otlpmetrics.OtelMetricsKeepInMemoryFlag, + ) + } + if s.configuration.OTLPConfig.Metrics.OTLPConfig != nil { + args = append( + args, + "--"+otlpmetrics.OtelMetricsExporterOTLPEndpointFlag, s.configuration.OTLPConfig.Metrics.OTLPConfig.Endpoint, + "--"+otlpmetrics.OtelMetricsExporterOTLPModeFlag, s.configuration.OTLPConfig.Metrics.OTLPConfig.Mode, + ) + if s.configuration.OTLPConfig.Metrics.OTLPConfig.Insecure { + args = append(args, "--"+otlpmetrics.OtelMetricsExporterOTLPInsecureFlag) + } + } + if s.configuration.OTLPConfig.Metrics.RuntimeMetrics { + args = append(args, "--"+otlpmetrics.OtelMetricsRuntimeFlag) + } + if s.configuration.OTLPConfig.Metrics.MinimumReadMemStatsInterval != 0 { + args = append( + args, + "--"+otlpmetrics.OtelMetricsRuntimeMinimumReadMemStatsIntervalFlag, + s.configuration.OTLPConfig.Metrics.MinimumReadMemStatsInterval.String(), + ) + } + if s.configuration.OTLPConfig.Metrics.PushInterval != 0 { + args = append( + args, + "--"+otlpmetrics.OtelMetricsExporterPushIntervalFlag, + s.configuration.OTLPConfig.Metrics.PushInterval.String(), + ) + } + if len(s.configuration.OTLPConfig.Metrics.ResourceAttributes) > 0 { + args = append( + args, + "--"+otlp.OtelResourceAttributesFlag, + strings.Join(s.configuration.OTLPConfig.Metrics.ResourceAttributes, ","), + ) + } + } + if s.configuration.OTLPConfig.BaseConfig.ServiceName != "" { + args = append(args, "--"+otlp.OtelServiceNameFlag, s.configuration.OTLPConfig.BaseConfig.ServiceName) + } + } + if s.configuration.Debug { args = append(args, "--"+service.DebugFlag) } @@ -114,15 +196,24 @@ func (s *Server) Start() { } } + var transport http.RoundTripper = &http.Transport{ + MaxIdleConns: 100, + MaxIdleConnsPerHost: 100, + MaxConnsPerHost: 100, + } + if s.configuration.Debug { + transport = httpclient.NewDebugHTTPTransport(transport) + } + s.httpClient = ledgerclient.New( ledgerclient.WithServerURL(httpserver.URL(s.ctx)), ledgerclient.WithClient(&http.Client{ - Transport: httpclient.NewDebugHTTPTransport(http.DefaultTransport), + Transport: transport, }), ) } -func (s *Server) Stop() { +func (s *Server) Stop(ctx context.Context) { s.t.Helper() if s.cancel == nil { @@ -134,7 +225,7 @@ func (s *Server) Stop() { // Wait app to be marked as stopped select { case <-service.Stopped(s.ctx): - case <-time.After(5 * time.Second): + case <-ctx.Done(): require.Fail(s.t, "service should have been stopped") } @@ -142,7 +233,7 @@ func (s *Server) Stop() { select { case err := <-s.errorChan: require.NoError(s.t, err) - case <-time.After(5 * time.Second): + case <-ctx.Done(): require.Fail(s.t, "service should have been stopped without error") } } @@ -151,23 +242,62 @@ func (s *Server) Client() *ledgerclient.Formance { return s.httpClient } -func (s *Server) Restart() { +func (s *Server) Restart(ctx context.Context) { s.t.Helper() - s.Stop() + s.Stop(ctx) s.Start() } +func (s *Server) Database() *bun.DB { + db, err := bunconnect.OpenSQLDB(s.ctx, s.configuration.PostgresConfiguration) + require.NoError(s.t, err) + s.t.Cleanup(func() { + require.NoError(s.t, db.Close()) + }) + + return db +} + +func (s *Server) Subscribe() chan *nats.Msg { + if s.configuration.NatsURL == "" { + require.Fail(s.t, "NATS URL must be set") + } + + ret := make(chan *nats.Msg) + conn, err := nats.Connect(s.configuration.NatsURL) + require.NoError(s.t, err) + + subscription, err := conn.Subscribe(s.id, func(msg *nats.Msg) { + ret <- msg + }) + require.NoError(s.t, err) + s.t.Cleanup(func() { + require.NoError(s.t, subscription.Unsubscribe()) + }) + return ret +} + +func (s *Server) URL() string { + return httpserver.URL(s.ctx) +} + func New(t T, configuration Configuration) *Server { + t.Helper() + srv := &Server{ t: t, configuration: configuration, + id: uuid.NewString()[:8], } t.Logf("Start testing server") srv.Start() t.Cleanup(func() { t.Logf("Stop testing server") - srv.Stop() + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + srv.Stop(ctx) }) return srv diff --git a/scripts/export-database-schema.sh b/scripts/export-database-schema.sh new file mode 100755 index 000000000..3f4e549a3 --- /dev/null +++ b/scripts/export-database-schema.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +echo "Creating PG server..." +postgresContainerID=$(docker run -d --rm -e POSTGRES_USER=root -e POSTGRES_PASSWORD=root -e POSTGRES_DB=formance --net=host postgres:15-alpine) +wait-for-it -w 127.0.0.1:5432 + +echo "Creating bucket..." +go run main.go buckets upgrade _default --postgres-uri "postgres://root:root@127.0.0.1:5432/formance?sslmode=disable" + +echo "Exporting schemas..." +docker run --rm -u root \ + -v ./docs/database:/output \ + --net=host \ + schemaspy/schemaspy:6.2.4 -u root -db formance -t pgsql11 -host 127.0.0.1 -port 5432 -p root -schemas _system,_default + +docker kill "$postgresContainerID" \ No newline at end of file diff --git a/test/e2e/api_accounts_list.go b/test/e2e/api_accounts_list.go new file mode 100644 index 000000000..d995faefb --- /dev/null +++ b/test/e2e/api_accounts_list.go @@ -0,0 +1,428 @@ +//go:build it + +package test_suite + +import ( + "fmt" + "github.com/formancehq/go-libs/v2/logging" + . "github.com/formancehq/go-libs/v2/testing/api" + . "github.com/formancehq/ledger/pkg/testserver" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + "math/big" + "sort" + "time" + + "github.com/formancehq/go-libs/v2/pointer" + + "github.com/formancehq/go-libs/v2/metadata" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Context("Ledger accounts list API tests", func() { + var ( + db = UseTemplatedDatabase() + ctx = logging.TestingContext() + ) + + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + NatsURL: natsServer.GetValue().ClientURL(), + } + }) + BeforeEach(func() { + err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: "default", + }) + Expect(err).To(BeNil()) + }) + When("counting and listing accounts", func() { + var ( + metadata1 = map[string]string{ + "clientType": "gold", + } + + metadata2 = map[string]string{ + "clientType": "silver", + } + + timestamp = time.Now().Round(time.Second).UTC() + bigInt, _ = big.NewInt(0).SetString("999999999999999999999999999999999999999999999999999999999999999999999999999999999999999", 10) + ) + BeforeEach(func() { + err := AddMetadataToAccount( + ctx, + testServer.GetValue(), + operations.V2AddMetadataToAccountRequest{ + RequestBody: metadata1, + Address: "foo:foo", + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + + err = AddMetadataToAccount( + ctx, + testServer.GetValue(), + operations.V2AddMetadataToAccountRequest{ + RequestBody: metadata2, + Address: "foo:bar", + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + + _, err = CreateTransaction( + ctx, + testServer.GetValue(), + operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{{ + Amount: bigInt, + Asset: "USD", + Source: "world", + Destination: "foo:foo", + }}, + Timestamp: ×tamp, + }, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + }) + It("should return a "+string(components.V2ErrorsEnumValidation)+" on invalid filter", func() { + _, err := ListAccounts( + ctx, + testServer.GetValue(), + operations.V2ListAccountsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "invalid-key": 0, + }, + }, + }, + ) + Expect(err).To(HaveOccurred()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumInternal))) + }) + It("should be countable on api", func() { + response, err := CountAccounts( + ctx, + testServer.GetValue(), + operations.V2CountAccountsRequest{ + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response).To(Equal(3)) + }) + It("should be listed on api", func() { + response, err := ListAccounts( + ctx, + testServer.GetValue(), + operations.V2ListAccountsRequest{ + Ledger: "default", + Expand: pointer.For("volumes"), + }, + ) + Expect(err).ToNot(HaveOccurred()) + + accountsCursorResponse := response.Data + Expect(accountsCursorResponse).To(HaveLen(3)) + Expect(accountsCursorResponse[0]).To(Equal(components.V2Account{ + Address: "foo:bar", + Metadata: metadata2, + })) + Expect(accountsCursorResponse[1]).To(Equal(components.V2Account{ + Address: "foo:foo", + Metadata: metadata1, + Volumes: map[string]components.V2Volume{ + "USD": { + Input: bigInt, + Output: big.NewInt(0), + Balance: bigInt, + }, + }, + })) + Expect(accountsCursorResponse[2]).To(Equal(components.V2Account{ + Address: "world", + Metadata: metadata.Metadata{}, + Volumes: map[string]components.V2Volume{ + "USD": { + Output: bigInt, + Input: big.NewInt(0), + Balance: big.NewInt(0).Neg(bigInt), + }, + }, + })) + }) + It("should be listed on api using address filters", func() { + response, err := ListAccounts( + ctx, + testServer.GetValue(), + operations.V2ListAccountsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "address": "foo:", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + + accountsCursorResponse := response.Data + Expect(accountsCursorResponse).To(HaveLen(2)) + Expect(accountsCursorResponse[0]).To(Equal(components.V2Account{ + Address: "foo:bar", + Metadata: metadata2, + })) + Expect(accountsCursorResponse[1]).To(Equal(components.V2Account{ + Address: "foo:foo", + Metadata: metadata1, + })) + + response, err = ListAccounts( + ctx, + testServer.GetValue(), + operations.V2ListAccountsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "address": ":foo", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + + accountsCursorResponse = response.Data + Expect(accountsCursorResponse).To(HaveLen(1)) + Expect(accountsCursorResponse[0]).To(Equal(components.V2Account{ + Address: "foo:foo", + Metadata: metadata1, + })) + }) + It("should be listed on api using metadata filters", func() { + response, err := ListAccounts( + ctx, + testServer.GetValue(), + operations.V2ListAccountsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "metadata[clientType]": "gold", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + + accountsCursorResponse := response.Data + Expect(accountsCursorResponse).To(HaveLen(1)) + Expect(accountsCursorResponse[0]).To(Equal(components.V2Account{ + Address: "foo:foo", + Metadata: metadata1, + })) + }) + It("should be listable on api using $not filter", func() { + response, err := ListAccounts( + ctx, + testServer.GetValue(), + operations.V2ListAccountsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$not": map[string]any{ + "$match": map[string]any{ + "address": "world", + }, + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + + accountsCursorResponse := response.Data + Expect(accountsCursorResponse).To(HaveLen(2)) + }) + }) + + When("counting and listing accounts empty", func() { + It("should be countable on api even if empty", func() { + response, err := CountAccounts( + ctx, + testServer.GetValue(), + operations.V2CountAccountsRequest{ + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response).To(Equal(0)) + }) + It("should be listed on api even if empty", func() { + response, err := ListAccounts( + ctx, + testServer.GetValue(), + operations.V2ListAccountsRequest{ + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).To(HaveLen(0)) + }) + }) + + const ( + pageSize = int64(10) + accountCounts = 2 * pageSize + ) + When("creating accounts", func() { + var ( + accounts []components.V2Account + ) + BeforeEach(func() { + for i := 0; i < int(accountCounts); i++ { + m := map[string]string{ + "id": fmt.Sprintf("%d", i), + } + + err := AddMetadataToAccount( + ctx, + testServer.GetValue(), + operations.V2AddMetadataToAccountRequest{ + RequestBody: m, + Address: fmt.Sprintf("foo:%d", i), + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + + accounts = append(accounts, components.V2Account{ + Address: fmt.Sprintf("foo:%d", i), + Metadata: m, + }) + + sort.Slice(accounts, func(i, j int) bool { + return accounts[i].Address < accounts[j].Address + }) + } + }) + AfterEach(func() { + accounts = nil + }) + When(fmt.Sprintf("listing accounts using page size of %d", pageSize), func() { + var ( + response *components.V2AccountsCursorResponseCursor + err error + ) + BeforeEach(func() { + response, err = ListAccounts( + ctx, + testServer.GetValue(), + operations.V2ListAccountsRequest{ + Ledger: "default", + PageSize: pointer.For(pageSize), + }, + ) + Expect(err).ToNot(HaveOccurred()) + + Expect(response.HasMore).To(BeTrue()) + Expect(response.Previous).To(BeNil()) + Expect(response.Next).NotTo(BeNil()) + }) + It("should return the first page", func() { + Expect(response.PageSize).To(Equal(pageSize)) + Expect(response.Data).To(Equal(accounts[:pageSize])) + }) + When("following next cursor", func() { + BeforeEach(func() { + response, err = ListAccounts( + ctx, + testServer.GetValue(), + operations.V2ListAccountsRequest{ + Cursor: response.Next, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + }) + It("should return next page", func() { + Expect(response.PageSize).To(Equal(pageSize)) + Expect(response.Data).To(Equal(accounts[pageSize : 2*pageSize])) + Expect(response.Next).To(BeNil()) + }) + When("following previous cursor", func() { + BeforeEach(func() { + response, err = ListAccounts( + ctx, + testServer.GetValue(), + operations.V2ListAccountsRequest{ + Ledger: "default", + Cursor: response.Previous, + }, + ) + Expect(err).ToNot(HaveOccurred()) + }) + It("should return first page", func() { + Expect(response.PageSize).To(Equal(pageSize)) + Expect(response.Data).To(Equal(accounts[:pageSize])) + Expect(response.Previous).To(BeNil()) + }) + }) + }) + }) + }) + + When("Inserting one transaction in past and one in the future", func() { + now := time.Now() + BeforeEach(func() { + _, err := CreateTransaction(ctx, testServer.GetValue(), operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Postings: []components.V2Posting{{ + Amount: big.NewInt(100), + Asset: "USD", + Destination: "foo", + Source: "world", + }}, + Timestamp: pointer.For(now.Add(-12 * time.Hour)), + Metadata: map[string]string{}, + }, + Ledger: "default", + }) + Expect(err).To(Succeed()) + + _, err = CreateTransaction(ctx, testServer.GetValue(), operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Postings: []components.V2Posting{{ + Amount: big.NewInt(100), + Asset: "USD", + Destination: "foo", + Source: "world", + }}, + Timestamp: pointer.For(now.Add(12 * time.Hour)), + Metadata: map[string]string{}, + }, + Ledger: "default", + }) + Expect(err).To(Succeed()) + }) + When("getting account in the present", func() { + It("should ignore future transaction on effective volumes", func() { + accountResponse, err := GetAccount(ctx, testServer.GetValue(), operations.V2GetAccountRequest{ + Address: "foo", + Expand: pointer.For("effectiveVolumes"), + Ledger: "default", + Pit: pointer.For(time.Now().Add(time.Minute)), + }) + Expect(err).To(Succeed()) + Expect(accountResponse.EffectiveVolumes["USD"].Balance).To(Equal(big.NewInt(100))) + }) + }) + }) +}) diff --git a/test/e2e/api_accounts_metadata.go b/test/e2e/api_accounts_metadata.go new file mode 100644 index 000000000..a0e618afc --- /dev/null +++ b/test/e2e/api_accounts_metadata.go @@ -0,0 +1,79 @@ +//go:build it + +package test_suite + +import ( + "github.com/formancehq/go-libs/v2/logging" + ledgerevents "github.com/formancehq/ledger/pkg/events" + . "github.com/formancehq/ledger/pkg/testserver" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + "github.com/nats-io/nats.go" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Context("Ledger accounts list API tests", func() { + var ( + db = UseTemplatedDatabase() + ctx = logging.TestingContext() + ) + + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + NatsURL: natsServer.GetValue().ClientURL(), + } + }) + var events chan *nats.Msg + BeforeEach(func() { + events = testServer.GetValue().Subscribe() + }) + + BeforeEach(func() { + err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: "default", + }) + Expect(err).To(BeNil()) + }) + When("setting metadata on a unknown account", func() { + var ( + metadata = map[string]string{ + "clientType": "gold", + } + ) + BeforeEach(func() { + err := AddMetadataToAccount( + ctx, + testServer.GetValue(), + operations.V2AddMetadataToAccountRequest{ + RequestBody: metadata, + Address: "foo", + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + }) + It("should be available on api", func() { + response, err := GetAccount( + ctx, + testServer.GetValue(), + operations.V2GetAccountRequest{ + Address: "foo", + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + + Expect(*response).Should(Equal(components.V2Account{ + Address: "foo", + Metadata: metadata, + })) + }) + It("should trigger a new event", func() { + Eventually(events).Should(Receive(Event(ledgerevents.EventTypeSavedMetadata))) + }) + }) +}) diff --git a/test/e2e/api_balances_aggregated.go b/test/e2e/api_balances_aggregated.go new file mode 100644 index 000000000..b890a145d --- /dev/null +++ b/test/e2e/api_balances_aggregated.go @@ -0,0 +1,181 @@ +//go:build it + +package test_suite + +import ( + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/pointer" + . "github.com/formancehq/ledger/pkg/testserver" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + "math/big" + "time" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Context("Ledger engine tests", func() { + var ( + db = UseTemplatedDatabase() + ctx = logging.TestingContext() + ) + + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + NatsURL: natsServer.GetValue().ClientURL(), + } + }) + now := time.Now().UTC().Round(time.Second) + When("creating two transactions on a ledger with custom metadata", func() { + var firstTransactionsInsertedAt time.Time + BeforeEach(func() { + err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: "default", + }) + Expect(err).To(BeNil()) + + _, err = CreateBulk(ctx, testServer.GetValue(), operations.V2CreateBulkRequest{ + RequestBody: []components.V2BulkElement{ + components.CreateV2BulkElementCreateTransaction(components.V2BulkElementCreateTransaction{ + Data: &components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{{ + Amount: big.NewInt(100), + Asset: "USD/2", + Destination: "bank1", + Source: "world", + }}, + Timestamp: pointer.For(now.Add(-time.Minute)), + }, + }), + components.CreateV2BulkElementCreateTransaction(components.V2BulkElementCreateTransaction{ + Data: &components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{{ + Amount: big.NewInt(100), + Asset: "USD/2", + Destination: "bank2", + Source: "world", + }}, + Timestamp: pointer.For(now.Add(-2 * time.Minute)), + }, + }), + components.CreateV2BulkElementCreateTransaction(components.V2BulkElementCreateTransaction{ + Data: &components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{{ + Amount: big.NewInt(100), + Asset: "USD/2", + Destination: "bank1", + Source: "world", + }}, + Timestamp: pointer.For(now), + }, + }), + components.CreateV2BulkElementAddMetadata(components.V2BulkElementAddMetadata{ + Data: &components.Data{ + Metadata: map[string]string{ + "category": "premium", + }, + TargetID: components.CreateV2TargetIDStr("bank2"), + TargetType: components.V2TargetTypeAccount, + }, + }), + components.CreateV2BulkElementAddMetadata(components.V2BulkElementAddMetadata{ + Data: &components.Data{ + Metadata: map[string]string{ + "category": "premium", + }, + TargetID: components.CreateV2TargetIDStr("bank1"), + TargetType: components.V2TargetTypeAccount, + }, + }), + }, + Ledger: "default", + }) + Expect(err).To(Succeed()) + + firstTransactionsInsertedAt = time.Now() + + _, err = CreateBulk(ctx, testServer.GetValue(), operations.V2CreateBulkRequest{ + RequestBody: []components.V2BulkElement{ + components.CreateV2BulkElementCreateTransaction(components.V2BulkElementCreateTransaction{ + Data: &components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{{ + Amount: big.NewInt(100), + Asset: "USD/2", + Destination: "bank1", + Source: "world", + }}, + Timestamp: pointer.For(now), + }, + }), + }, + Ledger: "default", + }) + Expect(err).To(Succeed()) + }) + It("should be ok when aggregating using the metadata", func() { + response, err := GetBalancesAggregated( + ctx, + testServer.GetValue(), + operations.V2GetBalancesAggregatedRequest{ + RequestBody: map[string]any{ + "$match": map[string]any{ + "metadata[category]": "premium", + }, + }, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + + Expect(response).To(HaveLen(1)) + Expect(response["USD/2"]).To(Equal(big.NewInt(400))) + }) + It("should be ok when aggregating using pit on effective date", func() { + response, err := GetBalancesAggregated( + ctx, + testServer.GetValue(), + operations.V2GetBalancesAggregatedRequest{ + Ledger: "default", + Pit: pointer.For(now.Add(-time.Minute)), + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "address": "bank1", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + + Expect(response).To(HaveLen(1)) + Expect(response["USD/2"]).To(Equal(big.NewInt(100))) + }) + It("should be ok when aggregating using pit on insertion date", func() { + response, err := GetBalancesAggregated( + ctx, + testServer.GetValue(), + operations.V2GetBalancesAggregatedRequest{ + Ledger: "default", + Pit: pointer.For(firstTransactionsInsertedAt), + UseInsertionDate: pointer.For(true), + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "address": "bank1", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + + Expect(response).To(HaveLen(1)) + Expect(response["USD/2"]).To(Equal(big.NewInt(200))) + }) + }) +}) diff --git a/test/e2e/api_bulk.go b/test/e2e/api_bulk.go new file mode 100644 index 000000000..7c6570312 --- /dev/null +++ b/test/e2e/api_bulk.go @@ -0,0 +1,189 @@ +//go:build it + +package test_suite + +import ( + "github.com/formancehq/go-libs/v2/logging" + . "github.com/formancehq/go-libs/v2/testing/api" + . "github.com/formancehq/ledger/pkg/testserver" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + "math/big" + "time" + + "github.com/formancehq/go-libs/v2/metadata" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Context("Ledger engine tests", func() { + var ( + db = UseTemplatedDatabase() + ctx = logging.TestingContext() + bulkMaxSize = 5 + ) + + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + NatsURL: natsServer.GetValue().ClientURL(), + BulkMaxSize: bulkMaxSize, + } + }) + BeforeEach(func() { + err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: "default", + }) + Expect(err).To(BeNil()) + }) + When("creating a bulk on a ledger", func() { + var ( + now = time.Now().Round(time.Microsecond).UTC() + items []components.V2BulkElement + err error + ) + BeforeEach(func() { + items = []components.V2BulkElement{ + components.CreateV2BulkElementCreateTransaction(components.V2BulkElementCreateTransaction{ + Data: &components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{{ + Amount: big.NewInt(100), + Asset: "USD/2", + Destination: "bank", + Source: "world", + }}, + Timestamp: &now, + }, + }), + components.CreateV2BulkElementAddMetadata(components.V2BulkElementAddMetadata{ + Data: &components.Data{ + Metadata: metadata.Metadata{ + "foo": "bar", + "role": "admin", + }, + TargetID: components.CreateV2TargetIDBigint(big.NewInt(1)), + TargetType: components.V2TargetTypeTransaction, + }, + }), + components.CreateV2BulkElementDeleteMetadata(components.V2BulkElementDeleteMetadata{ + Data: &components.V2BulkElementDeleteMetadataData{ + Key: "foo", + TargetID: components.CreateV2TargetIDBigint(big.NewInt(1)), + TargetType: components.V2TargetTypeTransaction, + }, + }), + components.CreateV2BulkElementRevertTransaction(components.V2BulkElementRevertTransaction{ + Data: &components.V2BulkElementRevertTransactionData{ + ID: big.NewInt(1), + }, + }), + } + }) + JustBeforeEach(func() { + _, err = CreateBulk(ctx, testServer.GetValue(), operations.V2CreateBulkRequest{ + RequestBody: items, + Ledger: "default", + }) + }) + It("should be ok", func() { + Expect(err).To(Succeed()) + + tx, err := GetTransaction(ctx, testServer.GetValue(), operations.V2GetTransactionRequest{ + ID: big.NewInt(1), + Ledger: "default", + }) + Expect(err).To(Succeed()) + + reversedTx, err := GetTransaction(ctx, testServer.GetValue(), operations.V2GetTransactionRequest{ + ID: big.NewInt(2), + Ledger: "default", + }) + Expect(err).To(Succeed()) + + Expect(*tx).To(Equal(components.V2Transaction{ + ID: big.NewInt(1), + Metadata: metadata.Metadata{ + "role": "admin", + }, + Postings: []components.V2Posting{{ + Amount: big.NewInt(100), + Asset: "USD/2", + Destination: "bank", + Source: "world", + }}, + Reverted: true, + RevertedAt: &reversedTx.Timestamp, + Timestamp: now, + InsertedAt: tx.InsertedAt, + })) + }) + Context("with exceeded batch size", func() { + BeforeEach(func() { + items = make([]components.V2BulkElement, 0) + for i := 0; i < bulkMaxSize+1; i++ { + items = append(items, components.CreateV2BulkElementCreateTransaction(components.V2BulkElementCreateTransaction{ + Data: &components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{{ + Amount: big.NewInt(100), + Asset: "USD/2", + Destination: "bank", + Source: "world", + }}, + Timestamp: &now, + }, + })) + } + }) + It("should respond with an error", func() { + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumBulkSizeExceeded))) + }) + }) + }) + When("creating a bulk with an error on a ledger", func() { + var ( + now = time.Now().Round(time.Microsecond).UTC() + err error + bulkResponse []components.V2BulkElementResult + ) + BeforeEach(func() { + bulkResponse, err = CreateBulk(ctx, testServer.GetValue(), operations.V2CreateBulkRequest{ + RequestBody: []components.V2BulkElement{ + components.CreateV2BulkElementCreateTransaction(components.V2BulkElementCreateTransaction{ + Data: &components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{{ + Amount: big.NewInt(100), + Asset: "USD/2", + Destination: "bank", + Source: "world", + }}, + Timestamp: &now, + }, + }), + components.CreateV2BulkElementCreateTransaction(components.V2BulkElementCreateTransaction{ + Data: &components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{{ + Amount: big.NewInt(200), // Insufficient fund + Asset: "USD/2", + Destination: "user", + Source: "bank", + }}, + Timestamp: &now, + }, + }), + }, + Ledger: "default", + }) + Expect(err).To(Succeed()) + }) + It("should respond with an error", func() { + Expect(bulkResponse[1].Type).To(Equal(components.V2BulkElementResultType("ERROR"))) + Expect(bulkResponse[1].V2BulkElementResultError.ErrorCode).To(Equal("INSUFFICIENT_FUND")) + }) + }) +}) diff --git a/test/e2e/api_ledgers_create_test.go b/test/e2e/api_ledgers_create_test.go new file mode 100644 index 000000000..7d7161aa2 --- /dev/null +++ b/test/e2e/api_ledgers_create_test.go @@ -0,0 +1,140 @@ +//go:build it + +package test_suite + +import ( + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/pointer" + . "github.com/formancehq/go-libs/v2/testing/api" + ledger "github.com/formancehq/ledger/internal" + . "github.com/formancehq/ledger/pkg/testserver" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + "strings" +) + +var _ = Context("Ledger engine tests", func() { + var ( + db = UseTemplatedDatabase() + ctx = logging.TestingContext() + ) + + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + NatsURL: natsServer.GetValue().ClientURL(), + ExperimentalFeatures: true, + } + }) + When("creating a new ledger", func() { + var ( + createLedgerRequest operations.V2CreateLedgerRequest + err error + ) + BeforeEach(func() { + createLedgerRequest = operations.V2CreateLedgerRequest{ + Ledger: "foo", + V2CreateLedgerRequest: &components.V2CreateLedgerRequest{}, + } + }) + JustBeforeEach(func() { + err = CreateLedger(ctx, testServer.GetValue(), createLedgerRequest) + }) + It("should be ok", func() { + Expect(err).To(BeNil()) + }) + Context("with specific features set", func() { + BeforeEach(func() { + createLedgerRequest.V2CreateLedgerRequest.Features = ledger.MinimalFeatureSet. + With(ledger.FeatureMovesHistoryPostCommitEffectiveVolumes, "DISABLED") + }) + It("should be ok", func() { + Expect(err).To(BeNil()) + }) + }) + Context("with invalid feature configuration", func() { + BeforeEach(func() { + createLedgerRequest.V2CreateLedgerRequest.Features = ledger.MinimalFeatureSet. + With(ledger.FeatureMovesHistoryPostCommitEffectiveVolumes, "XXX") + }) + It("should fail", func() { + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumValidation))) + }) + }) + Context("with invalid feature name", func() { + BeforeEach(func() { + createLedgerRequest.V2CreateLedgerRequest.Features = ledger.MinimalFeatureSet. + With("foo", "XXX") + }) + It("should fail", func() { + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumValidation))) + }) + }) + Context("trying to create another ledger with the same name", func() { + JustBeforeEach(func() { + err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: createLedgerRequest.Ledger, + }) + Expect(err).NotTo(BeNil()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumValidation))) + }) + It("should fail", func() {}) + }) + Context("bucket naming convention depends on the database 63 bytes length (pg constraint)", func() { + BeforeEach(func() { + createLedgerRequest.V2CreateLedgerRequest.Bucket = pointer.For(strings.Repeat("a", 64)) + }) + It("should fail with > 63 characters in ledger or bucket name", func() { + Expect(err).To(HaveOccurred()) + }) + }) + Context("With metadata", func() { + BeforeEach(func() { + createLedgerRequest.V2CreateLedgerRequest.Metadata = map[string]string{ + "foo": "bar", + } + }) + It("Should be ok", func() { + ledger, err := GetLedger(ctx, testServer.GetValue(), operations.V2GetLedgerRequest{ + Ledger: createLedgerRequest.Ledger, + }) + Expect(err).To(BeNil()) + Expect(ledger.Metadata).To(Equal(createLedgerRequest.V2CreateLedgerRequest.Metadata)) + }) + }) + Context("with invalid ledger name", func() { + BeforeEach(func() { + createLedgerRequest.Ledger = "invalid\\name\\contains\\some\\backslash" + }) + It("should fail", func() { + Expect(err).NotTo(BeNil()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumValidation))) + }) + }) + Context("with invalid bucket name", func() { + BeforeEach(func() { + createLedgerRequest.V2CreateLedgerRequest = &components.V2CreateLedgerRequest{ + Bucket: pointer.For("invalid\\name\\contains\\some\\backslash"), + } + }) + It("should fail", func() { + Expect(err).NotTo(BeNil()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumValidation))) + }) + }) + Context("on alternate bucket", func() { + BeforeEach(func() { + createLedgerRequest.V2CreateLedgerRequest = &components.V2CreateLedgerRequest{ + Bucket: pointer.For("bucket0"), + } + }) + It("should be ok", func() { + Expect(err).To(BeNil()) + }) + }) + }) +}) diff --git a/test/e2e/api_ledgers_import_test.go b/test/e2e/api_ledgers_import_test.go new file mode 100644 index 000000000..9576a5127 --- /dev/null +++ b/test/e2e/api_ledgers_import_test.go @@ -0,0 +1,242 @@ +//go:build it + +package test_suite + +import ( + "database/sql" + . "github.com/formancehq/go-libs/v2/testing/api" + "io" + "math/big" + + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/pointer" + ledger "github.com/formancehq/ledger/internal" + . "github.com/formancehq/ledger/pkg/testserver" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + "github.com/uptrace/bun" +) + +var _ = Context("Ledger engine tests", func() { + var ( + db = UseTemplatedDatabase() + ctx = logging.TestingContext() + ) + + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + NatsURL: natsServer.GetValue().ClientURL(), + ExperimentalFeatures: true, + } + }) + When("creating a new ledger", func() { + var ( + createLedgerRequest operations.V2CreateLedgerRequest + err error + ) + BeforeEach(func() { + createLedgerRequest = operations.V2CreateLedgerRequest{ + Ledger: "foo", + V2CreateLedgerRequest: &components.V2CreateLedgerRequest{ + Features: ledger.MinimalFeatureSet, + }, + } + }) + JustBeforeEach(func() { + err = CreateLedger(ctx, testServer.GetValue(), createLedgerRequest) + }) + Context("with a set of all possible actions", func() { + JustBeforeEach(func() { + Expect(err).To(BeNil()) + tx, err := CreateTransaction(ctx, testServer.GetValue(), operations.V2CreateTransactionRequest{ + Ledger: createLedgerRequest.Ledger, + V2PostTransaction: components.V2PostTransaction{ + Script: &components.V2PostTransactionScript{ + Plain: `send [COIN 100] ( + source = @world + destination = @bob + ) + set_account_meta(@world, "foo", "bar") + `, + }, + }, + }) + Expect(err).To(BeNil()) + + Expect(AddMetadataToTransaction(ctx, testServer.GetValue(), operations.V2AddMetadataOnTransactionRequest{ + Ledger: createLedgerRequest.Ledger, + ID: tx.ID, + RequestBody: map[string]string{ + "foo": "bar", + }, + })).To(BeNil()) + + Expect(AddMetadataToAccount(ctx, testServer.GetValue(), operations.V2AddMetadataToAccountRequest{ + Ledger: createLedgerRequest.Ledger, + Address: "bank", + RequestBody: map[string]string{ + "foo": "bar", + }, + })).To(BeNil()) + + Expect(DeleteTransactionMetadata(ctx, testServer.GetValue(), operations.V2DeleteTransactionMetadataRequest{ + Ledger: createLedgerRequest.Ledger, + ID: tx.ID, + Key: "foo", + })).To(BeNil()) + + Expect(DeleteAccountMetadata(ctx, testServer.GetValue(), operations.V2DeleteAccountMetadataRequest{ + Ledger: createLedgerRequest.Ledger, + Address: "world", + Key: "foo", + })).To(BeNil()) + + _, err = RevertTransaction(ctx, testServer.GetValue(), operations.V2RevertTransactionRequest{ + Ledger: createLedgerRequest.Ledger, + ID: tx.ID, + }) + Expect(err).To(BeNil()) + }) + When("exporting the logs", func() { + var ( + reader io.Reader + err error + ) + JustBeforeEach(func() { + reader, err = Export(ctx, testServer.GetValue(), operations.V2ExportLogsRequest{ + Ledger: createLedgerRequest.Ledger, + }) + Expect(err).To(BeNil()) + }) + It("should be ok", func() {}) + When("then create a new ledger", func() { + var ledgerCopyName string + JustBeforeEach(func() { + ledgerCopyName = createLedgerRequest.Ledger + "-copy" + err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: ledgerCopyName, + V2CreateLedgerRequest: &components.V2CreateLedgerRequest{ + Features: ledger.MinimalFeatureSet, + }, + }) + Expect(err).To(BeNil()) + }) + + importLogs := func() error { + GinkgoHelper() + + data, err := io.ReadAll(reader) + Expect(err).To(BeNil()) + + return Import(ctx, testServer.GetValue(), operations.V2ImportLogsRequest{ + Ledger: ledgerCopyName, + RequestBody: pointer.For(string(data)), + }) + } + + When("importing data", func() { + It("should be ok", func() { + Expect(importLogs()).To(Succeed()) + }) + }) + Context("with state to 'in-use'", func() { + JustBeforeEach(func() { + _, err := CreateTransaction(ctx, testServer.GetValue(), operations.V2CreateTransactionRequest{ + Ledger: ledgerCopyName, + V2PostTransaction: components.V2PostTransaction{ + Postings: []components.V2Posting{{ + Source: "world", + Destination: "dst", + Asset: "USD", + Amount: big.NewInt(100), + }}, + }, + }) + Expect(err).To(BeNil()) + }) + When("importing data", func() { + It("Should fail with IMPORT code", func() { + err := importLogs() + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumImport))) + }) + }) + }) + Context("with concurrent transaction creation", func() { + var ( + sqlTx bun.Tx + importErrChan chan error + db *bun.DB + ) + // the import process is relying on the ledger state + // it the ledger already has some logs, it is considered as in use and import must fails. + // as the sdk does not allow to control the stream passed to the Import function + // we take a lock on the ledgers table to force the process to wait + // while we will make a concurrent request + JustBeforeEach(func() { + db = testServer.GetValue().Database() + sqlTx, err = db.BeginTx(ctx, &sql.TxOptions{}) + Expect(err).To(BeNil()) + + DeferCleanup(func() { + _ = sqlTx.Rollback() + }) + + _, err := sqlTx.NewRaw("lock table _default.logs").Exec(ctx) + Expect(err).To(BeNil()) + + go func() { + defer GinkgoRecover() + + // should block + _, err := CreateTransaction(ctx, testServer.GetValue(), operations.V2CreateTransactionRequest{ + Ledger: ledgerCopyName, + Force: pointer.For(true), + V2PostTransaction: components.V2PostTransaction{ + Postings: []components.V2Posting{{ + Source: "a", + Destination: "b", + Asset: "USD", + Amount: big.NewInt(100), + }}, + }, + }) + Expect(err).To(BeNil()) + }() + + // check postgres locks + // we wait for two active locks. The first is the one we took in the test. + // the second is the one took by the call to the CreateTransaction + // Once we have the two locks, we know that the CreateTransaction is in a sql tx. + Eventually(func(g Gomega) int { + count, err := db.NewSelect(). + Table("pg_stat_activity"). + Where("state <> 'idle' and pid <> pg_backend_pid()"). + Where(`query like 'lock table _default.logs%' or query like 'INSERT INTO "_default".logs%'`). + Count(ctx) + g.Expect(err).To(BeNil()) + return count + }).Should(Equal(2)) + + importErrChan = make(chan error, 1) + go func() { + defer GinkgoRecover() + + // the call on importLogs() should block too since the logs table is locked + importErrChan <- importLogs() + }() + }) + It("should fail", func() { + Expect(sqlTx.Rollback()).To(Succeed()) + Eventually(importErrChan).Should(Receive(HaveErrorCode(string(components.V2ErrorsEnumImport)))) + }) + }) + }) + }) + }) + }) +}) diff --git a/test/e2e/api_ledgers_list.go b/test/e2e/api_ledgers_list.go new file mode 100644 index 000000000..113b24734 --- /dev/null +++ b/test/e2e/api_ledgers_list.go @@ -0,0 +1,44 @@ +//go:build it + +package test_suite + +import ( + "fmt" + "github.com/formancehq/go-libs/v2/logging" + . "github.com/formancehq/ledger/pkg/testserver" + "github.com/formancehq/stack/ledger/client/models/operations" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Context("Ledger engine tests", func() { + var ( + db = UseTemplatedDatabase() + ctx = logging.TestingContext() + ) + + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + NatsURL: natsServer.GetValue().ClientURL(), + } + }) + + When("creating 10 ledger", func() { + BeforeEach(func() { + for i := range 10 { + err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: fmt.Sprintf("ledger%d", i), + }) + Expect(err).To(BeNil()) + } + }) + It("should be listable", func() { + ledgers, err := ListLedgers(ctx, testServer.GetValue(), operations.V2ListLedgersRequest{}) + Expect(err).To(BeNil()) + Expect(ledgers.Data).To(HaveLen(10)) + }) + }) +}) diff --git a/test/e2e/api_ledgers_metadata.go b/test/e2e/api_ledgers_metadata.go new file mode 100644 index 000000000..35993fd57 --- /dev/null +++ b/test/e2e/api_ledgers_metadata.go @@ -0,0 +1,70 @@ +//go:build it + +package test_suite + +import ( + "github.com/formancehq/go-libs/v2/logging" + . "github.com/formancehq/ledger/pkg/testserver" + "github.com/formancehq/stack/ledger/client/models/operations" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Context("Ledger engine tests", func() { + var ( + db = UseTemplatedDatabase() + ctx = logging.TestingContext() + ) + + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + NatsURL: natsServer.GetValue().ClientURL(), + } + }) + When("creating a ledger", func() { + BeforeEach(func() { + err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: "default", + }) + Expect(err).To(BeNil()) + }) + When("updating metadata", func() { + m := map[string]string{ + "foo": "bar", + } + BeforeEach(func() { + err := UpdateLedgerMetadata(ctx, testServer.GetValue(), operations.V2UpdateLedgerMetadataRequest{ + Ledger: "default", + RequestBody: m, + }) + Expect(err).To(BeNil()) + }) + It("should be ok", func() { + ledger, err := GetLedger(ctx, testServer.GetValue(), operations.V2GetLedgerRequest{ + Ledger: "default", + }) + Expect(err).To(BeNil()) + Expect(ledger.Metadata).To(Equal(m)) + }) + When("deleting metadata", func() { + BeforeEach(func() { + err := DeleteLedgerMetadata(ctx, testServer.GetValue(), operations.V2DeleteLedgerMetadataRequest{ + Ledger: "default", + Key: "foo", + }) + Expect(err).To(BeNil()) + }) + It("should be ok", func() { + ledger, err := GetLedger(ctx, testServer.GetValue(), operations.V2GetLedgerRequest{ + Ledger: "default", + }) + Expect(err).To(BeNil()) + Expect(ledger.Metadata).To(BeEmpty()) + }) + }) + }) + }) +}) diff --git a/test/e2e/api_logs_list.go b/test/e2e/api_logs_list.go new file mode 100644 index 000000000..912fe76d3 --- /dev/null +++ b/test/e2e/api_logs_list.go @@ -0,0 +1,328 @@ +//go:build it + +package test_suite + +import ( + "fmt" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/pointer" + . "github.com/formancehq/ledger/pkg/testserver" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + "math/big" + "sort" + "time" +) + +var _ = Context("Ledger logs list API tests", func() { + var ( + db = UseTemplatedDatabase() + ctx = logging.TestingContext() + ) + + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + NatsURL: natsServer.GetValue().ClientURL(), + } + }) + BeforeEach(func() { + err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: "default", + }) + Expect(err).To(BeNil()) + + err = CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: "another", + }) + Expect(err).To(BeNil()) + }) + When("listing logs", func() { + var ( + timestamp1 = time.Date(2023, 4, 11, 10, 0, 0, 0, time.UTC) + timestamp2 = time.Date(2023, 4, 12, 10, 0, 0, 0, time.UTC) + + m1 = map[string]string{ + "clientType": "silver", + } + m2 = map[string]string{ + "clientType": "gold", + } + ) + BeforeEach(func() { + _, err := CreateTransaction( + ctx, + testServer.GetValue(), + operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{{ + Amount: big.NewInt(100), + Asset: "USD", + Source: "world", + Destination: "foo:foo", + }}, + Timestamp: ×tamp1, + }, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + + _, err = CreateTransaction( + ctx, + testServer.GetValue(), + operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{{ + Amount: big.NewInt(100), + Asset: "USD", + Source: "world", + Destination: "foo:foo", + }}, + Timestamp: ×tamp1, + }, + Ledger: "another", + }, + ) + Expect(err).ToNot(HaveOccurred()) + + _, err = CreateTransaction( + ctx, + testServer.GetValue(), + operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Metadata: m1, + Postings: []components.V2Posting{{ + Amount: big.NewInt(200), + Asset: "USD", + Source: "world", + Destination: "foo:bar", + }}, + Timestamp: ×tamp2, + }, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + + err = AddMetadataToAccount( + ctx, + testServer.GetValue(), + operations.V2AddMetadataToAccountRequest{ + RequestBody: m2, + Address: "foo:baz", + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + }) + It("should be listed on api with ListLogs", func() { + response, err := ListLogs( + ctx, + testServer.GetValue(), + operations.V2ListLogsRequest{ + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + + Expect(response.Data).To(HaveLen(3)) + + // Cannot check the date and the hash since they are changing at + // every run + Expect(response.Data[0].ID).To(Equal(big.NewInt(3))) + Expect(response.Data[0].Type).To(Equal(components.V2LogTypeSetMetadata)) + Expect(response.Data[0].Data).To(Equal(map[string]any{ + "targetType": "ACCOUNT", + "metadata": map[string]any{ + "clientType": "gold", + }, + "targetId": "foo:baz", + })) + + Expect(response.Data[1].ID).To(Equal(big.NewInt(2))) + Expect(response.Data[1].Type).To(Equal(components.V2LogTypeNewTransaction)) + // Cannot check date and txid inside Data since they are changing at + // every run + Expect(response.Data[1].Data["accountMetadata"]).To(Equal(map[string]any{})) + Expect(response.Data[1].Data["transaction"]).To(BeAssignableToTypeOf(map[string]any{})) + transaction := response.Data[1].Data["transaction"].(map[string]any) + Expect(transaction["metadata"]).To(Equal(map[string]any{ + "clientType": "silver", + })) + Expect(transaction["timestamp"]).To(Equal("2023-04-12T10:00:00Z")) + Expect(transaction["postings"]).To(Equal([]any{ + map[string]any{ + "amount": float64(200), + "asset": "USD", + "source": "world", + "destination": "foo:bar", + }, + })) + + Expect(response.Data[2].ID).To(Equal(big.NewInt(1))) + Expect(response.Data[2].Type).To(Equal(components.V2LogTypeNewTransaction)) + Expect(response.Data[2].Data["accountMetadata"]).To(Equal(map[string]any{})) + Expect(response.Data[2].Data["transaction"]).To(BeAssignableToTypeOf(map[string]any{})) + transaction = response.Data[2].Data["transaction"].(map[string]any) + Expect(transaction["metadata"]).To(Equal(map[string]any{})) + Expect(transaction["timestamp"]).To(Equal("2023-04-11T10:00:00Z")) + Expect(transaction["postings"]).To(Equal([]any{ + map[string]any{ + "amount": float64(100), + "asset": "USD", + "source": "world", + "destination": "foo:foo", + }, + })) + }) + }) + + type expectedLog struct { + id *big.Int + typ components.V2LogType + postings []any + } + + var ( + compareLogs = func(log components.V2Log, expected expectedLog) { + Expect(log.ID).To(Equal(expected.id)) + Expect(log.Type).To(Equal(expected.typ)) + Expect(log.Data["accountMetadata"]).To(Equal(map[string]any{})) + Expect(log.Data["transaction"]).To(BeAssignableToTypeOf(map[string]any{})) + transaction := log.Data["transaction"].(map[string]any) + Expect(transaction["metadata"]).To(Equal(map[string]any{})) + Expect(transaction["postings"]).To(Equal(expected.postings)) + } + ) + + const ( + pageSize = int64(10) + accountCounts = 2 * pageSize + ) + When("creating logs with transactions", func() { + var ( + expectedLogs []expectedLog + ) + BeforeEach(func() { + for i := int64(0); i < accountCounts; i++ { + now := time.Now().Round(time.Millisecond).UTC() + + _, err := CreateTransaction( + ctx, + testServer.GetValue(), + operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{{ + Amount: big.NewInt(100), + Asset: "USD", + Source: "world", + Destination: fmt.Sprintf("foo:%d", i), + }}, + Timestamp: &now, + }, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + + expectedLogs = append(expectedLogs, expectedLog{ + id: big.NewInt(i + 1), + typ: components.V2LogTypeNewTransaction, + postings: []any{ + map[string]any{ + "amount": float64(100), + "asset": "USD", + "source": "world", + "destination": fmt.Sprintf("foo:%d", i), + }, + }, + }) + } + + sort.Slice(expectedLogs, func(i, j int) bool { + return expectedLogs[i].id.Cmp(expectedLogs[j].id) > 0 + }) + }) + AfterEach(func() { + expectedLogs = nil + }) + When(fmt.Sprintf("listing accounts using page size of %d", pageSize), func() { + var ( + rsp *components.V2LogsCursorResponseCursor + err error + ) + BeforeEach(func() { + rsp, err = ListLogs( + ctx, + testServer.GetValue(), + operations.V2ListLogsRequest{ + Ledger: "default", + PageSize: pointer.For(pageSize), + }, + ) + Expect(err).ToNot(HaveOccurred()) + + Expect(rsp.HasMore).To(BeTrue()) + Expect(rsp.Previous).To(BeNil()) + Expect(rsp.Next).NotTo(BeNil()) + }) + It("should return the first page", func() { + Expect(rsp.PageSize).To(Equal(pageSize)) + Expect(len(rsp.Data)).To(Equal(len(expectedLogs[:pageSize]))) + for i := range rsp.Data { + compareLogs(rsp.Data[i], expectedLogs[i]) + } + }) + When("following next cursor", func() { + BeforeEach(func() { + rsp, err = ListLogs( + ctx, + testServer.GetValue(), + operations.V2ListLogsRequest{ + Cursor: rsp.Next, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + }) + It("should return next page", func() { + Expect(rsp.PageSize).To(Equal(pageSize)) + Expect(len(rsp.Data)).To(Equal(len(expectedLogs[pageSize : 2*pageSize]))) + for i := range rsp.Data { + compareLogs(rsp.Data[i], expectedLogs[int64(i)+pageSize]) + } + Expect(rsp.Next).To(BeNil()) + }) + When("following previous cursor", func() { + BeforeEach(func() { + rsp, err = ListLogs( + ctx, + testServer.GetValue(), + operations.V2ListLogsRequest{ + Cursor: rsp.Previous, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + }) + It("should return first page", func() { + Expect(rsp.PageSize).To(Equal(pageSize)) + Expect(len(rsp.Data)).To(Equal(len(expectedLogs[:pageSize]))) + for i := range rsp.Data { + compareLogs(rsp.Data[i], expectedLogs[i]) + } + Expect(rsp.Previous).To(BeNil()) + }) + }) + }) + }) + }) +}) diff --git a/test/e2e/api_transactions_create.go b/test/e2e/api_transactions_create.go new file mode 100644 index 000000000..0da493620 --- /dev/null +++ b/test/e2e/api_transactions_create.go @@ -0,0 +1,487 @@ +//go:build it + +package test_suite + +import ( + "github.com/formancehq/go-libs/v2/logging" + . "github.com/formancehq/go-libs/v2/testing/api" + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/bus" + . "github.com/formancehq/ledger/pkg/testserver" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + "math/big" + "time" + + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/pointer" + ledgerevents "github.com/formancehq/ledger/pkg/events" + "github.com/nats-io/nats.go" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Context("Ledger accounts list API tests", func() { + var ( + db = UseTemplatedDatabase() + ctx = logging.TestingContext() + ) + + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + NatsURL: natsServer.GetValue().ClientURL(), + } + }) + + BeforeEach(func() { + err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: "default", + }) + Expect(err).To(BeNil()) + + err = CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: "test", + }) + Expect(err).To(BeNil()) + }) + + When("creating a transaction", func() { + var ( + events chan *nats.Msg + timestamp = time.Now().Round(time.Second).UTC() + rsp *components.V2Transaction + req operations.V2CreateTransactionRequest + err error + ) + BeforeEach(func() { + events = testServer.GetValue().Subscribe() + req = operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Timestamp: ×tamp, + }, + Ledger: "default", + } + }) + JustBeforeEach(func() { + // Create a transaction + rsp, err = CreateTransaction(ctx, testServer.GetValue(), req) + }) + Context("with valid data", func() { + BeforeEach(func() { + req = operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{ + { + Amount: big.NewInt(100), + Asset: "USD", + Source: "world", + Destination: "alice", + }, + }, + Timestamp: ×tamp, + Reference: pointer.For("foo"), + }, + Ledger: "default", + } + }) + It("should be ok", func() { + response, err := GetTransaction( + ctx, + testServer.GetValue(), + operations.V2GetTransactionRequest{ + Ledger: "default", + ID: rsp.ID, + Expand: pointer.For("volumes"), + }, + ) + Expect(err).ToNot(HaveOccurred()) + + Expect(*response).To(Equal(components.V2Transaction{ + Timestamp: rsp.Timestamp, + InsertedAt: rsp.InsertedAt, + Postings: rsp.Postings, + Reference: rsp.Reference, + Metadata: rsp.Metadata, + ID: rsp.ID, + PreCommitVolumes: map[string]map[string]components.V2Volume{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(0), + Balance: big.NewInt(0), + }, + }, + "alice": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(0), + Balance: big.NewInt(0), + }, + }, + }, + PostCommitVolumes: map[string]map[string]components.V2Volume{ + "world": { + "USD": { + Input: big.NewInt(0), + Output: big.NewInt(100), + Balance: big.NewInt(-100), + }, + }, + "alice": { + "USD": { + Input: big.NewInt(100), + Output: big.NewInt(0), + Balance: big.NewInt(100), + }, + }, + }, + })) + + account, err := GetAccount( + ctx, + testServer.GetValue(), + operations.V2GetAccountRequest{ + Address: "alice", + Ledger: "default", + Expand: pointer.For("volumes"), + }, + ) + Expect(err).ToNot(HaveOccurred()) + + Expect(*account).Should(Equal(components.V2Account{ + Address: "alice", + Metadata: metadata.Metadata{}, + Volumes: map[string]components.V2Volume{ + "USD": { + Input: big.NewInt(100), + Output: big.NewInt(0), + Balance: big.NewInt(100), + }, + }, + })) + By("should trigger a new event", func() { + Eventually(events).Should(Receive(Event(ledgerevents.EventTypeCommittedTransactions, WithPayload(bus.CommittedTransactions{ + Ledger: "default", + Transactions: []ledger.Transaction{ConvertSDKTxToCoreTX(rsp)}, + AccountMetadata: ledger.AccountMetadata{}, + })))) + }) + }) + When("using a reference", func() { + BeforeEach(func() { + req.V2PostTransaction.Reference = pointer.For("foo") + }) + It("should be ok", func() { + Expect(err).To(BeNil()) + }) + When("trying to commit a new transaction with the same reference", func() { + JustBeforeEach(func() { + _, err = CreateTransaction(ctx, testServer.GetValue(), req) + Expect(err).To(HaveOccurred()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumConflict))) + }) + It("Should fail with "+string(components.V2ErrorsEnumConflict)+" error code", func() {}) + }) + }) + }) + When("with insufficient funds", func() { + BeforeEach(func() { + req.V2PostTransaction.Postings = []components.V2Posting{{ + Amount: big.NewInt(100), + Asset: "USD", + Source: "bob", + Destination: "alice", + }} + }) + It("should fail", func() { + Expect(err).To(HaveOccurred()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumInsufficientFund))) + }) + }) + When("with nil amount", func() { + BeforeEach(func() { + req.V2PostTransaction.Postings = []components.V2Posting{{ + Asset: "USD", + Source: "bob", + Destination: "alice", + }} + }) + It("should fail", func() { + Expect(err).To(HaveOccurred()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumValidation))) + }) + }) + When("with negative amount", func() { + BeforeEach(func() { + req.V2PostTransaction.Postings = []components.V2Posting{{ + Amount: big.NewInt(-100), + Asset: "USD", + Source: "bob", + Destination: "alice", + }} + }) + It("should fail", func() { + Expect(err).To(HaveOccurred()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumValidation))) + }) + }) + When("with invalid source address", func() { + BeforeEach(func() { + req.V2PostTransaction.Postings = []components.V2Posting{{ + Amount: big.NewInt(-100), + Asset: "USD", + Source: "bob;test", + Destination: "alice", + }} + }) + It("should fail", func() { + Expect(err).To(HaveOccurred()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumValidation))) + }) + }) + When("with invalid destination address", func() { + BeforeEach(func() { + req.V2PostTransaction.Postings = []components.V2Posting{{ + Amount: big.NewInt(-100), + Asset: "USD", + Source: "bob", + Destination: "alice;test", + }} + }) + It("should fail", func() { + Expect(err).To(HaveOccurred()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumValidation))) + }) + }) + When("with invalid asset", func() { + BeforeEach(func() { + req.V2PostTransaction.Postings = []components.V2Posting{{ + Amount: big.NewInt(-100), + Asset: "USD//2", + Source: "bob", + Destination: "alice", + }} + }) + It("should fail", func() { + Expect(err).To(HaveOccurred()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumValidation))) + }) + }) + When("using an idempotency key and a specific ledger", func() { + BeforeEach(func() { + req = operations.V2CreateTransactionRequest{ + IdempotencyKey: pointer.For("foo"), + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{ + { + Amount: big.NewInt(100), + Asset: "USD", + Source: "world", + Destination: "alice", + }, + }, + Timestamp: ×tamp, + Reference: pointer.For("foo"), + }, + Ledger: "default", + } + }) + It("should be ok", func() { + Expect(err).To(Succeed()) + Expect(rsp.ID).To(Equal(big.NewInt(1))) + }) + When("creating a ledger transaction with same ik and different ledger", func() { + JustBeforeEach(func() { + rsp, err = CreateTransaction(ctx, testServer.GetValue(), req) + }) + It("should not have an error", func() { + Expect(err).To(Succeed()) + Expect(rsp.ID).To(Equal(big.NewInt(1))) + }) + }) + }) + When("using a negative amount in a script", func() { + BeforeEach(func() { + req = operations.V2CreateTransactionRequest{ + IdempotencyKey: pointer.For("testing"), + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Script: &components.V2PostTransactionScript{ + Plain: `send [COIN -100] ( + source = @world + destination = @bob + )`, + Vars: map[string]interface{}{}, + }, + }, + Ledger: "default", + } + }) + It("should fail with "+string(components.V2ErrorsEnumCompilationFailed)+" code", func() { + Expect(err).NotTo(Succeed()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumCompilationFailed))) + }) + }) + When("using a negative amount in the script with a variable", func() { + BeforeEach(func() { + req = operations.V2CreateTransactionRequest{ + IdempotencyKey: pointer.For("testing"), + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Script: &components.V2PostTransactionScript{ + Plain: `vars { + monetary $amount + } + send $amount ( + source = @world + destination = @bob + )`, + Vars: map[string]interface{}{ + "amount": "USD -100", + }, + }, + }, + Ledger: "default", + } + }) + It("should fail with "+string(components.V2ErrorsEnumCompilationFailed)+" code", func() { + Expect(err).NotTo(Succeed()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumCompilationFailed))) + }) + }) + Context("with error on script", func() { + BeforeEach(func() { + req = operations.V2CreateTransactionRequest{ + IdempotencyKey: pointer.For("testing"), + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Script: &components.V2PostTransactionScript{ + Plain: `XXX`, + Vars: map[string]interface{}{}, + }, + }, + Ledger: "default", + } + }) + It("should fail with "+string(components.V2ErrorsEnumCompilationFailed)+" code", func() { + Expect(err).NotTo(Succeed()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumCompilationFailed))) + }) + }) + Context("with no postings", func() { + BeforeEach(func() { + req = operations.V2CreateTransactionRequest{ + IdempotencyKey: pointer.For("testing"), + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Script: &components.V2PostTransactionScript{ + Plain: `vars { + monetary $amount + } + set_tx_meta("foo", "bar") + `, + Vars: map[string]interface{}{ + "amount": "USD 100", + }, + }, + }, + Ledger: "default", + } + }) + It("should fail with "+string(components.V2ErrorsEnumNoPostings)+" code", func() { + Expect(err).NotTo(Succeed()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumNoPostings))) + }) + }) + When("with metadata override", func() { + BeforeEach(func() { + req = operations.V2CreateTransactionRequest{ + IdempotencyKey: pointer.For("testing"), + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{ + "foo": "baz", + }, + Script: &components.V2PostTransactionScript{ + Plain: `send [COIN 100] ( + source = @world + destination = @bob + ) + set_tx_meta("foo", "bar")`, + Vars: map[string]interface{}{}, + }, + }, + Ledger: "default", + } + }) + It("should fail with "+string(components.V2ErrorsEnumMetadataOverride)+" code", func() { + Expect(err).NotTo(Succeed()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumMetadataOverride))) + }) + }) + When("with dry run mode", func() { + BeforeEach(func() { + req = operations.V2CreateTransactionRequest{ + IdempotencyKey: pointer.For("testing"), + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Script: &components.V2PostTransactionScript{ + Plain: `send [COIN 100] ( + source = @world + destination = @bob + )`, + Vars: map[string]interface{}{}, + }, + }, + DryRun: pointer.For(true), + Ledger: "default", + } + }) + It("should be ok", func() { + Expect(err).To(BeNil()) + }) + }) + }) + + When("creating a transaction on the ledger v1 with old variable format", func() { + var ( + err error + response *operations.CreateTransactionResponse + ) + BeforeEach(func() { + v, _ := big.NewInt(0).SetString("1320000000000000000000000000000000000000000000000001", 10) + response, err = testServer.GetValue().Client().Ledger.V1.CreateTransaction( + ctx, + operations.CreateTransactionRequest{ + PostTransaction: components.PostTransaction{ + Metadata: map[string]any{}, + Script: &components.PostTransactionScript{ + Plain: `vars { + monetary $amount + } + send $amount ( + source = @world + destination = @bob + )`, + Vars: map[string]interface{}{ + "amount": map[string]any{ + "asset": "EUR/12", + "amount": v, + }, + }, + }, + }, + Ledger: "default", + }, + ) + }) + It("should be ok", func() { + Expect(err).To(Succeed()) + Expect(response.TransactionsResponse.Data[0].Txid).To(Equal(big.NewInt(1))) + }) + }) +}) diff --git a/test/e2e/api_transactions_list.go b/test/e2e/api_transactions_list.go new file mode 100644 index 000000000..801d3d320 --- /dev/null +++ b/test/e2e/api_transactions_list.go @@ -0,0 +1,988 @@ +//go:build it + +package test_suite + +import ( + "fmt" + "github.com/formancehq/go-libs/v2/bun/bunpaginate" + "github.com/formancehq/go-libs/v2/logging" + . "github.com/formancehq/go-libs/v2/testing/api" + . "github.com/formancehq/ledger/pkg/testserver" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + "math/big" + "sort" + "time" + + "github.com/formancehq/go-libs/v2/metadata" + "github.com/formancehq/go-libs/v2/pointer" +) + +var _ = Context("Ledger transactions list API tests", func() { + var ( + db = UseTemplatedDatabase() + ctx = logging.TestingContext() + ) + + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + NatsURL: natsServer.GetValue().ClientURL(), + } + }) + JustBeforeEach(func() { + err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: "default", + }) + Expect(err).To(BeNil()) + }) + const ( + pageSize = int64(10) + txCount = 2 * pageSize + ) + When(fmt.Sprintf("creating %d transactions", txCount), func() { + var ( + timestamp = time.Now().Round(time.Second).UTC() + transactions []components.V2Transaction + ) + JustBeforeEach(func() { + for i := 0; i < int(txCount); i++ { + offset := time.Duration(int(txCount)-i) * time.Minute + // 1 transaction of 2 is backdated to test pagination using effective date + if offset%2 == 0 { + offset += 1 + } else { + offset -= 1 + } + txTimestamp := timestamp.Add(-offset) + + response, err := CreateTransaction( + ctx, + testServer.GetValue(), + operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{ + { + Amount: big.NewInt(100), + Asset: "USD", + Source: "world", + Destination: fmt.Sprintf("account:%d", i), + }, + }, + Timestamp: pointer.For(txTimestamp), + }, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + + transactions = append([]components.V2Transaction{ + *response, + }, transactions...) + } + }) + AfterEach(func() { + transactions = nil + }) + When(fmt.Sprintf("listing transactions using page size of %d", pageSize), func() { + var ( + rsp *components.V2TransactionsCursorResponseCursor + req operations.V2ListTransactionsRequest + err error + ) + BeforeEach(func() { + req = operations.V2ListTransactionsRequest{ + Ledger: "default", + PageSize: pointer.For(pageSize), + Expand: pointer.For("volumes,effectiveVolumes"), + Pit: pointer.For(time.Now()), + RequestBody: map[string]any{ + "$and": []map[string]any{ + { + "$match": map[string]any{ + "source": "world", + }, + }, + { + "$not": map[string]any{ + "$exists": map[string]any{ + "metadata": "foo", + }, + }, + }, + }, + }, + } + }) + JustBeforeEach(func() { + rsp, err = ListTransactions(ctx, testServer.GetValue(), req) + Expect(err).ToNot(HaveOccurred()) + + Expect(rsp.HasMore).To(BeTrue()) + Expect(rsp.Previous).To(BeNil()) + Expect(rsp.Next).NotTo(BeNil()) + }) + Context("with effective ordering", func() { + BeforeEach(func() { + req.Order = pointer.For(operations.OrderEffective) + }) + It("Should be ok, and returns transactions ordered by effective timestamp", func() { + Expect(rsp.PageSize).To(Equal(pageSize)) + sorted := transactions[:pageSize] + sort.SliceStable(sorted, func(i, j int) bool { + return sorted[i].Timestamp.After(sorted[j].Timestamp) + }) + Expect(rsp.Data).To(Equal(sorted)) + }) + }) + It("Should be ok", func() { + Expect(rsp.PageSize).To(Equal(pageSize)) + Expect(rsp.Data).To(Equal(transactions[:pageSize])) + }) + When("following next cursor", func() { + JustBeforeEach(func() { + + // Create a new transaction to ensure cursor is stable + _, err := CreateTransaction( + ctx, + testServer.GetValue(), + operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{ + { + Amount: big.NewInt(100), + Asset: "USD", + Source: "world", + Destination: "account:0", + }, + }, + Timestamp: pointer.For(time.Now()), + }, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + + rsp, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Cursor: rsp.Next, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + }) + It("should return next page", func() { + Expect(rsp.PageSize).To(Equal(pageSize)) + Expect(rsp.Data).To(Equal(transactions[pageSize : 2*pageSize])) + Expect(rsp.Next).To(BeNil()) + }) + When("following previous cursor", func() { + JustBeforeEach(func() { + var err error + rsp, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Cursor: rsp.Previous, + Ledger: "default", + Expand: pointer.For("volumes,effectiveVolumes"), + }, + ) + Expect(err).ToNot(HaveOccurred()) + }) + It("should return first page", func() { + Expect(rsp.PageSize).To(Equal(pageSize)) + Expect(rsp.Data).To(Equal(transactions[:pageSize])) + Expect(rsp.Previous).To(BeNil()) + }) + }) + }) + }) + + When("listing transactions using filter on a single match", func() { + var ( + err error + response *components.V2TransactionsCursorResponseCursor + now = time.Now().Round(time.Second).UTC() + ) + JustBeforeEach(func() { + response, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "source": "world", + }, + }, + Ledger: "default", + PageSize: pointer.For(pageSize), + Pit: &now, + }, + ) + Expect(err).To(BeNil()) + }) + It("Should be ok", func() { + Expect(response.Next).NotTo(BeNil()) + cursor := &bunpaginate.ColumnPaginatedQuery[map[string]any]{} + Expect(bunpaginate.UnmarshalCursor(*response.Next, cursor)).To(BeNil()) + Expect(cursor.Options).To(Equal(map[string]any{ + "qb": map[string]any{ + "$match": map[string]any{ + "source": "world", + }, + }, + "pageSize": float64(10), + "options": map[string]any{ + "pit": now.Format(time.RFC3339), + "oot": nil, + "volumes": false, + "effectiveVolumes": false, + }, + })) + }) + }) + When("listing transactions using filter on a single match", func() { + var ( + err error + response *components.V2TransactionsCursorResponseCursor + now = time.Now().Round(time.Second).UTC() + ) + JustBeforeEach(func() { + response, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + RequestBody: map[string]interface{}{ + "$and": []map[string]any{ + { + "$match": map[string]any{ + "source": "world", + }, + }, + { + "$match": map[string]any{ + "destination": "account:", + }, + }, + }, + }, + Ledger: "default", + PageSize: pointer.For(pageSize), + Pit: &now, + }, + ) + Expect(err).To(BeNil()) + }) + It("Should be ok", func() { + Expect(response.Next).NotTo(BeNil()) + cursor := &bunpaginate.ColumnPaginatedQuery[map[string]any]{} + Expect(bunpaginate.UnmarshalCursor(*response.Next, cursor)).To(BeNil()) + Expect(cursor.Options).To(Equal(map[string]any{ + "qb": map[string]any{ + "$and": []any{ + map[string]any{ + "$match": map[string]any{ + "source": "world", + }, + }, + map[string]any{ + "$match": map[string]any{ + "destination": "account:", + }, + }, + }, + }, + "pageSize": float64(10), + "options": map[string]any{ + "pit": now.Format(time.RFC3339), + "oot": nil, + "volumes": false, + "effectiveVolumes": false, + }, + })) + }) + }) + When("listing transactions using invalid filter", func() { + var ( + err error + ) + JustBeforeEach(func() { + _, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "invalid-key": 0, + }, + }, + Ledger: "default", + PageSize: pointer.For(pageSize), + }, + ) + Expect(err).To(HaveOccurred()) + }) + It("Should fail with "+string(components.V2ErrorsEnumInternal)+" error code", func() { + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumInternal))) + }) + }) + }) + var ( + timestamp1 = time.Date(2023, 4, 10, 10, 0, 0, 0, time.UTC) + timestamp2 = time.Date(2023, 4, 11, 10, 0, 0, 0, time.UTC) + timestamp3 = time.Date(2023, 4, 12, 10, 0, 0, 0, time.UTC) + + m1 = metadata.Metadata{ + "foo": "bar", + } + ) + + var ( + t1 *components.V2Transaction + t2 *components.V2Transaction + t3 *components.V2Transaction + err error + ) + When("creating transactions", func() { + JustBeforeEach(func() { + t1, err = CreateTransaction( + ctx, + testServer.GetValue(), + operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Metadata: m1, + Postings: []components.V2Posting{ + { + Amount: big.NewInt(100), + Asset: "USD", + Source: "world", + Destination: "foo:foo", + }, + }, + Timestamp: ×tamp1, + }, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + + t2, err = CreateTransaction( + ctx, + testServer.GetValue(), + operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Metadata: m1, + Postings: []components.V2Posting{ + { + Amount: big.NewInt(100), + Asset: "USD", + Source: "world", + Destination: "foo:bar", + }, + }, + Timestamp: ×tamp2, + }, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + + t3, err = CreateTransaction( + ctx, + testServer.GetValue(), + operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{ + { + Amount: big.NewInt(100), + Asset: "USD", + Source: "world", + Destination: "foo:baz", + }, + }, + Timestamp: ×tamp3, + }, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + }) + It("should be countable on api", func() { + response, err := CountTransactions( + ctx, + testServer.GetValue(), + operations.V2CountTransactionsRequest{ + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response).To(Equal(3)) + + response, err = CountTransactions( + ctx, + testServer.GetValue(), + operations.V2CountTransactionsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "account": "foo:", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response).To(Equal(3)) + + response, err = CountTransactions( + ctx, + testServer.GetValue(), + operations.V2CountTransactionsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "account": "not_existing", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response).To(Equal(0)) + + response, err = CountTransactions( + ctx, + testServer.GetValue(), + operations.V2CountTransactionsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "destination": ":baz", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response).To(Equal(1)) + + response, err = CountTransactions( + ctx, + testServer.GetValue(), + operations.V2CountTransactionsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "destination": "not_existing", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response).To(Equal(0)) + + response, err = CountTransactions( + ctx, + testServer.GetValue(), + operations.V2CountTransactionsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "source": "foo:", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response).To(Equal(0)) + + response, err = CountTransactions( + ctx, + testServer.GetValue(), + operations.V2CountTransactionsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "source": "world", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response).To(Equal(3)) + + response, err = CountTransactions( + ctx, + testServer.GetValue(), + operations.V2CountTransactionsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "metadata[foo]": "bar", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response).To(Equal(2)) + + response, err = CountTransactions( + ctx, + testServer.GetValue(), + operations.V2CountTransactionsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "metadata[foo]": "not_existing", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response).To(Equal(0)) + + response, err = CountTransactions( + ctx, + testServer.GetValue(), + operations.V2CountTransactionsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$gte": map[string]any{ + "timestamp": timestamp2.Format(time.RFC3339), + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response).To(Equal(2)) + + response, err = CountTransactions( + ctx, + testServer.GetValue(), + operations.V2CountTransactionsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$gte": map[string]any{ + "timestamp": timestamp3.Format(time.RFC3339), + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response).To(Equal(1)) + + response, err = CountTransactions( + ctx, + testServer.GetValue(), + operations.V2CountTransactionsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$gte": map[string]any{ + "timestamp": time.Now().UTC().Format(time.RFC3339), + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response).To(Equal(0)) + + response, err = CountTransactions( + ctx, + testServer.GetValue(), + operations.V2CountTransactionsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$lt": map[string]any{ + "timestamp": timestamp3.Format(time.RFC3339), + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response).To(Equal(2)) + + response, err = CountTransactions( + ctx, + testServer.GetValue(), + operations.V2CountTransactionsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$lt": map[string]any{ + "timestamp": timestamp2.Format(time.RFC3339), + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response).To(Equal(1)) + + response, err = CountTransactions( + ctx, + testServer.GetValue(), + operations.V2CountTransactionsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$lt": map[string]any{ + "timestamp": time.Date(2023, 4, 9, 10, 0, 0, 0, time.UTC).Format(time.RFC3339), + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response).To(Equal(0)) + }) + It("should be listed on api", func() { + response, err := ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Ledger: "default", + Expand: pointer.For("volumes,effectiveVolumes"), + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).To(HaveLen(3)) + Expect(response.Data[0]).Should(Equal(*t3)) + Expect(response.Data[1]).Should(Equal(*t2)) + Expect(response.Data[2]).Should(Equal(*t1)) + + response, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Ledger: "default", + Expand: pointer.For("volumes,effectiveVolumes"), + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "account": "foo:", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).To(HaveLen(3)) + Expect(response.Data[0]).Should(Equal(*t3)) + Expect(response.Data[1]).Should(Equal(*t2)) + Expect(response.Data[2]).Should(Equal(*t1)) + + response, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Ledger: "default", + Expand: pointer.For("volumes,effectiveVolumes"), + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "account": "not_existing", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).To(HaveLen(0)) + + response, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Ledger: "default", + Expand: pointer.For("volumes,effectiveVolumes"), + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "destination": "foo:", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).To(HaveLen(3)) + Expect(response.Data[0]).Should(Equal(*t3)) + Expect(response.Data[1]).Should(Equal(*t2)) + Expect(response.Data[2]).Should(Equal(*t1)) + + response, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Ledger: "default", + Expand: pointer.For("volumes,effectiveVolumes"), + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "destination": "not_existing", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).To(HaveLen(0)) + + response, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Ledger: "default", + Expand: pointer.For("volumes,effectiveVolumes"), + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "source": "foo:", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).To(HaveLen(0)) + + response, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Ledger: "default", + Expand: pointer.For("volumes,effectiveVolumes"), + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "source": "world", + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).To(HaveLen(3)) + Expect(response.Data[0]).Should(Equal(*t3)) + Expect(response.Data[1]).Should(Equal(*t2)) + Expect(response.Data[2]).Should(Equal(*t1)) + + response, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "metadata[foo]": "bar", + }, + }, + Expand: pointer.For("volumes,effectiveVolumes"), + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).Should(HaveLen(2)) + Expect(response.Data[0]).Should(Equal(*t2)) + Expect(response.Data[1]).Should(Equal(*t1)) + + response, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Ledger: "default", + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "metadata[foo]": "not_existing", + }, + }, + Expand: pointer.For("volumes,effectiveVolumes"), + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).Should(HaveLen(0)) + + response, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Ledger: "default", + Expand: pointer.For("volumes,effectiveVolumes"), + RequestBody: map[string]interface{}{ + "$gte": map[string]any{ + "timestamp": timestamp2.Format(time.RFC3339), + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).Should(HaveLen(2)) + Expect(response.Data[0]).Should(Equal(*t3)) + Expect(response.Data[1]).Should(Equal(*t2)) + + response, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Ledger: "default", + Expand: pointer.For("volumes,effectiveVolumes"), + RequestBody: map[string]interface{}{ + "$gte": map[string]any{ + "timestamp": timestamp3.Format(time.RFC3339), + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).Should(HaveLen(1)) + Expect(response.Data[0]).Should(Equal(*t3)) + + response, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Ledger: "default", + Expand: pointer.For("volumes,effectiveVolumes"), + RequestBody: map[string]interface{}{ + "$gte": map[string]any{ + "timestamp": time.Now().UTC().Format(time.RFC3339), + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).Should(HaveLen(0)) + + response, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Ledger: "default", + Expand: pointer.For("volumes,effectiveVolumes"), + RequestBody: map[string]interface{}{ + "$lt": map[string]any{ + "timestamp": timestamp3.Format(time.RFC3339), + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).Should(HaveLen(2)) + Expect(response.Data[0]).Should(Equal(*t2)) + Expect(response.Data[1]).Should(Equal(*t1)) + + response, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Ledger: "default", + Expand: pointer.For("volumes,effectiveVolumes"), + RequestBody: map[string]interface{}{ + "$lt": map[string]any{ + "timestamp": timestamp2.Format(time.RFC3339), + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).Should(HaveLen(1)) + Expect(response.Data[0]).Should(Equal(*t1)) + + response, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Ledger: "default", + Expand: pointer.For("volumes,effectiveVolumes"), + RequestBody: map[string]interface{}{ + "$lt": map[string]any{ + "timestamp": time.Date(2023, 4, 9, 10, 0, 0, 0, time.UTC).Format(time.RFC3339), + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).Should(HaveLen(0)) + + By("using $not operator on account 'world'", func() { + response, err = ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Ledger: "default", + Expand: pointer.For("volumes,effectiveVolumes"), + RequestBody: map[string]interface{}{ + "$not": map[string]any{ + "$match": map[string]any{ + "account": "foo:bar", + }, + }, + }, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).Should(HaveLen(2)) + }) + }) + It("should be gettable on api", func() { + _, err := GetTransaction( + ctx, + testServer.GetValue(), + operations.V2GetTransactionRequest{ + Ledger: "default", + ID: t1.ID, + Expand: pointer.For("volumes,effectiveVolumes"), + }, + ) + Expect(err).ToNot(HaveOccurred()) + + _, err = GetTransaction( + ctx, + testServer.GetValue(), + operations.V2GetTransactionRequest{ + Ledger: "default", + ID: t2.ID, + Expand: pointer.For("volumes,effectiveVolumes"), + }, + ) + Expect(err).ToNot(HaveOccurred()) + + _, err = GetTransaction( + ctx, + testServer.GetValue(), + operations.V2GetTransactionRequest{ + Ledger: "default", + ID: t3.ID, + Expand: pointer.For("volumes,effectiveVolumes"), + }, + ) + Expect(err).ToNot(HaveOccurred()) + + _, err = GetTransaction( + ctx, + testServer.GetValue(), + operations.V2GetTransactionRequest{ + Ledger: "default", + ID: big.NewInt(666), + }, + ) + Expect(err).To(HaveOccurred()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumNotFound))) + }) + }) + + When("counting and listing transactions empty", func() { + It("should be countable on api even if empty", func() { + response, err := CountTransactions( + ctx, + testServer.GetValue(), + operations.V2CountTransactionsRequest{ + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response).To(Equal(0)) + }) + It("should be listed on api even if empty", func() { + response, err := ListTransactions( + ctx, + testServer.GetValue(), + operations.V2ListTransactionsRequest{ + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).To(HaveLen(0)) + }) + }) +}) diff --git a/test/e2e/api_transactions_metadata.go b/test/e2e/api_transactions_metadata.go new file mode 100644 index 000000000..2cee5b4f6 --- /dev/null +++ b/test/e2e/api_transactions_metadata.go @@ -0,0 +1,125 @@ +//go:build it + +package test_suite + +import ( + "github.com/formancehq/go-libs/v2/logging" + . "github.com/formancehq/go-libs/v2/testing/api" + . "github.com/formancehq/ledger/pkg/testserver" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + "math/big" + "time" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Context("Ledger accounts list API tests", func() { + var ( + db = UseTemplatedDatabase() + ctx = logging.TestingContext() + ) + + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + NatsURL: natsServer.GetValue().ClientURL(), + } + }) + BeforeEach(func() { + err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: "default", + }) + Expect(err).To(BeNil()) + }) + When("creating a transaction on a ledger", func() { + var ( + timestamp = time.Now().Round(time.Second).UTC() + rsp *components.V2Transaction + err error + ) + BeforeEach(func() { + // Create a transaction + rsp, err = CreateTransaction( + ctx, + testServer.GetValue(), + operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{ + { + Amount: big.NewInt(100), + Asset: "USD", + Source: "world", + Destination: "alice", + }, + }, + Timestamp: ×tamp, + }, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + + // Check existence on api + _, err := GetTransaction( + ctx, + testServer.GetValue(), + operations.V2GetTransactionRequest{ + Ledger: "default", + ID: rsp.ID, + }, + ) + Expect(err).ToNot(HaveOccurred()) + }) + It("should fail if the transaction does not exist", func() { + metadata := map[string]string{ + "foo": "bar", + } + + err := AddMetadataToTransaction( + ctx, + testServer.GetValue(), + operations.V2AddMetadataOnTransactionRequest{ + RequestBody: metadata, + Ledger: "default", + ID: big.NewInt(666), + }, + ) + Expect(err).To(HaveOccurred()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumNotFound))) + }) + When("adding a metadata", func() { + metadata := map[string]string{ + "foo": "bar", + } + BeforeEach(func() { + err := AddMetadataToTransaction( + ctx, + testServer.GetValue(), + operations.V2AddMetadataOnTransactionRequest{ + RequestBody: metadata, + Ledger: "default", + ID: rsp.ID, + }, + ) + Expect(err).To(Succeed()) + }) + It("should be available on api", func() { + response, err := GetTransaction( + ctx, + testServer.GetValue(), + operations.V2GetTransactionRequest{ + Ledger: "default", + ID: rsp.ID, + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Metadata).Should(Equal(metadata)) + }) + }) + }) +}) diff --git a/test/e2e/api_transactions_revert.go b/test/e2e/api_transactions_revert.go new file mode 100644 index 000000000..d8d4e7028 --- /dev/null +++ b/test/e2e/api_transactions_revert.go @@ -0,0 +1,220 @@ +//go:build it + +package test_suite + +import ( + "github.com/formancehq/go-libs/v2/logging" + . "github.com/formancehq/go-libs/v2/testing/api" + . "github.com/formancehq/ledger/pkg/testserver" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + "math/big" + "time" + + "github.com/formancehq/go-libs/v2/pointer" + "github.com/nats-io/nats.go" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + + ledgerevents "github.com/formancehq/ledger/pkg/events" +) + +var _ = Context("Ledger accounts list API tests", func() { + var ( + db = UseTemplatedDatabase() + ctx = logging.TestingContext() + ) + + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + NatsURL: natsServer.GetValue().ClientURL(), + } + }) + BeforeEach(func() { + err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: "default", + }) + Expect(err).To(BeNil()) + }) + When("creating a transaction on a ledger", func() { + var ( + timestamp = time.Now().Round(time.Second).UTC() + tx *components.V2Transaction + events chan *nats.Msg + err error + ) + BeforeEach(func() { + events = testServer.GetValue().Subscribe() + tx, err = CreateTransaction( + ctx, + testServer.GetValue(), + operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{ + { + Amount: big.NewInt(100), + Asset: "USD", + Source: "world", + Destination: "alice", + }, + }, + Timestamp: ×tamp, + }, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + }) + When("transferring funds from destination to another account", func() { + BeforeEach(func() { + _, err := CreateTransaction( + ctx, + testServer.GetValue(), + operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{ + { + Amount: big.NewInt(100), + Asset: "USD", + Source: "alice", + Destination: "foo", + }, + }, + Timestamp: ×tamp, + }, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + }) + When("trying to revert the original transaction", func() { + var ( + force bool + err error + ) + revertTx := func() { + _, err = RevertTransaction( + ctx, + testServer.GetValue(), + operations.V2RevertTransactionRequest{ + Force: pointer.For(force), + ID: tx.ID, + Ledger: "default", + }, + ) + } + JustBeforeEach(revertTx) + It("Should fail", func() { + Expect(err).To(HaveOccurred()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumInsufficientFund))) + }) + Context("With forcing", func() { + BeforeEach(func() { + force = true + }) + It("Should be ok", func() { + Expect(err).ToNot(HaveOccurred()) + }) + }) + }) + }) + When("reverting it", func() { + BeforeEach(func() { + _, err := RevertTransaction( + ctx, + testServer.GetValue(), + operations.V2RevertTransactionRequest{ + Ledger: "default", + ID: tx.ID, + }, + ) + Expect(err).To(Succeed()) + }) + It("should trigger a new event", func() { + Eventually(events).Should(Receive(Event(ledgerevents.EventTypeRevertedTransaction))) + }) + It("should revert the original transaction", func() { + response, err := GetTransaction( + ctx, + testServer.GetValue(), + operations.V2GetTransactionRequest{ + Ledger: "default", + ID: tx.ID, + }, + ) + Expect(err).NotTo(HaveOccurred()) + + Expect(response.Reverted).To(BeTrue()) + }) + When("trying to revert again", func() { + It("should be rejected", func() { + _, err := RevertTransaction( + ctx, + testServer.GetValue(), + operations.V2RevertTransactionRequest{ + Ledger: "default", + ID: tx.ID, + }, + ) + Expect(err).NotTo(BeNil()) + Expect(err).To(HaveErrorCode(string(components.V2ErrorsEnumAlreadyRevert))) + }) + }) + }) + When("reverting it at effective date", func() { + BeforeEach(func() { + _, err := RevertTransaction( + ctx, + testServer.GetValue(), + operations.V2RevertTransactionRequest{ + Ledger: "default", + ID: tx.ID, + AtEffectiveDate: pointer.For(true), + }, + ) + Expect(err).To(Succeed()) + }) + It("should revert the original transaction at date of the original tx", func() { + response, err := GetTransaction( + ctx, + testServer.GetValue(), + operations.V2GetTransactionRequest{ + Ledger: "default", + ID: tx.ID, + }, + ) + Expect(err).NotTo(HaveOccurred()) + + Expect(response.Reverted).To(BeTrue()) + Expect(response.Timestamp).To(Equal(tx.Timestamp)) + }) + }) + When("reverting with dryRun", func() { + BeforeEach(func() { + _, err := RevertTransaction( + ctx, + testServer.GetValue(), + operations.V2RevertTransactionRequest{ + Ledger: "default", + ID: tx.ID, + DryRun: pointer.For(true), + }, + ) + Expect(err).To(Succeed()) + }) + It("should not revert the transaction", func() { + tx, err := GetTransaction(ctx, testServer.GetValue(), operations.V2GetTransactionRequest{ + Ledger: "default", + ID: tx.ID, + }) + Expect(err).To(BeNil()) + Expect(tx.Reverted).To(BeFalse()) + }) + }) + }) +}) diff --git a/test/e2e/api_volumes.go b/test/e2e/api_volumes.go new file mode 100644 index 000000000..bf2ef5138 --- /dev/null +++ b/test/e2e/api_volumes.go @@ -0,0 +1,259 @@ +//go:build it + +package test_suite + +import ( + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/pointer" + . "github.com/formancehq/ledger/pkg/testserver" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + "math/big" + "time" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +type Transaction struct { + Amount int64 + Asset string + Source string + Destination string + EffectiveDate time.Time +} + +var now = time.Now() + +var _ = Context("Ledger accounts list API tests", func() { + var ( + db = UseTemplatedDatabase() + ctx = logging.TestingContext() + ) + + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + NatsURL: natsServer.GetValue().ClientURL(), + } + }) + + transactions := []Transaction{ + {Amount: 100, Asset: "USD", Source: "world", Destination: "account:user1", EffectiveDate: now.Add(-4 * time.Hour)}, //user1:100, world:-100 + {Amount: 125, Asset: "USD", Source: "world", Destination: "account:user2", EffectiveDate: now.Add(-3 * time.Hour)}, //user1:100, user2:125, world:-225 + {Amount: 75, Asset: "USD", Source: "account:user1", Destination: "account:user2", EffectiveDate: now.Add(-2 * time.Hour)}, //user1:25, user2:200, world:-200 + {Amount: 175, Asset: "USD", Source: "world", Destination: "account:user1", EffectiveDate: now.Add(-1 * time.Hour)}, //user1:200, user2:200, world:-400 + {Amount: 50, Asset: "USD", Source: "account:user2", Destination: "bank", EffectiveDate: now}, //user1:200, user2:150, world:-400, bank:50 + {Amount: 100, Asset: "USD", Source: "account:user2", Destination: "account:user1", EffectiveDate: now.Add(1 * time.Hour)}, //user1:300, user2:50, world:-400, bank:50 + {Amount: 150, Asset: "USD", Source: "account:user1", Destination: "bank", EffectiveDate: now.Add(2 * time.Hour)}, //user1:150, user2:50, world:-400, bank:200 + } + + BeforeEach(func() { + err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: "default", + }) + Expect(err).To(BeNil()) + + for _, transaction := range transactions { + _, err := CreateTransaction( + ctx, + testServer.GetValue(), + operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{ + { + Amount: big.NewInt(transaction.Amount), + Asset: transaction.Asset, + Source: transaction.Source, + Destination: transaction.Destination, + }, + }, + Timestamp: &transaction.EffectiveDate, + }, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + } + }) + + When("Get current Volumes and Balances From origin of time till now (insertion-date)", func() { + It("should be ok", func() { + response, err := GetVolumesWithBalances( + ctx, + testServer.GetValue(), + operations.V2GetVolumesWithBalancesRequest{ + InsertionDate: pointer.For(true), + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + + Expect(len(response.Data)).To(Equal(4)) + for _, volume := range response.Data { + if volume.Account == "account:user1" { + Expect(volume.Balance).To(Equal(big.NewInt(150))) + } + if volume.Account == "account:user2" { + Expect(volume.Balance).To(Equal(big.NewInt(50))) + } + if volume.Account == "bank" { + Expect(volume.Balance).To(Equal(big.NewInt(200))) + } + if volume.Account == "world" { + Expect(volume.Balance).To(Equal(big.NewInt(-400))) + } + } + }) + }) + + When("Get Volumes and Balances From oot til oot+2 hours (effectiveDate) ", func() { + It("should be ok", func() { + + response, err := GetVolumesWithBalances( + ctx, + testServer.GetValue(), + operations.V2GetVolumesWithBalancesRequest{ + StartTime: pointer.For(now.Add(-4 * time.Hour)), + EndTime: pointer.For(now.Add(-2 * time.Hour)), + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + + Expect(len(response.Data)).To(Equal(3)) + for _, volume := range response.Data { + if volume.Account == "account:user1" { + Expect(volume.Balance).To(Equal(big.NewInt(25))) + } + if volume.Account == "account:user2" { + Expect(volume.Balance).To(Equal(big.NewInt(200))) + } + if volume.Account == "world" { + Expect(volume.Balance).To(Equal(big.NewInt(-225))) + } + } + }) + }) + + When("Get Volumes and Balances Filter by address account", func() { + It("should be ok", func() { + response, err := GetVolumesWithBalances( + ctx, + testServer.GetValue(), + operations.V2GetVolumesWithBalancesRequest{ + InsertionDate: pointer.For(true), + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "account": "account:", + }, + }, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).To(HaveLen(2)) + for _, volume := range response.Data { + if volume.Account == "account:user1" { + Expect(volume.Balance).To(Equal(big.NewInt(150))) + } + if volume.Account == "account:user2" { + Expect(volume.Balance).To(Equal(big.NewInt(50))) + } + } + }) + }) + + When("Get Volumes and Balances Filter by address account a,d and end-time now effective", func() { + It("should be ok", func() { + response, err := GetVolumesWithBalances( + ctx, + testServer.GetValue(), + operations.V2GetVolumesWithBalancesRequest{ + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "account": "account:", + }, + }, + EndTime: pointer.For(now), + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).To(HaveLen(2)) + + for _, volume := range response.Data { + if volume.Account == "account:user1" { + Expect(volume.Balance).To(Equal(big.NewInt(200))) + } + if volume.Account == "account:user2" { + Expect(volume.Balance).To(Equal(big.NewInt(150))) + } + } + }) + }) + + When("Get Volumes and Balances Filter by address account which doesn't exist", func() { + It("should be ok", func() { + response, err := GetVolumesWithBalances( + ctx, + testServer.GetValue(), + operations.V2GetVolumesWithBalancesRequest{ + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "account": "foo:", + }, + }, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(response.Data).To(HaveLen(0)) + }) + }) + + When("Get Volumes and Balances Filter With futures dates empty", func() { + It("should be ok", func() { + response, err := GetVolumesWithBalances( + ctx, + testServer.GetValue(), + operations.V2GetVolumesWithBalancesRequest{ + StartTime: pointer.For(time.Now().Add(8 * time.Hour)), + EndTime: pointer.For(time.Now().Add(12 * time.Hour)), + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(len(response.Data)).To(Equal(0)) + }) + }) + + When("Get Volumes and Balances Filter by address account aggregation by level 1", func() { + It("should be ok", func() { + response, err := GetVolumesWithBalances( + ctx, + testServer.GetValue(), + operations.V2GetVolumesWithBalancesRequest{ + InsertionDate: pointer.For(true), + RequestBody: map[string]interface{}{ + "$match": map[string]any{ + "account": "account:", + }, + }, + GroupBy: pointer.For(int64(1)), + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + Expect(len(response.Data)).To(Equal(1)) + for _, volume := range response.Data { + if volume.Account == "account" { + Expect(volume.Balance).To(Equal(big.NewInt(200))) + } + } + }) + }) +}) diff --git a/test/e2e/lifecycle_test.go b/test/e2e/lifecycle_test.go new file mode 100644 index 000000000..81908d9f4 --- /dev/null +++ b/test/e2e/lifecycle_test.go @@ -0,0 +1,164 @@ +//go:build it + +package test_suite + +import ( + "context" + "database/sql" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/pointer" + "github.com/formancehq/go-libs/v2/time" + ledgerevents "github.com/formancehq/ledger/pkg/events" + . "github.com/formancehq/ledger/pkg/testserver" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + "github.com/nats-io/nats.go" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + "github.com/uptrace/bun" + "math/big" +) + +var _ = Context("Ledger application lifecycle tests", func() { + var ( + db = UseTemplatedDatabase() + ctx = logging.TestingContext() + ) + + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + NatsURL: natsServer.GetValue().ClientURL(), + } + }) + var events chan *nats.Msg + BeforeEach(func() { + events = testServer.GetValue().Subscribe() + }) + + When("starting the service", func() { + It("should be ok", func() { + info, err := testServer.GetValue().Client().Ledger.GetInfo(ctx) + Expect(err).NotTo(HaveOccurred()) + Expect(info.V2ConfigInfoResponse.Version).To(Equal("develop")) + }) + }) + When("restarting the service", func() { + BeforeEach(func(ctx context.Context) { + testServer.GetValue().Restart(ctx) + }) + It("should be ok", func() {}) + }) + When("having some in flight transactions on a ledger", func() { + var ( + sqlTx bun.Tx + countTransactions = 80 + serverRestartTimeout = 10 * time.Second + ) + BeforeEach(func() { + err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: "foo", + }) + Expect(err).ToNot(HaveOccurred()) + + // lock logs table to block transactions creation requests + // the first tx will block on the log insertion + // the next transaction will block earlier on advisory lock acquirement for accounts + db := testServer.GetValue().Database() + sqlTx, err = db.BeginTx(ctx, &sql.TxOptions{}) + Expect(err).To(BeNil()) + DeferCleanup(func() { + _ = sqlTx.Rollback() + }) + + _, err = sqlTx.NewRaw("lock table _default.logs").Exec(ctx) + Expect(err).To(BeNil()) + + // Create transactions in go routines + for i := 0; i < countTransactions; i++ { + go func() { + defer GinkgoRecover() + + _, err := CreateTransaction(ctx, testServer.GetValue(), operations.V2CreateTransactionRequest{ + Ledger: "foo", + V2PostTransaction: components.V2PostTransaction{ + Postings: []components.V2Posting{{ + Amount: big.NewInt(100), + Asset: "USD", + Destination: "bank", + Source: "world", + }}, + }, + }) + Expect(err).To(BeNil()) + }() + } + + // check postgres locks + Eventually(func(g Gomega) int { + count, err := db.NewSelect(). + Table("pg_stat_activity"). + Where("state <> 'idle' and pid <> pg_backend_pid()"). + Where(`query like 'INSERT INTO "_default".accounts%'`). + Count(ctx) + g.Expect(err).To(BeNil()) + return count + }). + WithTimeout(10 * time.Second). + // Once all the transactions are in pending state, we should have one lock + // for the first tx, trying to write a new log. + // And, we should also have countTransactions-1 pending lock for the 'bank' account + Should(BeNumerically("==", countTransactions-1)) // -1 for the first one + }) + When("restarting the service", func() { + BeforeEach(func() { + // We will restart the server in a separate gorouting + // the server should not restart until all pending transactions creation requests are fully completed + restarted := make(chan struct{}) + go func() { + defer GinkgoRecover() + defer func() { + close(restarted) + }() + By("restart server", func() { + ctx, cancel := context.WithTimeout(ctx, serverRestartTimeout) + DeferCleanup(cancel) + + testServer.GetValue().Restart(ctx) + }) + }() + + // Once the server is restarting, it should not accept any new connection + Eventually(func() error { + _, err := GetInfo(ctx, testServer.GetValue()) + return err + }).ShouldNot(BeNil()) + + // by rollback sql transactions, we allow the blocked routines (which create transactions) to resume. + By("rollback tx", func() { + _ = sqlTx.Rollback() + }) + + Eventually(restarted). + WithTimeout(serverRestartTimeout). + Should(BeClosed()) + }) + It("in flight transactions should be correctly terminated before", func() { + transactions, err := ListTransactions(ctx, testServer.GetValue(), operations.V2ListTransactionsRequest{ + Ledger: "foo", + PageSize: pointer.For(int64(countTransactions)), + }) + Expect(err).To(BeNil()) + Expect(transactions.Data).To(HaveLen(countTransactions)) + + By("all events should have been properly sent", func() { + for range countTransactions { + Eventually(events).Should(Receive(Event(ledgerevents.EventTypeCommittedTransactions))) + } + }) + }) + }) + }) +}) diff --git a/test/e2e/suite_test.go b/test/e2e/suite_test.go new file mode 100644 index 000000000..558df1bd1 --- /dev/null +++ b/test/e2e/suite_test.go @@ -0,0 +1,110 @@ +//go:build it + +package test_suite + +import ( + "context" + "encoding/json" + "github.com/formancehq/go-libs/v2/bun/bunconnect" + "github.com/formancehq/go-libs/v2/testing/platform/natstesting" + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/internal/storage/bucket" + "github.com/formancehq/ledger/internal/storage/driver" + "github.com/stretchr/testify/require" + "go.opentelemetry.io/otel/trace/noop" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/testing/docker" + . "github.com/formancehq/go-libs/v2/testing/platform/pgtesting" + . "github.com/formancehq/go-libs/v2/testing/utils" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func Test(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Test Suite") +} + +var ( + dockerPool = NewDeferred[*docker.Pool]() + pgServer = NewDeferred[*PostgresServer]() + natsServer = NewDeferred[*natstesting.NatsServer]() + debug = os.Getenv("DEBUG") == "true" + logger = logging.NewDefaultLogger(GinkgoWriter, debug, false) + + DBTemplate = "template1" +) + +type ParallelExecutionContext struct { + PostgresServer *PostgresServer + NatsServer *natstesting.NatsServer +} + +var _ = SynchronizedBeforeSuite(func() []byte { + By("Initializing docker pool") + dockerPool.SetValue(docker.NewPool(GinkgoT(), logger)) + + pgServer.LoadAsync(func() *PostgresServer { + By("Initializing postgres server") + ret := CreatePostgresServer( + GinkgoT(), + dockerPool.GetValue(), + WithPGStatsExtension(), + WithPGCrypto(), + ) + By("Postgres address: " + ret.GetDSN()) + + db, err := bunconnect.OpenSQLDB(context.Background(), bunconnect.ConnectionOptions{ + DatabaseSourceName: ret.GetDatabaseDSN(DBTemplate), + }) + require.NoError(GinkgoT(), err) + + err = driver.Migrate(context.Background(), db) + require.NoError(GinkgoT(), err) + + // Initialize the _default bucket on the default database + // This way, we will be able to clone this database to speed up the tests + err = bucket.Migrate(context.Background(), noop.Tracer{}, db, ledger.DefaultBucket) + require.NoError(GinkgoT(), err) + + return ret + }) + natsServer.LoadAsync(func() *natstesting.NatsServer { + By("Initializing nats server") + ret := natstesting.CreateServer(GinkgoT(), debug, logger) + By("Nats address: " + ret.ClientURL()) + return ret + }) + + By("Waiting services alive") + Wait(pgServer, natsServer) + By("All services ready.") + + data, err := json.Marshal(ParallelExecutionContext{ + PostgresServer: pgServer.GetValue(), + NatsServer: natsServer.GetValue(), + }) + Expect(err).To(BeNil()) + + return data +}, func(data []byte) { + select { + case <-pgServer.Done(): + // Process #1, setup is terminated + return + default: + } + pec := ParallelExecutionContext{} + err := json.Unmarshal(data, &pec) + Expect(err).To(BeNil()) + + pgServer.SetValue(pec.PostgresServer) + natsServer.SetValue(pec.NatsServer) +}) + +func UseTemplatedDatabase() *Deferred[*Database] { + return UsePostgresDatabase(pgServer, CreateWithTemplate(DBTemplate)) +} diff --git a/test/e2e/v1_api_balances.go b/test/e2e/v1_api_balances.go new file mode 100644 index 000000000..5919a08fc --- /dev/null +++ b/test/e2e/v1_api_balances.go @@ -0,0 +1,95 @@ +//go:build it + +package test_suite + +import ( + "fmt" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/pointer" + . "github.com/formancehq/ledger/pkg/testserver" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + "math/big" + "time" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Context("Ledger accounts list API tests", func() { + var ( + db = UseTemplatedDatabase() + ctx = logging.TestingContext() + ) + + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + NatsURL: natsServer.GetValue().ClientURL(), + } + }) + BeforeEach(func() { + err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: "default", + }) + Expect(err).To(BeNil()) + }) + const ( + pageSize = int64(10) + txCount = 2 * pageSize + ) + When(fmt.Sprintf("creating %d transactions", txCount), func() { + var ( + timestamp = time.Now().Round(time.Second).UTC() + ) + BeforeEach(func() { + for i := 0; i < int(txCount); i++ { + _, err := CreateTransaction( + ctx, + testServer.GetValue(), + operations.V2CreateTransactionRequest{ + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{ + { + Amount: big.NewInt(100), + Asset: "USD", + Source: "world", + Destination: fmt.Sprintf("account:%d", i), + }, + }, + Timestamp: ×tamp, + }, + Ledger: "default", + }, + ) + Expect(err).ToNot(HaveOccurred()) + } + }) + When("Listing balances using v1 endpoint", func() { + var ( + rsp *operations.GetBalancesResponse + err error + ) + BeforeEach(func() { + rsp, err = testServer.GetValue().Client().Ledger.V1.GetBalances( + ctx, + operations.GetBalancesRequest{ + Ledger: "default", + Address: pointer.For("world"), + }, + ) + Expect(err).ToNot(HaveOccurred()) + }) + It("Should be return non empty balances", func() { + Expect(rsp.BalancesCursorResponse.Cursor.Data).To(HaveLen(1)) + balances := rsp.BalancesCursorResponse.Cursor.Data[0] + Expect(balances).To(HaveKey("world")) + Expect(balances["world"]).To(HaveKey("USD")) + Expect(balances["world"]["USD"]).To(Equal(int64(-2000))) + }) + }) + }) +}) diff --git a/test/integration/scenario_test.go b/test/integration/scenario_test.go deleted file mode 100644 index f76ca5c21..000000000 --- a/test/integration/scenario_test.go +++ /dev/null @@ -1,33 +0,0 @@ -//go:build it - -package test_suite - -import ( - "github.com/formancehq/go-libs/logging" - . "github.com/formancehq/go-libs/testing/platform/pgtesting" - "github.com/formancehq/ledger/pkg/testserver" - . "github.com/onsi/ginkgo/v2" - . "github.com/onsi/gomega" -) - -var _ = Context("Ledger integration tests", func() { - var ( - db = UsePostgresDatabase(pgServer) - ctx = logging.TestingContext() - ) - - testServer := testserver.UseNewTestServer(func() testserver.Configuration { - return testserver.Configuration{ - PostgresConfiguration: db.GetValue().ConnectionOptions(), - Output: GinkgoWriter, - Debug: debug, - } - }) - When("Starting the ledger", func() { - It("Should be ok", func() { - info, err := testServer.GetValue().Client().Ledger.V2.GetInfo(ctx) - Expect(err).NotTo(HaveOccurred()) - Expect(info.V2ConfigInfoResponse.Version).To(Equal("develop")) - }) - }) -}) diff --git a/test/integration/suite_test.go b/test/integration/suite_test.go deleted file mode 100644 index d9f0dbfc8..000000000 --- a/test/integration/suite_test.go +++ /dev/null @@ -1,15 +0,0 @@ -//go:build it - -package test_suite - -import ( - "testing" - - . "github.com/onsi/ginkgo/v2" - . "github.com/onsi/gomega" -) - -func Test(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Test Suite") -} diff --git a/test/migrations/README.md b/test/migrations/README.md new file mode 100644 index 000000000..95c4a621f --- /dev/null +++ b/test/migrations/README.md @@ -0,0 +1,20 @@ +# Migrations test + +This package allow to test the migration of an existing database regarding current code. + +The test can be run using the following command : +```shell +go test . \ + -databases.source +``` + +The test will start a new postgres server, copy the database inside, then apply migrations. + +Additionally, you can add the flag : +```shell +go test . \ + -databases.source \ + -databases.destination +``` + +In this case, the destination database will be used and no local postgres server will be started. diff --git a/test/migrations/upgrade_test.go b/test/migrations/upgrade_test.go new file mode 100644 index 000000000..2abbad014 --- /dev/null +++ b/test/migrations/upgrade_test.go @@ -0,0 +1,139 @@ +package migrations + +import ( + "flag" + "fmt" + "github.com/formancehq/go-libs/v2/bun/bunconnect" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/testing/docker" + "github.com/formancehq/go-libs/v2/testing/platform/pgtesting" + "github.com/formancehq/ledger/internal/storage/driver" + "github.com/ory/dockertest/v3" + dockerlib "github.com/ory/dockertest/v3/docker" + "github.com/stretchr/testify/require" + "github.com/xo/dburl" + "os" + "strings" + "testing" +) + +var ( + sourceDatabase string + destinationDatabase string +) + +func TestMain(m *testing.M) { + flag.StringVar(&sourceDatabase, "databases.source", "", "Source database") + flag.StringVar(&destinationDatabase, "databases.destination", "", "Destination database") + flag.Parse() + + os.Exit(m.Run()) +} + +func TestMigrations(t *testing.T) { + if sourceDatabase == "" { + t.Skip() + } + + ctx := logging.TestingContext() + dockerPool := docker.NewPool(t, logging.Testing()) + + if destinationDatabase == "" { + pgServer := pgtesting.CreatePostgresServer(t, dockerPool) + destinationDatabase = pgServer.GetDSN() + } + + copyDatabase(t, dockerPool, sourceDatabase, destinationDatabase) + + db, err := bunconnect.OpenSQLDB(ctx, bunconnect.ConnectionOptions{ + DatabaseSourceName: destinationDatabase, + }) + require.NoError(t, err) + + // Migrate database + driver := driver.New(db) + require.NoError(t, driver.Initialize(ctx)) + require.NoError(t, driver.UpgradeAllBuckets(ctx)) +} + +func copyDatabase(t *testing.T, dockerPool *docker.Pool, source, destination string) { + resource := dockerPool.Run(docker.Configuration{ + RunOptions: &dockertest.RunOptions{ + Repository: "postgres", + Tag: "15-alpine", + Entrypoint: []string{"sleep", "infinity"}, + }, + HostConfigOptions: []func(config *dockerlib.HostConfig){ + func(config *dockerlib.HostConfig) { + config.NetworkMode = "host" + }, + }, + }) + + execArgs := []string{"sh", "-c", fmt.Sprintf(` + %s | %s + `, + preparePGDumpCommand(t, source), + preparePSQLCommand(t, destination), + )} + + fmt.Printf("Exec command: %s\n", execArgs) + + _, err := resource.Exec(execArgs, dockertest.ExecOptions{ + StdOut: os.Stdout, + StdErr: os.Stdout, + }) + + require.NoError(t, err) +} + +func preparePGDumpCommand(t *testing.T, dsn string) string { + parsedSource, err := dburl.Parse(dsn) + require.NoError(t, err) + + args := make([]string, 0) + + password, ok := parsedSource.User.Password() + if ok { + args = append(args, "PGPASSWORD="+password) + } + + args = append(args, + "pg_dump", + "--no-owner", // skip roles + "-x", // Skip privileges + "-h", parsedSource.Hostname(), + "-p", parsedSource.Port(), + ) + + if username := parsedSource.User.Username(); username != "" { + args = append(args, "-U", username) + } + + return strings.Join(append(args, parsedSource.Path[1:]), " ") +} + +func preparePSQLCommand(t *testing.T, dsn string) string { + parsedSource, err := dburl.Parse(dsn) + require.NoError(t, err) + + args := make([]string, 0) + + password, ok := parsedSource.User.Password() + if ok { + args = append(args, "PGPASSWORD="+password) + } + + args = append(args, + "psql", + "-h", parsedSource.Hostname(), + "-p", parsedSource.Port(), + parsedSource.Path[1:], + ) + + if username := parsedSource.User.Username(); username != "" { + args = append(args, "-U", username) + } + + return strings.Join(args, " ") +} diff --git a/test/performance/.gitignore b/test/performance/.gitignore new file mode 100644 index 000000000..05c319264 --- /dev/null +++ b/test/performance/.gitignore @@ -0,0 +1 @@ +report diff --git a/test/performance/Earthfile b/test/performance/Earthfile new file mode 100644 index 000000000..685823a70 --- /dev/null +++ b/test/performance/Earthfile @@ -0,0 +1,19 @@ +VERSION 0.8 + +IMPORT github.com/formancehq/earthly:tags/v0.16.2 AS core + +run: + LOCALLY + ARG args="" + RUN go test -bench="Write" -run ^$ -tags it -report.file ./report/report.json -timeout 60m $args + +generate-graphs: + FROM core+base-image + RUN apk update && apk add nodejs npm + COPY charts /src + COPY ./report/report.json /report/report.json + WORKDIR /src + RUN npm install + RUN npm run build + RUN node index.js + SAVE ARTIFACT *.png AS LOCAL ./report/ diff --git a/test/performance/README.md b/test/performance/README.md new file mode 100644 index 000000000..614e48f96 --- /dev/null +++ b/test/performance/README.md @@ -0,0 +1,46 @@ +# Performance test + +Each feature is tested against a test script involving a transaction from a source to a destination. +The benchmarks also test the minimal set of features and the full set of features. + +Refer to [features](../../CONTRIBUTING.md/#features) for more information about features. + +Three types of script are actually tested: +* world->bank : A transaction from `@world` to `@bank` +* world->any : A transaction from `@world` to `@dst:` +* any(unbounded)->any : A transaction from `@src:` to `@dst:` + +## Run locally + +```shell +earthly +run +``` + +You can pass additional arguments (the underlying command is a standard `go test -bench=.`) using the flag `--args`. +For example: +```shell +earthly +run --args="-benchtime 10s" +``` + +## Run on a remote stack + +```shell +earthly +run --args="--stack.url=XXX --client.id=XXX --client.secret=XXX" +``` + +## Run on a remote ledger + +```shell +earthly +run --args="--ledger.url=XXX --auth.url=XXX --client.id=XXX --client.secret=XXX" +``` + +## Results + +TPS is included as a benchmark metrics. + +You can generate some graphs using the command: +``` +earthly +generate-graphs +``` + +See generated files in `report` directory. \ No newline at end of file diff --git a/test/performance/benchmark_test.go b/test/performance/benchmark_test.go new file mode 100644 index 000000000..3cddeca40 --- /dev/null +++ b/test/performance/benchmark_test.go @@ -0,0 +1,183 @@ +//go:build it + +package performance_test + +import ( + "context" + "encoding/json" + "fmt" + . "github.com/formancehq/go-libs/v2/collectionutils" + ledgerclient "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + "net/http" + "sort" + "sync/atomic" + "testing" + + "github.com/formancehq/go-libs/v2/time" + ledger "github.com/formancehq/ledger/internal" + "github.com/google/uuid" + "github.com/stretchr/testify/require" +) + +type TransactionProvider interface { + Get(iteration int) (string, map[string]string) +} +type TransactionProviderFn func(iteration int) (string, map[string]string) + +func (fn TransactionProviderFn) Get(iteration int) (string, map[string]string) { + return fn(iteration) +} + +type Benchmark struct { + EnvFactory EnvFactory + Scenarios map[string]TransactionProvider + + reports map[string]map[string]*report + b *testing.B +} + +func (benchmark *Benchmark) Run(ctx context.Context) map[string][]Result { + results := make(map[string][]Result, 0) + scenarios := Keys(benchmark.Scenarios) + sort.Strings(scenarios) + + for _, scenario := range scenarios { + for _, configuration := range buildAllPossibleConfigurations() { + + testName := fmt.Sprintf("%s/%s", scenario, configuration) + + ledgerConfiguration := ledger.Configuration{ + Features: configuration.FeatureSet, + Bucket: uuid.NewString()[:8], + } + ledgerConfiguration.SetDefaults() + report := newReport(configuration, scenario) + var result Result + + benchmark.b.Run(testName, func(b *testing.B) { + report.reset() + l := ledger.Ledger{ + Configuration: ledgerConfiguration, + Name: uuid.NewString()[:8], + } + + cpt := atomic.Int64{} + + env := envFactory.Create(ctx, b, l) + b.Logf("ledger: %s/%s", l.Bucket, l.Name) + + b.SetParallelism(int(parallelism)) + b.ResetTimer() + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + iteration := int(cpt.Add(1)) + + script, vars := benchmark.Scenarios[scenario].Get(iteration) + now := time.Now() + + _, err := benchmark.createTransaction(ctx, env.Client(), l, script, vars) + require.NoError(b, err) + + report.registerTransactionLatency(time.Since(now)) + } + }) + b.StopTimer() + report.End = time.Now() + + // Fetch otel metrics + rsp, err := http.Get(env.URL() + "/_/metrics") + require.NoError(b, err) + if rsp.StatusCode == http.StatusOK { + ret := make(map[string]any) + require.NoError(b, json.NewDecoder(rsp.Body).Decode(&ret)) + report.InternalMetrics = ret + } else { + b.Logf("Unable to fetch ledger metrics, got status code %d", rsp.StatusCode) + } + + // Compute final results + result = report.GetResult() + + b.ReportMetric(report.TPS(), "t/s") + b.ReportMetric(float64(result.Metrics.Time.Avg.Milliseconds()), "ms/transaction") + + stopContext, cancel := context.WithTimeout(ctx, 10*time.Second) + b.Cleanup(cancel) + + require.NoError(benchmark.b, env.Stop(stopContext)) + }) + + if report.Tachymeter.Count > 0 { + results[scenario] = append(results[scenario], result) + } + } + } + + return results +} + +func (benchmark *Benchmark) createTransaction( + ctx context.Context, + client *ledgerclient.Formance, + l ledger.Ledger, + script string, + vars map[string]string, +) (*ledger.Transaction, error) { + varsAsMapAny := make(map[string]any) + for k, v := range vars { + varsAsMapAny[k] = v + } + response, err := client.Ledger.V2.CreateTransaction(ctx, operations.V2CreateTransactionRequest{ + Ledger: l.Name, + V2PostTransaction: components.V2PostTransaction{ + Script: &components.V2PostTransactionScript{ + Plain: script, + Vars: varsAsMapAny, + }, + }, + }) + if err != nil { + return nil, fmt.Errorf("creating transaction: %w", err) + } + + return &ledger.Transaction{ + TransactionData: ledger.TransactionData{ + Postings: Map(response.V2CreateTransactionResponse.Data.Postings, func(from components.V2Posting) ledger.Posting { + return ledger.Posting{ + Source: from.Source, + Destination: from.Destination, + Amount: from.Amount, + Asset: from.Asset, + } + }), + Metadata: response.V2CreateTransactionResponse.Data.Metadata, + Timestamp: time.Time{ + Time: response.V2CreateTransactionResponse.Data.Timestamp, + }, + Reference: func() string { + if response.V2CreateTransactionResponse.Data.Reference == nil { + return "" + } + return *response.V2CreateTransactionResponse.Data.Reference + }(), + }, + ID: int(response.V2CreateTransactionResponse.Data.ID.Int64()), + RevertedAt: func() *time.Time { + if response.V2CreateTransactionResponse.Data.RevertedAt == nil { + return nil + } + return &time.Time{Time: *response.V2CreateTransactionResponse.Data.RevertedAt} + }(), + }, nil +} + +func New(b *testing.B, envFactory EnvFactory, scenarios map[string]TransactionProvider) *Benchmark { + return &Benchmark{ + b: b, + EnvFactory: envFactory, + Scenarios: scenarios, + reports: make(map[string]map[string]*report), + } +} diff --git a/test/performance/charts/index.js b/test/performance/charts/index.js new file mode 100644 index 000000000..cf68fabac --- /dev/null +++ b/test/performance/charts/index.js @@ -0,0 +1,51 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const fs = __importStar(require("fs")); +require("chartjs-to-image"); +const graphs_1 = require("./src/graphs"); +const main = () => __awaiter(void 0, void 0, void 0, function* () { + let buffer = fs.readFileSync('../report/report.json', 'utf-8'); + let reports = JSON.parse(buffer); + yield (0, graphs_1.exportTPSGraph)({ + output: 'tps.png', + }, reports); + yield (0, graphs_1.exportLatencyGraph)({ + output: 'p99.png' + }, 'P99', reports); + yield (0, graphs_1.exportLatencyGraph)({ + output: 'p95.png' + }, 'P95', reports); +}); +main(); diff --git a/test/performance/charts/index.ts b/test/performance/charts/index.ts new file mode 100644 index 000000000..e5ac02f89 --- /dev/null +++ b/test/performance/charts/index.ts @@ -0,0 +1,23 @@ +import * as fs from 'fs'; +import 'chartjs-to-image'; +import {exportDatabaseStats, exportLatencyGraph, exportTPSGraph} from "./src/graphs"; + +const main = async () => { + let buffer = fs.readFileSync('../report/report.json', 'utf-8'); + let reports = JSON.parse(buffer); + await exportTPSGraph({ + output: 'tps.png', + }, reports); + + await exportDatabaseStats('database_connections.png', reports); + + const ps: (keyof MetricsTime)[] = ['P99', 'P95', 'P75', 'Avg'] + for (let p of ps) { + await exportLatencyGraph({ + output: p.toLowerCase() + '.png' + }, p, reports); + } +} + +main(); + diff --git a/test/performance/charts/package-lock.json b/test/performance/charts/package-lock.json new file mode 100644 index 000000000..d5fae3dd2 --- /dev/null +++ b/test/performance/charts/package-lock.json @@ -0,0 +1,3135 @@ +{ + "name": "charts", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "charts", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "@types/node": "^22.7.4", + "@types/promise-fs": "^2.1.5", + "chart.js": "^4.4.4", + "chart.js-image": "^6.1.3", + "chartjs-plugin-annotation": "^3.0.1", + "chartjs-to-image": "^1.2.2", + "install": "^0.13.0", + "npm": "^10.9.0" + }, + "devDependencies": { + "typescript": "^5.6.2" + } + }, + "node_modules/@kurkle/color": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@kurkle/color/-/color-0.3.2.tgz", + "integrity": "sha512-fuscdXJ9G1qb7W8VdHi+IwRqij3lBkosAm4ydQtEmbY58OzHXqQhvlxqEkoz0yssNVn38bcpRWgA9PP+OGoisw==", + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "22.7.4", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.4.tgz", + "integrity": "sha512-y+NPi1rFzDs1NdQHHToqeiX2TIS79SWEAw9GYhkkx8bD0ChpfqC+n2j5OXOCpzfojBEBt6DnEnnG9MY0zk1XLg==", + "license": "MIT", + "dependencies": { + "undici-types": "~6.19.2" + } + }, + "node_modules/@types/promise-fs": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@types/promise-fs/-/promise-fs-2.1.5.tgz", + "integrity": "sha512-C8bi4Xh6HlixkGspIDaX/zFA+r1+UHUuXYJCCqi6oBlVjxF870kPv38aJlSRo0u1j+2YBi0PLP7PGMZIBbCQ6Q==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "license": "MIT" + }, + "node_modules/axios": { + "version": "1.7.7", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.7.tgz", + "integrity": "sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q==", + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/chart.js": { + "version": "4.4.4", + "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.4.4.tgz", + "integrity": "sha512-emICKGBABnxhMjUjlYRR12PmOXhJ2eJjEHL2/dZlWjxRAZT1D8xplLFq5M0tMQK8ja+wBS/tuVEJB5C6r7VxJA==", + "license": "MIT", + "dependencies": { + "@kurkle/color": "^0.3.0" + }, + "engines": { + "pnpm": ">=8" + } + }, + "node_modules/chart.js-image": { + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/chart.js-image/-/chart.js-image-6.1.3.tgz", + "integrity": "sha512-K+h0dc/Wf/Dk5CWKrV7xxS7ozONCiQ73XL+QStBxUpgABzSblJwQe/R6X1RWxY2Z/G8OhKsPFB0HC7bwiCmB2w==", + "license": "MIT", + "dependencies": { + "javascript-stringify": "2.0.1", + "node-fetch": "2.6.0" + }, + "engines": { + "node": ">12" + } + }, + "node_modules/chart.js-image/node_modules/javascript-stringify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/javascript-stringify/-/javascript-stringify-2.0.1.tgz", + "integrity": "sha512-yV+gqbd5vaOYjqlbk16EG89xB5udgjqQF3C5FAORDg4f/IS1Yc5ERCv5e/57yBcfJYw05V5JyIXabhwb75Xxow==", + "license": "MIT" + }, + "node_modules/chartjs-plugin-annotation": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/chartjs-plugin-annotation/-/chartjs-plugin-annotation-3.0.1.tgz", + "integrity": "sha512-hlIrXXKqSDgb+ZjVYHefmlZUXK8KbkCPiynSVrTb/HjTMkT62cOInaT1NTQCKtxKKOm9oHp958DY3RTAFKtkHg==", + "license": "MIT", + "peerDependencies": { + "chart.js": ">=4.0.0" + } + }, + "node_modules/chartjs-to-image": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/chartjs-to-image/-/chartjs-to-image-1.2.2.tgz", + "integrity": "sha512-qnYedDlNSPsrISQyRhJk4gWciKMtK8mlx2VWbFMJIPLVokSHJBEUuoxE6LLDFGnOhdvLd3K5E6lmGap7/phWFQ==", + "license": "MIT", + "dependencies": { + "axios": "^1.6.0", + "javascript-stringify": "^2.1.0" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/follow-redirects": { + "version": "1.15.9", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", + "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/install": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/install/-/install-0.13.0.tgz", + "integrity": "sha512-zDml/jzr2PKU9I8J/xyZBQn8rPCAY//UOYNmR01XwNwyfhEWObo2SWfSl1+0tm1u6PhxLwDnfsT/6jB7OUxqFA==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/javascript-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/javascript-stringify/-/javascript-stringify-2.1.0.tgz", + "integrity": "sha512-JVAfqNPTvNq3sB/VHQJAFxN/sPgKnsKrCwyRt15zwNCdrMMJDdcEOdubuy+DuJYYdm0ox1J4uzEuYKkN+9yhVg==", + "license": "MIT" + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-fetch": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", + "integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==", + "license": "MIT", + "engines": { + "node": "4.x || >=6.0.0" + } + }, + "node_modules/npm": { + "version": "10.9.0", + "resolved": "https://registry.npmjs.org/npm/-/npm-10.9.0.tgz", + "integrity": "sha512-ZanDioFylI9helNhl2LNd+ErmVD+H5I53ry41ixlLyCBgkuYb+58CvbAp99hW+zr5L9W4X7CchSoeqKdngOLSw==", + "bundleDependencies": [ + "@isaacs/string-locale-compare", + "@npmcli/arborist", + "@npmcli/config", + "@npmcli/fs", + "@npmcli/map-workspaces", + "@npmcli/package-json", + "@npmcli/promise-spawn", + "@npmcli/redact", + "@npmcli/run-script", + "@sigstore/tuf", + "abbrev", + "archy", + "cacache", + "chalk", + "ci-info", + "cli-columns", + "fastest-levenshtein", + "fs-minipass", + "glob", + "graceful-fs", + "hosted-git-info", + "ini", + "init-package-json", + "is-cidr", + "json-parse-even-better-errors", + "libnpmaccess", + "libnpmdiff", + "libnpmexec", + "libnpmfund", + "libnpmhook", + "libnpmorg", + "libnpmpack", + "libnpmpublish", + "libnpmsearch", + "libnpmteam", + "libnpmversion", + "make-fetch-happen", + "minimatch", + "minipass", + "minipass-pipeline", + "ms", + "node-gyp", + "nopt", + "normalize-package-data", + "npm-audit-report", + "npm-install-checks", + "npm-package-arg", + "npm-pick-manifest", + "npm-profile", + "npm-registry-fetch", + "npm-user-validate", + "p-map", + "pacote", + "parse-conflict-json", + "proc-log", + "qrcode-terminal", + "read", + "semver", + "spdx-expression-parse", + "ssri", + "supports-color", + "tar", + "text-table", + "tiny-relative-date", + "treeverse", + "validate-npm-package-name", + "which", + "write-file-atomic" + ], + "license": "Artistic-2.0", + "workspaces": [ + "docs", + "smoke-tests", + "mock-globals", + "mock-registry", + "workspaces/*" + ], + "dependencies": { + "@isaacs/string-locale-compare": "^1.1.0", + "@npmcli/arborist": "^8.0.0", + "@npmcli/config": "^9.0.0", + "@npmcli/fs": "^4.0.0", + "@npmcli/map-workspaces": "^4.0.1", + "@npmcli/package-json": "^6.0.1", + "@npmcli/promise-spawn": "^8.0.1", + "@npmcli/redact": "^3.0.0", + "@npmcli/run-script": "^9.0.1", + "@sigstore/tuf": "^2.3.4", + "abbrev": "^3.0.0", + "archy": "~1.0.0", + "cacache": "^19.0.1", + "chalk": "^5.3.0", + "ci-info": "^4.0.0", + "cli-columns": "^4.0.0", + "fastest-levenshtein": "^1.0.16", + "fs-minipass": "^3.0.3", + "glob": "^10.4.5", + "graceful-fs": "^4.2.11", + "hosted-git-info": "^8.0.0", + "ini": "^5.0.0", + "init-package-json": "^7.0.1", + "is-cidr": "^5.1.0", + "json-parse-even-better-errors": "^4.0.0", + "libnpmaccess": "^9.0.0", + "libnpmdiff": "^7.0.0", + "libnpmexec": "^9.0.0", + "libnpmfund": "^6.0.0", + "libnpmhook": "^11.0.0", + "libnpmorg": "^7.0.0", + "libnpmpack": "^8.0.0", + "libnpmpublish": "^10.0.0", + "libnpmsearch": "^8.0.0", + "libnpmteam": "^7.0.0", + "libnpmversion": "^7.0.0", + "make-fetch-happen": "^14.0.1", + "minimatch": "^9.0.5", + "minipass": "^7.1.1", + "minipass-pipeline": "^1.2.4", + "ms": "^2.1.2", + "node-gyp": "^10.2.0", + "nopt": "^8.0.0", + "normalize-package-data": "^7.0.0", + "npm-audit-report": "^6.0.0", + "npm-install-checks": "^7.1.0", + "npm-package-arg": "^12.0.0", + "npm-pick-manifest": "^10.0.0", + "npm-profile": "^11.0.1", + "npm-registry-fetch": "^18.0.1", + "npm-user-validate": "^3.0.0", + "p-map": "^4.0.0", + "pacote": "^19.0.0", + "parse-conflict-json": "^4.0.0", + "proc-log": "^5.0.0", + "qrcode-terminal": "^0.12.0", + "read": "^4.0.0", + "semver": "^7.6.3", + "spdx-expression-parse": "^4.0.0", + "ssri": "^12.0.0", + "supports-color": "^9.4.0", + "tar": "^6.2.1", + "text-table": "~0.2.0", + "tiny-relative-date": "^1.3.0", + "treeverse": "^3.0.0", + "validate-npm-package-name": "^6.0.0", + "which": "^5.0.0", + "write-file-atomic": "^6.0.0" + }, + "bin": { + "npm": "bin/npm-cli.js", + "npx": "bin/npx-cli.js" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/@isaacs/cliui": { + "version": "8.0.2", + "inBundle": true, + "license": "ISC", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.0.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex": { + "version": "9.2.2", + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/@isaacs/cliui/node_modules/string-width": { + "version": "5.1.2", + "inBundle": true, + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/npm/node_modules/@isaacs/fs-minipass": { + "version": "4.0.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^7.0.4" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/npm/node_modules/@isaacs/string-locale-compare": { + "version": "1.1.0", + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/@npmcli/agent": { + "version": "3.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "agent-base": "^7.1.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.1", + "lru-cache": "^10.0.1", + "socks-proxy-agent": "^8.0.3" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/@npmcli/arborist": { + "version": "8.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@isaacs/string-locale-compare": "^1.1.0", + "@npmcli/fs": "^4.0.0", + "@npmcli/installed-package-contents": "^3.0.0", + "@npmcli/map-workspaces": "^4.0.1", + "@npmcli/metavuln-calculator": "^8.0.0", + "@npmcli/name-from-folder": "^3.0.0", + "@npmcli/node-gyp": "^4.0.0", + "@npmcli/package-json": "^6.0.1", + "@npmcli/query": "^4.0.0", + "@npmcli/redact": "^3.0.0", + "@npmcli/run-script": "^9.0.1", + "bin-links": "^5.0.0", + "cacache": "^19.0.1", + "common-ancestor-path": "^1.0.1", + "hosted-git-info": "^8.0.0", + "json-parse-even-better-errors": "^4.0.0", + "json-stringify-nice": "^1.1.4", + "lru-cache": "^10.2.2", + "minimatch": "^9.0.4", + "nopt": "^8.0.0", + "npm-install-checks": "^7.1.0", + "npm-package-arg": "^12.0.0", + "npm-pick-manifest": "^10.0.0", + "npm-registry-fetch": "^18.0.1", + "pacote": "^19.0.0", + "parse-conflict-json": "^4.0.0", + "proc-log": "^5.0.0", + "proggy": "^3.0.0", + "promise-all-reject-late": "^1.0.0", + "promise-call-limit": "^3.0.1", + "read-package-json-fast": "^4.0.0", + "semver": "^7.3.7", + "ssri": "^12.0.0", + "treeverse": "^3.0.0", + "walk-up-path": "^3.0.1" + }, + "bin": { + "arborist": "bin/index.js" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/@npmcli/config": { + "version": "9.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/map-workspaces": "^4.0.1", + "@npmcli/package-json": "^6.0.1", + "ci-info": "^4.0.0", + "ini": "^5.0.0", + "nopt": "^8.0.0", + "proc-log": "^5.0.0", + "semver": "^7.3.5", + "walk-up-path": "^3.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/@npmcli/fs": { + "version": "4.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/@npmcli/git": { + "version": "6.0.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/promise-spawn": "^8.0.0", + "ini": "^5.0.0", + "lru-cache": "^10.0.1", + "npm-pick-manifest": "^10.0.0", + "proc-log": "^5.0.0", + "promise-inflight": "^1.0.1", + "promise-retry": "^2.0.1", + "semver": "^7.3.5", + "which": "^5.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/@npmcli/installed-package-contents": { + "version": "3.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "npm-bundled": "^4.0.0", + "npm-normalize-package-bin": "^4.0.0" + }, + "bin": { + "installed-package-contents": "bin/index.js" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/@npmcli/map-workspaces": { + "version": "4.0.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/name-from-folder": "^3.0.0", + "@npmcli/package-json": "^6.0.0", + "glob": "^10.2.2", + "minimatch": "^9.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/@npmcli/metavuln-calculator": { + "version": "8.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "cacache": "^19.0.0", + "json-parse-even-better-errors": "^4.0.0", + "pacote": "^19.0.0", + "proc-log": "^5.0.0", + "semver": "^7.3.5" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/@npmcli/name-from-folder": { + "version": "3.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/@npmcli/node-gyp": { + "version": "4.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/@npmcli/package-json": { + "version": "6.0.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/git": "^6.0.0", + "glob": "^10.2.2", + "hosted-git-info": "^8.0.0", + "json-parse-even-better-errors": "^4.0.0", + "normalize-package-data": "^7.0.0", + "proc-log": "^5.0.0", + "semver": "^7.5.3" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/@npmcli/promise-spawn": { + "version": "8.0.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "which": "^5.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/@npmcli/query": { + "version": "4.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "postcss-selector-parser": "^6.1.2" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/@npmcli/redact": { + "version": "3.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/@npmcli/run-script": { + "version": "9.0.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/node-gyp": "^4.0.0", + "@npmcli/package-json": "^6.0.0", + "@npmcli/promise-spawn": "^8.0.0", + "node-gyp": "^10.0.0", + "proc-log": "^5.0.0", + "which": "^5.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "inBundle": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/npm/node_modules/@sigstore/bundle": { + "version": "2.3.2", + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@sigstore/core": { + "version": "1.1.0", + "inBundle": true, + "license": "Apache-2.0", + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@sigstore/protobuf-specs": { + "version": "0.3.2", + "inBundle": true, + "license": "Apache-2.0", + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@sigstore/sign": { + "version": "2.3.2", + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^2.3.2", + "@sigstore/core": "^1.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "make-fetch-happen": "^13.0.1", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent": { + "version": "2.2.2", + "inBundle": true, + "license": "ISC", + "dependencies": { + "agent-base": "^7.1.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.1", + "lru-cache": "^10.0.1", + "socks-proxy-agent": "^8.0.3" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs": { + "version": "3.1.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@sigstore/sign/node_modules/cacache": { + "version": "18.0.4", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^3.1.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^10.0.1", + "minipass": "^7.0.3", + "minipass-collect": "^2.0.1", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^4.0.0", + "ssri": "^10.0.0", + "tar": "^6.1.11", + "unique-filename": "^3.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen": { + "version": "13.0.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/agent": "^2.0.0", + "cacache": "^18.0.0", + "http-cache-semantics": "^4.1.1", + "is-lambda": "^1.0.1", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1", + "ssri": "^10.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch": { + "version": "3.0.5", + "inBundle": true, + "license": "MIT", + "dependencies": { + "minipass": "^7.0.3", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + } + }, + "node_modules/npm/node_modules/@sigstore/sign/node_modules/proc-log": { + "version": "4.2.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@sigstore/sign/node_modules/ssri": { + "version": "10.0.6", + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@sigstore/sign/node_modules/unique-filename": { + "version": "3.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "unique-slug": "^4.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@sigstore/sign/node_modules/unique-slug": { + "version": "4.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@sigstore/tuf": { + "version": "2.3.4", + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2", + "tuf-js": "^2.2.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@sigstore/verify": { + "version": "1.2.1", + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^2.3.2", + "@sigstore/core": "^1.1.0", + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@tufjs/canonical-json": { + "version": "2.0.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@tufjs/models": { + "version": "2.0.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "@tufjs/canonical-json": "2.0.0", + "minimatch": "^9.0.4" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/abbrev": { + "version": "3.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/agent-base": { + "version": "7.1.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/npm/node_modules/aggregate-error": { + "version": "3.1.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/ansi-regex": { + "version": "5.0.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/ansi-styles": { + "version": "6.2.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/npm/node_modules/aproba": { + "version": "2.0.0", + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/archy": { + "version": "1.0.0", + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/balanced-match": { + "version": "1.0.2", + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/bin-links": { + "version": "5.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "cmd-shim": "^7.0.0", + "npm-normalize-package-bin": "^4.0.0", + "proc-log": "^5.0.0", + "read-cmd-shim": "^5.0.0", + "write-file-atomic": "^6.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/binary-extensions": { + "version": "2.3.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm/node_modules/brace-expansion": { + "version": "2.0.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/npm/node_modules/cacache": { + "version": "19.0.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^4.0.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^10.0.1", + "minipass": "^7.0.3", + "minipass-collect": "^2.0.1", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^7.0.2", + "ssri": "^12.0.0", + "tar": "^7.4.3", + "unique-filename": "^4.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/cacache/node_modules/chownr": { + "version": "3.0.0", + "inBundle": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=18" + } + }, + "node_modules/npm/node_modules/cacache/node_modules/minizlib": { + "version": "3.0.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "minipass": "^7.0.4", + "rimraf": "^5.0.5" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/npm/node_modules/cacache/node_modules/mkdirp": { + "version": "3.0.1", + "inBundle": true, + "license": "MIT", + "bin": { + "mkdirp": "dist/cjs/src/bin.js" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/cacache/node_modules/p-map": { + "version": "7.0.2", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm/node_modules/cacache/node_modules/tar": { + "version": "7.4.3", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@isaacs/fs-minipass": "^4.0.0", + "chownr": "^3.0.0", + "minipass": "^7.1.2", + "minizlib": "^3.0.1", + "mkdirp": "^3.0.1", + "yallist": "^5.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/npm/node_modules/cacache/node_modules/yallist": { + "version": "5.0.0", + "inBundle": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=18" + } + }, + "node_modules/npm/node_modules/chalk": { + "version": "5.3.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/npm/node_modules/chownr": { + "version": "2.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/npm/node_modules/ci-info": { + "version": "4.0.0", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/cidr-regex": { + "version": "4.1.1", + "inBundle": true, + "license": "BSD-2-Clause", + "dependencies": { + "ip-regex": "^5.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/npm/node_modules/clean-stack": { + "version": "2.2.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/npm/node_modules/cli-columns": { + "version": "4.0.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/npm/node_modules/cmd-shim": { + "version": "7.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/color-convert": { + "version": "2.0.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/npm/node_modules/color-name": { + "version": "1.1.4", + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/common-ancestor-path": { + "version": "1.0.1", + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/cross-spawn": { + "version": "7.0.3", + "inBundle": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/npm/node_modules/cross-spawn/node_modules/which": { + "version": "2.0.2", + "inBundle": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/npm/node_modules/cssesc": { + "version": "3.0.0", + "inBundle": true, + "license": "MIT", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/npm/node_modules/debug": { + "version": "4.3.6", + "inBundle": true, + "license": "MIT", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/npm/node_modules/debug/node_modules/ms": { + "version": "2.1.2", + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/diff": { + "version": "5.2.0", + "inBundle": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/npm/node_modules/eastasianwidth": { + "version": "0.2.0", + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/emoji-regex": { + "version": "8.0.0", + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/encoding": { + "version": "0.1.13", + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "iconv-lite": "^0.6.2" + } + }, + "node_modules/npm/node_modules/env-paths": { + "version": "2.2.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/npm/node_modules/err-code": { + "version": "2.0.3", + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/exponential-backoff": { + "version": "3.1.1", + "inBundle": true, + "license": "Apache-2.0" + }, + "node_modules/npm/node_modules/fastest-levenshtein": { + "version": "1.0.16", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 4.9.1" + } + }, + "node_modules/npm/node_modules/foreground-child": { + "version": "3.3.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/fs-minipass": { + "version": "3.0.3", + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/glob": { + "version": "10.4.5", + "inBundle": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/graceful-fs": { + "version": "4.2.11", + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/hosted-git-info": { + "version": "8.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "lru-cache": "^10.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/http-cache-semantics": { + "version": "4.1.1", + "inBundle": true, + "license": "BSD-2-Clause" + }, + "node_modules/npm/node_modules/http-proxy-agent": { + "version": "7.0.2", + "inBundle": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/npm/node_modules/https-proxy-agent": { + "version": "7.0.5", + "inBundle": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.0.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/npm/node_modules/iconv-lite": { + "version": "0.6.3", + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm/node_modules/ignore-walk": { + "version": "7.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "minimatch": "^9.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/imurmurhash": { + "version": "0.1.4", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/npm/node_modules/indent-string": { + "version": "4.0.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/ini": { + "version": "5.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/init-package-json": { + "version": "7.0.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/package-json": "^6.0.0", + "npm-package-arg": "^12.0.0", + "promzard": "^2.0.0", + "read": "^4.0.0", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4", + "validate-npm-package-name": "^6.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/ip-address": { + "version": "9.0.5", + "inBundle": true, + "license": "MIT", + "dependencies": { + "jsbn": "1.1.0", + "sprintf-js": "^1.1.3" + }, + "engines": { + "node": ">= 12" + } + }, + "node_modules/npm/node_modules/ip-regex": { + "version": "5.0.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm/node_modules/is-cidr": { + "version": "5.1.0", + "inBundle": true, + "license": "BSD-2-Clause", + "dependencies": { + "cidr-regex": "^4.1.1" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/npm/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/is-lambda": { + "version": "1.0.1", + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/isexe": { + "version": "2.0.0", + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/jackspeak": { + "version": "3.4.3", + "inBundle": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/npm/node_modules/jsbn": { + "version": "1.1.0", + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/json-parse-even-better-errors": { + "version": "4.0.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/json-stringify-nice": { + "version": "1.1.4", + "inBundle": true, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/jsonparse": { + "version": "1.3.1", + "engines": [ + "node >= 0.2.0" + ], + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/just-diff": { + "version": "6.0.2", + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/just-diff-apply": { + "version": "5.5.0", + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/libnpmaccess": { + "version": "9.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "npm-package-arg": "^12.0.0", + "npm-registry-fetch": "^18.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/libnpmdiff": { + "version": "7.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/arborist": "^8.0.0", + "@npmcli/installed-package-contents": "^3.0.0", + "binary-extensions": "^2.3.0", + "diff": "^5.1.0", + "minimatch": "^9.0.4", + "npm-package-arg": "^12.0.0", + "pacote": "^19.0.0", + "tar": "^6.2.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/libnpmexec": { + "version": "9.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/arborist": "^8.0.0", + "@npmcli/run-script": "^9.0.1", + "ci-info": "^4.0.0", + "npm-package-arg": "^12.0.0", + "pacote": "^19.0.0", + "proc-log": "^5.0.0", + "read": "^4.0.0", + "read-package-json-fast": "^4.0.0", + "semver": "^7.3.7", + "walk-up-path": "^3.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/libnpmfund": { + "version": "6.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/arborist": "^8.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/libnpmhook": { + "version": "11.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "aproba": "^2.0.0", + "npm-registry-fetch": "^18.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/libnpmorg": { + "version": "7.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "aproba": "^2.0.0", + "npm-registry-fetch": "^18.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/libnpmpack": { + "version": "8.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/arborist": "^8.0.0", + "@npmcli/run-script": "^9.0.1", + "npm-package-arg": "^12.0.0", + "pacote": "^19.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/libnpmpublish": { + "version": "10.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "ci-info": "^4.0.0", + "normalize-package-data": "^7.0.0", + "npm-package-arg": "^12.0.0", + "npm-registry-fetch": "^18.0.1", + "proc-log": "^5.0.0", + "semver": "^7.3.7", + "sigstore": "^2.2.0", + "ssri": "^12.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/libnpmsearch": { + "version": "8.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "npm-registry-fetch": "^18.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/libnpmteam": { + "version": "7.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "aproba": "^2.0.0", + "npm-registry-fetch": "^18.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/libnpmversion": { + "version": "7.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/git": "^6.0.1", + "@npmcli/run-script": "^9.0.1", + "json-parse-even-better-errors": "^4.0.0", + "proc-log": "^5.0.0", + "semver": "^7.3.7" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/lru-cache": { + "version": "10.4.3", + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/make-fetch-happen": { + "version": "14.0.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/agent": "^3.0.0", + "cacache": "^19.0.1", + "http-cache-semantics": "^4.1.1", + "minipass": "^7.0.2", + "minipass-fetch": "^4.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "proc-log": "^5.0.0", + "promise-retry": "^2.0.1", + "ssri": "^12.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/minimatch": { + "version": "9.0.5", + "inBundle": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/minipass": { + "version": "7.1.2", + "inBundle": true, + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/npm/node_modules/minipass-collect": { + "version": "2.0.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/npm/node_modules/minipass-fetch": { + "version": "4.0.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "minipass": "^7.0.3", + "minipass-sized": "^1.0.3", + "minizlib": "^3.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + } + }, + "node_modules/npm/node_modules/minipass-fetch/node_modules/minizlib": { + "version": "3.0.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "minipass": "^7.0.4", + "rimraf": "^5.0.5" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/npm/node_modules/minipass-flush": { + "version": "1.0.5", + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/npm/node_modules/minipass-flush/node_modules/minipass": { + "version": "3.3.6", + "inBundle": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/minipass-pipeline": { + "version": "1.2.4", + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/minipass-pipeline/node_modules/minipass": { + "version": "3.3.6", + "inBundle": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/minipass-sized": { + "version": "1.0.3", + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/minipass-sized/node_modules/minipass": { + "version": "3.3.6", + "inBundle": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/minizlib": { + "version": "2.1.2", + "inBundle": true, + "license": "MIT", + "dependencies": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/npm/node_modules/minizlib/node_modules/minipass": { + "version": "3.3.6", + "inBundle": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/mkdirp": { + "version": "1.0.4", + "inBundle": true, + "license": "MIT", + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/npm/node_modules/ms": { + "version": "2.1.3", + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/mute-stream": { + "version": "2.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/negotiator": { + "version": "0.6.3", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/npm/node_modules/node-gyp": { + "version": "10.2.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "env-paths": "^2.2.0", + "exponential-backoff": "^3.1.1", + "glob": "^10.3.10", + "graceful-fs": "^4.2.6", + "make-fetch-happen": "^13.0.0", + "nopt": "^7.0.0", + "proc-log": "^4.1.0", + "semver": "^7.3.5", + "tar": "^6.2.1", + "which": "^4.0.0" + }, + "bin": { + "node-gyp": "bin/node-gyp.js" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/agent": { + "version": "2.2.2", + "inBundle": true, + "license": "ISC", + "dependencies": { + "agent-base": "^7.1.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.1", + "lru-cache": "^10.0.1", + "socks-proxy-agent": "^8.0.3" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs": { + "version": "3.1.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/abbrev": { + "version": "2.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/cacache": { + "version": "18.0.4", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^3.1.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^10.0.1", + "minipass": "^7.0.3", + "minipass-collect": "^2.0.1", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^4.0.0", + "ssri": "^10.0.0", + "tar": "^6.1.11", + "unique-filename": "^3.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/isexe": { + "version": "3.1.1", + "inBundle": true, + "license": "ISC", + "engines": { + "node": ">=16" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen": { + "version": "13.0.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/agent": "^2.0.0", + "cacache": "^18.0.0", + "http-cache-semantics": "^4.1.1", + "is-lambda": "^1.0.1", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1", + "ssri": "^10.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch": { + "version": "3.0.5", + "inBundle": true, + "license": "MIT", + "dependencies": { + "minipass": "^7.0.3", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/nopt": { + "version": "7.2.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "abbrev": "^2.0.0" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/proc-log": { + "version": "4.2.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/ssri": { + "version": "10.0.6", + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/unique-filename": { + "version": "3.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "unique-slug": "^4.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/unique-slug": { + "version": "4.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/which": { + "version": "4.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "isexe": "^3.1.1" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/nopt": { + "version": "8.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "abbrev": "^2.0.0" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/nopt/node_modules/abbrev": { + "version": "2.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/normalize-package-data": { + "version": "7.0.0", + "inBundle": true, + "license": "BSD-2-Clause", + "dependencies": { + "hosted-git-info": "^8.0.0", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/npm-audit-report": { + "version": "6.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/npm-bundled": { + "version": "4.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "npm-normalize-package-bin": "^4.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/npm-install-checks": { + "version": "7.1.0", + "inBundle": true, + "license": "BSD-2-Clause", + "dependencies": { + "semver": "^7.1.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/npm-normalize-package-bin": { + "version": "4.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/npm-package-arg": { + "version": "12.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "hosted-git-info": "^8.0.0", + "proc-log": "^5.0.0", + "semver": "^7.3.5", + "validate-npm-package-name": "^6.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/npm-packlist": { + "version": "9.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "ignore-walk": "^7.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/npm-pick-manifest": { + "version": "10.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "npm-install-checks": "^7.1.0", + "npm-normalize-package-bin": "^4.0.0", + "npm-package-arg": "^12.0.0", + "semver": "^7.3.5" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/npm-profile": { + "version": "11.0.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "npm-registry-fetch": "^18.0.0", + "proc-log": "^5.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/npm-registry-fetch": { + "version": "18.0.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/redact": "^3.0.0", + "jsonparse": "^1.3.1", + "make-fetch-happen": "^14.0.0", + "minipass": "^7.0.2", + "minipass-fetch": "^4.0.0", + "minizlib": "^3.0.1", + "npm-package-arg": "^12.0.0", + "proc-log": "^5.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/npm-registry-fetch/node_modules/minizlib": { + "version": "3.0.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "minipass": "^7.0.4", + "rimraf": "^5.0.5" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/npm/node_modules/npm-user-validate": { + "version": "3.0.0", + "inBundle": true, + "license": "BSD-2-Clause", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/p-map": { + "version": "4.0.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm/node_modules/package-json-from-dist": { + "version": "1.0.0", + "inBundle": true, + "license": "BlueOak-1.0.0" + }, + "node_modules/npm/node_modules/pacote": { + "version": "19.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/git": "^6.0.0", + "@npmcli/installed-package-contents": "^3.0.0", + "@npmcli/package-json": "^6.0.0", + "@npmcli/promise-spawn": "^8.0.0", + "@npmcli/run-script": "^9.0.0", + "cacache": "^19.0.0", + "fs-minipass": "^3.0.0", + "minipass": "^7.0.2", + "npm-package-arg": "^12.0.0", + "npm-packlist": "^9.0.0", + "npm-pick-manifest": "^10.0.0", + "npm-registry-fetch": "^18.0.0", + "proc-log": "^5.0.0", + "promise-retry": "^2.0.1", + "sigstore": "^2.2.0", + "ssri": "^12.0.0", + "tar": "^6.1.11" + }, + "bin": { + "pacote": "bin/index.js" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/parse-conflict-json": { + "version": "4.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "json-parse-even-better-errors": "^4.0.0", + "just-diff": "^6.0.0", + "just-diff-apply": "^5.2.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/path-key": { + "version": "3.1.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/path-scurry": { + "version": "1.11.1", + "inBundle": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/postcss-selector-parser": { + "version": "6.1.2", + "inBundle": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/npm/node_modules/proc-log": { + "version": "5.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/proggy": { + "version": "3.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/promise-all-reject-late": { + "version": "1.0.1", + "inBundle": true, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/promise-call-limit": { + "version": "3.0.1", + "inBundle": true, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/promise-inflight": { + "version": "1.0.1", + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/promise-retry": { + "version": "2.0.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "err-code": "^2.0.2", + "retry": "^0.12.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/npm/node_modules/promzard": { + "version": "2.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "read": "^4.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/qrcode-terminal": { + "version": "0.12.0", + "inBundle": true, + "bin": { + "qrcode-terminal": "bin/qrcode-terminal.js" + } + }, + "node_modules/npm/node_modules/read": { + "version": "4.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "mute-stream": "^2.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/read-cmd-shim": { + "version": "5.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/read-package-json-fast": { + "version": "4.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "json-parse-even-better-errors": "^4.0.0", + "npm-normalize-package-bin": "^4.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/retry": { + "version": "0.12.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/npm/node_modules/rimraf": { + "version": "5.0.10", + "inBundle": true, + "license": "ISC", + "dependencies": { + "glob": "^10.3.7" + }, + "bin": { + "rimraf": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/safer-buffer": { + "version": "2.1.2", + "inBundle": true, + "license": "MIT", + "optional": true + }, + "node_modules/npm/node_modules/semver": { + "version": "7.6.3", + "inBundle": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/npm/node_modules/shebang-command": { + "version": "2.0.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/shebang-regex": { + "version": "3.0.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/signal-exit": { + "version": "4.1.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/sigstore": { + "version": "2.3.1", + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^2.3.2", + "@sigstore/core": "^1.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "@sigstore/sign": "^2.3.2", + "@sigstore/tuf": "^2.3.4", + "@sigstore/verify": "^1.2.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/smart-buffer": { + "version": "4.2.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 6.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/npm/node_modules/socks": { + "version": "2.8.3", + "inBundle": true, + "license": "MIT", + "dependencies": { + "ip-address": "^9.0.5", + "smart-buffer": "^4.2.0" + }, + "engines": { + "node": ">= 10.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/npm/node_modules/socks-proxy-agent": { + "version": "8.0.4", + "inBundle": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.1", + "debug": "^4.3.4", + "socks": "^2.8.3" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/npm/node_modules/spdx-correct": { + "version": "3.2.0", + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/npm/node_modules/spdx-correct/node_modules/spdx-expression-parse": { + "version": "3.0.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/npm/node_modules/spdx-exceptions": { + "version": "2.5.0", + "inBundle": true, + "license": "CC-BY-3.0" + }, + "node_modules/npm/node_modules/spdx-expression-parse": { + "version": "4.0.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/npm/node_modules/spdx-license-ids": { + "version": "3.0.18", + "inBundle": true, + "license": "CC0-1.0" + }, + "node_modules/npm/node_modules/sprintf-js": { + "version": "1.1.3", + "inBundle": true, + "license": "BSD-3-Clause" + }, + "node_modules/npm/node_modules/ssri": { + "version": "12.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/string-width": { + "version": "4.2.3", + "inBundle": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "inBundle": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/strip-ansi": { + "version": "6.0.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/supports-color": { + "version": "9.4.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/npm/node_modules/tar": { + "version": "6.2.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^5.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/npm/node_modules/tar/node_modules/fs-minipass": { + "version": "2.1.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/npm/node_modules/tar/node_modules/fs-minipass/node_modules/minipass": { + "version": "3.3.6", + "inBundle": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/tar/node_modules/minipass": { + "version": "5.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/text-table": { + "version": "0.2.0", + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/tiny-relative-date": { + "version": "1.3.0", + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/treeverse": { + "version": "3.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/tuf-js": { + "version": "2.2.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "@tufjs/models": "2.0.1", + "debug": "^4.3.4", + "make-fetch-happen": "^13.0.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/tuf-js/node_modules/@npmcli/agent": { + "version": "2.2.2", + "inBundle": true, + "license": "ISC", + "dependencies": { + "agent-base": "^7.1.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.1", + "lru-cache": "^10.0.1", + "socks-proxy-agent": "^8.0.3" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/tuf-js/node_modules/@npmcli/fs": { + "version": "3.1.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/tuf-js/node_modules/cacache": { + "version": "18.0.4", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^3.1.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^10.0.1", + "minipass": "^7.0.3", + "minipass-collect": "^2.0.1", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^4.0.0", + "ssri": "^10.0.0", + "tar": "^6.1.11", + "unique-filename": "^3.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/tuf-js/node_modules/make-fetch-happen": { + "version": "13.0.1", + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/agent": "^2.0.0", + "cacache": "^18.0.0", + "http-cache-semantics": "^4.1.1", + "is-lambda": "^1.0.1", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1", + "ssri": "^10.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/tuf-js/node_modules/minipass-fetch": { + "version": "3.0.5", + "inBundle": true, + "license": "MIT", + "dependencies": { + "minipass": "^7.0.3", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + } + }, + "node_modules/npm/node_modules/tuf-js/node_modules/proc-log": { + "version": "4.2.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/tuf-js/node_modules/ssri": { + "version": "10.0.6", + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/tuf-js/node_modules/unique-filename": { + "version": "3.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "unique-slug": "^4.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/tuf-js/node_modules/unique-slug": { + "version": "4.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/unique-filename": { + "version": "4.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "unique-slug": "^5.0.0" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/unique-slug": { + "version": "5.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/util-deprecate": { + "version": "1.0.2", + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/validate-npm-package-license": { + "version": "3.0.4", + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/npm/node_modules/validate-npm-package-license/node_modules/spdx-expression-parse": { + "version": "3.0.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/npm/node_modules/validate-npm-package-name": { + "version": "6.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/walk-up-path": { + "version": "3.0.1", + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/which": { + "version": "5.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "isexe": "^3.1.1" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/which/node_modules/isexe": { + "version": "3.1.1", + "inBundle": true, + "license": "ISC", + "engines": { + "node": ">=16" + } + }, + "node_modules/npm/node_modules/wrap-ansi": { + "version": "8.1.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/npm/node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/npm/node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { + "version": "4.3.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/npm/node_modules/wrap-ansi/node_modules/ansi-regex": { + "version": "6.0.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/npm/node_modules/wrap-ansi/node_modules/emoji-regex": { + "version": "9.2.2", + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/wrap-ansi/node_modules/string-width": { + "version": "5.1.2", + "inBundle": true, + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm/node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "7.1.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/npm/node_modules/write-file-atomic": { + "version": "6.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/yallist": { + "version": "4.0.0", + "inBundle": true, + "license": "ISC" + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "license": "MIT" + }, + "node_modules/typescript": { + "version": "5.6.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.2.tgz", + "integrity": "sha512-NW8ByodCSNCwZeghjN3o+JX5OFH0Ojg6sadjEKY4huZ52TqbJTJnDo5+Tw98lSy63NZvi4n+ez5m2u5d4PkZyw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "license": "MIT" + } + } +} diff --git a/test/performance/charts/package.json b/test/performance/charts/package.json new file mode 100644 index 000000000..78a986709 --- /dev/null +++ b/test/performance/charts/package.json @@ -0,0 +1,25 @@ +{ + "name": "charts", + "version": "1.0.0", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1", + "build": "npx tsc" + }, + "author": "", + "license": "ISC", + "description": "", + "dependencies": { + "@types/node": "^22.7.4", + "@types/promise-fs": "^2.1.5", + "chart.js": "^4.4.4", + "chart.js-image": "^6.1.3", + "chartjs-plugin-annotation": "^3.0.1", + "chartjs-to-image": "^1.2.2", + "install": "^0.13.0", + "npm": "^10.9.0" + }, + "devDependencies": { + "typescript": "^5.6.2" + } +} diff --git a/test/performance/charts/src/colors.js b/test/performance/charts/src/colors.js new file mode 100644 index 000000000..a66518a80 --- /dev/null +++ b/test/performance/charts/src/colors.js @@ -0,0 +1,21 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NAMED_COLORS = exports.CHART_COLORS = void 0; +exports.CHART_COLORS = { + red: 'rgb(255, 99, 132)', + orange: 'rgb(255, 159, 64)', + yellow: 'rgb(255, 205, 86)', + green: 'rgb(75, 192, 192)', + blue: 'rgb(54, 162, 235)', + purple: 'rgb(153, 102, 255)', + grey: 'rgb(201, 203, 207)' +}; +exports.NAMED_COLORS = [ + exports.CHART_COLORS.red, + exports.CHART_COLORS.orange, + exports.CHART_COLORS.yellow, + exports.CHART_COLORS.green, + exports.CHART_COLORS.blue, + exports.CHART_COLORS.purple, + exports.CHART_COLORS.grey, +]; diff --git a/test/performance/charts/src/colors.ts b/test/performance/charts/src/colors.ts new file mode 100644 index 000000000..e66d42ec2 --- /dev/null +++ b/test/performance/charts/src/colors.ts @@ -0,0 +1,19 @@ +export const CHART_COLORS = { + red: 'rgb(255, 99, 132)', + orange: 'rgb(255, 159, 64)', + yellow: 'rgb(255, 205, 86)', + green: 'rgb(75, 192, 192)', + blue: 'rgb(54, 162, 235)', + purple: 'rgb(153, 102, 255)', + grey: 'rgb(201, 203, 207)' +}; + +export const NAMED_COLORS = [ + CHART_COLORS.red, + CHART_COLORS.orange, + CHART_COLORS.yellow, + CHART_COLORS.green, + CHART_COLORS.blue, + CHART_COLORS.purple, + CHART_COLORS.grey, +]; \ No newline at end of file diff --git a/test/performance/charts/src/graphs.js b/test/performance/charts/src/graphs.js new file mode 100644 index 000000000..a38670561 --- /dev/null +++ b/test/performance/charts/src/graphs.js @@ -0,0 +1,114 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.exportLatencyGraph = exports.exportTPSGraph = void 0; +const colors_1 = require("./colors"); +const chartjs_to_image_1 = __importDefault(require("chartjs-to-image")); +const exportTPSGraph = (configuration, result) => __awaiter(void 0, void 0, void 0, function* () { + const scripts = []; + for (let script in result) { + scripts.push(script); + } + const reportsForAnyScript = result[scripts[0]]; + if (!reportsForAnyScript) { + throw new Error("no data"); + } + const datasets = scripts.map(((script, index) => { + return { + label: script, + data: result[script].map(r => r.tps), + backgroundColor: colors_1.NAMED_COLORS[index % scripts.length], + }; + })); + const config = { + type: 'bar', + data: { + labels: reportsForAnyScript + .map(r => r.configuration.name), + datasets: datasets + }, + options: { + plugins: { + title: { + display: true, + text: 'TPS' + }, + }, + responsive: true, + interaction: { + intersect: false, + }, + scales: { + x: { + stacked: true, + }, + y: { + stacked: true + } + } + } + }; + const chart = new chartjs_to_image_1.default(); + chart.setConfig(config); + yield chart.toFile(configuration.output); +}); +exports.exportTPSGraph = exportTPSGraph; +const exportLatencyGraph = (configuration, key, result) => __awaiter(void 0, void 0, void 0, function* () { + const scripts = []; + for (let script in result) { + scripts.push(script); + } + const reportsForAnyScript = result[scripts[0]]; + if (!reportsForAnyScript) { + throw new Error("no data"); + } + const datasets = scripts.map(((script, index) => { + return { + label: script, + data: result[script].map(r => r.metrics.Time[key].substring(0, r.metrics.Time[key].length - 2)), + backgroundColor: colors_1.NAMED_COLORS[index % scripts.length], + }; + })); + const config = { + type: 'bar', + data: { + labels: reportsForAnyScript + .map(r => r.configuration.name), + datasets: datasets + }, + options: { + plugins: { + title: { + display: true, + text: 'TPS' + }, + }, + interaction: { + intersect: false, + }, + scales: { + x: { + stacked: true, + }, + y: { + stacked: true + } + } + } + }; + const chart = new chartjs_to_image_1.default(); + chart.setConfig(config); + yield chart.toFile(configuration.output); +}); +exports.exportLatencyGraph = exportLatencyGraph; diff --git a/test/performance/charts/src/graphs.ts b/test/performance/charts/src/graphs.ts new file mode 100644 index 000000000..76a450463 --- /dev/null +++ b/test/performance/charts/src/graphs.ts @@ -0,0 +1,196 @@ +import {NAMED_COLORS} from "./colors"; +import ChartJsImage from "chartjs-to-image"; +import {ChartConfiguration, ChartDataset, Chart} from "chart.js"; +import annotationPlugin from 'chartjs-plugin-annotation'; + +Chart.register(annotationPlugin); + +export const exportTPSGraph = async (configuration: {output: string}, result: BenchmarkResult) => { + + const scripts = []; + for (let script in result) { + scripts.push(script); + } + + const reportsForAnyScript = result[scripts[0]]; + if (!reportsForAnyScript) { + throw new Error("no data"); + } + + const datasets = scripts.map(((script, index): ChartDataset => { + return { + label: script, + data: result[script].map(r => r.TPS), + backgroundColor: NAMED_COLORS[index % scripts.length], + } + })); + + const config: ChartConfiguration = { + type: 'bar', + data: { + labels: reportsForAnyScript + .map(r => r.Configuration.Name), + datasets: datasets + }, + options: { + plugins: { + title: { + display: true, + text: 'TPS' + }, + }, + responsive: true, + interaction: { + intersect: false, + }, + scales: { + x: { + stacked: true, + }, + y: { + stacked: true + } + } + } + }; + + const chart = new ChartJsImage(); + chart.setConfig(config); + await chart.toFile(configuration.output); +} + +export const exportLatencyGraph = async (configuration: {output: string}, key: keyof MetricsTime, result: BenchmarkResult) => { + const scripts = []; + for (let script in result) { + scripts.push(script); + } + + const reportsForAnyScript = result[scripts[0]]; + if (!reportsForAnyScript) { + throw new Error("no data"); + } + + const datasets = scripts.map(((script, index): ChartDataset => { + return { + label: script, + data: result[script].map(r => parseFloat(r.Metrics.Time[key].substring(0, r.Metrics.Time[key].length-2))), + backgroundColor: NAMED_COLORS[index % scripts.length], + } + })); + + const config: ChartConfiguration = { + type: 'bar', + data: { + labels: reportsForAnyScript + .map(r => r.Configuration.Name), + datasets: datasets + }, + options: { + plugins: { + title: { + display: true, + text: 'TPS' + }, + }, + interaction: { + intersect: false, + }, + scales: { + x: { + stacked: true, + }, + y: { + stacked: true + } + } + } + }; + + const chart = new ChartJsImage(); + chart.setConfig(config); + await chart.toFile(configuration.output); +} + +export const exportDatabaseStats = async ( + output: string, + result: BenchmarkResult, +) => { + + const scope = 'github.com/uptrace/opentelemetry-go-extra/otelsql'; + + const scripts = []; + for (let script in result) { + scripts.push(script); + } + + const reportsForAnyScript = result[scripts[0]]; + if (!reportsForAnyScript) { + throw new Error("no data"); + } + + const datasets = scripts.map(((script, index): ChartDataset => { + return { + label: script, + data: result[script].map(r => r.InternalMetrics.ScopeMetrics + .find(scopeMetric => scopeMetric.Scope.Name == scope)! + .Metrics + .find(metric => metric.Name == 'go.sql.connections_open')! + .Data + .DataPoints[0] + .Value + ), + backgroundColor: NAMED_COLORS[index % scripts.length], + } + })); + + const maxConnection = reportsForAnyScript[0].InternalMetrics.ScopeMetrics + .find(scopeMetric => scopeMetric.Scope.Name == scope)! + .Metrics + .find(metric => metric.Name == 'go.sql.connections_max_open')! + .Data + .DataPoints[0] + .Value + + const config: ChartConfiguration = { + type: 'bar', + data: { + labels: reportsForAnyScript.map(r => r.Configuration.Name), + datasets: datasets + }, + options: { + plugins: { + title: { + display: true, + text: 'Database connections' + }, + annotation: { + annotations: { + line1: { + type: 'line', + yMin: maxConnection, + yMax: maxConnection, + borderColor: 'rgb(255, 99, 132)', + borderWidth: 2, + } + } + } + }, + interaction: { + intersect: false, + }, + scales: { + x: { + stacked: false, + }, + y: { + stacked: false + } + } + } + }; + + const chart = new ChartJsImage(); + chart.setConfig(config); + chart.setChartJsVersion('4') + await chart.toFile(output); +} \ No newline at end of file diff --git a/test/performance/charts/src/report.js b/test/performance/charts/src/report.js new file mode 100644 index 000000000..3918c74e4 --- /dev/null +++ b/test/performance/charts/src/report.js @@ -0,0 +1 @@ +"use strict"; diff --git a/test/performance/charts/src/report.ts b/test/performance/charts/src/report.ts new file mode 100644 index 000000000..13a56d890 --- /dev/null +++ b/test/performance/charts/src/report.ts @@ -0,0 +1,86 @@ +interface MetricsTime { + Cumulative: string // Cumulative time of all sampled events. + HMean: string // Event duration harmonic mean. + Avg: string // Event duration average. + P50: string // Event duration nth percentiles .. + P75: string + P95: string + P99: string + P999: string + Long5p: string // Average of the longest 5% event durations. + Short5p: string // Average of the shortest 5% event durations. + Max: string // Highest event duration. + Min: string // Lowest event duration. + StdDev: string // Standard deviation. + Range: string // Event duration range (Max-Min). +} + +interface MetricsRate { + Second: number +} + +interface Metrics { + Time: MetricsTime, + Rate: MetricsRate, + Histogram: Map[] // Frequency distribution of event durations in len(Histogram) bins of HistogramBinSize. + HistogramBinSize: string // The width of a histogram bin in time. + Samples: number // Number of events included in the sample set. + Count: number // Total number of events observed. +} + +interface Configuration { + Name: string, + FeatureSet: Map +} + +interface DataPoint { + Attributes: string[] + Bounds: number[] + BucketCounts: number[] + Count: number + Max: number + Min: number + StartTime: string + Sum: number + Time: string + Value: number +} + +interface OtelMetric { + Data: { + DataPoints: DataPoint[] + Temporality: string + }, + Description: string + Name: string + Unit: string +} + +interface Scope { + Name: string + SchemaURL: string + Version: string +} + +interface ScopeMetric { + Metrics: OtelMetric[] + Scope: Scope +} + +interface InternalMetrics { + ScopeMetrics: ScopeMetric[] +} + +interface Report { + Start: string, + End: string, + Metrics: Metrics, + Scenario: string, + Configuration: Configuration, + TPS: number + InternalMetrics: InternalMetrics +} + +interface BenchmarkResult { + [key: string]: Report[]; +} \ No newline at end of file diff --git a/test/performance/charts/tsconfig.json b/test/performance/charts/tsconfig.json new file mode 100644 index 000000000..1ae7f5bf2 --- /dev/null +++ b/test/performance/charts/tsconfig.json @@ -0,0 +1,112 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig to read more about this file */ + "types": [ + "node" + ], + /* Projects */ + // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ + // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ + // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ + // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ + // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ + // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ + + /* Language and Environment */ + "target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ + // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + // "jsx": "preserve", /* Specify what JSX code is generated. */ + // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ + // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ + // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ + // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ + // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ + // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ + // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ + // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ + // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ + + /* Modules */ + "module": "commonjs", /* Specify what module code is generated. */ + // "rootDir": "./", /* Specify the root folder within your source files. */ + // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */ + // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ + // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ + // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ + // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ + // "types": [], /* Specify type package names to be included without being referenced in a source file. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ + // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ + // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ + // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ + // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ + // "noUncheckedSideEffectImports": true, /* Check side effect imports. */ + // "resolveJsonModule": true, /* Enable importing .json files. */ + // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ + // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ + + /* JavaScript Support */ + // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ + // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ + // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ + + /* Emit */ + // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ + // "declarationMap": true, /* Create sourcemaps for d.ts files. */ + // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ + // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ + // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ + // "noEmit": true, /* Disable emitting files from a compilation. */ + // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ + // "outDir": "./", /* Specify an output folder for all emitted files. */ + // "removeComments": true, /* Disable emitting comments. */ + // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ + // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ + // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ + // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ + // "newLine": "crlf", /* Set the newline character for emitting files. */ + // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ + // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ + // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ + // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ + // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ + + /* Interop Constraints */ + // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ + // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ + // "isolatedDeclarations": true, /* Require sufficient annotation on exports so other tools can trivially generate declaration files. */ + // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ + "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ + // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ + "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ + + /* Type Checking */ + "strict": true, /* Enable all strict type-checking options. */ + // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ + // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ + // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ + // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ + // "strictBuiltinIteratorReturn": true, /* Built-in iterators are instantiated with a 'TReturn' type of 'undefined' instead of 'any'. */ + // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ + // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ + // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ + // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ + // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ + // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ + // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ + // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ + // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ + // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ + // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + + /* Completeness */ + // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ + "skipLibCheck": true /* Skip type checking all .d.ts files. */ + } +} diff --git a/test/performance/env_remote_ledger_test.go b/test/performance/env_remote_ledger_test.go new file mode 100644 index 000000000..bc2b4be5e --- /dev/null +++ b/test/performance/env_remote_ledger_test.go @@ -0,0 +1,77 @@ +//go:build it + +package performance_test + +import ( + "context" + "net/http" + "testing" + + ledger "github.com/formancehq/ledger/internal" + ledgerclient "github.com/formancehq/stack/ledger/client" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + "github.com/stretchr/testify/require" +) + +type RemoteLedgerEnvFactory struct { + httpClient *http.Client + ledgerURL string +} + +func (r *RemoteLedgerEnvFactory) Create(ctx context.Context, b *testing.B, ledger ledger.Ledger) Env { + + client := ledgerclient.New( + ledgerclient.WithClient(r.httpClient), + ledgerclient.WithServerURL(r.ledgerURL), + ) + + _, err := client.Ledger.V2.CreateLedger(ctx, operations.V2CreateLedgerRequest{ + Ledger: ledger.Name, + V2CreateLedgerRequest: &components.V2CreateLedgerRequest{ + Bucket: &ledger.Bucket, + Metadata: ledger.Metadata, + Features: ledger.Features, + }, + }) + require.NoError(b, err) + + return NewRemoteLedgerEnv(client, ledgerURL, ledger) +} + +var _ EnvFactory = (*RemoteLedgerEnvFactory)(nil) + +func NewRemoteLedgerEnvFactory(httpClient *http.Client, ledgerURL string) *RemoteLedgerEnvFactory { + return &RemoteLedgerEnvFactory{ + httpClient: httpClient, + ledgerURL: ledgerURL, + } +} + +type RemoteLedgerEnv struct { + ledger ledger.Ledger + client *ledgerclient.Formance + ledgerURL string +} + +func (r *RemoteLedgerEnv) URL() string { + return r.ledgerURL +} + +func (r *RemoteLedgerEnv) Client() *ledgerclient.Formance { + return r.client +} + +func (r *RemoteLedgerEnv) Stop(_ context.Context) error { + return nil +} + +func NewRemoteLedgerEnv(client *ledgerclient.Formance, metricsURL string, ledger ledger.Ledger) *RemoteLedgerEnv { + return &RemoteLedgerEnv{ + client: client, + ledger: ledger, + ledgerURL: metricsURL, + } +} + +var _ Env = (*RemoteLedgerEnv)(nil) diff --git a/test/performance/env_test.go b/test/performance/env_test.go new file mode 100644 index 000000000..265690b99 --- /dev/null +++ b/test/performance/env_test.go @@ -0,0 +1,30 @@ +//go:build it + +package performance_test + +import ( + "context" + ledgerclient "github.com/formancehq/stack/ledger/client" + "testing" + + ledger "github.com/formancehq/ledger/internal" +) + +type TransactionExecutor interface { + ExecuteScript(context.Context, string, map[string]string) (*ledger.Transaction, error) +} +type TransactionExecutorFn func(context.Context, string, map[string]string) (*ledger.Transaction, error) + +func (fn TransactionExecutorFn) ExecuteScript(ctx context.Context, script string, vars map[string]string) (*ledger.Transaction, error) { + return fn(ctx, script, vars) +} + +type Env interface { + Client() *ledgerclient.Formance + URL() string + Stop(ctx context.Context) error +} + +type EnvFactory interface { + Create(ctx context.Context, b *testing.B, ledger ledger.Ledger) Env +} diff --git a/test/performance/env_testserver_test.go b/test/performance/env_testserver_test.go new file mode 100644 index 000000000..5f05ef75c --- /dev/null +++ b/test/performance/env_testserver_test.go @@ -0,0 +1,97 @@ +//go:build it + +package performance_test + +import ( + "context" + "github.com/formancehq/go-libs/v2/otlp/otlpmetrics" + ledgerclient "github.com/formancehq/stack/ledger/client" + "io" + "os" + "testing" + + "github.com/formancehq/go-libs/v2/pointer" + "github.com/formancehq/go-libs/v2/testing/platform/pgtesting" + "github.com/formancehq/go-libs/v2/time" + ledger "github.com/formancehq/ledger/internal" + "github.com/formancehq/ledger/pkg/testserver" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + "github.com/stretchr/testify/require" +) + +type TestServerEnv struct { + testServer *testserver.Server + ledger ledger.Ledger +} + +func (e *TestServerEnv) Client() *ledgerclient.Formance { + return e.testServer.Client() +} + +func (e *TestServerEnv) URL() string { + return e.testServer.URL() +} + +func (e *TestServerEnv) Stop(ctx context.Context) error { + e.testServer.Stop(ctx) + return nil +} + +var _ Env = (*TestServerEnv)(nil) + +type TestServerEnvFactory struct { + pgServer *pgtesting.PostgresServer +} + +func (f *TestServerEnvFactory) Create(ctx context.Context, b *testing.B, ledger ledger.Ledger) Env { + + db := f.pgServer.NewDatabase(b) + b.Logf("database: %s", db.Name()) + connectionOptions := db.ConnectionOptions() + connectionOptions.MaxOpenConns = 100 + connectionOptions.MaxIdleConns = 100 + connectionOptions.ConnMaxIdleTime = time.Minute + + var output io.Writer = os.Stdout + if os.Getenv("DEBUG") != "true" { + output = io.Discard + } + + testServer := testserver.New(b, testserver.Configuration{ + PostgresConfiguration: connectionOptions, + Debug: os.Getenv("DEBUG") == "true", + Output: output, + OTLPConfig: &testserver.OTLPConfig{ + Metrics: &otlpmetrics.ModuleConfig{ + KeepInMemory: true, + RuntimeMetrics: true, + }, + }, + ExperimentalFeatures: true, + }) + + _, err := testServer.Client().Ledger.V2. + CreateLedger(ctx, operations.V2CreateLedgerRequest{ + Ledger: ledger.Name, + V2CreateLedgerRequest: &components.V2CreateLedgerRequest{ + Bucket: pointer.For(ledger.Bucket), + Metadata: ledger.Metadata, + Features: ledger.Features, + }, + }) + require.NoError(b, err) + + return &TestServerEnv{ + testServer: testServer, + ledger: ledger, + } +} + +var _ EnvFactory = (*TestServerEnvFactory)(nil) + +func NewTestServerEnvFactory(pgServer *pgtesting.PostgresServer) *TestServerEnvFactory { + return &TestServerEnvFactory{ + pgServer: pgServer, + } +} diff --git a/test/performance/features_test.go b/test/performance/features_test.go new file mode 100644 index 000000000..a3afc7c02 --- /dev/null +++ b/test/performance/features_test.go @@ -0,0 +1,44 @@ +//go:build it + +package performance_test + +import ( + . "github.com/formancehq/go-libs/v2/collectionutils" + ledger "github.com/formancehq/ledger/internal" + "sort" +) + +func buildAllPossibleConfigurations() []configuration { + possibleConfigurations := make([]configuration, 0) + possibleConfigurations = append(possibleConfigurations, configuration{ + Name: "MINIMAL", + FeatureSet: ledger.MinimalFeatureSet, + }) + + fullConfiguration := ledger.MinimalFeatureSet + features := Keys(ledger.FeatureConfigurations) + sort.Strings(features) + + for _, feature := range features { + possibleConfigurations = append(possibleConfigurations, configuration{ + Name: feature, + FeatureSet: ledger.MinimalFeatureSet.With(feature, ledger.FeatureConfigurations[feature][0]), + }) + fullConfiguration = fullConfiguration.With(feature, ledger.FeatureConfigurations[feature][0]) + } + possibleConfigurations = append(possibleConfigurations, configuration{ + Name: "FULL", + FeatureSet: fullConfiguration, + }) + + return possibleConfigurations +} + +type configuration struct { + Name string + FeatureSet ledger.FeatureSet +} + +func (c configuration) String() string { + return c.Name +} diff --git a/test/performance/main_test.go b/test/performance/main_test.go new file mode 100644 index 000000000..2b049c8b5 --- /dev/null +++ b/test/performance/main_test.go @@ -0,0 +1,115 @@ +//go:build it + +package performance_test + +import ( + "context" + "crypto/tls" + "flag" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/testing/docker" + "github.com/formancehq/go-libs/v2/testing/platform/pgtesting" + . "github.com/formancehq/go-libs/v2/testing/utils" + "golang.org/x/oauth2" + "golang.org/x/oauth2/clientcredentials" + "net/http" + "testing" +) + +var ( + dockerPool *docker.Pool + pgServer *Deferred[*pgtesting.PostgresServer] + + authClientID string + authClientSecret string + + // targeting a stack + stackURL string + + // targeting a ledger + authIssuerURL string + ledgerURL string + + parallelism int64 + reportFile string + + envFactory EnvFactory +) + +func init() { + flag.StringVar(&stackURL, "stack.url", "", "Stack URL") + flag.StringVar(&authClientID, "client.id", "", "Client ID") + flag.StringVar(&authClientSecret, "client.secret", "", "Client secret") + flag.StringVar(&ledgerURL, "ledger.url", "", "Ledger url") + flag.StringVar(&authIssuerURL, "auth.url", "", "Auth url (ignored if --stack.url is specified)") + flag.StringVar(&reportFile, "report.file", "", "Location to write report file") + flag.Int64Var(¶llelism, "parallelism", 1, "Parallelism (default 1). Values is multiplied by GOMAXPROCS") +} + +func TestMain(m *testing.M) { + flag.Parse() + + WithTestMain(func(t *TestingTForMain) int { + selectedEnv := 0 + if stackURL != "" { + selectedEnv++ + } + if ledgerURL != "" { + selectedEnv++ + } + if selectedEnv > 1 { + t.Errorf("Cannot specify both --stack.url and --ledger.url") + t.FailNow() + } + + switch { + case stackURL != "": + envFactory = NewRemoteLedgerEnvFactory(getHttpClient(stackURL+"/api/auth"), stackURL+"/api/ledger") + case ledgerURL != "": + envFactory = NewRemoteLedgerEnvFactory(getHttpClient(authIssuerURL), ledgerURL) + default: + // Configure the environment to run benchmarks locally. + // Start a docker connection and create a new postgres server. + dockerPool = docker.NewPool(t, logging.Testing()) + + pgServer = NewDeferred[*pgtesting.PostgresServer]() + pgServer.LoadAsync(func() *pgtesting.PostgresServer { + return pgtesting.CreatePostgresServer( + t, + dockerPool, + pgtesting.WithPGCrypto(), + ) + }) + + Wait(pgServer) + + envFactory = NewTestServerEnvFactory(pgServer.GetValue()) + } + + return m.Run() + }) +} + +func getHttpClient(authUrl string) *http.Client { + httpClient := &http.Client{ + Transport: &http.Transport{ + MaxIdleConns: 100, + MaxConnsPerHost: 100, + MaxIdleConnsPerHost: 100, + TLSClientConfig: &tls.Config{ + InsecureSkipVerify: true, + }, + }, + } + if authClientID != "" { + httpClient = (&clientcredentials.Config{ + ClientID: authClientID, + ClientSecret: authClientSecret, + TokenURL: authUrl + "/oauth/token", + Scopes: []string{"ledger:read", "ledger:write"}, + }). + Client(context.WithValue(context.Background(), oauth2.HTTPClient, httpClient)) + } + + return httpClient +} diff --git a/test/performance/performance_test.go b/test/performance/performance_test.go deleted file mode 100644 index b35030e01..000000000 --- a/test/performance/performance_test.go +++ /dev/null @@ -1,117 +0,0 @@ -//go:build it - -package benchmarks - -import ( - "bytes" - "fmt" - "math/big" - "runtime" - "sync" - "sync/atomic" - "testing" - - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/pointer" - "github.com/formancehq/go-libs/testing/docker" - "github.com/formancehq/go-libs/testing/platform/pgtesting" - "github.com/formancehq/go-libs/testing/utils" - "github.com/formancehq/go-libs/time" - "github.com/formancehq/ledger/pkg/testserver" - "github.com/formancehq/stack/ledger/client/models/components" - "github.com/google/uuid" - "github.com/stretchr/testify/require" -) - -var ( - dockerPool *docker.Pool - srv *pgtesting.PostgresServer -) - -func TestMain(m *testing.M) { - utils.WithTestMain(func(t *utils.TestingTForMain) int { - dockerPool = docker.NewPool(t, logging.Testing()) - srv = pgtesting.CreatePostgresServer(t, dockerPool) - - return m.Run() - }) -} - -func BenchmarkWorstCase(b *testing.B) { - - db := srv.NewDatabase(b) - - ctx := logging.TestingContext() - - ledgerName := uuid.NewString() - testServer := testserver.New(b, testserver.Configuration{ - PostgresConfiguration: db.ConnectionOptions(), - Debug: testing.Verbose(), - }) - testServer.Start() - defer testServer.Stop() - - _, err := testServer.Client().Ledger.V2.CreateLedger(ctx, ledgerName, &components.V2CreateLedgerRequest{}) - require.NoError(b, err) - - totalDuration := atomic.Int64{} - b.SetParallelism(1000) - runtime.GC() - b.ResetTimer() - startOfBench := time.Now() - counter := atomic.Int64{} - longestTxLock := sync.Mutex{} - longestTransactionID := big.NewInt(0) - longestTransactionDuration := time.Duration(0) - b.RunParallel(func(pb *testing.PB) { - buf := bytes.NewBufferString("") - for pb.Next() { - buf.Reset() - id := counter.Add(1) - now := time.Now() - - // todo: check why the generated sdk does not have the same signature as the global sdk - transactionResponse, err := testServer.Client().Ledger.V2.CreateTransaction(ctx, ledgerName, components.V2PostTransaction{ - Timestamp: nil, - Postings: nil, - Script: &components.V2PostTransactionScript{ - Plain: `vars { - account $account -} - -send [USD/2 100] ( - source = @world - destination = $account -)`, - Vars: map[string]any{ - "account": fmt.Sprintf("accounts:%d", id), - }, - }, - - Reference: nil, - Metadata: nil, - }, pointer.For(false), nil) - if err != nil { - return - } - require.NoError(b, err) - - latency := time.Since(now).Milliseconds() - totalDuration.Add(latency) - - longestTxLock.Lock() - if time.Millisecond*time.Duration(latency) > longestTransactionDuration { - longestTransactionID = transactionResponse.V2CreateTransactionResponse.Data.ID - longestTransactionDuration = time.Duration(latency) * time.Millisecond - } - longestTxLock.Unlock() - } - }) - - b.StopTimer() - b.Logf("Longest transaction: %d (%s)", longestTransactionID, longestTransactionDuration.String()) - b.ReportMetric((float64(time.Duration(b.N))/float64(time.Since(startOfBench)))*float64(time.Second), "t/s") - b.ReportMetric(float64(totalDuration.Load()/int64(b.N)), "ms/transaction") - - runtime.GC() -} diff --git a/test/performance/report_test.go b/test/performance/report_test.go new file mode 100644 index 000000000..ad0b2b4fe --- /dev/null +++ b/test/performance/report_test.go @@ -0,0 +1,75 @@ +//go:build it + +package performance_test + +import ( + "github.com/formancehq/go-libs/v2/time" + "github.com/jamiealquiza/tachymeter" + "sync" +) + +type Result struct { + Start time.Time + End time.Time + + Metrics *tachymeter.Metrics + + Name string + Configuration configuration + TPS float64 + InternalMetrics map[string]any +} + +type report struct { + mu *sync.Mutex + + Start time.Time + End time.Time + + Tachymeter *tachymeter.Tachymeter + + Scenario string + Configuration configuration + InternalMetrics map[string]any +} + +func (r *report) GetResult() Result { + return Result{ + Start: r.Start, + End: r.End, + Metrics: r.Tachymeter.Calc(), + InternalMetrics: r.InternalMetrics, + Name: r.Scenario, + Configuration: r.Configuration, + TPS: r.TPS(), + } +} + +func (r *report) TPS() float64 { + return (float64(time.Duration(r.Tachymeter.Count)) / float64(r.End.Sub(r.Start))) * float64(time.Second) +} + +func (r *report) registerTransactionLatency(latency time.Duration) { + r.mu.Lock() + defer r.mu.Unlock() + + r.Tachymeter.AddTime(latency) +} + +func (r *report) reset() { + r.Start = time.Now() + r.Tachymeter.Reset() +} + +func newReport(configuration configuration, scenario string) report { + ret := report{ + Scenario: scenario, + Configuration: configuration, + mu: &sync.Mutex{}, + Tachymeter: tachymeter.New(&tachymeter.Config{ + Size: 10000, + }), + } + ret.reset() + return ret +} diff --git a/test/performance/write_test.go b/test/performance/write_test.go new file mode 100644 index 000000000..d660cb51c --- /dev/null +++ b/test/performance/write_test.go @@ -0,0 +1,88 @@ +//go:build it + +package performance_test + +import ( + "encoding/json" + "fmt" + "github.com/formancehq/go-libs/v2/logging" + "github.com/stretchr/testify/require" + "os" + "path/filepath" + "testing" +) + +var scripts = map[string]TransactionProvider{ + "world->bank": TransactionProviderFn(worldToBank), + "world->any": TransactionProviderFn(worldToAny), + "any(unbounded)->any": TransactionProviderFn(anyUnboundedToAny), + "any(bounded)->any": TransactionProviderFn(anyBoundedToAny), +} + +func worldToBank(_ int) (string, map[string]string) { + return ` +send [USD/2 100] ( + source = @world + destination = @bank +)`, nil +} + +func worldToAny(id int) (string, map[string]string) { + return ` +vars { + account $destination +} +send [USD/2 100] ( + source = @world + destination = $destination +)`, map[string]string{ + "destination": fmt.Sprintf("dst:%d", id), + } +} + +func anyUnboundedToAny(id int) (string, map[string]string) { + return ` +vars { + account $source + account $destination +} +send [USD/2 100] ( + source = $source allowing unbounded overdraft + destination = $destination +)`, map[string]string{ + "source": fmt.Sprintf("src:%d", id), + "destination": fmt.Sprintf("dst:%d", id), + } +} + +func anyBoundedToAny(id int) (string, map[string]string) { + return fmt.Sprintf(` +vars { + account $source + account $destination +} +send [USD/2 100] ( + source = $source allowing overdraft up to [USD/2 %d] + destination = $destination +)`, (id+1)*100), map[string]string{ + "source": fmt.Sprintf("src:%d", id), + "destination": fmt.Sprintf("dst:%d", id), + } +} + +func BenchmarkWrite(b *testing.B) { + + // Execute benchmarks + reports := New(b, envFactory, scripts).Run(logging.TestingContext()) + + // Write report + if reportFile != "" { + require.NoError(b, os.MkdirAll(filepath.Dir(reportFile), 0755)) + + f, err := os.Create(reportFile) + require.NoError(b, err) + enc := json.NewEncoder(f) + enc.SetIndent("", " ") + require.NoError(b, enc.Encode(reports)) + } +} diff --git a/test/stress/stress_test.go b/test/stress/stress_test.go new file mode 100644 index 000000000..f5bc5ec73 --- /dev/null +++ b/test/stress/stress_test.go @@ -0,0 +1,158 @@ +//go:build it + +package test_suite + +import ( + "fmt" + "math/big" + "math/rand" + "sync" + "sync/atomic" + + "github.com/alitto/pond" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/pointer" + "github.com/formancehq/go-libs/v2/testing/platform/pgtesting" + ledger "github.com/formancehq/ledger/internal" + . "github.com/formancehq/ledger/pkg/testserver" + "github.com/formancehq/stack/ledger/client/models/components" + "github.com/formancehq/stack/ledger/client/models/operations" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Context("Ledger stress tests", func() { + var ( + db = pgtesting.UsePostgresDatabase(pgServer) + ctx = logging.TestingContext() + ) + + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + ExperimentalFeatures: true, + } + }) + + const ( + countLedgers = 6 + countBuckets = 3 + countTransactions = 500 + countAccounts = 80 + ) + + When(fmt.Sprintf("creating %d ledgers dispatched on %d buckets", countLedgers, countLedgers/10), func() { + BeforeEach(func() { + for i := range countLedgers { + bucketName := fmt.Sprintf("bucket%d", i/countBuckets) + ledgerName := fmt.Sprintf("ledger%d", i) + err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: ledgerName, + V2CreateLedgerRequest: &components.V2CreateLedgerRequest{ + Bucket: &bucketName, + Features: ledger.MinimalFeatureSet.With(ledger.FeatureMovesHistory, "ON"), + }, + }) + Expect(err).ShouldNot(HaveOccurred()) + } + }) + When(fmt.Sprintf("creating %d transactions across the same account pool", countTransactions), func() { + var ( + createdTransactions map[string][]*big.Int + mu sync.Mutex + ) + BeforeEach(func() { + createdTransactions = map[string][]*big.Int{} + wp := pond.New(20, 20) + for range countTransactions { + wp.Submit(func() { + defer GinkgoRecover() + + ledger := fmt.Sprintf("ledger%d", rand.Intn(countLedgers)) + createdTx, err := CreateTransaction(ctx, testServer.GetValue(), operations.V2CreateTransactionRequest{ + Ledger: ledger, + V2PostTransaction: components.V2PostTransaction{ + Postings: []components.V2Posting{ + { + Source: fmt.Sprintf("accounts:%d", rand.Intn(countAccounts)), + Destination: fmt.Sprintf("accounts:%d", rand.Intn(countAccounts)), + Asset: "USD", + Amount: big.NewInt(100), + }, + { + Source: fmt.Sprintf("accounts:%d", rand.Intn(countAccounts)), + Destination: fmt.Sprintf("accounts:%d", rand.Intn(countAccounts)), + Asset: "USD", + Amount: big.NewInt(100), + }, + }, + }, + Force: pointer.For(true), + }) + Expect(err).ShouldNot(HaveOccurred()) + mu.Lock() + if createdTransactions[ledger] == nil { + createdTransactions[ledger] = []*big.Int{} + } + createdTransactions[ledger] = append(createdTransactions[ledger], createdTx.ID) + mu.Unlock() + }) + go func() { + + }() + } + wp.StopAndWait() + }) + When("getting aggregated volumes with no parameters", func() { + It("should be zero", func() { + Expect(testServer.GetValue()).To(HaveCoherentState()) + }) + }) + When("trying to revert concurrently all transactions", func() { + It("should be handled correctly", func() { + const ( + // We will introduce attempts to duplicate transactions twice. + // At the end we will check than the correct number of revert has + // succeeded and the correct number has failed. + duplicates = 1 + ) + var ( + success atomic.Int64 + failures atomic.Int64 + ) + wp := pond.New(20, 20) + for ledger, ids := range createdTransactions { + for _, id := range ids { + for range duplicates + 1 { + wp.Submit(func() { + defer GinkgoRecover() + + _, err := RevertTransaction(ctx, testServer.GetValue(), operations.V2RevertTransactionRequest{ + Ledger: ledger, + ID: id, + Force: pointer.For(true), + }) + if err == nil { + success.Add(1) + } else { + failures.Add(1) + } + }) + } + } + } + wp.StopAndWait() + By("we should have the correct amount of success/failures", func() { + Expect(success.Load()).To(Equal(int64(countTransactions))) + Expect(failures.Load()).To(Equal(int64(duplicates * countTransactions))) + }) + By("we should still have the aggregated balances to 0", func() { + Expect(testServer.GetValue()).To(HaveCoherentState()) + }) + }) + }) + }) + }) +}) diff --git a/test/integration/environment_test.go b/test/stress/suite_test.go similarity index 69% rename from test/integration/environment_test.go rename to test/stress/suite_test.go index 70841afe8..a85ef7764 100644 --- a/test/integration/environment_test.go +++ b/test/stress/suite_test.go @@ -4,16 +4,22 @@ package test_suite import ( "encoding/json" + . "github.com/formancehq/go-libs/v2/testing/platform/pgtesting" + . "github.com/formancehq/go-libs/v2/testing/utils" "os" + "testing" - "github.com/formancehq/go-libs/logging" - "github.com/formancehq/go-libs/testing/docker" - . "github.com/formancehq/go-libs/testing/platform/pgtesting" - . "github.com/formancehq/go-libs/testing/utils" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/testing/docker" . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" ) +func Test(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Test Suite") +} + var ( dockerPool = NewDeferred[*docker.Pool]() pgServer = NewDeferred[*PostgresServer]() @@ -31,7 +37,14 @@ var _ = SynchronizedBeforeSuite(func() []byte { pgServer.LoadAsync(func() *PostgresServer { By("Initializing postgres server") - return CreatePostgresServer(GinkgoT(), dockerPool.GetValue()) + ret := CreatePostgresServer( + GinkgoT(), + dockerPool.GetValue(), + WithPGStatsExtension(), + WithPGCrypto(), + ) + By("Postgres address: " + ret.GetDSN()) + return ret }) By("Waiting services alive") diff --git a/tools/docs/events/main.go b/tools/docs/events/main.go new file mode 100644 index 000000000..ffd200162 --- /dev/null +++ b/tools/docs/events/main.go @@ -0,0 +1,60 @@ +package main + +import ( + "encoding/json" + "fmt" + "github.com/formancehq/ledger/internal/bus" + "github.com/invopop/jsonschema" + "github.com/spf13/cobra" + "os" + "path/filepath" + "reflect" +) + +func newDocEventsCommand() *cobra.Command { + const ( + writeDirFlag = "write-dir" + ) + cmd := &cobra.Command{ + RunE: func(cmd *cobra.Command, _ []string) error { + + writeDir, err := cmd.Flags().GetString(writeDirFlag) + if err != nil { + return fmt.Errorf("failed to get write-dir flag: %w", err) + } + + err = os.MkdirAll(writeDir, 0755) + if err != nil { + return fmt.Errorf("failed to create write-dir: %w", err) + } + + for _, o := range []any{ + bus.CommittedTransactions{}, + bus.DeletedMetadata{}, + bus.SavedMetadata{}, + bus.RevertedTransaction{}, + } { + schema := jsonschema.Reflect(o) + data, err := json.MarshalIndent(schema, "", " ") + if err != nil { + return fmt.Errorf("failed to marshal schema: %w", err) + } + err = os.WriteFile(filepath.Join(writeDir, reflect.TypeOf(o).Name()+".json"), data, 0600) + if err != nil { + return fmt.Errorf("failed to write schema: %w", err) + } + } + + return nil + }, + } + cmd.Flags().String(writeDirFlag, "", "directory to write events to") + + return cmd +} + +func main() { + if err := newDocEventsCommand().Execute(); err != nil { + os.Exit(1) + } +} diff --git a/tools/docs/flags/main.go b/tools/docs/flags/main.go new file mode 100644 index 000000000..48834fd06 --- /dev/null +++ b/tools/docs/flags/main.go @@ -0,0 +1,59 @@ +package main + +import ( + "fmt" + ledgercmd "github.com/formancehq/ledger/cmd" + "os" + "sort" + "strings" + "text/tabwriter" + + "github.com/spf13/cobra" + "github.com/spf13/pflag" +) + +func newDocFlagsCommand() *cobra.Command { + return &cobra.Command{ + Use: "flags", + RunE: func(cmd *cobra.Command, _ []string) error { + + w := tabwriter.NewWriter(cmd.OutOrStdout(), 0, 0, 1, ' ', tabwriter.Debug) + + allKeys := make([]string, 0) + + serveCommand := ledgercmd.NewServeCommand() + serveCommand.Flags().VisitAll(func(f *pflag.Flag) { + allKeys = append(allKeys, f.Name) + }) + sort.Strings(allKeys) + + if _, err := fmt.Fprintf(w, + "\tFlag\tEnv var\tDefault value\tDescription\t\r\n"); err != nil { + return err + } + if _, err := fmt.Fprintf(w, + "\t-\t-\t-\t-\t\r\n"); err != nil { + return err + } + for _, key := range allKeys { + asEnvVar := strings.ToUpper(strings.Replace(key, "-", "_", -1)) + flag := serveCommand.Flags().Lookup(key) + if flag == nil { + continue + } + if _, err := fmt.Fprintf(w, + "\t --%s\t %s\t %s\t %s\t\r\n", key, asEnvVar, flag.DefValue, flag.Usage); err != nil { + panic(err) + } + } + + return w.Flush() + }, + } +} + +func main() { + if err := newDocFlagsCommand().Execute(); err != nil { + os.Exit(1) + } +}