Skip to content

Commit

Permalink
fix: Backport v1.7.6 to v1.8.1 & main (#370)
Browse files Browse the repository at this point in the history
* first

* chore: add migration 17 test

* fix: remove verbose

* fix: review gfyrag

* fix: 'account' filter on GET /transactions using old method
  • Loading branch information
Antoine Gelloz authored and flemzord committed Nov 17, 2022
1 parent eccbf83 commit 664781d
Show file tree
Hide file tree
Showing 4 changed files with 203 additions and 148 deletions.

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package _16_denormalize_addresses
package _17_optimized_segments

import (
"context"
Expand All @@ -12,7 +12,11 @@ import (
"github.com/stretchr/testify/require"
)

func TestMigrate16(t *testing.T) {
func TestMigrate17(t *testing.T) {
if ledgertesting.StorageDriverName() != "postgres" {
t.Skip()
}

driver, closeFunc, err := ledgertesting.StorageDriver()
require.NoError(t, err)
defer closeFunc()
Expand All @@ -26,7 +30,7 @@ func TestMigrate16(t *testing.T) {
migrations, err := sqlstorage.CollectMigrationFiles(sqlstorage.MigrationsFS)
require.NoError(t, err)

modified, err := sqlstorage.Migrate(context.Background(), schema, migrations[0:16]...)
modified, err := sqlstorage.Migrate(context.Background(), schema, migrations[0:17]...)
require.NoError(t, err)
require.True(t, modified)

Expand All @@ -37,27 +41,31 @@ func TestMigrate16(t *testing.T) {
InsertInto(schema.Table("transactions")).
Cols("id", "timestamp", "postings", "metadata").
Values(0, now.Format(time.RFC3339), `[
{"source": "world", "destination": "bank", "asset": "USD", "amount": 100},
{"source": "bank", "destination": "user", "asset": "USD", "amount": 100}
{"source": "world", "destination": "users:001", "asset": "USD", "amount": 100}
]`, "{}").
BuildWithFlavor(schema.Flavor())
_, err = schema.ExecContext(context.Background(), sqlq, args...)
require.NoError(t, err)

modified, err = sqlstorage.Migrate(context.Background(), schema, migrations[16])
modified, err = sqlstorage.Migrate(context.Background(), schema, migrations[17])
require.NoError(t, err)
require.True(t, modified)

sqlq, args = sqlbuilder.
Select("sources", "destinations").
From(schema.Table("transactions")).
Where("id = 0").
Select("txid", "posting_index", "source", "destination").
From(schema.Table("postings")).
Where("txid = 0").
BuildWithFlavor(schema.Flavor())

row := store.Schema().QueryRowContext(context.Background(), sqlq, args...)
require.NoError(t, row.Err())
var sources, destinations string
require.NoError(t, err, row.Scan(&sources, &destinations))
require.Equal(t, "world;bank", sources)
require.Equal(t, "bank;user", destinations)

var txid uint64
var postingIndex int
var source, destination string
require.NoError(t, err, row.Scan(&txid, &postingIndex, &source, &destination))
require.Equal(t, uint64(0), txid)
require.Equal(t, 0, postingIndex)
require.Equal(t, `["world"]`, source)
require.Equal(t, `["users", "001"]`, destination)
}
36 changes: 36 additions & 0 deletions pkg/storage/sqlstorage/migrates/17-optimized-segments/postgres.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
--statement
drop trigger if exists log_entry on "VAR_LEDGER_NAME".log;
--statement
drop trigger if exists volumes_changed on "VAR_LEDGER_NAME".log;

--statement
alter table "VAR_LEDGER_NAME".transactions drop column if exists sources;
--statement
alter table "VAR_LEDGER_NAME".transactions drop column if exists destinations;
--statement
drop index if exists transactions_sources;
--statement
drop index if exists transactions_destinations;

--statement
create table if not exists "VAR_LEDGER_NAME".postings (
txid bigint,
posting_index integer,
source jsonb,
destination jsonb
);

--statement
create index postings_src on "VAR_LEDGER_NAME".postings using GIN(source);
--statement
create index postings_dest on "VAR_LEDGER_NAME".postings using GIN(destination);
--statement
create index postings_txid on "VAR_LEDGER_NAME".postings (txid asc);

--statement
insert into "VAR_LEDGER_NAME".postings(txid, posting_index, source, destination)
select txs.id as txid, i - 1 as posting_index,
array_to_json(string_to_array(t.posting->>'source', ':'))::jsonb as source,
array_to_json(string_to_array(t.posting->>'destination', ':'))::jsonb as destination
from "VAR_LEDGER_NAME".transactions txs left join lateral jsonb_array_elements(txs.postings)
with ordinality as t(posting, i) on true;
Loading

0 comments on commit 664781d

Please sign in to comment.