Skip to content

Commit

Permalink
fix: from review
Browse files Browse the repository at this point in the history
  • Loading branch information
gfyrag committed Dec 20, 2024
1 parent 348a006 commit c048862
Show file tree
Hide file tree
Showing 6 changed files with 17 additions and 58 deletions.
5 changes: 5 additions & 0 deletions internal/storage/ledger/paginator_offset.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import (
"github.com/formancehq/go-libs/v2/bun/bunpaginate"
ledgercontroller "github.com/formancehq/ledger/internal/controller/ledger"
"github.com/uptrace/bun"
"math"
)

type offsetPaginator[ResourceType, OptionsType any] struct {
Expand Down Expand Up @@ -54,6 +55,10 @@ func (o offsetPaginator[ResourceType, OptionsType]) buildCursor(ret []ResourceTy
// Page with transactions after
if query.PageSize != 0 && len(ret) > int(query.PageSize) {
cp := query
// Check for potential overflow
if query.Offset > math.MaxUint64-query.PageSize {
return nil, fmt.Errorf("offset overflow")
}
cp.Offset = query.Offset + query.PageSize
next = &cp
ret = ret[:len(ret)-1]
Expand Down
2 changes: 1 addition & 1 deletion internal/storage/ledger/resource.go
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ func (r *resourceRepository[ResourceType, OptionsType]) validateFilters(builder
options := append([]string{property.name}, property.aliases...)
for _, option := range options {
if found, err = regexp.MatchString("^"+option+"$", key); err != nil {
panic(err)
return fmt.Errorf("failed to match regex for key '%s': %w", key, err)
} else if found {
break
}
Expand Down
4 changes: 2 additions & 2 deletions internal/storage/ledger/resource_accounts.go
Original file line number Diff line number Diff line change
Expand Up @@ -127,9 +127,9 @@ func (h accountsResourceHandler) resolveFilter(store *Store, opts ledgercontroll
return "metadata @> ?", []any{map[string]any{
match[0][1]: value,
}}, nil
default:
return "", nil, ledgercontroller.NewErrInvalidQuery("invalid filter property %s", property)
}

panic("unreachable")
}

func (h accountsResourceHandler) project(store *Store, query ledgercontroller.ResourceQuery[any], selectQuery *bun.SelectQuery) (*bun.SelectQuery, error) {
Expand Down
39 changes: 2 additions & 37 deletions internal/storage/ledger/resource_transactions.go
Original file line number Diff line number Diff line change
Expand Up @@ -146,11 +146,9 @@ func (h transactionsResourceHandler) resolveFilter(store *Store, opts ledgercont

case property == "metadata":
return "metadata -> ? is not null", []any{value}, nil
case property == "timestamp":
return fmt.Sprintf("timestamp %s ?", convertOperatorToSQL(operator)), []any{value}, nil
default:
return "", nil, fmt.Errorf("unsupported filter: %s", property)
}

panic("unreachable")
}

func (h transactionsResourceHandler) project(store *Store, query ledgercontroller.ResourceQuery[any], selectQuery *bun.SelectQuery) (*bun.SelectQuery, error) {
Expand All @@ -162,39 +160,6 @@ func (h transactionsResourceHandler) expand(store *Store, opts ledgercontroller.
return nil, nil, nil
}

/**
SELECT "transactions_id", public.aggregate_objects(post_commit_effective_volumes::JSONB) AS post_commit_effective_volumes
FROM (
SELECT "transactions_id", json_build_object(moves.accounts_address, json_build_object(moves.asset, json_build_object('input', (moves.post_commit_effective_volumes).inputs, 'output', (moves.post_commit_effective_volumes).outputs))) AS post_commit_effective_volumes
FROM (
SELECT DISTINCT ON (transactions_id, accounts_address, asset)
"transactions_id",
"accounts_address",
"asset",
first_value(moves.post_commit_effective_volumes)
OVER (PARTITION BY (transactions_id, accounts_address, asset) ORDER BY seq DESC) AS post_commit_effective_volumes
FROM "_default".moves
) moves
) DATA
GROUP BY "transactions_id";
SELECT "transactions_id", public.aggregate_objects(json_build_object(accounts_address, post_commit_effective_volumes)::jsonb) AS post_commit_effective_volumes
FROM (
SELECT "transactions_id", "accounts_address", public.aggregate_objects(json_build_object(moves.asset, json_build_object('input', (moves.post_commit_effective_volumes).inputs, 'output', (moves.post_commit_effective_volumes).outputs))::jsonb) AS post_commit_effective_volumes
FROM (
SELECT DISTINCT ON (transactions_id, accounts_address, asset)
"transactions_id",
"accounts_address",
"asset",
first_value(moves.post_commit_effective_volumes)
OVER (PARTITION BY (transactions_id, accounts_address, asset) ORDER BY seq DESC) AS post_commit_effective_volumes
FROM "_default".moves
) moves
GROUP BY "transactions_id", "accounts_address"
) data
GROUP BY "transactions_id";
*/

ret := store.db.NewSelect().
TableExpr(
"(?) data",
Expand Down
4 changes: 2 additions & 2 deletions internal/storage/ledger/resource_volumes.go
Original file line number Diff line number Diff line change
Expand Up @@ -179,9 +179,9 @@ func (h volumesResourceHandler) resolveFilter(
match[0][1]: value,
}}, nil
}
default:
return "", nil, fmt.Errorf("unsupported filter %s", property)
}

panic("unreachable")
}

func (h volumesResourceHandler) project(
Expand Down
21 changes: 5 additions & 16 deletions internal/storage/ledger/utils.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,13 @@ import (
func isSegmentedAddress(address string) bool {
src := strings.Split(address, ":")

needSegmentCheck := false
for _, segment := range src {
needSegmentCheck = segment == ""
if needSegmentCheck {
break
if segment == "" {
return true
}
}

return needSegmentCheck
return false
}

func filterAccountAddress(address, key string) string {
Expand All @@ -39,15 +37,6 @@ func filterAccountAddress(address, key string) string {
return strings.Join(parts, " and ")
}

func isPartialAddress[V any](address V) bool {
switch address := any(address).(type) {
case string:
for _, segment := range strings.Split(address, ":") {
if segment == "" {
return true
}
}
}

return false
func isPartialAddress(address any) bool {
return isSegmentedAddress(address.(string))
}

0 comments on commit c048862

Please sign in to comment.