Skip to content

Commit

Permalink
Merge pull request #483 from psteinroe/feat/wildcard
Browse files Browse the repository at this point in the history
  • Loading branch information
psteinroe authored Jul 14, 2024
2 parents e120a3d + 40ba34d commit 2cfb11a
Show file tree
Hide file tree
Showing 25 changed files with 989 additions and 238 deletions.
7 changes: 7 additions & 0 deletions .changeset/real-donuts-count.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
"@supabase-cache-helpers/postgrest-react-query": minor
"@supabase-cache-helpers/postgrest-core": minor
"@supabase-cache-helpers/postgrest-swr": minor
---

feat: support for wildcard
15 changes: 7 additions & 8 deletions docs/pages/postgrest/queries.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,6 @@ import { Callout, Tabs, Tab } from "nextra-theme-docs";

# Queries

<Callout emoji="⚠️ ">
Unfortunately, we require you to be explicit in the select statement fields,
and not use a wildcard ('*'), since mutations need to know the columns to
derive whether the query cache should be updated with new values. Note that
the wildcard selector is not allowed on any level, so you can't use it for
relations either.
</Callout>

The cache helpers query hooks wrap the data fetching hooks of the cache libraries and pass both the cache key and the fetcher function from on the PostgREST query. This is enabled primarily by a parser that turns any Supabase PostgREST query into a definite cache key. For example,

```ts
Expand All @@ -32,6 +24,13 @@ is parsed into
</Tab>
</Tabs>

<Callout emoji="⚠️ ">
Although you can use wildcards (`*`) in your query, their usage is only
recommended for `head: true` and `count: true` queries. For any other query,
you should be explicit about the columns you want to select. Only then is
cache helpers able to do granular cache updates without refetching.
</Callout>

## `useQuery`

Wrapper around the default data fetching hook that returns the query including the count without any modification of the data. The config parameter of the respective library can be passed as the second argument.
Expand Down
10 changes: 2 additions & 8 deletions packages/postgrest-core/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,11 @@
"./package.json": "./package.json"
},
"types": "./dist/index.d.ts",
"files": [
"dist/**"
],
"files": ["dist/**"],
"publishConfig": {
"access": "public"
},
"keywords": [
"Supabase",
"PostgREST",
"Cache"
],
"keywords": ["Supabase", "PostgREST", "Cache"],
"repository": {
"type": "git",
"url": "git+https://github.com/psteinroe/supabase-cache-helpers.git",
Expand Down
98 changes: 51 additions & 47 deletions packages/postgrest-core/src/delete-item.ts
Original file line number Diff line number Diff line change
Expand Up @@ -78,57 +78,61 @@ export const deleteItem = async <KeyType, Type extends Record<string, unknown>>(
const filter = getPostgrestFilter(key.queryKey);
// parse input into expected target format
if (key.schema === schema && key.table === table) {
const transformedInput = filter.denormalize(op.input);
if (
// For delete, the input has to have a value for all primary keys
op.primaryKeys.every(
(pk) => typeof transformedInput[pk as string] !== 'undefined',
)
) {
const limit = key.limit ?? 1000;
mutations.push(
mutate(k, (currentData) => {
// Return early if undefined or null
if (!currentData) return currentData;
if (key.isHead === true) {
mutations.push(revalidate(k));
} else {
const transformedInput = filter.denormalize(op.input);
if (
// For delete, the input has to have a value for all primary keys
op.primaryKeys.every(
(pk) => typeof transformedInput[pk as string] !== 'undefined',
)
) {
const limit = key.limit ?? 1000;
mutations.push(
mutate(k, (currentData) => {
// Return early if undefined or null
if (!currentData) return currentData;

if (isPostgrestHasMorePaginationCacheData<Type>(currentData)) {
return toHasMorePaginationCacheData(
filterByPks<Type>(
transformedInput,
currentData.flatMap((p) => p.data),
op.primaryKeys,
),
currentData,
limit,
);
} else if (isPostgrestPaginationCacheData<Type>(currentData)) {
return toPaginationCacheData(
filterByPks<Type>(
if (isPostgrestHasMorePaginationCacheData<Type>(currentData)) {
return toHasMorePaginationCacheData(
filterByPks<Type>(
transformedInput,
currentData.flatMap((p) => p.data),
op.primaryKeys,
),
currentData,
limit,
);
} else if (isPostgrestPaginationCacheData<Type>(currentData)) {
return toPaginationCacheData(
filterByPks<Type>(
transformedInput,
currentData.flat(),
op.primaryKeys,
),
limit,
);
} else if (isAnyPostgrestResponse<Type>(currentData)) {
const { data } = currentData;
if (!Array.isArray(data)) {
return { data: null };
}

const newData = filterByPks(
transformedInput,
currentData.flat(),
data,
op.primaryKeys,
),
limit,
);
} else if (isAnyPostgrestResponse<Type>(currentData)) {
const { data } = currentData;
if (!Array.isArray(data)) {
return { data: null };
}

const newData = filterByPks(
transformedInput,
data,
op.primaryKeys,
);
);

return {
data: newData,
count: newData.length,
};
}
}),
);
return {
data: newData,
count: newData.length,
};
}
}),
);
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,17 +44,51 @@ export const normalizeResponse = <R>(
groups: (Path | NestedPath)[],
obj: R,
): R => {
if (groups.some((p) => p.path === '*')) {
// if wildcard, add every non nested value
// for every nested value, check if groups contains a nested path for it. if not, also add it.
// reason is that the wildcard does not select relations

Object.entries(obj as Record<string, unknown>).forEach(([k, v]) => {
if (typeof v === 'object' || Array.isArray(v)) {
if (!groups.some((g) => isNestedPath(g) && g.path === k)) {
groups.push({
path: k,
declaration: k,
});
}
} else if (!groups.some((g) => g.path === k)) {
groups.push({
path: k,
declaration: k,
});
}
});
}

return groups.reduce<R>((prev, curr) => {
// prefer alias over path because of dedupe alias
const value = get(obj, curr.alias || curr.path);

if (typeof value === 'undefined') return prev;
if (value === null || !isNestedPath(curr)) {
if (value === null) {
return {
...prev,
[curr.path]: value,
};
}
if (!isNestedPath(curr)) {
return {
...prev,
...flatten({
[curr.path]:
value !== null &&
(typeof value === 'object' || Array.isArray(value))
? flatten(value)
: value,
}),
};
}
if (Array.isArray(value)) {
return {
...prev,
Expand Down Expand Up @@ -87,7 +121,52 @@ const buildUserQueryData = <R>(
pathGroups: (Path | NestedPath)[],
obj: R,
): R => {
if (pathGroups.some((p) => p.path === '*')) {
// if wildcard, add every non nested value
// for every nested value, check if pathGroups contains a nested path for it. if not, also add it.
// reason is that the wildcard does not select relations

Object.entries(obj as Record<string, unknown>).forEach(([k, v]) => {
if (typeof v === 'object' || Array.isArray(v)) {
if (!pathGroups.some((g) => isNestedPath(g) && g.path === k)) {
pathGroups.push({
path: k,
declaration: k,
});
}
} else if (!pathGroups.some((g) => g.path === k)) {
pathGroups.push({
path: k,
declaration: k,
});
}
});
}

if (userQueryGroups.some((p) => p.path === '*')) {
// if wildcard, add every non nested value
// for every nested value, check if pathGroups contains a nested path for it. if not, also add it.
// reason is that the wildcard does not select relations

Object.entries(obj as Record<string, unknown>).forEach(([k, v]) => {
if (typeof v === 'object' || Array.isArray(v)) {
if (!pathGroups.some((g) => isNestedPath(g) && g.path === k)) {
userQueryGroups.push({
path: k,
declaration: k,
});
}
} else if (!userQueryGroups.some((g) => g.path === k)) {
userQueryGroups.push({
path: k,
declaration: k,
});
}
});
}

return userQueryGroups.reduce<R>((prev, curr) => {
if (curr.path === '*') return prev;
// paths is reflecting the obj
const inputPath = pathGroups.find(
(p) => p.path === curr.path && isNestedPath(p) === isNestedPath(curr),
Expand Down
28 changes: 27 additions & 1 deletion packages/postgrest-core/src/filter/denormalize.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import type { Path } from '../lib/query-types';

/**
* Denormalize a normalized response object using the paths of the target query
* **/
**/
export const denormalize = <R extends Record<string, unknown>>(
// the paths into which we need to transform
paths: Path[],
Expand All @@ -17,7 +17,33 @@ export const denormalize = <R extends Record<string, unknown>>(
): R => {
const groups = groupPathsRecursive(paths);

if (groups.some((g) => g.path === '*')) {
// if a wildcard path is present, we expand the groups with all values from the object that are not part of a nested path from `paths`.
// This will include also unwanted values, e.g. from a join on another relation because its impossible for us to distinguish between json columns and joins.
Object.keys(obj).forEach((k) => {
const keyParts = k.split('.');
if (
keyParts.length > 1 &&
groups.some((g) => isNestedPath(g) && g.path === keyParts[0])
) {
// skip if key is actually part of a nested path from the groups
return;
}
if (groups.some((g) => g.path === keyParts[0])) {
// skip if key is already part of the groups
return;
}

groups.push({
declaration: keyParts[0],
path: keyParts[0],
});
});
}

return groups.reduce<R>((prev, curr) => {
// skip the wildcard since we already handled it above
if (curr.path === '*') return prev;
let value = obj[curr.path];

if (!isNestedPath(curr)) {
Expand Down
6 changes: 2 additions & 4 deletions packages/postgrest-core/src/lib/parse-select-param.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,8 @@ export const parseSelectParam = (s: string, currentPath?: Path): Path[] => {
.map(([table, selectedColumns]) =>
`${table}(${selectedColumns})`
.replace(/\(/g, '\\(')
.replace(/\)/g, '\\)'),
.replace(/\)/g, '\\)')
.replace(/\*/g, '\\*'),
)
.join('|')}`,
'g',
Expand All @@ -75,9 +76,6 @@ export const parseSelectParam = (s: string, currentPath?: Path): Path[] => {
};
});

if (columns.find((c) => c.path.includes('*')))
throw new Error('Wildcard selector is not supported');

return [
...columns,
...Object.entries(foreignTables).flatMap(
Expand Down
Loading

0 comments on commit 2cfb11a

Please sign in to comment.