Skip to content

Commit

Permalink
fix: impl
Browse files Browse the repository at this point in the history
  • Loading branch information
luckasRanarison committed Dec 2, 2024
1 parent 83b1d82 commit ceee646
Show file tree
Hide file tree
Showing 4 changed files with 368 additions and 67 deletions.
15 changes: 9 additions & 6 deletions src/metagen/src/client_ts/static/mod.ts
Original file line number Diff line number Diff line change
Expand Up @@ -216,15 +216,17 @@ class FileExtractor {
#currentPath: ValuePath = [];
#files: Map<string, File> = new Map();

static extractFrom(object: unknown, paths: TypePath[]) {
static extractFrom(key: string, object: unknown, paths: TypePath[]) {
const extractor = new FileExtractor();
if (!object || typeof object !== "object") {
throw new Error("expected object");
}
for (const path of paths) {
extractor.#currentPath = [];
extractor.#path = path;
extractor.#extractFromValue(object);
if (path[0] && path[0].startsWith("." + key)) {
extractor.#currentPath = [];
extractor.#path = path;
extractor.#extractFromValue(object);
}
}
return extractor.#files;
}
Expand Down Expand Up @@ -451,7 +453,7 @@ function convertQueryNodeGql(
const obj = { [key]: val.value };

if (node.files && node.files.length > 0) {
const extractedFiles = FileExtractor.extractFrom(obj, node.files);
const extractedFiles = FileExtractor.extractFrom(key, obj, node.files);

for (const [path, file] of extractedFiles) {
const pathInVariables = path.replace(/^\.[^\.\[]+/, `.${name}`);
Expand All @@ -464,7 +466,7 @@ function convertQueryNodeGql(
argsRow.push(`${key}: $${name}`);
}

out = argsRow.join(", ");
out = `${out} (${argsRow.join(", ")})`;
}

const subNodes = node.subNodes;
Expand Down Expand Up @@ -572,6 +574,7 @@ async function fetchGql(
method: "POST",
headers: {
accept: "application/json",
...additionalHeaders,
...(options.headers ?? {}),
},
body,
Expand Down
207 changes: 178 additions & 29 deletions tests/metagen/typegraphs/sample/ts/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ function _selectionToNodeSet(
continue;
}

const { argumentTypes, subNodes, variants } = metaFn();
const { argumentTypes, subNodes, variants, inputFiles } = metaFn();

const nodeInstances =
nodeSelection instanceof Alias
Expand All @@ -39,7 +39,7 @@ function _selectionToNodeSet(
`nested Alias discovered at ${parentPath}.${instanceName}`,
);
}
const node: SelectNode = { instanceName, nodeName };
const node: SelectNode = { instanceName, nodeName, files: inputFiles };

if (argumentTypes) {
// make sure the arg is of the expected form
Expand Down Expand Up @@ -176,6 +176,7 @@ type SelectNode<_Out = unknown> = {
instanceName: string;
args?: NodeArgs;
subNodes?: SubNodes;
files?: TypePath[];
};

export class QueryNode<Out> {
Expand Down Expand Up @@ -210,12 +211,107 @@ type QueryDocOut<T> =
}
: never;

//TypePath = typing.List[typing.Union[typing.Literal["?"], typing.Literal["[]"], str]]
//ValuePath = typing.List[typing.Union[typing.Literal[""], str]]

type TypePath = ("?" | "[]" | `.${string}`)[];
type ValuePath = ("" | `[${number}]` | `.${string}`)[];

class FileExtractor {
#path: TypePath = [];
#currentPath: ValuePath = [];
#files: Map<string, File> = new Map();

static extractFrom(key: string, object: unknown, paths: TypePath[]) {
const extractor = new FileExtractor();
if (!object || typeof object !== "object") {
throw new Error("expected object");
}
for (const path of paths) {
if (path[0] && path[0].startsWith("." + key)) {
extractor.#currentPath = [];
extractor.#path = path;
extractor.#extractFromValue(object);
}
}
return extractor.#files;
}

#extractFromValue(value: unknown) {
const nextSegment = this.#path[this.#currentPath.length];
if (nextSegment === "?") {
if (value === null || value === undefined) {
return;
}
this.#currentPath.push("");
this.#extractFromValue(value);
this.#currentPath.pop();
return;
}

if (nextSegment === "[]") {
if (!Array.isArray(value)) {
throw new Error(`Expected array at ${this.#formatPath()}`);
}
for (let i = 0; i < value.length; i++) {
this.#currentPath.push(`[${i}]`);
this.#extractFromArray(value, i);
this.#currentPath.pop();
}
return;
}

if (nextSegment.startsWith(".")) {
if (typeof value !== "object" || value === null) {
throw new Error(`Expected non-null object at ${this.#formatPath()}`);
}
this.#currentPath.push(nextSegment);
this.#extractFromObject(
value as Record<string, unknown>,
nextSegment.slice(1),
);
this.#currentPath.pop();
return;
}
}

#extractFromObject(parent: Record<string, unknown>, key: string) {
const value = parent[key];
if (this.#currentPath.length == this.#path.length) {
if (value instanceof File) {
this.#files.set(this.#formatPath(), value);
parent[key] = null;
return;
}
throw new Error(`Expected File at ${this.#formatPath()}`);
}

this.#extractFromValue(value);
}

#extractFromArray(parent: unknown[], idx: number) {
const value = parent[idx];
if (this.#currentPath.length == this.#path.length) {
if (value instanceof File) {
this.#files.set(this.#formatPath(), value);
parent[idx] = null;
return;
}
throw new Error(`Expected File at ${this.#formatPath()}`);
}

this.#extractFromValue(value);
}

#formatPath() {
return this.#currentPath
.map((seg) => {
if (seg.startsWith("[")) {
return `.${seg.slice(1, -1)}`;
}
return seg;
})
.join("");
}
}

type NodeMeta = {
subNodes?: [string, () => NodeMeta][];
variants?: [string, () => NodeMeta][];
Expand Down Expand Up @@ -344,6 +440,7 @@ function convertQueryNodeGql(
typeToGqlTypeMap: Record<string, string>,
node: SelectNode,
variables: Map<string, NodeArgValue>,
files: Map<string, File>,
) {
let out =
node.nodeName == node.instanceName
Expand All @@ -352,20 +449,36 @@ function convertQueryNodeGql(

const args = node.args;
if (args && Object.keys(args).length > 0) {
out = `${out} (${Object.entries(args)
.map(([key, val]) => {
const name = `in${variables.size}`;
variables.set(name, val);
return `${key}: $${name}`;
})
.join(", ")})`;
const argsRow = [];

for (const [key, val] of Object.entries(args)) {
const name = `in${variables.size}`;
const obj = { [key]: val.value };

if (node.files && node.files.length > 0) {
const extractedFiles = FileExtractor.extractFrom(key, obj, node.files);

for (const [path, file] of extractedFiles) {
const pathInVariables = path.replace(/^\.[^\.\[]+/, `.${name}`);
files.set(pathInVariables, file);
}
}

val.value = obj[key];
variables.set(name, val);
argsRow.push(`${key}: $${name}`);
}

out = `${out} (${argsRow.join(", ")})`;
}

const subNodes = node.subNodes;
if (subNodes) {
if (Array.isArray(subNodes)) {
out = `${out} { ${subNodes
.map((node) => convertQueryNodeGql(typeToGqlTypeMap, node, variables))
.map((node) =>
convertQueryNodeGql(typeToGqlTypeMap, node, variables, files),
)
.join(" ")} }`;
} else {
out = `${out} { ${Object.entries(subNodes)
Expand All @@ -380,7 +493,7 @@ function convertQueryNodeGql(
return `... on ${gqlTy} {${subNodes
.map((node) =>
convertQueryNodeGql(typeToGqlTypeMap, node, variables),
convertQueryNodeGql(typeToGqlTypeMap, node, variables, files),
)
.join(" ")}}`;
})
Expand All @@ -398,11 +511,12 @@ function buildGql(
name: string = "",
) {
const variables = new Map<string, NodeArgValue>();
const files = new Map<string, File>();

const rootNodes = Object.entries(query)
.map(([key, node]) => {
const fixedNode = { ...node, instanceName: key };
return convertQueryNodeGql(typeToGqlTypeMap, fixedNode, variables);
return convertQueryNodeGql(typeToGqlTypeMap, fixedNode, variables, files);
})
.join("\n ");

Expand All @@ -423,6 +537,7 @@ function buildGql(
variables: Object.fromEntries(
[...variables.entries()].map(([key, val]) => [key, val.value]),
),
files,
};
}

Expand All @@ -431,22 +546,43 @@ async function fetchGql(
doc: string,
variables: Record<string, unknown>,
options: GraphQlTransportOptions,
files?: Map<string, File>,
) {
// console.log(doc, variables);
let body: FormData | string = JSON.stringify({
query: doc,
variables,
});

const additionalHeaders: HeadersInit = {};

if (files && files.size > 0) {
const data = new FormData();
data.set("operations", body);
const map: Record<string, string[]> = {};
for (const [i, [path, file]] of [...(files?.entries() ?? [])].entries()) {
const key = `${i}`;
// TODO single file on multiple paths
map[key] = ["variables" + path];
data.set(key, file);
}
data.set("map", JSON.stringify(map));
body = data;
} else {
additionalHeaders["content-type"] = "application/json";
}

const fetchImpl = options.fetch ?? fetch;
const res = await fetchImpl(addr, {
...options,
method: "POST",
headers: {
accept: "application/json",
"content-type": "application/json",
...additionalHeaders,
...(options.headers ?? {}),
},
body: JSON.stringify({
query: doc,
variables,
}),
body,
});

if (!res.ok) {
const body = await res.text().catch((err) => `error reading body: ${err} `);
throw new (Error as ErrorPolyfill)(
Expand Down Expand Up @@ -483,12 +619,16 @@ export class GraphQLTransport {
async #request(
doc: string,
variables: Record<string, unknown>,
options?: GraphQlTransportOptions,
options: GraphQlTransportOptions,
files?: Map<string, File>,
) {
const res = await fetchGql(this.address, doc, variables, {
...this.options,
...options,
});
const res = await fetchGql(
this.address,
doc,
variables,
{ ...this.options, ...options },
files,
);
if ("errors" in res) {
throw new (Error as ErrorPolyfill)("graphql errors on response", {
cause: res.errors,
Expand Down Expand Up @@ -521,7 +661,11 @@ export class GraphQLTransport {
"query",
name,
);
return (await this.#request(doc, variables, options)) as QueryDocOut<Doc>;
return (await this.#request(
doc,
variables,
options ?? {},
)) as QueryDocOut<Doc>;
}

/**
Expand All @@ -537,7 +681,7 @@ export class GraphQLTransport {
name?: string;
} = {},
): Promise<QueryDocOut<Doc>> {
const { variables, doc } = buildGql(
const { variables, doc, files } = buildGql(
this.typeToGqlTypeMap,
Object.fromEntries(
Object.entries(query).map(([key, val]) => [
Expand All @@ -548,7 +692,12 @@ export class GraphQLTransport {
"mutation",
name,
);
return (await this.#request(doc, variables, options)) as QueryDocOut<Doc>;
return (await this.#request(
doc,
variables,
options ?? {},
files,
)) as QueryDocOut<Doc>;
}

/**
Expand Down
Loading

0 comments on commit ceee646

Please sign in to comment.