From 4037040f3b2ebdb1eeb3d78c46196b435522e9fd Mon Sep 17 00:00:00 2001 From: Luckas Date: Fri, 6 Dec 2024 05:01:25 +0300 Subject: [PATCH] feat(metagen): typescript client file upload (#934) - Closes [MET-768](https://linear.app/metatypedev/issue/MET-768/add-file-upload-support-for-typescript-metagen-client). #### Migration notes --- - [x] The change comes with new or modified tests - [ ] Hard-to-understand functions have explanatory comments - [ ] End-user documentation is updated to reflect the change --- deno.lock | 1 + src/metagen/src/client_py/node_metas.rs | 20 +- src/metagen/src/client_ts/mod.rs | 1 + src/metagen/src/client_ts/node_metas.rs | 25 +- src/metagen/src/client_ts/static/mod.ts | 394 +++++--- src/metagen/src/shared/files.rs | 13 + tests/metagen/metagen_test.ts | 8 + tests/metagen/typegraphs/sample/metatype.yml | 3 + tests/metagen/typegraphs/sample/ts/client.ts | 394 +++++--- .../typegraphs/sample/ts_upload/client.ts | 938 ++++++++++++++++++ .../typegraphs/sample/ts_upload/main.ts | 27 + 11 files changed, 1567 insertions(+), 257 deletions(-) create mode 100644 tests/metagen/typegraphs/sample/ts_upload/client.ts create mode 100644 tests/metagen/typegraphs/sample/ts_upload/main.ts diff --git a/deno.lock b/deno.lock index 979dcd749..9e243f308 100644 --- a/deno.lock +++ b/deno.lock @@ -36,6 +36,7 @@ "jsr:@std/semver@^1.0.1": "jsr:@std/semver@1.0.3", "jsr:@std/streams@0.221.0": "jsr:@std/streams@0.221.0", "jsr:@std/streams@1": "jsr:@std/streams@1.0.4", + "jsr:@std/streams@^1.0.2": "jsr:@std/streams@1.0.4", "jsr:@std/testing@^1.0.1": "jsr:@std/testing@1.0.2", "jsr:@std/uuid@^1.0.1": "jsr:@std/uuid@1.0.3", "jsr:@std/yaml@^1.0.4": "jsr:@std/yaml@1.0.5", diff --git a/src/metagen/src/client_py/node_metas.rs b/src/metagen/src/client_py/node_metas.rs index 1c5d29e40..ef788764f 100644 --- a/src/metagen/src/client_py/node_metas.rs +++ b/src/metagen/src/client_py/node_metas.rs @@ -1,14 +1,17 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use std::{collections::HashMap, fmt::Write, ops::Not}; +use std::{collections::HashMap, fmt::Write}; use common::typegraph::*; use super::utils::normalize_type_title; use crate::{ interlude::*, - shared::{files::TypePath, types::*}, + shared::{ + files::{serialize_typepaths_json, TypePath}, + types::*, + }, }; pub struct PyNodeMetasRenderer { @@ -187,18 +190,7 @@ impl RenderType for PyNodeMetasRenderer { let input_files = self .input_files .get(&cursor.id) - .map(|files| { - files - .iter() - .map(|path| path.to_vec_str()) - .collect::>() - }) - .and_then(|files| { - files - .is_empty() - .not() - .then_some(serde_json::to_string(&files).unwrap()) - }); + .and_then(|files| serialize_typepaths_json(files)); self.render_for_func(renderer, &ty_name, &return_ty_name, props, input_files)?; ty_name } diff --git a/src/metagen/src/client_ts/mod.rs b/src/metagen/src/client_ts/mod.rs index d859976dc..eccc05182 100644 --- a/src/metagen/src/client_ts/mod.rs +++ b/src/metagen/src/client_ts/mod.rs @@ -286,6 +286,7 @@ fn render_node_metas( Rc::new(node_metas::TsNodeMetasRenderer { name_mapper, named_types: named_types.clone(), + input_files: manifest.input_files.clone(), }), ); for &id in &manifest.node_metas { diff --git a/src/metagen/src/client_ts/node_metas.rs b/src/metagen/src/client_ts/node_metas.rs index f9db07b2e..8605a5e98 100644 --- a/src/metagen/src/client_ts/node_metas.rs +++ b/src/metagen/src/client_ts/node_metas.rs @@ -1,16 +1,23 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use std::fmt::Write; +use std::{collections::HashMap, fmt::Write}; use common::typegraph::*; use super::utils::normalize_type_title; -use crate::{interlude::*, shared::types::*}; +use crate::{ + interlude::*, + shared::{ + files::{serialize_typepaths_json, TypePath}, + types::*, + }, +}; pub struct TsNodeMetasRenderer { pub name_mapper: Rc, pub named_types: Rc>>, + pub input_files: Rc>>, } impl TsNodeMetasRenderer { @@ -51,6 +58,7 @@ impl TsNodeMetasRenderer { ty_name: &str, return_node: &str, argument_fields: Option>>, + input_files: Option, ) -> std::fmt::Result { write!( dest, @@ -80,6 +88,13 @@ impl TsNodeMetasRenderer { }},"# )?; } + if let Some(input_files) = input_files { + write!( + dest, + r#" + inputFiles: {input_files},"# + )?; + } write!( dest, r#" @@ -167,7 +182,11 @@ impl RenderType for TsNodeMetasRenderer { }; let node_name = &base.title; let ty_name = normalize_type_title(node_name).to_pascal_case(); - self.render_for_func(renderer, &ty_name, &return_ty_name, props)?; + let input_files = self + .input_files + .get(&cursor.id) + .and_then(|files| serialize_typepaths_json(files)); + self.render_for_func(renderer, &ty_name, &return_ty_name, props, input_files)?; ty_name } TypeNode::Object { data, base } => { diff --git a/src/metagen/src/client_ts/static/mod.ts b/src/metagen/src/client_ts/static/mod.ts index cc9485f6f..c1911f318 100644 --- a/src/metagen/src/client_ts/static/mod.ts +++ b/src/metagen/src/client_ts/static/mod.ts @@ -9,9 +9,7 @@ function _selectionToNodeSet( // check at the end const foundNodes = new Set(Object.keys(selection)); - for ( - const [nodeName, metaFn] of metas - ) { + for (const [nodeName, metaFn] of metas) { foundNodes.delete(nodeName); const nodeSelection = selection[nodeName]; @@ -20,15 +18,16 @@ function _selectionToNodeSet( continue; } - const { argumentTypes, subNodes, variants } = metaFn(); + const { argumentTypes, subNodes, variants, inputFiles } = metaFn(); - const nodeInstances = nodeSelection instanceof Alias - ? nodeSelection.aliases() - : { [nodeName]: nodeSelection }; + const nodeInstances = + nodeSelection instanceof Alias + ? nodeSelection.aliases() + : { [nodeName]: nodeSelection }; - for ( - const [instanceName, instanceSelection] of Object.entries(nodeInstances) - ) { + for (const [instanceName, instanceSelection] of Object.entries( + nodeInstances, + )) { if (!instanceSelection && !selectAll) { continue; } @@ -37,7 +36,7 @@ function _selectionToNodeSet( `nested Alias discovered at ${parentPath}.${instanceName}`, ); } - const node: SelectNode = { instanceName, nodeName }; + const node: SelectNode = { instanceName, nodeName, files: inputFiles }; if (argumentTypes) { // make sure the arg is of the expected form @@ -129,10 +128,10 @@ function _selectionToNodeSet( const variant_select = subSelections[variantTy]; const nodes = variant_select ? _selectionToNodeSet( - variant_select as Selection, - variant_meta.subNodes, - `${parentPath}.${instanceName}.variant(${variantTy})`, - ) + variant_select as Selection, + variant_meta.subNodes, + `${parentPath}.${instanceName}.variant(${variantTy})`, + ) : []; nodes.push({ nodeName: "__typename", @@ -174,13 +173,12 @@ type SelectNode<_Out = unknown> = { instanceName: string; args?: NodeArgs; subNodes?: SubNodes; + files?: TypePath[]; }; export class QueryNode { #inner: SelectNode; - constructor( - inner: SelectNode, - ) { + constructor(inner: SelectNode) { this.#inner = inner; } @@ -191,9 +189,7 @@ export class QueryNode { export class MutationNode { #inner: SelectNode; - constructor( - inner: SelectNode, - ) { + constructor(inner: SelectNode) { this.#inner = inner; } @@ -202,19 +198,122 @@ export class MutationNode { } } -type SelectNodeOut = T extends (QueryNode | MutationNode) +type SelectNodeOut = T extends QueryNode | MutationNode ? O : never; -type QueryDocOut = T extends - Record | MutationNode> ? { - [K in keyof T]: SelectNodeOut; +type QueryDocOut = + T extends Record | MutationNode> + ? { + [K in keyof T]: SelectNodeOut; + } + : never; + +type TypePath = ("?" | "[]" | `.${string}`)[]; +type ValuePath = ("" | `[${number}]` | `.${string}`)[]; + +class FileExtractor { + #path: TypePath = []; + #currentPath: ValuePath = []; + #files: Map = new Map(); + + static extractFrom(key: string, object: unknown, paths: TypePath[]) { + const extractor = new FileExtractor(); + if (!object || typeof object !== "object") { + throw new Error("expected object"); + } + for (const path of paths) { + if (path[0] && path[0].startsWith("." + key)) { + extractor.#currentPath = []; + extractor.#path = path; + extractor.#extractFromValue(object); + } + } + return extractor.#files; } - : never; + + #extractFromValue(value: unknown) { + const nextSegment = this.#path[this.#currentPath.length]; + if (nextSegment === "?") { + if (value === null || value === undefined) { + return; + } + this.#currentPath.push(""); + this.#extractFromValue(value); + this.#currentPath.pop(); + return; + } + + if (nextSegment === "[]") { + if (!Array.isArray(value)) { + throw new Error(`Expected array at ${this.#formatPath()}`); + } + for (let i = 0; i < value.length; i++) { + this.#currentPath.push(`[${i}]`); + this.#extractFromArray(value, i); + this.#currentPath.pop(); + } + return; + } + + if (nextSegment.startsWith(".")) { + if (typeof value !== "object" || value === null) { + throw new Error(`Expected non-null object at ${this.#formatPath()}`); + } + this.#currentPath.push(nextSegment); + this.#extractFromObject( + value as Record, + nextSegment.slice(1), + ); + this.#currentPath.pop(); + return; + } + } + + #extractFromObject(parent: Record, key: string) { + const value = parent[key]; + if (this.#currentPath.length == this.#path.length) { + if (value instanceof File) { + this.#files.set(this.#formatPath(), value); + parent[key] = null; + return; + } + throw new Error(`Expected File at ${this.#formatPath()}`); + } + + this.#extractFromValue(value); + } + + #extractFromArray(parent: unknown[], idx: number) { + const value = parent[idx]; + if (this.#currentPath.length == this.#path.length) { + if (value instanceof File) { + this.#files.set(this.#formatPath(), value); + parent[idx] = null; + return; + } + throw new Error(`Expected File at ${this.#formatPath()}`); + } + + this.#extractFromValue(value); + } + + #formatPath() { + return this.#currentPath + .map((seg) => { + if (seg.startsWith("[")) { + return `.${seg.slice(1, -1)}`; + } + return seg; + }) + .join(""); + } +} type NodeMeta = { subNodes?: [string, () => NodeMeta][]; variants?: [string, () => NodeMeta][]; argumentTypes?: { [name: string]: string }; + inputFiles?: TypePath[]; }; /* Selection types section */ @@ -232,11 +331,7 @@ type Selection = { | Selection; }; -type ScalarSelectNoArgs = - | boolean - | Alias - | null - | undefined; +type ScalarSelectNoArgs = boolean | Alias | null | undefined; type ScalarSelectArgs> = | ArgT @@ -267,9 +362,7 @@ type CompositeSelectArgs, SelectionT> = */ export class Alias { #aliases: Record; - constructor( - aliases: Record, - ) { + constructor(aliases: Record) { this.#aliases = aliases; } aliases() { @@ -344,35 +437,49 @@ function convertQueryNodeGql( typeToGqlTypeMap: Record, node: SelectNode, variables: Map, + files: Map, ) { - let out = node.nodeName == node.instanceName - ? node.nodeName - : `${node.instanceName}: ${node.nodeName}`; + let out = + node.nodeName == node.instanceName + ? node.nodeName + : `${node.instanceName}: ${node.nodeName}`; const args = node.args; if (args && Object.keys(args).length > 0) { - out = `${out} (${ - Object.entries(args) - .map(([key, val]) => { - const name = `in${variables.size}`; - variables.set(name, val); - return `${key}: $${name}`; - }) - .join(", ") - })`; + const argsRow = []; + + for (const [key, val] of Object.entries(args)) { + const name = `in${variables.size}`; + const obj = { [key]: val.value }; + + if (node.files && node.files.length > 0) { + const extractedFiles = FileExtractor.extractFrom(key, obj, node.files); + + for (const [path, file] of extractedFiles) { + const pathInVariables = path.replace(/^\.[^\.\[]+/, `.${name}`); + files.set(pathInVariables, file); + } + } + + val.value = obj[key]; + variables.set(name, val); + argsRow.push(`${key}: $${name}`); + } + + out = `${out} (${argsRow.join(", ")})`; } const subNodes = node.subNodes; if (subNodes) { if (Array.isArray(subNodes)) { - out = `${out} { ${ - subNodes.map((node) => - convertQueryNodeGql(typeToGqlTypeMap, node, variables) - ).join(" ") - } }`; + out = `${out} { ${subNodes + .map((node) => + convertQueryNodeGql(typeToGqlTypeMap, node, variables, files), + ) + .join(" ")} }`; } else { - out = `${out} { ${ - Object.entries(subNodes).map(([variantTy, subNodes]) => { + out = `${out} { ${Object.entries(subNodes) + .map(([variantTy, subNodes]) => { let gqlTy = typeToGqlTypeMap[variantTy]; if (!gqlTy) { throw new Error( @@ -381,13 +488,13 @@ function convertQueryNodeGql( } gqlTy = gqlTy.replace(/[!]+$/, ""); - return `... on ${gqlTy} {${ - subNodes.map((node) => - convertQueryNodeGql(typeToGqlTypeMap, node, variables) - ).join(" ") - }}`; - }).join(" ") - } }`; + return `... on ${gqlTy} {${subNodes + .map((node) => + convertQueryNodeGql(typeToGqlTypeMap, node, variables, files), + ) + .join(" ")}}`; + }) + .join(" ")} }`; } } return out; @@ -401,12 +508,12 @@ function buildGql( name: string = "", ) { const variables = new Map(); + const files = new Map(); - const rootNodes = Object - .entries(query) + const rootNodes = Object.entries(query) .map(([key, node]) => { const fixedNode = { ...node, instanceName: key }; - return convertQueryNodeGql(typeToGqlTypeMap, fixedNode, variables); + return convertQueryNodeGql(typeToGqlTypeMap, fixedNode, variables, files); }) .join("\n "); @@ -425,9 +532,9 @@ function buildGql( return { doc, variables: Object.fromEntries( - [...variables.entries()] - .map(([key, val]) => [key, val.value]), + [...variables.entries()].map(([key, val]) => [key, val.value]), ), + files, }; } @@ -436,22 +543,58 @@ async function fetchGql( doc: string, variables: Record, options: GraphQlTransportOptions, + files?: Map, ) { - // console.log(doc, variables); + let body: FormData | string = JSON.stringify({ + query: doc, + variables, + }); + + const additionalHeaders: HeadersInit = {}; + + if (files && files.size > 0) { + const data = new FormData(); + const fileMap = new Map(); + const map: Record = {}; + + for (const [path, file] of files) { + const array = fileMap.get(file); + const variable = "variables" + path; + if (array) { + array.push(variable); + } else { + fileMap.set(file, [variable]); + } + } + + let index = 0; + for (const [file, variables] of fileMap) { + const key = index.toString(); + map[key] = variables; + data.set(key, file); + index += 1; + } + + data.set("operations", body); + data.set("map", JSON.stringify(map)); + + body = data; + } else { + additionalHeaders["content-type"] = "application/json"; + } + const fetchImpl = options.fetch ?? fetch; const res = await fetchImpl(addr, { ...options, method: "POST", headers: { accept: "application/json", - "content-type": "application/json", - ...options.headers ?? {}, + ...additionalHeaders, + ...(options.headers ?? {}), }, - body: JSON.stringify({ - query: doc, - variables, - }), + body, }); + if (!res.ok) { const body = await res.text().catch((err) => `error reading body: ${err} `); throw new (Error as ErrorPolyfill)( @@ -465,17 +608,14 @@ async function fetchGql( ); } if (res.headers.get("content-type") != "application/json") { - throw new (Error as ErrorPolyfill)( - "unexpected content type in response", - { - cause: { - response: res, - body: await res.text().catch((err) => `error reading body: ${err} `), - }, + throw new (Error as ErrorPolyfill)("unexpected content type in response", { + cause: { + response: res, + body: await res.text().catch((err) => `error reading body: ${err} `), }, - ); + }); } - return await res.json() as { data: unknown; errors?: object[] }; + return (await res.json()) as { data: unknown; errors?: object[] }; } /** @@ -486,18 +626,21 @@ export class GraphQLTransport { public address: URL, public options: GraphQlTransportOptions, private typeToGqlTypeMap: Record, - ) { - } + ) {} async #request( doc: string, variables: Record, - options?: GraphQlTransportOptions, + options: GraphQlTransportOptions, + files?: Map, ) { - const res = await fetchGql(this.address, doc, variables, { - ...this.options, - ...options, - }); + const res = await fetchGql( + this.address, + doc, + variables, + { ...this.options, ...options }, + files, + ); if ("errors" in res) { throw new (Error as ErrorPolyfill)("graphql errors on response", { cause: res.errors, @@ -511,7 +654,10 @@ export class GraphQLTransport { */ async query>>( query: Doc, - { options, name = "" }: { + { + options, + name = "", + }: { options?: GraphQlTransportOptions; name?: string; } = {}, @@ -519,14 +665,19 @@ export class GraphQLTransport { const { variables, doc } = buildGql( this.typeToGqlTypeMap, Object.fromEntries( - Object.entries(query).map(( - [key, val], - ) => [key, (val as QueryNode).inner()]), + Object.entries(query).map(([key, val]) => [ + key, + (val as QueryNode).inner(), + ]), ), "query", name, ); - return await this.#request(doc, variables, options) as QueryDocOut; + return (await this.#request( + doc, + variables, + options ?? {}, + )) as QueryDocOut; } /** @@ -534,22 +685,31 @@ export class GraphQLTransport { */ async mutation>>( query: Doc, - { options, name = "" }: { + { + options, + name = "", + }: { options?: GraphQlTransportOptions; name?: string; } = {}, ): Promise> { - const { variables, doc } = buildGql( + const { variables, doc, files } = buildGql( this.typeToGqlTypeMap, Object.fromEntries( - Object.entries(query).map(( - [key, val], - ) => [key, (val as MutationNode).inner()]), + Object.entries(query).map(([key, val]) => [ + key, + (val as MutationNode).inner(), + ]), ), "mutation", name, ); - return await this.#request(doc, variables, options) as QueryDocOut; + return (await this.#request( + doc, + variables, + options ?? {}, + files, + )) as QueryDocOut; } /** @@ -618,9 +778,10 @@ export class PreparedRequest< const { doc, variables } = buildGql( typeToGqlTypeMap, Object.fromEntries( - Object.entries(dryRunNode).map(( - [key, val], - ) => [key, (val as MutationNode).inner()]), + Object.entries(dryRunNode).map(([key, val]) => [ + key, + (val as MutationNode).inner(), + ]), ), ty, name, @@ -629,10 +790,7 @@ export class PreparedRequest< this.#mappings = variables; } - resolveVariables( - args: T, - mappings: Record, - ) { + resolveVariables(args: T, mappings: Record) { const resolvedVariables = {} as Record; for (const [key, val] of Object.entries(mappings)) { if (val instanceof PlaceholderValue) { @@ -649,25 +807,21 @@ export class PreparedRequest< /** * Execute the prepared request. */ - async perform(args: T, opts?: GraphQlTransportOptions): Promise< - { - [K in keyof Doc]: SelectNodeOut; - } - > { + async perform( + args: T, + opts?: GraphQlTransportOptions, + ): Promise<{ + [K in keyof Doc]: SelectNodeOut; + }> { const resolvedVariables = this.resolveVariables(args, this.#mappings); // console.log(this.doc, { // resolvedVariables, // mapping: this.#mappings, // }); - const res = await fetchGql( - this.address, - this.doc, - resolvedVariables, - { - ...this.options, - ...opts, - }, - ); + const res = await fetchGql(this.address, this.doc, resolvedVariables, { + ...this.options, + ...opts, + }); if ("errors" in res) { throw new (Error as ErrorPolyfill)("graphql errors on response", { cause: res.errors, diff --git a/src/metagen/src/shared/files.rs b/src/metagen/src/shared/files.rs index 096e5a66c..2de61113b 100644 --- a/src/metagen/src/shared/files.rs +++ b/src/metagen/src/shared/files.rs @@ -81,6 +81,19 @@ impl TypePath { } } +pub fn serialize_typepaths_json(typepaths: &[TypePath]) -> Option { + let paths = typepaths + .iter() + .map(|path| path.to_vec_str()) + .collect::>(); + + if paths.is_empty() { + None + } else { + Some(serde_json::to_string(&paths).unwrap()) + } +} + pub fn get_path_to_files(tg: &Typegraph, root: u32) -> Result>> { visitor2::traverse_types( tg, diff --git a/tests/metagen/metagen_test.ts b/tests/metagen/metagen_test.ts index 26e6750c2..9f6751f6e 100644 --- a/tests/metagen/metagen_test.ts +++ b/tests/metagen/metagen_test.ts @@ -685,6 +685,14 @@ Meta.test( ), expected: zod.tuple([expectedSchemaU1, expectedSchemaUn]), }, + { + name: "client_ts_upload", + skip: false, + command: $`bash -c "deno run -A main.ts"`.cwd( + join(scriptsPath, "ts_upload"), + ), + expected: zod.tuple([expectedSchemaU1, expectedSchemaUn]), + }, ]; await using _engine2 = await t.engine( diff --git a/tests/metagen/typegraphs/sample/metatype.yml b/tests/metagen/typegraphs/sample/metatype.yml index 7d3a3c386..baf4405a2 100644 --- a/tests/metagen/typegraphs/sample/metatype.yml +++ b/tests/metagen/typegraphs/sample/metatype.yml @@ -26,3 +26,6 @@ metagen: - generator: client_py path: ./py_upload/ typegraph_path: ../file_upload_sample.ts + - generator: client_ts + path: ./ts_upload/ + typegraph_path: ../file_upload_sample.ts diff --git a/tests/metagen/typegraphs/sample/ts/client.ts b/tests/metagen/typegraphs/sample/ts/client.ts index 0c2619e47..b96737447 100644 --- a/tests/metagen/typegraphs/sample/ts/client.ts +++ b/tests/metagen/typegraphs/sample/ts/client.ts @@ -12,9 +12,7 @@ function _selectionToNodeSet( // check at the end const foundNodes = new Set(Object.keys(selection)); - for ( - const [nodeName, metaFn] of metas - ) { + for (const [nodeName, metaFn] of metas) { foundNodes.delete(nodeName); const nodeSelection = selection[nodeName]; @@ -23,15 +21,16 @@ function _selectionToNodeSet( continue; } - const { argumentTypes, subNodes, variants } = metaFn(); + const { argumentTypes, subNodes, variants, inputFiles } = metaFn(); - const nodeInstances = nodeSelection instanceof Alias - ? nodeSelection.aliases() - : { [nodeName]: nodeSelection }; + const nodeInstances = + nodeSelection instanceof Alias + ? nodeSelection.aliases() + : { [nodeName]: nodeSelection }; - for ( - const [instanceName, instanceSelection] of Object.entries(nodeInstances) - ) { + for (const [instanceName, instanceSelection] of Object.entries( + nodeInstances, + )) { if (!instanceSelection && !selectAll) { continue; } @@ -40,7 +39,7 @@ function _selectionToNodeSet( `nested Alias discovered at ${parentPath}.${instanceName}`, ); } - const node: SelectNode = { instanceName, nodeName }; + const node: SelectNode = { instanceName, nodeName, files: inputFiles }; if (argumentTypes) { // make sure the arg is of the expected form @@ -132,10 +131,10 @@ function _selectionToNodeSet( const variant_select = subSelections[variantTy]; const nodes = variant_select ? _selectionToNodeSet( - variant_select as Selection, - variant_meta.subNodes, - `${parentPath}.${instanceName}.variant(${variantTy})`, - ) + variant_select as Selection, + variant_meta.subNodes, + `${parentPath}.${instanceName}.variant(${variantTy})`, + ) : []; nodes.push({ nodeName: "__typename", @@ -177,13 +176,12 @@ type SelectNode<_Out = unknown> = { instanceName: string; args?: NodeArgs; subNodes?: SubNodes; + files?: TypePath[]; }; export class QueryNode { #inner: SelectNode; - constructor( - inner: SelectNode, - ) { + constructor(inner: SelectNode) { this.#inner = inner; } @@ -194,9 +192,7 @@ export class QueryNode { export class MutationNode { #inner: SelectNode; - constructor( - inner: SelectNode, - ) { + constructor(inner: SelectNode) { this.#inner = inner; } @@ -205,19 +201,122 @@ export class MutationNode { } } -type SelectNodeOut = T extends (QueryNode | MutationNode) +type SelectNodeOut = T extends QueryNode | MutationNode ? O : never; -type QueryDocOut = T extends - Record | MutationNode> ? { - [K in keyof T]: SelectNodeOut; +type QueryDocOut = + T extends Record | MutationNode> + ? { + [K in keyof T]: SelectNodeOut; + } + : never; + +type TypePath = ("?" | "[]" | `.${string}`)[]; +type ValuePath = ("" | `[${number}]` | `.${string}`)[]; + +class FileExtractor { + #path: TypePath = []; + #currentPath: ValuePath = []; + #files: Map = new Map(); + + static extractFrom(key: string, object: unknown, paths: TypePath[]) { + const extractor = new FileExtractor(); + if (!object || typeof object !== "object") { + throw new Error("expected object"); + } + for (const path of paths) { + if (path[0] && path[0].startsWith("." + key)) { + extractor.#currentPath = []; + extractor.#path = path; + extractor.#extractFromValue(object); + } + } + return extractor.#files; } - : never; + + #extractFromValue(value: unknown) { + const nextSegment = this.#path[this.#currentPath.length]; + if (nextSegment === "?") { + if (value === null || value === undefined) { + return; + } + this.#currentPath.push(""); + this.#extractFromValue(value); + this.#currentPath.pop(); + return; + } + + if (nextSegment === "[]") { + if (!Array.isArray(value)) { + throw new Error(`Expected array at ${this.#formatPath()}`); + } + for (let i = 0; i < value.length; i++) { + this.#currentPath.push(`[${i}]`); + this.#extractFromArray(value, i); + this.#currentPath.pop(); + } + return; + } + + if (nextSegment.startsWith(".")) { + if (typeof value !== "object" || value === null) { + throw new Error(`Expected non-null object at ${this.#formatPath()}`); + } + this.#currentPath.push(nextSegment); + this.#extractFromObject( + value as Record, + nextSegment.slice(1), + ); + this.#currentPath.pop(); + return; + } + } + + #extractFromObject(parent: Record, key: string) { + const value = parent[key]; + if (this.#currentPath.length == this.#path.length) { + if (value instanceof File) { + this.#files.set(this.#formatPath(), value); + parent[key] = null; + return; + } + throw new Error(`Expected File at ${this.#formatPath()}`); + } + + this.#extractFromValue(value); + } + + #extractFromArray(parent: unknown[], idx: number) { + const value = parent[idx]; + if (this.#currentPath.length == this.#path.length) { + if (value instanceof File) { + this.#files.set(this.#formatPath(), value); + parent[idx] = null; + return; + } + throw new Error(`Expected File at ${this.#formatPath()}`); + } + + this.#extractFromValue(value); + } + + #formatPath() { + return this.#currentPath + .map((seg) => { + if (seg.startsWith("[")) { + return `.${seg.slice(1, -1)}`; + } + return seg; + }) + .join(""); + } +} type NodeMeta = { subNodes?: [string, () => NodeMeta][]; variants?: [string, () => NodeMeta][]; argumentTypes?: { [name: string]: string }; + inputFiles?: TypePath[]; }; /* Selection types section */ @@ -235,11 +334,7 @@ type Selection = { | Selection; }; -type ScalarSelectNoArgs = - | boolean - | Alias - | null - | undefined; +type ScalarSelectNoArgs = boolean | Alias | null | undefined; type ScalarSelectArgs> = | ArgT @@ -270,9 +365,7 @@ type CompositeSelectArgs, SelectionT> = */ export class Alias { #aliases: Record; - constructor( - aliases: Record, - ) { + constructor(aliases: Record) { this.#aliases = aliases; } aliases() { @@ -347,35 +440,49 @@ function convertQueryNodeGql( typeToGqlTypeMap: Record, node: SelectNode, variables: Map, + files: Map, ) { - let out = node.nodeName == node.instanceName - ? node.nodeName - : `${node.instanceName}: ${node.nodeName}`; + let out = + node.nodeName == node.instanceName + ? node.nodeName + : `${node.instanceName}: ${node.nodeName}`; const args = node.args; if (args && Object.keys(args).length > 0) { - out = `${out} (${ - Object.entries(args) - .map(([key, val]) => { - const name = `in${variables.size}`; - variables.set(name, val); - return `${key}: $${name}`; - }) - .join(", ") - })`; + const argsRow = []; + + for (const [key, val] of Object.entries(args)) { + const name = `in${variables.size}`; + const obj = { [key]: val.value }; + + if (node.files && node.files.length > 0) { + const extractedFiles = FileExtractor.extractFrom(key, obj, node.files); + + for (const [path, file] of extractedFiles) { + const pathInVariables = path.replace(/^\.[^\.\[]+/, `.${name}`); + files.set(pathInVariables, file); + } + } + + val.value = obj[key]; + variables.set(name, val); + argsRow.push(`${key}: $${name}`); + } + + out = `${out} (${argsRow.join(", ")})`; } const subNodes = node.subNodes; if (subNodes) { if (Array.isArray(subNodes)) { - out = `${out} { ${ - subNodes.map((node) => - convertQueryNodeGql(typeToGqlTypeMap, node, variables) - ).join(" ") - } }`; + out = `${out} { ${subNodes + .map((node) => + convertQueryNodeGql(typeToGqlTypeMap, node, variables, files), + ) + .join(" ")} }`; } else { - out = `${out} { ${ - Object.entries(subNodes).map(([variantTy, subNodes]) => { + out = `${out} { ${Object.entries(subNodes) + .map(([variantTy, subNodes]) => { let gqlTy = typeToGqlTypeMap[variantTy]; if (!gqlTy) { throw new Error( @@ -384,13 +491,13 @@ function convertQueryNodeGql( } gqlTy = gqlTy.replace(/[!]+$/, ""); - return `... on ${gqlTy} {${ - subNodes.map((node) => - convertQueryNodeGql(typeToGqlTypeMap, node, variables) - ).join(" ") - }}`; - }).join(" ") - } }`; + return `... on ${gqlTy} {${subNodes + .map((node) => + convertQueryNodeGql(typeToGqlTypeMap, node, variables, files), + ) + .join(" ")}}`; + }) + .join(" ")} }`; } } return out; @@ -404,12 +511,12 @@ function buildGql( name: string = "", ) { const variables = new Map(); + const files = new Map(); - const rootNodes = Object - .entries(query) + const rootNodes = Object.entries(query) .map(([key, node]) => { const fixedNode = { ...node, instanceName: key }; - return convertQueryNodeGql(typeToGqlTypeMap, fixedNode, variables); + return convertQueryNodeGql(typeToGqlTypeMap, fixedNode, variables, files); }) .join("\n "); @@ -428,9 +535,9 @@ function buildGql( return { doc, variables: Object.fromEntries( - [...variables.entries()] - .map(([key, val]) => [key, val.value]), + [...variables.entries()].map(([key, val]) => [key, val.value]), ), + files, }; } @@ -439,22 +546,58 @@ async function fetchGql( doc: string, variables: Record, options: GraphQlTransportOptions, + files?: Map, ) { - // console.log(doc, variables); + let body: FormData | string = JSON.stringify({ + query: doc, + variables, + }); + + const additionalHeaders: HeadersInit = {}; + + if (files && files.size > 0) { + const data = new FormData(); + const fileMap = new Map(); + const map: Record = {}; + + for (const [path, file] of files) { + const array = fileMap.get(file); + const variable = "variables" + path; + if (array) { + array.push(variable); + } else { + fileMap.set(file, [variable]); + } + } + + let index = 0; + for (const [file, variables] of fileMap) { + const key = index.toString(); + map[key] = variables; + data.set(key, file); + index += 1; + } + + data.set("operations", body); + data.set("map", JSON.stringify(map)); + + body = data; + } else { + additionalHeaders["content-type"] = "application/json"; + } + const fetchImpl = options.fetch ?? fetch; const res = await fetchImpl(addr, { ...options, method: "POST", headers: { accept: "application/json", - "content-type": "application/json", - ...options.headers ?? {}, + ...additionalHeaders, + ...(options.headers ?? {}), }, - body: JSON.stringify({ - query: doc, - variables, - }), + body, }); + if (!res.ok) { const body = await res.text().catch((err) => `error reading body: ${err} `); throw new (Error as ErrorPolyfill)( @@ -468,17 +611,14 @@ async function fetchGql( ); } if (res.headers.get("content-type") != "application/json") { - throw new (Error as ErrorPolyfill)( - "unexpected content type in response", - { - cause: { - response: res, - body: await res.text().catch((err) => `error reading body: ${err} `), - }, + throw new (Error as ErrorPolyfill)("unexpected content type in response", { + cause: { + response: res, + body: await res.text().catch((err) => `error reading body: ${err} `), }, - ); + }); } - return await res.json() as { data: unknown; errors?: object[] }; + return (await res.json()) as { data: unknown; errors?: object[] }; } /** @@ -489,18 +629,21 @@ export class GraphQLTransport { public address: URL, public options: GraphQlTransportOptions, private typeToGqlTypeMap: Record, - ) { - } + ) {} async #request( doc: string, variables: Record, - options?: GraphQlTransportOptions, + options: GraphQlTransportOptions, + files?: Map, ) { - const res = await fetchGql(this.address, doc, variables, { - ...this.options, - ...options, - }); + const res = await fetchGql( + this.address, + doc, + variables, + { ...this.options, ...options }, + files, + ); if ("errors" in res) { throw new (Error as ErrorPolyfill)("graphql errors on response", { cause: res.errors, @@ -514,7 +657,10 @@ export class GraphQLTransport { */ async query>>( query: Doc, - { options, name = "" }: { + { + options, + name = "", + }: { options?: GraphQlTransportOptions; name?: string; } = {}, @@ -522,14 +668,19 @@ export class GraphQLTransport { const { variables, doc } = buildGql( this.typeToGqlTypeMap, Object.fromEntries( - Object.entries(query).map(( - [key, val], - ) => [key, (val as QueryNode).inner()]), + Object.entries(query).map(([key, val]) => [ + key, + (val as QueryNode).inner(), + ]), ), "query", name, ); - return await this.#request(doc, variables, options) as QueryDocOut; + return (await this.#request( + doc, + variables, + options ?? {}, + )) as QueryDocOut; } /** @@ -537,22 +688,31 @@ export class GraphQLTransport { */ async mutation>>( query: Doc, - { options, name = "" }: { + { + options, + name = "", + }: { options?: GraphQlTransportOptions; name?: string; } = {}, ): Promise> { - const { variables, doc } = buildGql( + const { variables, doc, files } = buildGql( this.typeToGqlTypeMap, Object.fromEntries( - Object.entries(query).map(( - [key, val], - ) => [key, (val as MutationNode).inner()]), + Object.entries(query).map(([key, val]) => [ + key, + (val as MutationNode).inner(), + ]), ), "mutation", name, ); - return await this.#request(doc, variables, options) as QueryDocOut; + return (await this.#request( + doc, + variables, + options ?? {}, + files, + )) as QueryDocOut; } /** @@ -621,9 +781,10 @@ export class PreparedRequest< const { doc, variables } = buildGql( typeToGqlTypeMap, Object.fromEntries( - Object.entries(dryRunNode).map(( - [key, val], - ) => [key, (val as MutationNode).inner()]), + Object.entries(dryRunNode).map(([key, val]) => [ + key, + (val as MutationNode).inner(), + ]), ), ty, name, @@ -632,10 +793,7 @@ export class PreparedRequest< this.#mappings = variables; } - resolveVariables( - args: T, - mappings: Record, - ) { + resolveVariables(args: T, mappings: Record) { const resolvedVariables = {} as Record; for (const [key, val] of Object.entries(mappings)) { if (val instanceof PlaceholderValue) { @@ -652,25 +810,21 @@ export class PreparedRequest< /** * Execute the prepared request. */ - async perform(args: T, opts?: GraphQlTransportOptions): Promise< - { - [K in keyof Doc]: SelectNodeOut; - } - > { + async perform( + args: T, + opts?: GraphQlTransportOptions, + ): Promise<{ + [K in keyof Doc]: SelectNodeOut; + }> { const resolvedVariables = this.resolveVariables(args, this.#mappings); // console.log(this.doc, { // resolvedVariables, // mapping: this.#mappings, // }); - const res = await fetchGql( - this.address, - this.doc, - resolvedVariables, - { - ...this.options, - ...opts, - }, - ); + const res = await fetchGql(this.address, this.doc, resolvedVariables, { + ...this.options, + ...opts, + }); if ("errors" in res) { throw new (Error as ErrorPolyfill)("graphql errors on response", { cause: res.errors, diff --git a/tests/metagen/typegraphs/sample/ts_upload/client.ts b/tests/metagen/typegraphs/sample/ts_upload/client.ts new file mode 100644 index 000000000..206f12044 --- /dev/null +++ b/tests/metagen/typegraphs/sample/ts_upload/client.ts @@ -0,0 +1,938 @@ +// This file was @generated by metagen and is intended +// to be generated again on subsequent metagen runs. + +function _selectionToNodeSet( + selection: Selection, + metas: [string, () => NodeMeta][], + parentPath: string, +): SelectNode[] { + const out = [] as SelectNode[]; + const selectAll = selection._ == "selectAll"; + // set of the user specified nodes to do sanity + // check at the end + const foundNodes = new Set(Object.keys(selection)); + + for (const [nodeName, metaFn] of metas) { + foundNodes.delete(nodeName); + + const nodeSelection = selection[nodeName]; + if (!nodeSelection && !selectAll) { + // this node was not selected + continue; + } + + const { argumentTypes, subNodes, variants, inputFiles } = metaFn(); + + const nodeInstances = + nodeSelection instanceof Alias + ? nodeSelection.aliases() + : { [nodeName]: nodeSelection }; + + for (const [instanceName, instanceSelection] of Object.entries( + nodeInstances, + )) { + if (!instanceSelection && !selectAll) { + continue; + } + if (instanceSelection instanceof Alias) { + throw new Error( + `nested Alias discovered at ${parentPath}.${instanceName}`, + ); + } + const node: SelectNode = { instanceName, nodeName, files: inputFiles }; + + if (argumentTypes) { + // make sure the arg is of the expected form + let arg = instanceSelection; + if (Array.isArray(arg)) { + arg = arg[0]; + } + // TODO: consider bringing in Zod (after hoisting impl into common lib) + if (typeof arg != "object" || arg === null) { + throw new Error( + `node at ${parentPath}.${instanceName} is a node ` + + `that requires arguments object but detected argument ` + + `is typeof ${typeof arg}`, + ); + } + + const expectedArguments = new Map(Object.entries(argumentTypes)); + node.args = {}; + for (const [key, value] of Object.entries(arg)) { + const typeName = expectedArguments.get(key); + // TODO: consider logging a warning if `_` is detected incase user passes + // Selection as arg + if (!typeName) { + throw new Error( + `unexpected argument ${key} at ${parentPath}.${instanceName}`, + ); + } + expectedArguments.delete(key); + node.args[key] = { typeName, value }; + } + } + + if (subNodes || variants) { + // sanity check selection object + let subSelections = instanceSelection; + if (argumentTypes) { + if (!Array.isArray(subSelections)) { + throw new Error( + `node at ${parentPath}.${instanceName} ` + + `is a composite that takes an argument ` + + `but selection is typeof ${typeof subSelections}`, + ); + } + subSelections = subSelections[1]; + } else if (Array.isArray(subSelections)) { + throw new Error( + `node at ${parentPath}.${instanceName} ` + + `is a composite that takes no arguments ` + + `but selection is typeof ${typeof subSelections}`, + ); + } + if (subSelections == undefined) { + subSelections = { + _: selection._, + }; + } + if (typeof subSelections != "object") { + throw new Error( + `node at ${parentPath}.${nodeName} ` + + `is a no argument composite but first element of ` + + `selection is typeof ${typeof nodeSelection}`, + ); + } + + if (subNodes) { + if (variants) { + throw new Error( + "unreachable: union/either NodeMetas can't have subnodes", + ); + } + node.subNodes = _selectionToNodeSet( + // assume it's a Selection. If it's an argument + // object, mismatch between the node desc should hopefully + // catch it + subSelections as Selection, + subNodes, + `${parentPath}.${instanceName}`, + ); + } else { + const unionSelections = {} as Record; + const foundVariants = new Set([...Object.keys(subSelections)]); + for (const [variantTy, variant_meta_fn] of variants!) { + const variant_meta = variant_meta_fn(); + // this union member is a scalar + if (!variant_meta.subNodes) { + continue; + } + foundVariants.delete(variantTy); + const variant_select = subSelections[variantTy]; + const nodes = variant_select + ? _selectionToNodeSet( + variant_select as Selection, + variant_meta.subNodes, + `${parentPath}.${instanceName}.variant(${variantTy})`, + ) + : []; + nodes.push({ + nodeName: "__typename", + instanceName: "__typename", + }); + unionSelections[variantTy] = nodes; + } + if (foundVariants.size > 0) { + throw new Error( + `node at ${parentPath}.${instanceName} ` + + "has none of the variants called " + + [...foundVariants.keys()], + ); + } + node.subNodes = unionSelections; + } + } + + out.push(node); + } + } + foundNodes.delete("_"); + if (foundNodes.size > 0) { + throw new Error( + `unexpected nodes found in selection set at ${parentPath}: ${[ + ...foundNodes, + ]}`, + ); + } + return out; +} + +/* Query node types section */ + +type SubNodes = undefined | SelectNode[] | Record; + +type SelectNode<_Out = unknown> = { + nodeName: string; + instanceName: string; + args?: NodeArgs; + subNodes?: SubNodes; + files?: TypePath[]; +}; + +export class QueryNode { + #inner: SelectNode; + constructor(inner: SelectNode) { + this.#inner = inner; + } + + inner() { + return this.#inner; + } +} + +export class MutationNode { + #inner: SelectNode; + constructor(inner: SelectNode) { + this.#inner = inner; + } + + inner() { + return this.#inner; + } +} + +type SelectNodeOut = T extends QueryNode | MutationNode + ? O + : never; +type QueryDocOut = + T extends Record | MutationNode> + ? { + [K in keyof T]: SelectNodeOut; + } + : never; + +type TypePath = ("?" | "[]" | `.${string}`)[]; +type ValuePath = ("" | `[${number}]` | `.${string}`)[]; + +class FileExtractor { + #path: TypePath = []; + #currentPath: ValuePath = []; + #files: Map = new Map(); + + static extractFrom(key: string, object: unknown, paths: TypePath[]) { + const extractor = new FileExtractor(); + if (!object || typeof object !== "object") { + throw new Error("expected object"); + } + for (const path of paths) { + if (path[0] && path[0].startsWith("." + key)) { + extractor.#currentPath = []; + extractor.#path = path; + extractor.#extractFromValue(object); + } + } + return extractor.#files; + } + + #extractFromValue(value: unknown) { + const nextSegment = this.#path[this.#currentPath.length]; + if (nextSegment === "?") { + if (value === null || value === undefined) { + return; + } + this.#currentPath.push(""); + this.#extractFromValue(value); + this.#currentPath.pop(); + return; + } + + if (nextSegment === "[]") { + if (!Array.isArray(value)) { + throw new Error(`Expected array at ${this.#formatPath()}`); + } + for (let i = 0; i < value.length; i++) { + this.#currentPath.push(`[${i}]`); + this.#extractFromArray(value, i); + this.#currentPath.pop(); + } + return; + } + + if (nextSegment.startsWith(".")) { + if (typeof value !== "object" || value === null) { + throw new Error(`Expected non-null object at ${this.#formatPath()}`); + } + this.#currentPath.push(nextSegment); + this.#extractFromObject( + value as Record, + nextSegment.slice(1), + ); + this.#currentPath.pop(); + return; + } + } + + #extractFromObject(parent: Record, key: string) { + const value = parent[key]; + if (this.#currentPath.length == this.#path.length) { + if (value instanceof File) { + this.#files.set(this.#formatPath(), value); + parent[key] = null; + return; + } + throw new Error(`Expected File at ${this.#formatPath()}`); + } + + this.#extractFromValue(value); + } + + #extractFromArray(parent: unknown[], idx: number) { + const value = parent[idx]; + if (this.#currentPath.length == this.#path.length) { + if (value instanceof File) { + this.#files.set(this.#formatPath(), value); + parent[idx] = null; + return; + } + throw new Error(`Expected File at ${this.#formatPath()}`); + } + + this.#extractFromValue(value); + } + + #formatPath() { + return this.#currentPath + .map((seg) => { + if (seg.startsWith("[")) { + return `.${seg.slice(1, -1)}`; + } + return seg; + }) + .join(""); + } +} + +type NodeMeta = { + subNodes?: [string, () => NodeMeta][]; + variants?: [string, () => NodeMeta][]; + argumentTypes?: { [name: string]: string }; + inputFiles?: TypePath[]; +}; + +/* Selection types section */ + +type SelectionFlags = "selectAll"; + +type Selection = { + _?: SelectionFlags; + [key: string]: + | SelectionFlags + | ScalarSelectNoArgs + | ScalarSelectArgs> + | CompositeSelectNoArgs + | CompositeSelectArgs, Selection> + | Selection; +}; + +type ScalarSelectNoArgs = boolean | Alias | null | undefined; + +type ScalarSelectArgs> = + | ArgT + | PlaceholderArgs + | Alias> + | false + | null + | undefined; + +type CompositeSelectNoArgs = + | SelectionT + | Alias + | false + | null + | undefined; + +type CompositeSelectArgs, SelectionT> = + | [ArgT | PlaceholderArgs, SelectionT] + | Alias<[ArgT | PlaceholderArgs, SelectionT]> + | false + | undefined + | null; + +/** + * Request multiple instances of a single node under different + * aliases. Look at {@link alias} for a functional way of instantiating + * this class. + */ +export class Alias { + #aliases: Record; + constructor(aliases: Record) { + this.#aliases = aliases; + } + aliases() { + return this.#aliases; + } +} + +/** + * Request multiple instances of a single node under different + * aliases. + */ +export function alias(aliases: Record): Alias { + return new Alias(aliases); +} + +/* Argument types section */ + +type NodeArgValue = { + typeName: string; + value: unknown; +}; + +type NodeArgs = { + [name: string]: NodeArgValue; +}; + +/** + * This object is passed to closures used for preparing requests + * ahead of time for {@link PreparedRequest}s. It allows one to + * get {@link PlaceholderValue}s that can be used in place of node + * arguments. At request time, the {@link PreparedRequest} then + * takes an object that adheres to `T` that can then be used + * to replace the placeholders. + */ +export class PreparedArgs> { + get(key: OnlyStringKeys): PlaceholderValue { + return new PlaceholderValue(key); + } +} + +/** + * Placeholder values for use by {@link PreparedRequest} + */ +export class PlaceholderValue<_T> { + #key: string; + constructor(key: string) { + this.#key = key; + } + + key() { + return this.#key; + } +} + +export type PlaceholderArgs> = { + [K in keyof T]: PlaceholderValue; +}; + +/* GraphQL section */ + +/** + * Options to be used for requests performed by {@link GraphQLTransport}. + */ +export type GraphQlTransportOptions = Omit & { + /** + * {@link fetch} implementaiton to use. Defaults to the one found in the environment + */ + fetch?: typeof fetch; +}; + +function convertQueryNodeGql( + typeToGqlTypeMap: Record, + node: SelectNode, + variables: Map, + files: Map, +) { + let out = + node.nodeName == node.instanceName + ? node.nodeName + : `${node.instanceName}: ${node.nodeName}`; + + const args = node.args; + if (args && Object.keys(args).length > 0) { + const argsRow = []; + + for (const [key, val] of Object.entries(args)) { + const name = `in${variables.size}`; + const obj = { [key]: val.value }; + + if (node.files && node.files.length > 0) { + const extractedFiles = FileExtractor.extractFrom(key, obj, node.files); + + for (const [path, file] of extractedFiles) { + const pathInVariables = path.replace(/^\.[^\.\[]+/, `.${name}`); + files.set(pathInVariables, file); + } + } + + val.value = obj[key]; + variables.set(name, val); + argsRow.push(`${key}: $${name}`); + } + + out = `${out} (${argsRow.join(", ")})`; + } + + const subNodes = node.subNodes; + if (subNodes) { + if (Array.isArray(subNodes)) { + out = `${out} { ${subNodes + .map((node) => + convertQueryNodeGql(typeToGqlTypeMap, node, variables, files), + ) + .join(" ")} }`; + } else { + out = `${out} { ${Object.entries(subNodes) + .map(([variantTy, subNodes]) => { + let gqlTy = typeToGqlTypeMap[variantTy]; + if (!gqlTy) { + throw new Error( + `unreachable: no graphql type found for variant ${variantTy}`, + ); + } + gqlTy = gqlTy.replace(/[!]+$/, ""); + + return `... on ${gqlTy} {${subNodes + .map((node) => + convertQueryNodeGql(typeToGqlTypeMap, node, variables, files), + ) + .join(" ")}}`; + }) + .join(" ")} }`; + } + } + return out; +} + +function buildGql( + typeToGqlTypeMap: Record, + query: Record, + ty: "query" | "mutation", + // deno-lint-ignore no-inferrable-types + name: string = "", +) { + const variables = new Map(); + const files = new Map(); + + const rootNodes = Object.entries(query) + .map(([key, node]) => { + const fixedNode = { ...node, instanceName: key }; + return convertQueryNodeGql(typeToGqlTypeMap, fixedNode, variables, files); + }) + .join("\n "); + + let argsRow = [...variables.entries()] + .map(([key, val]) => `$${key}: ${typeToGqlTypeMap[val.typeName]} `) + .join(", "); + if (argsRow.length > 0) { + // graphql doesn't like empty parentheses so we only + // add them if there are args + argsRow = `(${argsRow})`; + } + + const doc = `${ty} ${name}${argsRow} { + ${rootNodes} + } `; + return { + doc, + variables: Object.fromEntries( + [...variables.entries()].map(([key, val]) => [key, val.value]), + ), + files, + }; +} + +async function fetchGql( + addr: URL, + doc: string, + variables: Record, + options: GraphQlTransportOptions, + files?: Map, +) { + let body: FormData | string = JSON.stringify({ + query: doc, + variables, + }); + + const additionalHeaders: HeadersInit = {}; + + if (files && files.size > 0) { + const data = new FormData(); + const fileMap = new Map(); + const map: Record = {}; + + for (const [path, file] of files) { + const array = fileMap.get(file); + const variable = "variables" + path; + if (array) { + array.push(variable); + } else { + fileMap.set(file, [variable]); + } + } + + let index = 0; + for (const [file, variables] of fileMap) { + const key = index.toString(); + map[key] = variables; + data.set(key, file); + index += 1; + } + + data.set("operations", body); + data.set("map", JSON.stringify(map)); + + body = data; + } else { + additionalHeaders["content-type"] = "application/json"; + } + + const fetchImpl = options.fetch ?? fetch; + const res = await fetchImpl(addr, { + ...options, + method: "POST", + headers: { + accept: "application/json", + ...additionalHeaders, + ...(options.headers ?? {}), + }, + body, + }); + + if (!res.ok) { + const body = await res.text().catch((err) => `error reading body: ${err} `); + throw new (Error as ErrorPolyfill)( + `graphql request to ${addr} failed with status ${res.status}: ${body} `, + { + cause: { + response: res, + body, + }, + }, + ); + } + if (res.headers.get("content-type") != "application/json") { + throw new (Error as ErrorPolyfill)("unexpected content type in response", { + cause: { + response: res, + body: await res.text().catch((err) => `error reading body: ${err} `), + }, + }); + } + return (await res.json()) as { data: unknown; errors?: object[] }; +} + +/** + * Access the typegraph over it's exposed GraphQL API. + */ +export class GraphQLTransport { + constructor( + public address: URL, + public options: GraphQlTransportOptions, + private typeToGqlTypeMap: Record, + ) {} + + async #request( + doc: string, + variables: Record, + options: GraphQlTransportOptions, + files?: Map, + ) { + const res = await fetchGql( + this.address, + doc, + variables, + { ...this.options, ...options }, + files, + ); + if ("errors" in res) { + throw new (Error as ErrorPolyfill)("graphql errors on response", { + cause: res.errors, + }); + } + return res.data; + } + + /** + * Make a query request to the typegraph. + */ + async query>>( + query: Doc, + { + options, + name = "", + }: { + options?: GraphQlTransportOptions; + name?: string; + } = {}, + ): Promise> { + const { variables, doc } = buildGql( + this.typeToGqlTypeMap, + Object.fromEntries( + Object.entries(query).map(([key, val]) => [ + key, + (val as QueryNode).inner(), + ]), + ), + "query", + name, + ); + return (await this.#request( + doc, + variables, + options ?? {}, + )) as QueryDocOut; + } + + /** + * Make a mutation request to the typegraph. + */ + async mutation>>( + query: Doc, + { + options, + name = "", + }: { + options?: GraphQlTransportOptions; + name?: string; + } = {}, + ): Promise> { + const { variables, doc, files } = buildGql( + this.typeToGqlTypeMap, + Object.fromEntries( + Object.entries(query).map(([key, val]) => [ + key, + (val as MutationNode).inner(), + ]), + ), + "mutation", + name, + ); + return (await this.#request( + doc, + variables, + options ?? {}, + files, + )) as QueryDocOut; + } + + /** + * Prepare an ahead of time query {@link PreparedRequest}. + */ + prepareQuery< + T extends JsonObject, + Doc extends Record>, + >( + fun: (args: PreparedArgs) => Doc, + { name = "" }: { name?: string } = {}, + ): PreparedRequest { + return new PreparedRequest( + this.address, + this.options, + this.typeToGqlTypeMap, + fun, + "query", + name, + ); + } + + /** + * Prepare an ahead of time mutation {@link PreparedRequest}. + */ + prepareMutation< + T extends JsonObject, + Q extends Record>, + >( + fun: (args: PreparedArgs) => Q, + { name = "" }: { name?: string } = {}, + ): PreparedRequest { + return new PreparedRequest( + this.address, + this.options, + this.typeToGqlTypeMap, + fun, + "mutation", + name, + ); + } +} + +/** + * Prepares the GraphQL string ahead of time and allows re-use + * avoid the compute and garbage overhead of re-building it for + * repeat queries. + */ +export class PreparedRequest< + T extends JsonObject, + Doc extends Record | MutationNode>, +> { + public doc: string; + #mappings: Record; + + constructor( + private address: URL, + private options: GraphQlTransportOptions, + typeToGqlTypeMap: Record, + fun: (args: PreparedArgs) => Doc, + ty: "query" | "mutation", + name: string = "", + ) { + const args = new PreparedArgs(); + const dryRunNode = fun(args); + const { doc, variables } = buildGql( + typeToGqlTypeMap, + Object.fromEntries( + Object.entries(dryRunNode).map(([key, val]) => [ + key, + (val as MutationNode).inner(), + ]), + ), + ty, + name, + ); + this.doc = doc; + this.#mappings = variables; + } + + resolveVariables(args: T, mappings: Record) { + const resolvedVariables = {} as Record; + for (const [key, val] of Object.entries(mappings)) { + if (val instanceof PlaceholderValue) { + resolvedVariables[key] = args[val.key()]; + } else if (typeof val == "object" && val != null) { + this.resolveVariables(args, val as JsonObject); + } else { + resolvedVariables[key] = val; + } + } + return resolvedVariables; + } + + /** + * Execute the prepared request. + */ + async perform( + args: T, + opts?: GraphQlTransportOptions, + ): Promise<{ + [K in keyof Doc]: SelectNodeOut; + }> { + const resolvedVariables = this.resolveVariables(args, this.#mappings); + // console.log(this.doc, { + // resolvedVariables, + // mapping: this.#mappings, + // }); + const res = await fetchGql(this.address, this.doc, resolvedVariables, { + ...this.options, + ...opts, + }); + if ("errors" in res) { + throw new (Error as ErrorPolyfill)("graphql errors on response", { + cause: res.errors, + }); + } + return res.data as QueryDocOut; + } +} + +/* Util types section */ + +type OnlyStringKeys> = { + [K in keyof T]: K extends string ? K : never; +}[keyof T]; + +type JsonLiteral = string | number | boolean | null; +type JsonObject = { [key: string]: Json }; +type JsonArray = Json[]; +type Json = JsonLiteral | JsonObject | JsonArray; + +type ErrorPolyfill = new (msg: string, payload: unknown) => Error; + +/* QueryGraph section */ + +class _QueryGraphBase { + constructor(private typeNameMapGql: Record) {} + + /** + * Get the {@link GraphQLTransport} for the typegraph. + */ + graphql(addr: URL | string, options?: GraphQlTransportOptions) { + return new GraphQLTransport( + new URL(addr), + options ?? {}, + this.typeNameMapGql, + ); + } +} + +// -------------------------------------------------- // + + +const nodeMetas = { + scalar() { + return {}; + }, + + RootUploadFn(): NodeMeta { + return { + ...nodeMetas.scalar(), + argumentTypes: { + file: "RootUploadFnInputFileFile", + path: "RootUploadFnInputPathRootUploadFnInputPathStringOptional", + }, + inputFiles: [[".file"]], + }; + }, + RootUploadManyFn(): NodeMeta { + return { + ...nodeMetas.scalar(), + argumentTypes: { + prefix: "RootUploadManyFnInputPrefixRootUploadFnInputPathStringOptional", + files: "RootUploadManyFnInputFilesRootUploadFnInputFileFileList", + }, + inputFiles: [[".files","[]"]], + }; + }, +}; +export type RootUploadFnInputFileFile = File; +export type RootUploadFnInputPathString = string; +export type RootUploadFnInputPathRootUploadFnInputPathStringOptional = RootUploadFnInputPathString | null | undefined; +export type RootUploadFnInput = { + file: RootUploadFnInputFileFile; + path?: RootUploadFnInputPathRootUploadFnInputPathStringOptional; +}; +export type RootUploadManyFnInputPrefixRootUploadFnInputPathStringOptional = RootUploadFnInputPathString | null | undefined; +export type RootUploadManyFnInputFilesRootUploadFnInputFileFileList = Array; +export type RootUploadManyFnInput = { + prefix?: RootUploadManyFnInputPrefixRootUploadFnInputPathStringOptional; + files: RootUploadManyFnInputFilesRootUploadFnInputFileFileList; +}; +export type RootUploadFnOutput = boolean; + + +export class QueryGraph extends _QueryGraphBase { + constructor() { + super({ + "RootUploadFnInputFileFile": "root_upload_fn_input_file_file!", + "RootUploadFnInputPathRootUploadFnInputPathStringOptional": "String", + "RootUploadManyFnInputPrefixRootUploadFnInputPathStringOptional": "String", + "RootUploadManyFnInputFilesRootUploadFnInputFileFileList": "[root_upload_fn_input_file_file]!", + }); + } + + upload(args: RootUploadFnInput | PlaceholderArgs) { + const inner = _selectionToNodeSet( + { "upload": args }, + [["upload", nodeMetas.RootUploadFn]], + "$q", + )[0]; + return new MutationNode(inner) as MutationNode; + } + uploadMany(args: RootUploadManyFnInput | PlaceholderArgs) { + const inner = _selectionToNodeSet( + { "uploadMany": args }, + [["uploadMany", nodeMetas.RootUploadManyFn]], + "$q", + )[0]; + return new MutationNode(inner) as MutationNode; + } +} diff --git a/tests/metagen/typegraphs/sample/ts_upload/main.ts b/tests/metagen/typegraphs/sample/ts_upload/main.ts new file mode 100644 index 000000000..e14d24856 --- /dev/null +++ b/tests/metagen/typegraphs/sample/ts_upload/main.ts @@ -0,0 +1,27 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + +import { QueryGraph } from "./client.ts"; + +const port = Deno.env.get("TG_PORT"); + +const qg = new QueryGraph(); +const gql = qg.graphql(`http://localhost:${port}/sample`); + +const res1 = await gql.mutation({ + upload: qg.upload({ + file: new File(["Hello"], "hello.txt", { type: "text/plain" }), + path: "deno/hello.txt", + }), +}); + +const res2 = await gql.mutation({ + uploadMany: qg.uploadMany({ + files: [1, 2, 3, 4].map( + (i) => new File([`Hello`], `${i}`, { type: "text/plain" }), + ), + prefix: "deno/", + }), +}); + +console.log(JSON.stringify([res1, res2]));