diff --git a/cmd/hasura-ndc-go/README.md b/cmd/hasura-ndc-go/README.md index 30e7710..62439f5 100644 --- a/cmd/hasura-ndc-go/README.md +++ b/cmd/hasura-ndc-go/README.md @@ -22,22 +22,26 @@ go install github.com/hasura/ndc-sdk-go/cmd/hasura-ndc-go@latest ```bash ❯ hasura-ndc-go -h -Usage: hasura-ndc-go +Usage: hasura-ndc-go [flags] Flags: - -h, --help Show context-sensitive help. + -h, --help Show context-sensitive help. + --log-level="info" Log level ($HASURA_PLUGIN_LOG_LEVEL). Commands: - new --name=STRING --module=STRING + new --name=STRING --module=STRING [flags] Initialize an NDC connector boilerplate. For example: hasura-ndc-go new -n example -m github.com/foo/example - update + update [flags] Generate schema and implementation for the connector from functions. - test snapshots + generate snapshots [flags] Generate test snapshots. + + version [flags] + Print the CLI version. ``` ### Initialize connector project @@ -60,6 +64,9 @@ hasura-ndc-go new -n example -m github.com/foo/example -o . ### Generate queries and mutations +> [!IMPORTANT] +> If neither function nor procedure is generated your project may use Go workspace. You need to add the submodule path to the `go.work` file. + The `update` command parses code in the `functions` folder, finds functions and types that are allowed to be exposed and generates the following files: - `schema.generated.json`: the generated connector schema in JSON format. diff --git a/cmd/hasura-ndc-go/command/internal/connector.go b/cmd/hasura-ndc-go/command/internal/connector.go index d6370a2..a17375f 100644 --- a/cmd/hasura-ndc-go/command/internal/connector.go +++ b/cmd/hasura-ndc-go/command/internal/connector.go @@ -5,27 +5,30 @@ import ( "context" "encoding/json" "fmt" + "go/token" "io" "os" "path" "runtime/trace" "slices" - "sort" "strings" "github.com/hasura/ndc-sdk-go/schema" + "github.com/hasura/ndc-sdk-go/utils" "github.com/iancoleman/strcase" "github.com/rs/zerolog/log" + "golang.org/x/tools/go/packages" ) // ConnectorGenerationArguments represent input arguments of the ConnectorGenerator type ConnectorGenerationArguments struct { Path string `help:"The path of the root directory where the go.mod file is present" short:"p" env:"HASURA_PLUGIN_CONNECTOR_CONTEXT_PATH" default:"."` ConnectorDir string `help:"The directory where the connector.go file is placed" default:"."` - PackageTypes string `help:"The name of types package where the State struct is in"` Directories []string `help:"Folders contain NDC operation functions" short:"d"` Trace string `help:"Enable tracing and write to target file path."` + SchemaFormat string `help:"The output format for the connector schema. Accept: json, go" enum:"json,go" default:"json"` Style string `help:"The naming style for functions and procedures. Accept: camel-case, snake-case" enum:"camel-case,snake-case" default:"camel-case"` + TypeOnly bool `help:"Generate type only" default:"false"` } type connectorTypeBuilder struct { @@ -33,6 +36,8 @@ type connectorTypeBuilder struct { packagePath string imports map[string]string builder *strings.Builder + functions []FunctionInfo + procedures []ProcedureInfo } // SetImport sets an import package into the import list @@ -40,18 +45,13 @@ func (ctb *connectorTypeBuilder) SetImport(value string, alias string) { ctb.imports[value] = alias } -// GetDecoderName gets the global decoder name -func (ctb connectorTypeBuilder) GetDecoderName() string { - return fmt.Sprintf("%s_Decoder", ctb.packageName) -} - // String renders generated Go types and methods func (ctb connectorTypeBuilder) String() string { var bs strings.Builder - bs.WriteString(genFileHeader(ctb.packageName)) + writeFileHeader(&bs, ctb.packageName) if len(ctb.imports) > 0 { bs.WriteString("import (\n") - sortedImports := getSortedKeys(ctb.imports) + sortedImports := utils.GetSortedKeys(ctb.imports) for _, pkg := range sortedImports { alias := ctb.imports[pkg] if alias != "" { @@ -62,28 +62,21 @@ func (ctb connectorTypeBuilder) String() string { bs.WriteString(")\n") } - decoderName := ctb.GetDecoderName() - bs.WriteString(fmt.Sprintf("var %s = utils.NewDecoder()\n", decoderName)) + bs.WriteString("var connector_Decoder = utils.NewDecoder()\n") bs.WriteString(ctb.builder.String()) return bs.String() } type connectorGenerator struct { - basePath string - connectorDir string - moduleName string - rawSchema *RawConnectorSchema - typeBuilders map[string]*connectorTypeBuilder -} - -func NewConnectorGenerator(basePath string, connectorDir string, moduleName string, rawSchema *RawConnectorSchema) *connectorGenerator { - return &connectorGenerator{ - basePath: basePath, - connectorDir: connectorDir, - moduleName: moduleName, - rawSchema: rawSchema, - typeBuilders: make(map[string]*connectorTypeBuilder), - } + basePath string + connectorDir string + moduleName string + schemaFormat string + rawSchema *RawConnectorSchema + typeBuilders map[string]*connectorTypeBuilder + typeOnly bool + functionHandlers []string + procedureHandlers []string } // ParseAndGenerateConnector parses and generate connector codes @@ -112,52 +105,102 @@ func ParseAndGenerateConnector(args ConnectorGenerationArguments, moduleName str _, genTask := trace.NewTask(context.TODO(), "generate_code") defer genTask.End() - connectorGen := NewConnectorGenerator(".", args.ConnectorDir, moduleName, sm) - return connectorGen.generateConnector() -} -func (cg *connectorGenerator) generateConnector() error { - // generate schema.generated.json - schemaBytes, err := json.MarshalIndent(cg.rawSchema.Schema(), "", " ") + connectorGen := &connectorGenerator{ + basePath: ".", + connectorDir: args.ConnectorDir, + moduleName: moduleName, + schemaFormat: args.SchemaFormat, + rawSchema: sm, + typeBuilders: make(map[string]*connectorTypeBuilder), + typeOnly: args.TypeOnly, + } + connectorPkgName, err := connectorGen.loadConnectorPackage() if err != nil { return err } + return connectorGen.generateConnector(connectorPkgName) +} + +func (cg *connectorGenerator) loadConnectorPackage() (string, error) { + connectorPath := path.Join(cg.basePath, cg.connectorDir) + fset := token.NewFileSet() + cfg := &packages.Config{ + Dir: connectorPath, + Fset: fset, + } + pkgList, err := packages.Load(cfg) + if err != nil { + return "", fmt.Errorf("failed to load the package in connector directory: %s", err) + } + + if len(pkgList) == 0 || pkgList[0].Name == "" { + return "main", nil + } + return pkgList[0].Name, nil +} + +func (cg *connectorGenerator) generateConnector(name string) error { + + var schemaBytes []byte + var err error + + schemaOutputFile := schemaOutputJSONFile + if cg.schemaFormat == "go" { + schemaOutputFile = schemaOutputGoFile + cg.rawSchema.Imports["encoding/json"] = true + output, err := cg.rawSchema.WriteGoSchema(name) + if err != nil { + return err + } + schemaBytes = []byte(output) + } else { + // generate schema.generated.json + schemaBytes, err = json.MarshalIndent(cg.rawSchema.Schema(), "", " ") + if err != nil { + return err + } + } schemaPath := path.Join(cg.basePath, cg.connectorDir, schemaOutputFile) if err := os.WriteFile(schemaPath, schemaBytes, 0644); err != nil { return err } - targetPath := path.Join(cg.basePath, cg.connectorDir, connectorOutputFile) - f, err := os.Create(targetPath) - if err != nil { + if err := cg.genTypeMethods(); err != nil { return err } - defer func() { - _ = f.Close() - }() - w := bufio.NewWriter(f) - defer func() { - _ = w.Flush() - }() + if !cg.typeOnly { + targetPath := path.Join(cg.basePath, cg.connectorDir, connectorOutputFile) + f, err := os.Create(targetPath) + if err != nil { + return err + } + defer func() { + _ = f.Close() + }() - if err := cg.genConnectorCodeFromTemplate(w); err != nil { - return err + w := bufio.NewWriter(f) + defer func() { + _ = w.Flush() + }() + + if err := cg.genConnectorCodeFromTemplate(w, name); err != nil { + return err + } + _ = w.Flush() } - if err := cg.genTypeMethods(); err != nil { - return err + if len(cg.rawSchema.Functions) == 0 && len(cg.rawSchema.Procedures) == 0 { + log.Warn().Msg("neither function nor procedure is generated. If your project uses Go Workspace please add the root path to the go.work file and update again") } return nil } -func (cg *connectorGenerator) genConnectorCodeFromTemplate(w io.Writer) error { - queries := cg.genConnectorFunctions(cg.rawSchema) - procedures := cg.genConnectorProcedures(cg.rawSchema) - - sortedImports := getSortedKeys(cg.rawSchema.Imports) +func (cg *connectorGenerator) genConnectorCodeFromTemplate(w io.Writer, packageName string) error { + sortedImports := utils.GetSortedKeys(cg.rawSchema.Imports) importLines := []string{} for _, importPath := range sortedImports { @@ -165,222 +208,58 @@ func (cg *connectorGenerator) genConnectorCodeFromTemplate(w io.Writer) error { } return connectorTemplate.Execute(w, map[string]any{ - "Imports": strings.Join(importLines, "\n"), - "Module": cg.moduleName, - "Queries": queries, - "Procedures": procedures, + "Imports": strings.Join(importLines, "\n"), + "PackageName": packageName, + "StateArgument": cg.rawSchema.StateType.GetArgumentName(""), + "SchemaFormat": cg.schemaFormat, + "QueryHandlers": cg.renderOperationHandlers(cg.functionHandlers), + "MutationHandlers": cg.renderOperationHandlers(cg.procedureHandlers), }) } -func (cg *connectorGenerator) genConnectorFunctions(rawSchema *RawConnectorSchema) string { - if len(rawSchema.Functions) == 0 { - return "" - } - - var sb strings.Builder - for _, fn := range rawSchema.Functions { - if _, ok := cg.rawSchema.Imports[fn.PackagePath]; !ok { - cg.rawSchema.Imports[fn.PackagePath] = true - } - var argumentParamStr string - sb.WriteString(fmt.Sprintf(" case \"%s\":", fn.Name)) - if fn.ResultType.IsScalar { - sb.WriteString(` - if len(queryFields) > 0 { - return nil, schema.UnprocessableContentError("cannot evaluate selection fields for scalar", nil) - }`) - } else if fn.ResultType.IsArray() { - sb.WriteString(` - selection, err := queryFields.AsArray() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be array", map[string]any{ - "cause": err.Error(), - }) - }`) - } else { - sb.WriteString(` - selection, err := queryFields.AsObject() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ - "cause": err.Error(), - }) - }`) - } - - if fn.ArgumentsType != nil { - argName := fn.ArgumentsType.Name - if fn.ArgumentsType.PackagePath != "" { - cg.rawSchema.Imports[fn.ArgumentsType.PackagePath] = true - argName = fmt.Sprintf("%s.%s", fn.ArgumentsType.PackageName, fn.ArgumentsType.Name) - } - - argumentStr := fmt.Sprintf(` - rawArgs, err := utils.ResolveArgumentVariables(request.Arguments, variables) - if err != nil { - return nil, schema.UnprocessableContentError("failed to resolve argument variables", map[string]any{ - "cause": err.Error(), - }) - } - - connector_addSpanEvent(span, logger, "resolve_arguments", map[string]any{ - "raw_arguments": rawArgs, - }) - - var args %s - if err = args.FromValue(rawArgs); err != nil { - return nil, schema.UnprocessableContentError("failed to resolve arguments", map[string]any{ - "cause": err.Error(), - }) - } - - connector_addSpanEvent(span, logger, "execute_function", map[string]any{ - "arguments": args, - })`, argName) - sb.WriteString(argumentStr) - argumentParamStr = ", &args" - } - - if fn.ResultType.IsScalar { - sb.WriteString(fmt.Sprintf("\n return %s.%s(ctx, state%s)\n", fn.PackageName, fn.OriginName, argumentParamStr)) - continue - } - - sb.WriteString(fmt.Sprintf("\n rawResult, err := %s.%s(ctx, state%s)", fn.PackageName, fn.OriginName, argumentParamStr)) - genGeneralOperationResult(&sb, fn.ResultType) - - sb.WriteString(` - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - })`) - if fn.ResultType.IsArray() { - sb.WriteString("\n result, err := utils.EvalNestedColumnArrayIntoSlice(selection, rawResult)") - } else { - sb.WriteString("\n result, err := utils.EvalNestedColumnObject(selection, rawResult)") - } - sb.WriteString(textBlockErrorCheck2) - sb.WriteString(" return result, nil\n") - } - - return sb.String() -} - -func genGeneralOperationResult(sb *strings.Builder, resultType *TypeInfo) { - sb.WriteString(textBlockErrorCheck2) - if resultType.IsNullable() { - sb.WriteString(` - if rawResult == nil { - return nil, nil - } -`) - } else { - sb.WriteString(` - if rawResult == nil { - return nil, schema.UnprocessableContentError("expected not null result", nil) - } -`) - } -} - -func (cg *connectorGenerator) genConnectorProcedures(rawSchema *RawConnectorSchema) string { - if len(rawSchema.Procedures) == 0 { - return "" +func (cg *connectorGenerator) renderOperationHandlers(values []string) string { + if len(values) == 0 { + return "{}" } - - cg.rawSchema.Imports["encoding/json"] = true - var sb strings.Builder - for _, fn := range rawSchema.Procedures { - if _, ok := cg.rawSchema.Imports[fn.PackagePath]; !ok { - cg.rawSchema.Imports[fn.PackagePath] = true - } - var argumentParamStr string - sb.WriteString(fmt.Sprintf(" case \"%s\":", fn.Name)) - if fn.ResultType.IsScalar { - sb.WriteString(` - if len(operation.Fields) > 0 { - return nil, schema.UnprocessableContentError("cannot evaluate selection fields for scalar", nil) - }`) - } else if fn.ResultType.IsArray() { - sb.WriteString(` - selection, err := operation.Fields.AsArray() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be array", map[string]any{ - "cause": err.Error(), - }) - }`) - } else { - sb.WriteString(` - selection, err := operation.Fields.AsObject() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ - "cause": err.Error(), - }) - }`) - } - if fn.ArgumentsType != nil { - argName := fn.ArgumentsType.Name - if fn.ArgumentsType.PackagePath != "" { - cg.rawSchema.Imports[fn.ArgumentsType.PackagePath] = true - argName = fmt.Sprintf("%s.%s", fn.ArgumentsType.PackageName, fn.ArgumentsType.Name) - } - - argumentStr := fmt.Sprintf(` - var args %s - if err := json.Unmarshal(operation.Arguments, &args); err != nil { - return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ - "cause": err.Error(), - }) - }`, argName) - sb.WriteString(argumentStr) - argumentParamStr = ", &args" - } - - sb.WriteString("\n span.AddEvent(\"execute_procedure\")") - if fn.ResultType.IsScalar { - sb.WriteString(fmt.Sprintf(` - var err error - result, err := %s.%s(ctx, state%s)`, fn.PackageName, fn.OriginName, argumentParamStr)) - } else { - sb.WriteString(fmt.Sprintf("\n rawResult, err := %s.%s(ctx, state%s)\n", fn.PackageName, fn.OriginName, argumentParamStr)) - genGeneralOperationResult(&sb, fn.ResultType) - - sb.WriteString(` connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - })`) - if fn.ResultType.IsArray() { - sb.WriteString("\n result, err := utils.EvalNestedColumnArrayIntoSlice(selection, rawResult)\n") - } else { - sb.WriteString("\n result, err := utils.EvalNestedColumnObject(selection, rawResult)\n") - } + sb.WriteRune('{') + for i, v := range values { + if i > 0 { + sb.WriteString(", ") } - sb.WriteString(textBlockErrorCheck2) - sb.WriteString(" return schema.NewProcedureResult(result).Encode(), nil\n") + sb.WriteString(v) + sb.WriteString(".DataConnectorHandler{}") } + sb.WriteRune('}') return sb.String() } // generate encoding and decoding methods for schema types func (cg *connectorGenerator) genTypeMethods() error { - if err := cg.genFunctionArgumentConstructors(); err != nil { - return err - } + cg.genFunctionArgumentConstructors() if err := cg.genObjectMethods(); err != nil { return err } if err := cg.genCustomScalarMethods(); err != nil { return err } + cg.genConnectorHandlers() log.Debug().Msg("Generating types...") for packagePath, builder := range cg.typeBuilders { + if !strings.HasPrefix(packagePath, cg.moduleName) { + continue + } relativePath := strings.TrimPrefix(packagePath, cg.moduleName) schemaPath := path.Join(cg.basePath, relativePath, typeMethodsOutputFile) log.Debug(). Str("package_name", builder.packageName). - Str("package_path", builder.packagePath). + Str("package_path", packagePath). + Str("module_name", cg.moduleName). Msgf(schemaPath) + if err := os.WriteFile(schemaPath, []byte(builder.String()), 0644); err != nil { return err } @@ -394,20 +273,20 @@ func (cg *connectorGenerator) genObjectMethods() error { return nil } - objectKeys := getSortedKeys(cg.rawSchema.Objects) + objectKeys := utils.GetSortedKeys(cg.rawSchema.Objects) for _, objectName := range objectKeys { object := cg.rawSchema.Objects[objectName] - if object.IsAnonymous || !strings.HasPrefix(object.PackagePath, cg.moduleName) { + if object.IsAnonymous || !strings.HasPrefix(object.Type.PackagePath, cg.moduleName) { continue } - sb := cg.getOrCreateTypeBuilder(object.PackagePath) + sb := cg.getOrCreateTypeBuilder(object.Type.PackagePath) sb.builder.WriteString(fmt.Sprintf(` // ToMap encodes the struct to a value map func (j %s) ToMap() map[string]any { r := make(map[string]any) -`, objectName)) - cg.genObjectToMap(sb, object, "j", "r") +`, object.Type.Name)) + cg.genObjectToMap(sb, &object, "j", "r") sb.builder.WriteString(` return r }`) @@ -418,12 +297,12 @@ func (j %s) ToMap() map[string]any { func (cg *connectorGenerator) genObjectToMap(sb *connectorTypeBuilder, object *ObjectInfo, selector string, name string) { - fieldKeys := getSortedKeys(object.Fields) + fieldKeys := utils.GetSortedKeys(object.Fields) for _, fieldKey := range fieldKeys { field := object.Fields[fieldKey] fieldSelector := fmt.Sprintf("%s.%s", selector, field.Name) - fieldAssigner := fmt.Sprintf("%s[\"%s\"]", name, field.Key) - cg.genToMapProperty(sb, field, fieldSelector, fieldAssigner, field.Type, field.Type.TypeFragments) + fieldAssigner := fmt.Sprintf("%s[\"%s\"]", name, fieldKey) + cg.genToMapProperty(sb, &field, fieldSelector, fieldAssigner, field.Type, field.Type.TypeFragments) } } @@ -453,26 +332,30 @@ func (cg *connectorGenerator) genToMapProperty(sb *connectorTypeBuilder, field * } isAnonymous := strings.HasPrefix(strings.Join(fragments, ""), "struct{") - if !isAnonymous { - sb.builder.WriteString(fmt.Sprintf(` %s = %s - `, assigner, selector)) - return selector - } - innerObject, ok := cg.rawSchema.Objects[ty.Name] - if !ok { - tyName := buildTypeNameFromFragments(ty.TypeFragments, ty.PackagePath, sb.packagePath) - innerObject, ok = cg.rawSchema.Objects[tyName] + if isAnonymous { + innerObject, ok := cg.rawSchema.Objects[ty.String()] if !ok { - return selector + tyName := buildTypeNameFromFragments(ty.TypeFragments, ty.PackagePath, sb.packagePath) + innerObject, ok = cg.rawSchema.Objects[tyName] + if !ok { + return selector + } } + + // anonymous struct + varName := formatLocalFieldName(selector, "obj") + sb.builder.WriteString(fmt.Sprintf(" %s := make(map[string]any)\n", varName)) + cg.genObjectToMap(sb, &innerObject, selector, varName) + sb.builder.WriteString(fmt.Sprintf(" %s = %s\n", assigner, varName)) + return varName + } + if !field.Type.Embedded { + sb.builder.WriteString(fmt.Sprintf(" %s = %s\n", assigner, selector)) + return selector } - // anonymous struct - varName := formatLocalFieldName(selector, "obj") - sb.builder.WriteString(fmt.Sprintf(" %s := make(map[string]any)\n", varName)) - cg.genObjectToMap(sb, innerObject, selector, varName) - sb.builder.WriteString(fmt.Sprintf(" %s = %s\n", assigner, varName)) - return varName + sb.builder.WriteString(fmt.Sprintf(" r = utils.MergeMap(r, %s.ToMap())\n", selector)) + return selector } // generate Scalar implementation for custom scalar types @@ -481,7 +364,7 @@ func (cg *connectorGenerator) genCustomScalarMethods() error { return nil } - scalarKeys := getSortedKeys(cg.rawSchema.CustomScalars) + scalarKeys := utils.GetSortedKeys(cg.rawSchema.CustomScalars) for _, scalarKey := range scalarKeys { scalar := cg.rawSchema.CustomScalars[scalarKey] @@ -573,32 +456,40 @@ func (s *%s) FromValue(value any) error { return nil } -func (cg *connectorGenerator) genFunctionArgumentConstructors() error { +func (cg *connectorGenerator) genFunctionArgumentConstructors() { if len(cg.rawSchema.Functions) == 0 { - return nil + return } - for _, fn := range cg.rawSchema.Functions { - if len(fn.Arguments) == 0 || fn.ArgumentsType == nil { - continue - } - sb := cg.getOrCreateTypeBuilder(fn.ArgumentsType.PackagePath) - sb.builder.WriteString(fmt.Sprintf(` -// FromValue decodes values from map -func (j *%s) FromValue(input map[string]any) error { - var err error -`, fn.ArgumentsType.Name)) + writtenObjects := make(map[string]bool) + fnKeys := utils.GetSortedKeys(cg.rawSchema.FunctionArguments) + for _, k := range fnKeys { + argInfo := cg.rawSchema.FunctionArguments[k] + writtenObjects[argInfo.Type.String()] = true + cg.writeObjectFromValue(&argInfo) + } +} - argumentKeys := getSortedKeys(fn.Arguments) - for _, key := range argumentKeys { - arg := fn.Arguments[key] - cg.genGetTypeValueDecoder(sb, arg.Type, key, arg.FieldName) - } - sb.builder.WriteString(` return nil -}`) +func (cg *connectorGenerator) writeObjectFromValue(info *ObjectInfo) { + if len(info.Fields) == 0 || info.Type == nil || info.IsAnonymous { + return } - return nil + sb := cg.getOrCreateTypeBuilder(info.Type.PackagePath) + sb.builder.WriteString(` +// FromValue decodes values from map +func (j *`) + sb.builder.WriteString(info.Type.Name) + sb.builder.WriteString(`) FromValue(input map[string]any) error { + var err error +`) + argumentKeys := utils.GetSortedKeys(info.Fields) + for _, key := range argumentKeys { + arg := info.Fields[key] + cg.genGetTypeValueDecoder(sb, arg.Type, key, arg.Name) + } + sb.builder.WriteString(" return nil\n}") + return } func (cg *connectorGenerator) getOrCreateTypeBuilder(packagePath string) *connectorTypeBuilder { @@ -619,10 +510,36 @@ func (cg *connectorGenerator) getOrCreateTypeBuilder(packagePath string) *connec return bs } -func genFileHeader(packageName string) string { - return fmt.Sprintf(`// Code generated by github.com/hasura/ndc-sdk-go/codegen, DO NOT EDIT. -package %s -`, packageName) +func (cg *connectorGenerator) genConnectorHandlers() { + for _, fn := range cg.rawSchema.Functions { + builder := cg.getOrCreateTypeBuilder(fn.PackagePath) + builder.functions = append(builder.functions, fn) + } + for _, fn := range cg.rawSchema.Procedures { + builder := cg.getOrCreateTypeBuilder(fn.PackagePath) + builder.procedures = append(builder.procedures, fn) + } + + for _, bs := range cg.typeBuilders { + fnLen := len(bs.functions) + procLen := len(bs.procedures) + if fnLen == 0 && procLen == 0 { + continue + } + cg.rawSchema.Imports[bs.packagePath] = true + if fnLen > 0 { + cg.functionHandlers = append(cg.functionHandlers, bs.packageName) + } + if procLen > 0 { + cg.procedureHandlers = append(cg.procedureHandlers, bs.packageName) + } + (&connectorHandlerBuilder{ + Builder: bs, + RawSchema: cg.rawSchema, + Functions: bs.functions, + Procedures: bs.procedures, + }).Render() + } } func (cg *connectorGenerator) genGetTypeValueDecoder(sb *connectorTypeBuilder, ty *TypeInfo, key string, fieldName string) { @@ -767,17 +684,39 @@ func (cg *connectorGenerator) genGetTypeValueDecoder(sb *connectorTypeBuilder, t if ty.IsNullable() { typeName := strings.TrimLeft(typeName, "*") pkgName, tyName, ok := findAndReplaceNativeScalarPackage(typeName) - if !ok { + if !ok && len(ty.TypeFragments) > 0 { pkgName = ty.PackagePath tyName = buildTypeNameFromFragments(ty.TypeFragments[1:], ty.PackagePath, sb.packagePath) } if pkgName != "" && pkgName != sb.packagePath { sb.imports[pkgName] = "" } - sb.builder.WriteString(fmt.Sprintf(` j.%s = new(%s) - err = %s.DecodeNullableObjectValue(j.%s, input, "%s")`, fieldName, tyName, sb.GetDecoderName(), fieldName, key)) + sb.builder.WriteString(" j.") + sb.builder.WriteString(fieldName) + sb.builder.WriteString(" = new(") + sb.builder.WriteString(tyName) + sb.builder.WriteString(")\n err = connector_Decoder.") + if ty.Embedded { + sb.builder.WriteString("DecodeObject(j.") + sb.builder.WriteString(fieldName) + sb.builder.WriteString(", input)") + } else { + sb.builder.WriteString("DecodeNullableObjectValue(j.") + sb.builder.WriteString(fieldName) + sb.builder.WriteString(`, input, "`) + sb.builder.WriteString(key) + sb.builder.WriteString(`")`) + } + } else if ty.Embedded { + sb.builder.WriteString(" err = connector_Decoder.DecodeObject(&j.") + sb.builder.WriteString(fieldName) + sb.builder.WriteString(", input)") } else { - sb.builder.WriteString(fmt.Sprintf(` err = %s.DecodeObjectValue(&j.%s, input, "%s")`, sb.GetDecoderName(), fieldName, key)) + sb.builder.WriteString(" err = connector_Decoder.DecodeObjectValue(&j.") + sb.builder.WriteString(fieldName) + sb.builder.WriteString(`, input, "`) + sb.builder.WriteString(key) + sb.builder.WriteString(`")`) } } sb.builder.WriteString(textBlockErrorCheck) @@ -808,15 +747,6 @@ func buildTypeWithAlias(name string, typePackagePath string, currentPackagePath return fmt.Sprintf("%s.%s", alias, name) } -func getSortedKeys[V any](input map[string]V) []string { - var results []string - for key := range input { - results = append(results, key) - } - sort.Strings(results) - return results -} - func formatLocalFieldName(input string, others ...string) string { name := fieldNameRegex.ReplaceAllString(input, "_") return strings.Trim(strings.Join(append([]string{name}, others...), "_"), "_") diff --git a/cmd/hasura-ndc-go/command/internal/connector_handler.go b/cmd/hasura-ndc-go/command/internal/connector_handler.go new file mode 100644 index 0000000..7bc51f7 --- /dev/null +++ b/cmd/hasura-ndc-go/command/internal/connector_handler.go @@ -0,0 +1,319 @@ +package internal + +import ( + "fmt" + "strings" +) + +const ( + functionEnumsName = "enumValues_FunctionName" + procedureEnumsName = "enumValues_ProcedureName" +) + +type connectorHandlerBuilder struct { + RawSchema *RawConnectorSchema + Functions []FunctionInfo + Procedures []ProcedureInfo + Builder *connectorTypeBuilder +} + +func (chb connectorHandlerBuilder) Render() { + if len(chb.Functions) == 0 && len(chb.Procedures) == 0 { + return + } + + bs := chb.Builder + bs.imports["context"] = "" + bs.imports["log/slog"] = "" + bs.imports["slices"] = "" + bs.imports["github.com/hasura/ndc-sdk-go/connector"] = "" + bs.imports["github.com/hasura/ndc-sdk-go/schema"] = "" + bs.imports["go.opentelemetry.io/otel/trace"] = "" + if chb.RawSchema.StateType != nil && bs.packagePath != chb.RawSchema.StateType.PackagePath { + bs.imports[chb.RawSchema.StateType.PackagePath] = "" + } + + _, _ = bs.builder.WriteString(` +// DataConnectorHandler implements the data connector handler +type DataConnectorHandler struct{} +`) + chb.writeQuery(bs.builder) + chb.writeMutation(bs.builder) + + bs.builder.WriteString(` +func connector_addSpanEvent(span trace.Span, logger *slog.Logger, name string, data map[string]any, options ...trace.EventOption) { + logger.Debug(name, slog.Any("data", data)) + attrs := utils.DebugJSONAttributes(data, utils.IsDebug(logger)) + span.AddEvent(name, append(options, trace.WithAttributes(attrs...))...) +}`) +} + +func (chb connectorHandlerBuilder) writeOperationNameEnums(sb *strings.Builder, name string, values []string) { + sb.WriteString("var ") + sb.WriteString(name) + sb.WriteString(" = []string{") + for i, enum := range values { + if i > 0 { + sb.WriteString(", ") + } + sb.WriteRune('"') + sb.WriteString(enum) + sb.WriteRune('"') + } + sb.WriteRune('}') +} + +func (chb connectorHandlerBuilder) writeStateArgumentName() string { + if chb.RawSchema.StateType == nil { + return "State" + } + return chb.RawSchema.StateType.GetArgumentName(chb.Builder.packagePath) +} + +func (chb connectorHandlerBuilder) writeQuery(sb *strings.Builder) { + if len(chb.Functions) == 0 { + return + } + stateArgument := chb.writeStateArgumentName() + _, _ = sb.WriteString(` +// QueryExists check if the query name exists +func (dch DataConnectorHandler) QueryExists(name string) bool { + return slices.Contains(`) + _, _ = sb.WriteString(functionEnumsName) + _, _ = sb.WriteString(`, name) +}`) + + _, _ = sb.WriteString(` +func (dch DataConnectorHandler) Query(ctx context.Context, state *`) + _, _ = sb.WriteString(stateArgument) + _, _ = sb.WriteString(`, request *schema.QueryRequest, rawArgs map[string]any) (*schema.RowSet, error) { + if !dch.QueryExists(request.Collection) { + return nil, utils.ErrHandlerNotfound + } + queryFields, err := utils.EvalFunctionSelectionFieldValue(request) + if err != nil { + return nil, schema.UnprocessableContentError(err.Error(), nil) + } + + result, err := dch.execQuery(ctx, state, request, queryFields, rawArgs) + if err != nil { + return nil, err + } + + return &schema.RowSet{ + Aggregates: schema.RowSetAggregates{}, + Rows: []map[string]any{ + { + "__value": result, + }, + }, + }, nil +} + +func (dch DataConnectorHandler) execQuery(ctx context.Context, state *`) + _, _ = sb.WriteString(stateArgument) + _, _ = sb.WriteString(`, request *schema.QueryRequest, queryFields schema.NestedField, rawArgs map[string]any) (any, error) { + span := trace.SpanFromContext(ctx) + logger := connector.GetLogger(ctx) + switch request.Collection {`) + + functionKeys := make([]string, len(chb.Functions)) + for i, fn := range chb.Functions { + functionKeys[i] = fn.Name + _, _ = sb.WriteString("\n case \"") + _, _ = sb.WriteString(fn.Name) + _, _ = sb.WriteString("\":\n") + + if fn.ResultType.IsScalar { + sb.WriteString(` + if len(queryFields) > 0 { + return nil, schema.UnprocessableContentError("cannot evaluate selection fields for scalar", nil) + }`) + } else if fn.ResultType.IsArray() { + sb.WriteString(` + selection, err := queryFields.AsArray() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be array", map[string]any{ + "cause": err.Error(), + }) + }`) + } else { + sb.WriteString(` + selection, err := queryFields.AsObject() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ + "cause": err.Error(), + }) + }`) + } + + var argumentParamStr string + if fn.ArgumentsType != nil { + argName := fn.ArgumentsType.Name + if fn.ArgumentsType.PackagePath != "" && fn.ArgumentsType.PackagePath != chb.Builder.packagePath { + chb.Builder.imports[fn.ArgumentsType.PackagePath] = "" + argName = fmt.Sprintf("%s.%s", fn.ArgumentsType.PackageName, fn.ArgumentsType.Name) + } + + argumentStr := fmt.Sprintf(` + var args %s + if parseErr := args.FromValue(rawArgs); parseErr != nil { + return nil, schema.UnprocessableContentError("failed to resolve arguments", map[string]any{ + "cause": parseErr.Error(), + }) + } + + connector_addSpanEvent(span, logger, "execute_function", map[string]any{ + "arguments": args, + })`, argName) + sb.WriteString(argumentStr) + argumentParamStr = ", &args" + } + + if fn.ResultType.IsScalar { + sb.WriteString(fmt.Sprintf("\n return %s(ctx, state%s)\n", fn.OriginName, argumentParamStr)) + continue + } + + sb.WriteString(fmt.Sprintf("\n rawResult, err := %s(ctx, state%s)", fn.OriginName, argumentParamStr)) + chb.writeGeneralOperationResult(sb, fn.ResultType) + + sb.WriteString(` + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + })`) + if fn.ResultType.IsArray() { + sb.WriteString("\n result, err := utils.EvalNestedColumnArrayIntoSlice(selection, rawResult)") + } else { + sb.WriteString("\n result, err := utils.EvalNestedColumnObject(selection, rawResult)") + } + sb.WriteString(textBlockErrorCheck2) + sb.WriteString(" return result, nil\n") + } + + _, _ = sb.WriteString(` + default: + return nil, utils.ErrHandlerNotfound + } +} +`) + chb.writeOperationNameEnums(sb, functionEnumsName, functionKeys) +} + +func (chb connectorHandlerBuilder) writeMutation(sb *strings.Builder) { + if len(chb.Procedures) == 0 { + return + } + stateArgument := chb.writeStateArgumentName() + chb.Builder.imports["encoding/json"] = "" + + _, _ = sb.WriteString(` +// MutationExists check if the mutation name exists +func (dch DataConnectorHandler) MutationExists(name string) bool { + return slices.Contains(`) + _, _ = sb.WriteString(procedureEnumsName) + _, _ = sb.WriteString(`, name) +}`) + + _, _ = sb.WriteString(` +func (dch DataConnectorHandler) Mutation(ctx context.Context, state *`) + _, _ = sb.WriteString(stateArgument) + _, _ = sb.WriteString(`, operation *schema.MutationOperation) (schema.MutationOperationResults, error) { + span := trace.SpanFromContext(ctx) + logger := connector.GetLogger(ctx) + connector_addSpanEvent(span, logger, "validate_request", map[string]any{ + "operations_name": operation.Name, + }) + + switch operation.Name {`) + + procedureKeys := make([]string, len(chb.Procedures)) + for i, fn := range chb.Procedures { + procedureKeys[i] = fn.Name + _, _ = sb.WriteString("\n case \"") + _, _ = sb.WriteString(fn.Name) + _, _ = sb.WriteString("\":\n") + + if fn.ResultType.IsScalar { + sb.WriteString(` + if len(operation.Fields) > 0 { + return nil, schema.UnprocessableContentError("cannot evaluate selection fields for scalar", nil) + }`) + } else if fn.ResultType.IsArray() { + sb.WriteString(` + selection, err := operation.Fields.AsArray() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be array", map[string]any{ + "cause": err.Error(), + }) + }`) + } else { + sb.WriteString(` + selection, err := operation.Fields.AsObject() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ + "cause": err.Error(), + }) + }`) + } + + var argumentParamStr string + if fn.ArgumentsType != nil { + argName := fn.ArgumentsType.Name + if fn.ArgumentsType.PackagePath != "" && fn.ArgumentsType.PackagePath != chb.Builder.packagePath { + chb.Builder.imports[fn.ArgumentsType.PackagePath] = "" + argName = fmt.Sprintf("%s.%s", fn.ArgumentsType.PackageName, fn.ArgumentsType.Name) + } + + argumentStr := fmt.Sprintf(` + var args %s + if err := json.Unmarshal(operation.Arguments, &args); err != nil { + return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ + "cause": err.Error(), + }) + }`, argName) + sb.WriteString(argumentStr) + argumentParamStr = ", &args" + } + + sb.WriteString("\n span.AddEvent(\"execute_procedure\")") + if fn.ResultType.IsScalar { + sb.WriteString(fmt.Sprintf(` + result, err := %s(ctx, state%s)`, fn.OriginName, argumentParamStr)) + } else { + sb.WriteString(fmt.Sprintf("\n rawResult, err := %s(ctx, state%s)\n", fn.OriginName, argumentParamStr)) + chb.writeGeneralOperationResult(sb, fn.ResultType) + + sb.WriteString(` connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + })`) + if fn.ResultType.IsArray() { + sb.WriteString("\n result, err := utils.EvalNestedColumnArrayIntoSlice(selection, rawResult)\n") + } else { + sb.WriteString("\n result, err := utils.EvalNestedColumnObject(selection, rawResult)\n") + } + } + + sb.WriteString(textBlockErrorCheck2) + sb.WriteString(" return schema.NewProcedureResult(result).Encode(), nil\n") + } + + _, _ = sb.WriteString(` + default: + return nil, utils.ErrHandlerNotfound + } +} +`) + chb.writeOperationNameEnums(sb, procedureEnumsName, procedureKeys) +} + +func (chb connectorHandlerBuilder) writeGeneralOperationResult(sb *strings.Builder, resultType *TypeInfo) { + sb.WriteString(textBlockErrorCheck2) + if resultType.IsNullable() { + sb.WriteString(` + if rawResult == nil { + return nil, nil + } +`) + } +} diff --git a/cmd/hasura-ndc-go/command/internal/connector_test.go b/cmd/hasura-ndc-go/command/internal/connector_test.go index b45a12d..8d750ab 100644 --- a/cmd/hasura-ndc-go/command/internal/connector_test.go +++ b/cmd/hasura-ndc-go/command/internal/connector_test.go @@ -53,29 +53,6 @@ func TestConnectorGeneration(t *testing.T) { ModuleName: "github.com/hasura/ndc-codegen-subdir-test", Directories: []string{"connector/functions"}, }, - { - Name: "subdir_package_types", - BasePath: "./testdata/subdir", - ConnectorDir: "connector", - ModuleName: "github.com/hasura/ndc-codegen-subdir-test", - PackageTypes: "connector/types", - Directories: []string{"connector/functions"}, - }, - { - Name: "subdir_package_types_absolute", - BasePath: "./testdata/subdir", - ConnectorDir: "connector", - ModuleName: "github.com/hasura/ndc-codegen-subdir-test", - PackageTypes: "github.com/hasura/ndc-codegen-subdir-test/connector/types", - Directories: []string{"connector/functions"}, - }, - { - Name: "invalid_types_package", - BasePath: "./testdata/subdir", - ModuleName: "github.com/hasura/ndc-codegen-subdir-test", - Directories: []string{"connector/functions"}, - errorMsg: "the `types` package where the State struct is in must be placed in root or connector directory", - }, { Name: "snake_case", BasePath: "./testdata/snake_case", @@ -90,7 +67,6 @@ func TestConnectorGeneration(t *testing.T) { for _, tc := range testCases { t.Run(tc.Name, func(t *testing.T) { assert.NoError(t, os.Chdir(rootDir)) - expectedSchemaBytes, err := os.ReadFile(path.Join(tc.BasePath, "expected/schema.json")) assert.NoError(t, err) connectorContentBytes, err := os.ReadFile(path.Join(tc.BasePath, "expected/connector.go.tmpl")) @@ -100,7 +76,6 @@ func TestConnectorGeneration(t *testing.T) { assert.NoError(t, os.Chdir(srcDir)) err = ParseAndGenerateConnector(ConnectorGenerationArguments{ ConnectorDir: tc.ConnectorDir, - PackageTypes: tc.PackageTypes, Directories: tc.Directories, Style: tc.NamingStyle, }, tc.ModuleName) @@ -108,7 +83,9 @@ func TestConnectorGeneration(t *testing.T) { assert.ErrorContains(t, err, tc.errorMsg) return } - assert.NoError(t, err) + if err != nil { + panic(err) + } var expectedSchema schema.SchemaResponse assert.NoError(t, json.Unmarshal(expectedSchemaBytes, &expectedSchema)) @@ -143,4 +120,62 @@ func TestConnectorGeneration(t *testing.T) { } }) } + + // go template + for _, tc := range testCases { + t.Run(tc.Name, func(t *testing.T) { + assert.NoError(t, os.Chdir(rootDir)) + + expectedSchemaBytes, err := os.ReadFile(path.Join(tc.BasePath, "expected/schema.go.tmpl")) + if err != nil { + if os.IsNotExist(err) { + return + } + assert.NoError(t, err) + } + connectorContentBytes, err := os.ReadFile(path.Join(tc.BasePath, "expected/connector-go.go.tmpl")) + if err != nil { + if os.IsNotExist(err) { + return + } + assert.NoError(t, err) + } + + srcDir := path.Join(tc.BasePath, "source") + assert.NoError(t, os.Chdir(srcDir)) + err = ParseAndGenerateConnector(ConnectorGenerationArguments{ + ConnectorDir: tc.ConnectorDir, + Directories: tc.Directories, + Style: tc.NamingStyle, + SchemaFormat: "go", + }, tc.ModuleName) + if tc.errorMsg != "" { + assert.ErrorContains(t, err, tc.errorMsg) + return + } + assert.NoError(t, err) + + schemaBytes, err := os.ReadFile(path.Join(tc.ConnectorDir, "schema.generated.go")) + assert.NoError(t, err) + assert.Equal(t, formatTextContent(string(expectedSchemaBytes)), formatTextContent(string(schemaBytes))) + + connectorBytes, err := os.ReadFile(path.Join(tc.ConnectorDir, "connector.generated.go")) + assert.NoError(t, err) + assert.Equal(t, formatTextContent(string(connectorContentBytes)), formatTextContent(string(connectorBytes))) + + // go to the base test directory + assert.NoError(t, os.Chdir("..")) + + for _, td := range tc.Directories { + expectedFunctionTypesBytes, err := os.ReadFile(path.Join("expected", "functions.go.tmpl")) + if err == nil { + functionTypesBytes, err := os.ReadFile(path.Join("source", td, "types.generated.go")) + assert.NoError(t, err) + assert.Equal(t, formatTextContent(string(expectedFunctionTypesBytes)), formatTextContent(string(functionTypesBytes))) + } else if !os.IsNotExist(err) { + assert.NoError(t, err) + } + } + }) + } } diff --git a/cmd/hasura-ndc-go/command/internal/constant.go b/cmd/hasura-ndc-go/command/internal/constant.go index c9884c5..bd2dd43 100644 --- a/cmd/hasura-ndc-go/command/internal/constant.go +++ b/cmd/hasura-ndc-go/command/internal/constant.go @@ -1,38 +1,11 @@ package internal import ( - _ "embed" - "fmt" "regexp" - "text/template" "github.com/hasura/ndc-sdk-go/schema" - "github.com/iancoleman/strcase" ) -const ( - connectorOutputFile = "connector.generated.go" - schemaOutputFile = "schema.generated.json" - typeMethodsOutputFile = "types.generated.go" -) - -//go:embed templates/connector/connector.go.tmpl -var connectorTemplateStr string -var connectorTemplate *template.Template - -func init() { - var err error - connectorTemplate, err = template.New(connectorOutputFile).Parse(connectorTemplateStr) - if err != nil { - panic(fmt.Errorf("failed to parse connector template: %s", err)) - } - - strcase.ConfigureAcronym("API", "Api") - strcase.ConfigureAcronym("REST", "Rest") - strcase.ConfigureAcronym("HTTP", "Http") - strcase.ConfigureAcronym("SQL", "sql") -} - type ScalarName string const ( diff --git a/cmd/hasura-ndc-go/command/internal/schema.go b/cmd/hasura-ndc-go/command/internal/schema.go index 99dfd28..4bc419e 100644 --- a/cmd/hasura-ndc-go/command/internal/schema.go +++ b/cmd/hasura-ndc-go/command/internal/schema.go @@ -1,25 +1,10 @@ package internal import ( - "context" - "errors" - "flag" "fmt" - "go/ast" - "go/token" "go/types" - "os" - "path" - "path/filepath" - "regexp" - "runtime/trace" - "strings" - "github.com/fatih/structtag" "github.com/hasura/ndc-sdk-go/schema" - "github.com/iancoleman/strcase" - "github.com/rs/zerolog/log" - "golang.org/x/tools/go/packages" ) type OperationKind string @@ -38,6 +23,7 @@ type TypeInfo struct { Description *string PackagePath string PackageName string + Embedded bool IsScalar bool ScalarRepresentation schema.TypeRepresentation TypeFragments []string @@ -50,6 +36,25 @@ func (ti *TypeInfo) IsNullable() bool { return isNullableFragments(ti.TypeFragments) } +// IsArray checks if the current type is an array +func (ti *TypeInfo) IsArray() bool { + return isArrayFragments(ti.TypeFragments) +} + +// GetArgumentName returns the argument name +func (ti *TypeInfo) GetArgumentName(packagePath string) string { + if packagePath == ti.PackagePath { + return ti.Name + } + + return fmt.Sprintf("%s.%s", ti.PackageName, ti.Name) +} + +// String implements the fmt.Stringer interface +func (ti *TypeInfo) String() string { + return fmt.Sprintf("%s.%s", ti.PackagePath, ti.Name) +} + func isNullableFragment(fragment string) bool { return fragment == "*" } @@ -58,11 +63,6 @@ func isNullableFragments(fragments []string) bool { return len(fragments) > 0 && isNullableFragment(fragments[0]) } -// IsArray checks if the current type is an array -func (ti *TypeInfo) IsArray() bool { - return isArrayFragments(ti.TypeFragments) -} - func isArrayFragment(fragment string) bool { return fragment == "[]" } @@ -73,40 +73,16 @@ func isArrayFragments(fragments []string) bool { // ObjectField represents the serialization information of an object field type ObjectField struct { - Name string - Key string - Type *TypeInfo + Name string + Description *string + Type *TypeInfo } // ObjectInfo represents the serialization information of an object type type ObjectInfo struct { - PackagePath string - PackageName string IsAnonymous bool - Fields map[string]*ObjectField -} - -// ArgumentInfo represents the serialization information of an argument type -type ArgumentInfo struct { - FieldName string - Description *string Type *TypeInfo -} - -// Schema converts to ArgumentInfo schema -func (ai ArgumentInfo) Schema() schema.ArgumentInfo { - return schema.ArgumentInfo{ - Description: ai.Description, - Type: ai.Type.Schema.Encode(), - } -} - -func buildArgumentInfosSchema(input map[string]ArgumentInfo) map[string]schema.ArgumentInfo { - result := make(map[string]schema.ArgumentInfo) - for k, arg := range input { - result[k] = arg.Schema() - } - return result + Fields map[string]ObjectField } // FunctionInfo represents a readable Go function info @@ -119,7 +95,7 @@ type OperationInfo struct { PackagePath string Description *string ArgumentsType *TypeInfo - Arguments map[string]ArgumentInfo + Arguments map[string]schema.ArgumentInfo ResultType *TypeInfo } @@ -133,7 +109,7 @@ func (op FunctionInfo) Schema() schema.FunctionInfo { Name: op.Name, Description: op.Description, ResultType: op.ResultType.Schema.Encode(), - Arguments: buildArgumentInfosSchema(op.Arguments), + Arguments: op.Arguments, } return result } @@ -148,7 +124,7 @@ func (op ProcedureInfo) Schema() schema.ProcedureInfo { Name: op.Name, Description: op.Description, ResultType: op.ResultType.Schema.Encode(), - Arguments: schema.ProcedureInfoArguments(buildArgumentInfosSchema(op.Arguments)), + Arguments: schema.ProcedureInfoArguments(op.Arguments), } return result } @@ -156,25 +132,28 @@ func (op ProcedureInfo) Schema() schema.ProcedureInfo { // RawConnectorSchema represents a readable Go schema object // which can encode to NDC schema type RawConnectorSchema struct { - Imports map[string]bool - CustomScalars map[string]*TypeInfo - ScalarSchemas schema.SchemaResponseScalarTypes - Objects map[string]*ObjectInfo - ObjectSchemas schema.SchemaResponseObjectTypes - Functions []FunctionInfo - Procedures []ProcedureInfo + StateType *TypeInfo + Imports map[string]bool + CustomScalars map[string]*TypeInfo + ScalarSchemas schema.SchemaResponseScalarTypes + Objects map[string]ObjectInfo + ObjectSchemas schema.SchemaResponseObjectTypes + Functions []FunctionInfo + FunctionArguments map[string]ObjectInfo + Procedures []ProcedureInfo } // NewRawConnectorSchema creates an empty RawConnectorSchema instance func NewRawConnectorSchema() *RawConnectorSchema { return &RawConnectorSchema{ - Imports: make(map[string]bool), - CustomScalars: make(map[string]*TypeInfo), - ScalarSchemas: make(schema.SchemaResponseScalarTypes), - Objects: make(map[string]*ObjectInfo), - ObjectSchemas: make(schema.SchemaResponseObjectTypes), - Functions: []FunctionInfo{}, - Procedures: []ProcedureInfo{}, + Imports: make(map[string]bool), + CustomScalars: make(map[string]*TypeInfo), + ScalarSchemas: make(schema.SchemaResponseScalarTypes), + Objects: make(map[string]ObjectInfo), + ObjectSchemas: make(schema.SchemaResponseObjectTypes), + Functions: []FunctionInfo{}, + FunctionArguments: make(map[string]ObjectInfo), + Procedures: []ProcedureInfo{}, } } @@ -197,784 +176,10 @@ func (rcs RawConnectorSchema) Schema() *schema.SchemaResponse { return result } -// IsCustomType checks if the type name is a custom scalar or an exported object -func (rcs RawConnectorSchema) IsCustomType(name string) bool { - if _, ok := rcs.CustomScalars[name]; ok { - return true - } - if obj, ok := rcs.Objects[name]; ok { - return !obj.IsAnonymous - } - return false -} - -type SchemaParser struct { - context context.Context - moduleName string - rawSchema *RawConnectorSchema - packages []*packages.Package - packageIndex int - namingStyle OperationNamingStyle -} - -// GetCurrentPackage gets the current evaluating package -func (sp SchemaParser) GetCurrentPackage() *packages.Package { - return sp.packages[sp.packageIndex] -} - -// FindPackageByPath finds the package by package path -func (sp SchemaParser) FindPackageByPath(input string) *packages.Package { - for _, p := range sp.packages { - if p.ID == input { - return p - } - } - return nil -} - -func parseRawConnectorSchemaFromGoCode(ctx context.Context, moduleName string, filePath string, args *ConnectorGenerationArguments) (*RawConnectorSchema, error) { - var err error - namingStyle := StyleCamelCase - if args.Style != "" { - namingStyle, err = ParseOperationNamingStyle(args.Style) - if err != nil { - return nil, err - } - } - rawSchema := NewRawConnectorSchema() - - pkgTypes, err := evalPackageTypesLocation(args.PackageTypes, moduleName, filePath, args.ConnectorDir) - if err != nil { - return nil, err - } - rawSchema.Imports[pkgTypes] = true - - tempDirs := args.Directories - if len(args.Directories) == 0 { - // recursively walk directories if the user don't explicitly specify target folders - entries, err := os.ReadDir(filePath) - if err != nil { - return nil, fmt.Errorf("failed to read subdirectories of %s: %s", filePath, err) - } - for _, entry := range entries { - if !entry.IsDir() { - continue - } - tempDirs = append(tempDirs, entry.Name()) - } - } - var directories []string - for _, dir := range tempDirs { - for _, globPath := range []string{path.Join(filePath, dir, "*.go"), path.Join(filePath, dir, "**", "*.go")} { - goFiles, err := filepath.Glob(globPath) - if err != nil { - return nil, fmt.Errorf("failed to read subdirectories of %s/%s: %s", filePath, dir, err) - } - // cleanup types.generated.go files - fileCount := 0 - for _, fp := range goFiles { - if !strings.HasSuffix(fp, typeMethodsOutputFile) { - fileCount++ - continue - } - if err := os.Remove(fp); err != nil { - return nil, fmt.Errorf("failed to delete %s: %s", fp, err) - } - } - if fileCount > 0 { - directories = append(directories, dir) - break - } - } - } - - if len(directories) == 0 { - log.Info().Msgf("no subdirectory in %s", filePath) - return rawSchema, nil - } - - log.Info().Interface("directories", directories).Msgf("parsing connector schema...") - - var packageList []*packages.Package - fset := token.NewFileSet() - for _, folder := range directories { - _, parseCodeTask := trace.NewTask(ctx, fmt.Sprintf("parse_%s_code", folder)) - folderPath := path.Join(filePath, folder) - - cfg := &packages.Config{ - Mode: packages.NeedSyntax | packages.NeedTypes, - Dir: folderPath, - Fset: fset, - } - pkgList, err := packages.Load(cfg, flag.Args()...) - parseCodeTask.End() - if err != nil { - return nil, err - } - packageList = append(packageList, pkgList...) - } - - for i := range packageList { - parseSchemaCtx, parseSchemaTask := trace.NewTask(ctx, fmt.Sprintf("parse_schema_%s", packageList[i].ID)) - sp := &SchemaParser{ - context: parseSchemaCtx, - moduleName: moduleName, - packages: packageList, - packageIndex: i, - rawSchema: rawSchema, - namingStyle: namingStyle, - } - - err = sp.parseRawConnectorSchema(packageList[i].Types) - parseSchemaTask.End() - if err != nil { - return nil, err - } - } - - return rawSchema, nil -} - -func evalPackageTypesLocation(name string, moduleName string, filePath string, connectorDir string) (string, error) { - if name != "" { - // assume that the absolute package name should have domain, e.g. github.com/... - if strings.Contains(name, ".") { - return name, nil - } - return fmt.Sprintf("%s/%s", moduleName, name), nil - } - - matches, err := filepath.Glob(path.Join(filePath, "types", "*.go")) - if err == nil && len(matches) > 0 { - return fmt.Sprintf("%s/types", moduleName), nil - } - - if connectorDir != "" && !strings.HasPrefix(".", connectorDir) { - matches, err = filepath.Glob(path.Join(filePath, connectorDir, "types", "*.go")) - if err == nil && len(matches) > 0 { - return fmt.Sprintf("%s/%s/types", moduleName, connectorDir), nil - } - } - return "", fmt.Errorf("the `types` package where the State struct is in must be placed in root or connector directory, %s", err) -} - -// parse raw connector schema from Go code -func (sp *SchemaParser) parseRawConnectorSchema(pkg *types.Package) error { - - for _, name := range pkg.Scope().Names() { - _, task := trace.NewTask(sp.context, fmt.Sprintf("parse_%s_schema_%s", sp.GetCurrentPackage().Name, name)) - err := sp.parsePackageScope(pkg, name) - task.End() - if err != nil { - return err - } - } - - return nil -} - -func (sp *SchemaParser) parsePackageScope(pkg *types.Package, name string) error { - switch obj := pkg.Scope().Lookup(name).(type) { - case *types.Func: - // only parse public functions - if !obj.Exported() { - return nil - } - opInfo := sp.parseOperationInfo(obj) - if opInfo == nil { - return nil - } - opInfo.PackageName = pkg.Name() - opInfo.PackagePath = pkg.Path() - var resultTuple *types.Tuple - var params *types.Tuple - switch sig := obj.Type().(type) { - case *types.Signature: - params = sig.Params() - resultTuple = sig.Results() - default: - return fmt.Errorf("expected function signature, got: %s", sig.String()) - } - - if params == nil || (params.Len() < 2 || params.Len() > 3) { - return fmt.Errorf("%s: expect 2 or 3 parameters only (ctx context.Context, state types.State, arguments *[ArgumentType]), got %s", opInfo.OriginName, params) - } - - if resultTuple == nil || resultTuple.Len() != 2 { - return fmt.Errorf("%s: expect result tuple ([type], error), got %s", opInfo.OriginName, resultTuple) - } - - // parse arguments in the function if exists - // ignore 2 first parameters (context and state) - if params.Len() == 3 { - arg := params.At(2) - arguments, argumentType, err := sp.parseArgumentTypes(arg.Type(), []string{}) - if err != nil { - return err - } - opInfo.ArgumentsType = argumentType - opInfo.Arguments = arguments - } - - resultType, err := sp.parseType(nil, resultTuple.At(0).Type(), []string{}, false, false) - if err != nil { - return err - } - opInfo.ResultType = resultType - - switch opInfo.Kind { - case OperationProcedure: - sp.rawSchema.Procedures = append(sp.rawSchema.Procedures, ProcedureInfo(*opInfo)) - case OperationFunction: - sp.rawSchema.Functions = append(sp.rawSchema.Functions, FunctionInfo(*opInfo)) - } - } - return nil -} - -func (sp *SchemaParser) parseArgumentTypes(ty types.Type, fieldPaths []string) (map[string]ArgumentInfo, *TypeInfo, error) { - - switch inferredType := ty.(type) { - case *types.Pointer: - return sp.parseArgumentTypes(inferredType.Elem(), fieldPaths) - case *types.Struct: - result := make(map[string]ArgumentInfo) - for i := 0; i < inferredType.NumFields(); i++ { - fieldVar := inferredType.Field(i) - fieldTag := inferredType.Tag(i) - fieldPackage := fieldVar.Pkg() - var typeInfo *TypeInfo - if fieldPackage != nil { - typeInfo = &TypeInfo{ - PackageName: fieldPackage.Name(), - PackagePath: fieldPackage.Path(), - } - } - fieldType, err := sp.parseType(typeInfo, fieldVar.Type(), append(fieldPaths, fieldVar.Name()), false, true) - if err != nil { - return nil, nil, err - } - fieldName := getFieldNameOrTag(fieldVar.Name(), fieldTag) - if fieldType.TypeAST == nil { - fieldType.TypeAST = fieldVar.Type() - } - result[fieldName] = ArgumentInfo{ - FieldName: fieldVar.Name(), - Type: fieldType, - } - } - return result, nil, nil - case *types.Named: - arguments, _, err := sp.parseArgumentTypes(inferredType.Obj().Type().Underlying(), append(fieldPaths, inferredType.Obj().Name())) - if err != nil { - return nil, nil, err - } - - typeObj := inferredType.Obj() - typeInfo := &TypeInfo{ - Name: typeObj.Name(), - SchemaName: typeObj.Name(), - } - pkg := typeObj.Pkg() - if pkg != nil { - typeInfo.PackagePath = pkg.Path() - typeInfo.PackageName = pkg.Name() - } - return arguments, typeInfo, nil - default: - return nil, nil, fmt.Errorf("expected struct type, got %s", ty.String()) - } -} - -func (sp *SchemaParser) parseType(rootType *TypeInfo, ty types.Type, fieldPaths []string, skipNullable bool, isArgument bool) (*TypeInfo, error) { - - switch inferredType := ty.(type) { - case *types.Pointer: - if skipNullable { - return sp.parseType(rootType, inferredType.Elem(), fieldPaths, false, isArgument) - } - innerType, err := sp.parseType(rootType, inferredType.Elem(), fieldPaths, false, isArgument) - if err != nil { - return nil, err - } - return &TypeInfo{ - Name: innerType.Name, - SchemaName: innerType.Name, - Description: innerType.Description, - PackagePath: innerType.PackagePath, - PackageName: innerType.PackageName, - TypeAST: ty, - TypeFragments: append([]string{"*"}, innerType.TypeFragments...), - IsScalar: innerType.IsScalar, - Schema: schema.NewNullableType(innerType.Schema), - }, nil - case *types.Struct: - isAnonymous := false - if rootType == nil { - rootType = &TypeInfo{} - } - - name := strings.Join(fieldPaths, "") - if rootType.Name == "" { - rootType.Name = name - isAnonymous = true - rootType.TypeFragments = append(rootType.TypeFragments, ty.String()) - } - if rootType.SchemaName == "" { - rootType.SchemaName = name - } - if rootType.TypeAST == nil { - rootType.TypeAST = ty - } - - if rootType.Schema == nil { - rootType.Schema = schema.NewNamedType(name) - } - objType := schema.ObjectType{ - Description: rootType.Description, - Fields: make(schema.ObjectTypeFields), - } - objFields := &ObjectInfo{ - PackagePath: rootType.PackagePath, - PackageName: rootType.PackageName, - IsAnonymous: isAnonymous, - Fields: map[string]*ObjectField{}, - } - // temporarily add the object type to raw schema to avoid infinite loop - sp.rawSchema.ObjectSchemas[rootType.Name] = objType - sp.rawSchema.Objects[rootType.Name] = objFields - - for i := 0; i < inferredType.NumFields(); i++ { - fieldVar := inferredType.Field(i) - fieldTag := inferredType.Tag(i) - fieldType, err := sp.parseType(nil, fieldVar.Type(), append(fieldPaths, fieldVar.Name()), false, isArgument) - if err != nil { - return nil, err - } - fieldKey := getFieldNameOrTag(fieldVar.Name(), fieldTag) - objType.Fields[fieldKey] = schema.ObjectField{ - Type: fieldType.Schema.Encode(), - } - objFields.Fields[fieldVar.Name()] = &ObjectField{ - Name: fieldVar.Name(), - Key: fieldKey, - Type: fieldType, - } - } - sp.rawSchema.ObjectSchemas[rootType.Name] = objType - sp.rawSchema.Objects[rootType.Name] = objFields - - return rootType, nil - case *types.Named: - - innerType := inferredType.Obj() - if innerType == nil { - return nil, fmt.Errorf("failed to parse named type: %s", inferredType.String()) - } - - innerPkg := innerType.Pkg() - var typeInfo *TypeInfo - if innerPkg != nil { - if _, ok := sp.rawSchema.Objects[innerType.Name()]; ok { - ty := &TypeInfo{ - Name: innerType.Name(), - SchemaName: innerType.Name(), - PackageName: innerPkg.Name(), - PackagePath: innerPkg.Path(), - TypeAST: innerType.Type(), - Schema: schema.NewNamedType(innerType.Name()), - TypeFragments: []string{innerType.Name()}, - } - return ty, nil - } - - var err error - typeInfo, err = sp.parseTypeInfoFromComments(innerType.Name(), innerPkg.Path(), innerType.Parent()) - if err != nil { - return nil, err - } - var scalarName ScalarName - typeInfo.PackageName = innerPkg.Name() - typeInfo.PackagePath = innerPkg.Path() - scalarSchema := schema.NewScalarType() - - switch innerPkg.Path() { - case "time": - switch innerType.Name() { - case "Time": - scalarName = ScalarTimestampTZ - scalarSchema.Representation = schema.NewTypeRepresentationTimestampTZ().Encode() - case "Duration": - return nil, errors.New("unsupported type time.Duration. Create a scalar type wrapper with FromValue method to decode the any value") - } - case "encoding/json": - switch innerType.Name() { - case "RawMessage": - scalarName = ScalarRawJSON - scalarSchema.Representation = schema.NewTypeRepresentationJSON().Encode() - } - case "github.com/google/uuid": - switch innerType.Name() { - case "UUID": - scalarName = ScalarUUID - scalarSchema.Representation = schema.NewTypeRepresentationUUID().Encode() - } - case "github.com/hasura/ndc-sdk-go/scalar": - scalarName = ScalarName(innerType.Name()) - switch innerType.Name() { - case "Date": - scalarSchema.Representation = schema.NewTypeRepresentationDate().Encode() - case "BigInt": - scalarSchema.Representation = schema.NewTypeRepresentationBigInteger().Encode() - case "Bytes": - scalarSchema.Representation = schema.NewTypeRepresentationBytes().Encode() - case "URL": - scalarSchema.Representation = schema.NewTypeRepresentationString().Encode() - } - } - - if scalarName != "" { - typeInfo.IsScalar = true - typeInfo.Schema = schema.NewNamedType(string(scalarName)) - typeInfo.TypeAST = ty - sp.rawSchema.ScalarSchemas[string(scalarName)] = *scalarSchema - return typeInfo, nil - } - } else if innerType.Name() == "error" { - if isArgument { - return nil, fmt.Errorf("%s: native `error` interface isn't allowed in input arguments", strings.Join(fieldPaths, ".")) - } - typeInfo = &TypeInfo{ - Name: innerType.Name(), - SchemaName: string(ScalarJSON), - TypeAST: innerType.Type(), - Schema: schema.NewNullableType(schema.NewNamedType(string(ScalarJSON))), - IsScalar: true, - ScalarRepresentation: schema.NewTypeRepresentationJSON().Encode(), - } - if _, ok := sp.rawSchema.ScalarSchemas[typeInfo.SchemaName]; !ok { - sp.rawSchema.ScalarSchemas[typeInfo.SchemaName] = defaultScalarTypes[ScalarJSON] - } - return typeInfo, nil - } else { - return nil, fmt.Errorf("%s: unsupported type <%s>", strings.Join(fieldPaths, "."), innerType.Name()) - } - - if typeInfo.IsScalar { - sp.rawSchema.CustomScalars[typeInfo.Name] = typeInfo - scalarSchema := schema.NewScalarType() - if typeInfo.ScalarRepresentation != nil { - scalarSchema.Representation = typeInfo.ScalarRepresentation - } else { - // requires representation since NDC spec v0.1.2 - scalarSchema.Representation = schema.NewTypeRepresentationJSON().Encode() - } - sp.rawSchema.ScalarSchemas[typeInfo.SchemaName] = *scalarSchema - return typeInfo, nil - } - - return sp.parseType(typeInfo, innerType.Type().Underlying(), append(fieldPaths, innerType.Name()), false, isArgument) - case *types.Basic: - var scalarName ScalarName - switch inferredType.Kind() { - case types.Bool: - scalarName = ScalarBoolean - sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] - case types.Int8, types.Uint8: - scalarName = ScalarInt8 - sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] - case types.Int16, types.Uint16: - scalarName = ScalarInt16 - sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] - case types.Int, types.Int32, types.Uint, types.Uint32: - scalarName = ScalarInt32 - sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] - case types.Int64, types.Uint64: - scalarName = ScalarInt64 - sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] - case types.Float32: - scalarName = ScalarFloat32 - sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] - case types.Float64: - scalarName = ScalarFloat64 - sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] - case types.String: - scalarName = ScalarString - sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] - default: - return nil, fmt.Errorf("%s: unsupported scalar type <%s>", strings.Join(fieldPaths, "."), inferredType.String()) - } - if rootType == nil { - rootType = &TypeInfo{ - Name: inferredType.Name(), - SchemaName: inferredType.Name(), - TypeFragments: []string{inferredType.Name()}, - TypeAST: ty, - } - } - - rootType.Schema = schema.NewNamedType(string(scalarName)) - rootType.IsScalar = true - - return rootType, nil - case *types.Array: - innerType, err := sp.parseType(nil, inferredType.Elem(), fieldPaths, false, isArgument) - if err != nil { - return nil, err - } - innerType.TypeFragments = append([]string{"[]"}, innerType.TypeFragments...) - innerType.Schema = schema.NewArrayType(innerType.Schema) - return innerType, nil - case *types.Slice: - innerType, err := sp.parseType(nil, inferredType.Elem(), fieldPaths, false, isArgument) - if err != nil { - return nil, err - } - - innerType.TypeFragments = append([]string{"[]"}, innerType.TypeFragments...) - innerType.Schema = schema.NewArrayType(innerType.Schema) - return innerType, nil - case *types.Map, *types.Interface: - scalarName := ScalarJSON - if rootType == nil { - rootType = &TypeInfo{ - Name: inferredType.String(), - SchemaName: string(scalarName), - TypeAST: ty, - } - } else { - rootType.PackagePath = "" - } - - if _, ok := sp.rawSchema.ScalarSchemas[string(scalarName)]; !ok { - sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] - } - rootType.TypeFragments = append(rootType.TypeFragments, inferredType.String()) - rootType.Schema = schema.NewNamedType(string(scalarName)) - rootType.IsScalar = true - - return rootType, nil - default: - return nil, fmt.Errorf("unsupported type: %s", ty.String()) - } -} - -func (sp *SchemaParser) parseTypeInfoFromComments(typeName string, packagePath string, scope *types.Scope) (*TypeInfo, error) { - typeInfo := &TypeInfo{ - Name: typeName, - SchemaName: typeName, - IsScalar: false, - TypeFragments: []string{typeName}, - Schema: schema.NewNamedType(typeName), - } - comments := make([]string, 0) - commentGroup := findCommentsFromPos(sp.FindPackageByPath(packagePath), scope, typeName) - if commentGroup != nil { - for i, line := range commentGroup.List { - text := strings.TrimSpace(strings.TrimLeft(line.Text, "/")) - if text == "" { - continue - } - if i == 0 { - text = strings.TrimPrefix(text, fmt.Sprintf("%s ", typeName)) - } - - enumMatches := ndcEnumCommentRegex.FindStringSubmatch(text) - - if len(enumMatches) == 2 { - typeInfo.IsScalar = true - rawEnumItems := strings.Split(enumMatches[1], ",") - var enums []string - for _, item := range rawEnumItems { - trimmed := strings.TrimSpace(item) - if trimmed != "" { - enums = append(enums, trimmed) - } - } - if len(enums) == 0 { - return nil, fmt.Errorf("require enum values in the comment of %s", typeName) - } - typeInfo.ScalarRepresentation = schema.NewTypeRepresentationEnum(enums).Encode() - continue - } - - matches := ndcScalarCommentRegex.FindStringSubmatch(text) - matchesLen := len(matches) - if matchesLen > 1 { - typeInfo.IsScalar = true - if matchesLen > 3 && matches[3] != "" { - typeInfo.SchemaName = matches[2] - typeInfo.Schema = schema.NewNamedType(matches[2]) - typeRep, err := schema.ParseTypeRepresentationType(strings.TrimSpace(matches[3])) - if err != nil { - return nil, fmt.Errorf("failed to parse type representation of scalar %s: %s", typeName, err) - } - if typeRep == schema.TypeRepresentationTypeEnum { - return nil, errors.New("use @enum tag with values instead") - } - typeInfo.ScalarRepresentation = schema.TypeRepresentation{ - "type": typeRep, - } - } else if matchesLen > 2 && matches[2] != "" { - // if the second string is a type representation, use it as a TypeRepresentation instead - // e.g @scalar string - typeRep, err := schema.ParseTypeRepresentationType(matches[2]) - if err == nil { - if typeRep == schema.TypeRepresentationTypeEnum { - return nil, errors.New("use @enum tag with values instead") - } - typeInfo.ScalarRepresentation = schema.TypeRepresentation{ - "type": typeRep, - } - continue - } - - typeInfo.SchemaName = matches[2] - typeInfo.Schema = schema.NewNamedType(matches[2]) - } - continue - } - - comments = append(comments, text) - } - } - - if !typeInfo.IsScalar { - // fallback to parse scalar from type name with Scalar prefix - matches := ndcScalarNameRegex.FindStringSubmatch(typeName) - if len(matches) > 1 { - typeInfo.IsScalar = true - typeInfo.SchemaName = matches[1] - typeInfo.Schema = schema.NewNamedType(matches[1]) - } - } - - desc := strings.Join(comments, " ") - if desc != "" { - typeInfo.Description = &desc - } - - return typeInfo, nil -} - -// format operation name with style -func (sp SchemaParser) formatOperationName(name string) string { - switch sp.namingStyle { - case StyleSnakeCase: - return strcase.ToSnake(name) - default: - return strcase.ToLowerCamel(name) - } -} - -func (sp *SchemaParser) parseOperationInfo(fn *types.Func) *OperationInfo { - functionName := fn.Name() - result := OperationInfo{ - OriginName: functionName, - Arguments: make(map[string]ArgumentInfo), - } - - var descriptions []string - commentGroup := findCommentsFromPos(sp.GetCurrentPackage(), fn.Scope(), functionName) - if commentGroup != nil { - for i, comment := range commentGroup.List { - text := strings.TrimSpace(strings.TrimLeft(comment.Text, "/")) - - // trim the function name in the first line if exists - if i == 0 { - text = strings.TrimPrefix(text, fmt.Sprintf("%s ", functionName)) - } - matches := ndcOperationCommentRegex.FindStringSubmatch(text) - matchesLen := len(matches) - if matchesLen > 1 { - switch matches[1] { - case strings.ToLower(string(OperationFunction)): - result.Kind = OperationFunction - case strings.ToLower(string(OperationProcedure)): - result.Kind = OperationProcedure - default: - log.Debug().Msgf("unsupported operation kind: %s", matches) - } - - if matchesLen > 3 && strings.TrimSpace(matches[3]) != "" { - result.Name = strings.TrimSpace(matches[3]) - } else { - result.Name = sp.formatOperationName(functionName) - } - } else { - descriptions = append(descriptions, text) - } - } - } - - // try to parse function with following prefixes: - // - FunctionXxx as a query function - // - ProcedureXxx as a mutation procedure - if result.Kind == "" { - operationNameResults := ndcOperationNameRegex.FindStringSubmatch(functionName) - if len(operationNameResults) < 3 { - return nil - } - result.Kind = OperationKind(operationNameResults[1]) - result.Name = sp.formatOperationName(operationNameResults[2]) - } - - desc := strings.TrimSpace(strings.Join(descriptions, " ")) - if desc != "" { - result.Description = &desc - } - - return &result -} - -func findCommentsFromPos(pkg *packages.Package, scope *types.Scope, name string) *ast.CommentGroup { - if pkg == nil { - return nil - } - - for _, f := range pkg.Syntax { - for _, cg := range f.Comments { - if len(cg.List) == 0 { - continue - } - exp := regexp.MustCompile(fmt.Sprintf(`^//\s+%s`, name)) - if !exp.MatchString(cg.List[0].Text) { - continue - } - if _, obj := scope.LookupParent(name, cg.Pos()); obj != nil { - return cg - } - } - } - return nil -} - -// get field name by json tag -// return the struct field name if not exist -func getFieldNameOrTag(name string, tag string) string { - if tag == "" { - return name - } - tags, err := structtag.Parse(tag) - if err != nil { - log.Warn().Err(err).Msgf("failed to parse tag of struct field: %s", name) - return name - } - - jsonTag, err := tags.Get("json") - if err != nil { - log.Warn().Err(err).Msgf("json tag does not exist in struct field: %s", name) - return name - } - - return jsonTag.Name -} - -func findAndReplaceNativeScalarPackage(input string) (string, string, bool) { - for alias, pkg := range nativeScalarPackages { - if pkg.Pattern.MatchString(input) { - return pkg.PackageName, strings.ReplaceAll(input, pkg.PackageName, alias), true - } +func (rcs RawConnectorSchema) setFunctionArgument(info ObjectInfo) { + key := info.Type.String() + if _, ok := rcs.FunctionArguments[key]; ok { + return } - return "", "", false + rcs.FunctionArguments[key] = info } diff --git a/cmd/hasura-ndc-go/command/internal/schema_parser.go b/cmd/hasura-ndc-go/command/internal/schema_parser.go new file mode 100644 index 0000000..98e0ec3 --- /dev/null +++ b/cmd/hasura-ndc-go/command/internal/schema_parser.go @@ -0,0 +1,865 @@ +package internal + +import ( + "context" + "errors" + "flag" + "fmt" + "go/ast" + "go/token" + "go/types" + "os" + "path" + "path/filepath" + "regexp" + "runtime/trace" + "strings" + + "github.com/fatih/structtag" + "github.com/hasura/ndc-sdk-go/schema" + "github.com/iancoleman/strcase" + "github.com/rs/zerolog/log" + "golang.org/x/tools/go/packages" +) + +type SchemaParser struct { + context context.Context + moduleName string + rawSchema *RawConnectorSchema + packages []*packages.Package + packageIndex int + namingStyle OperationNamingStyle +} + +// GetCurrentPackage gets the current evaluating package +func (sp SchemaParser) GetCurrentPackage() *packages.Package { + return sp.packages[sp.packageIndex] +} + +// FindPackageByPath finds the package by package path +func (sp SchemaParser) FindPackageByPath(input string) *packages.Package { + for _, p := range sp.packages { + if p.ID == input { + return p + } + } + return nil +} + +func parseRawConnectorSchemaFromGoCode(ctx context.Context, moduleName string, filePath string, args *ConnectorGenerationArguments) (*RawConnectorSchema, error) { + var err error + namingStyle := StyleCamelCase + if args.Style != "" { + namingStyle, err = ParseOperationNamingStyle(args.Style) + if err != nil { + return nil, err + } + } + rawSchema := NewRawConnectorSchema() + + tempDirs := args.Directories + if len(args.Directories) == 0 { + // recursively walk directories if the user don't explicitly specify target folders + entries, err := os.ReadDir(filePath) + if err != nil { + return nil, fmt.Errorf("failed to read subdirectories of %s: %s", filePath, err) + } + for _, entry := range entries { + if !entry.IsDir() { + continue + } + tempDirs = append(tempDirs, entry.Name()) + } + } + var directories []string + for _, dir := range tempDirs { + for _, globPath := range []string{path.Join(filePath, dir, "*.go"), path.Join(filePath, dir, "**", "*.go")} { + goFiles, err := filepath.Glob(globPath) + if err != nil { + return nil, fmt.Errorf("failed to read subdirectories of %s/%s: %s", filePath, dir, err) + } + // cleanup types.generated.go files + fileCount := 0 + for _, fp := range goFiles { + if !strings.HasSuffix(fp, typeMethodsOutputFile) { + fileCount++ + continue + } + if err := os.Remove(fp); err != nil { + return nil, fmt.Errorf("failed to delete %s: %s", fp, err) + } + } + if fileCount > 0 { + directories = append(directories, dir) + break + } + } + } + + if len(directories) > 0 { + log.Info().Interface("directories", directories).Msgf("parsing connector schema...") + + var packageList []*packages.Package + fset := token.NewFileSet() + for _, folder := range directories { + _, parseCodeTask := trace.NewTask(ctx, fmt.Sprintf("parse_%s_code", folder)) + folderPath := path.Join(filePath, folder) + cfg := &packages.Config{ + Mode: packages.NeedSyntax | packages.NeedTypes, + Dir: folderPath, + Fset: fset, + } + pkgList, err := packages.Load(cfg, flag.Args()...) + parseCodeTask.End() + if err != nil { + return nil, err + } + packageList = append(packageList, pkgList...) + } + + for i := range packageList { + parseSchemaCtx, parseSchemaTask := trace.NewTask(ctx, fmt.Sprintf("parse_schema_%s", packageList[i].ID)) + sp := &SchemaParser{ + context: parseSchemaCtx, + moduleName: moduleName, + packages: packageList, + packageIndex: i, + rawSchema: rawSchema, + namingStyle: namingStyle, + } + + err = sp.parseRawConnectorSchema(packageList[i].Types) + parseSchemaTask.End() + if err != nil { + return nil, err + } + } + } else { + log.Info().Msgf("no subdirectory in %s", filePath) + } + + if rawSchema.StateType != nil { + rawSchema.Imports[rawSchema.StateType.PackagePath] = true + } else { + pkgPathTypes, err := evalPackageTypesLocation(moduleName, filePath, args.ConnectorDir) + if err != nil { + return nil, err + } + rawSchema.StateType = &TypeInfo{ + Name: "State", + PackagePath: pkgPathTypes, + PackageName: "types", + } + rawSchema.Imports[rawSchema.StateType.PackagePath] = true + } + return rawSchema, nil +} + +// parse raw connector schema from Go code +func (sp *SchemaParser) parseRawConnectorSchema(pkg *types.Package) error { + for _, name := range pkg.Scope().Names() { + _, task := trace.NewTask(sp.context, fmt.Sprintf("parse_%s_schema_%s", sp.GetCurrentPackage().Name, name)) + err := sp.parsePackageScope(pkg, name) + task.End() + if err != nil { + return err + } + } + + return nil +} + +func (sp *SchemaParser) parsePackageScope(pkg *types.Package, name string) error { + switch obj := pkg.Scope().Lookup(name).(type) { + case *types.Func: + // only parse public functions + if !obj.Exported() { + return nil + } + opInfo := sp.parseOperationInfo(obj) + if opInfo == nil { + return nil + } + opInfo.PackageName = pkg.Name() + opInfo.PackagePath = pkg.Path() + var resultTuple *types.Tuple + var params *types.Tuple + switch sig := obj.Type().(type) { + case *types.Signature: + params = sig.Params() + resultTuple = sig.Results() + default: + return fmt.Errorf("expected function signature, got: %s", sig.String()) + } + + if params == nil || (params.Len() < 2 || params.Len() > 3) { + return fmt.Errorf("%s: expect 2 or 3 parameters only (ctx context.Context, state *types.State, arguments *[ArgumentType]), got %s", opInfo.OriginName, params) + } + + if resultTuple == nil || resultTuple.Len() != 2 { + return fmt.Errorf("%s: expect result tuple ([type], error), got %s", opInfo.OriginName, resultTuple) + } + + if sp.rawSchema.StateType == nil { + ty := sp.getNamedType(params.At(1).Type()) + if ty != nil { + so := ty.Obj() + if so != nil { + objPkg := so.Pkg() + if objPkg != nil { + sp.rawSchema.StateType = &TypeInfo{ + Name: so.Name(), + PackageName: objPkg.Name(), + PackagePath: objPkg.Path(), + } + } + } + } + } + + // parse arguments in the function if exists + // ignore 2 first parameters (context and state) + if params.Len() == 3 { + arg := params.At(2) + argumentInfo, err := sp.parseArgumentTypes(arg.Type(), &opInfo.Kind, []string{}) + if err != nil { + return err + } + opInfo.ArgumentsType = argumentInfo.Type + if opInfo.Kind == OperationFunction { + sp.rawSchema.setFunctionArgument(*argumentInfo) + } + // convert argument schema + for k, a := range argumentInfo.Fields { + if !a.Type.Embedded { + opInfo.Arguments[k] = schema.ArgumentInfo{ + Description: a.Description, + Type: a.Type.Schema.Encode(), + } + continue + } + + embeddedObject, ok := sp.rawSchema.Objects[a.Type.String()] + if ok { + // flatten embedded object fields to the parent object + for k, of := range embeddedObject.Fields { + opInfo.Arguments[k] = schema.ArgumentInfo{ + Type: of.Type.Schema.Encode(), + } + } + + } + } + } + + resultType, err := sp.parseType(nil, resultTuple.At(0).Type(), []string{}, false, nil) + if err != nil { + return err + } + opInfo.ResultType = resultType + + switch opInfo.Kind { + case OperationProcedure: + sp.rawSchema.Procedures = append(sp.rawSchema.Procedures, ProcedureInfo(*opInfo)) + case OperationFunction: + sp.rawSchema.Functions = append(sp.rawSchema.Functions, FunctionInfo(*opInfo)) + } + } + return nil +} + +func (sp *SchemaParser) getNamedType(ty types.Type) *types.Named { + switch t := ty.(type) { + case *types.Pointer: + return sp.getNamedType(t.Elem()) + case *types.Named: + return t + case *types.Slice: + return sp.getNamedType(t.Elem()) + case *types.Array: + return sp.getNamedType(t.Elem()) + default: + return nil + } +} + +func (sp *SchemaParser) parseArgumentTypes(ty types.Type, argumentFor *OperationKind, fieldPaths []string) (*ObjectInfo, error) { + + switch inferredType := ty.(type) { + case *types.Pointer: + return sp.parseArgumentTypes(inferredType.Elem(), argumentFor, fieldPaths) + case *types.Struct: + result := &ObjectInfo{ + Fields: map[string]ObjectField{}, + } + for i := 0; i < inferredType.NumFields(); i++ { + fieldVar := inferredType.Field(i) + fieldTag := inferredType.Tag(i) + fieldPackage := fieldVar.Pkg() + var typeInfo *TypeInfo + if fieldPackage != nil { + typeInfo = &TypeInfo{ + PackageName: fieldPackage.Name(), + PackagePath: fieldPackage.Path(), + } + } + typeInfo.Embedded = fieldVar.Embedded() + + fieldType, err := sp.parseType(typeInfo, fieldVar.Type(), append(fieldPaths, fieldVar.Name()), false, argumentFor) + if err != nil { + return nil, err + } + fieldName := getFieldNameOrTag(fieldVar.Name(), fieldTag) + if fieldType.TypeAST == nil { + fieldType.TypeAST = fieldVar.Type() + } + if !fieldType.IsScalar && argumentFor != nil && *argumentFor == OperationFunction { + object, ok := sp.rawSchema.Objects[fieldType.String()] + if ok { + sp.rawSchema.setFunctionArgument(object) + } + sp.rawSchema.setFunctionArgument(ObjectInfo{ + Type: fieldType, + }) + } + + result.Fields[fieldName] = ObjectField{ + Name: fieldVar.Name(), + Type: fieldType, + } + } + return result, nil + case *types.Named: + arguments, err := sp.parseArgumentTypes(inferredType.Obj().Type().Underlying(), argumentFor, append(fieldPaths, inferredType.Obj().Name())) + if err != nil { + return nil, err + } + + typeObj := inferredType.Obj() + typeInfo := &TypeInfo{ + Name: typeObj.Name(), + SchemaName: typeObj.Name(), + } + pkg := typeObj.Pkg() + if pkg != nil { + typeInfo.PackagePath = pkg.Path() + typeInfo.PackageName = pkg.Name() + } + arguments.Type = typeInfo + return arguments, nil + default: + return nil, fmt.Errorf("expected struct type, got %s", ty.String()) + } +} + +func (sp *SchemaParser) parseType(rootType *TypeInfo, ty types.Type, fieldPaths []string, skipNullable bool, argumentFor *OperationKind) (*TypeInfo, error) { + + switch inferredType := ty.(type) { + case *types.Pointer: + if skipNullable { + return sp.parseType(rootType, inferredType.Elem(), fieldPaths, false, argumentFor) + } + innerType, err := sp.parseType(rootType, inferredType.Elem(), fieldPaths, false, argumentFor) + if err != nil { + return nil, err + } + innerType.TypeAST = ty + innerType.TypeFragments = append([]string{"*"}, innerType.TypeFragments...) + innerType.Schema = schema.NewNullableType(innerType.Schema) + return innerType, nil + case *types.Struct: + isAnonymous := false + if rootType == nil { + rootType = &TypeInfo{} + } + + name := strings.Join(fieldPaths, "") + if rootType.Name == "" { + rootType.Name = name + isAnonymous = true + rootType.TypeFragments = append(rootType.TypeFragments, ty.String()) + } + if rootType.SchemaName == "" { + rootType.SchemaName = name + } + if rootType.TypeAST == nil { + rootType.TypeAST = ty + } + + if rootType.Schema == nil { + rootType.Schema = schema.NewNamedType(rootType.SchemaName) + } + objType := schema.ObjectType{ + Description: rootType.Description, + Fields: make(schema.ObjectTypeFields), + } + objFields := ObjectInfo{ + IsAnonymous: isAnonymous, + Type: &TypeInfo{ + Name: rootType.Name, + SchemaName: rootType.SchemaName, + PackagePath: rootType.PackagePath, + PackageName: rootType.PackageName, + TypeAST: inferredType, + }, + Fields: map[string]ObjectField{}, + } + // temporarily add the object type to raw schema to avoid infinite loop + sp.rawSchema.ObjectSchemas[rootType.SchemaName] = objType + sp.rawSchema.Objects[rootType.String()] = objFields + + for i := 0; i < inferredType.NumFields(); i++ { + fieldVar := inferredType.Field(i) + fieldTag := inferredType.Tag(i) + + fieldType, err := sp.parseType(nil, fieldVar.Type(), append(fieldPaths, fieldVar.Name()), false, argumentFor) + if err != nil { + return nil, err + } + fieldType.Embedded = fieldVar.Embedded() + fieldType.TypeAST = fieldVar.Type() + fieldKey := getFieldNameOrTag(fieldVar.Name(), fieldTag) + if fieldType.Embedded { + embeddedObject, ok := sp.rawSchema.ObjectSchemas[fieldType.Name] + if ok { + // flatten embedded object fields to the parent object + for k, of := range embeddedObject.Fields { + objType.Fields[k] = of + } + } + } else { + objType.Fields[fieldKey] = schema.ObjectField{ + Type: fieldType.Schema.Encode(), + } + } + objFields.Fields[fieldKey] = ObjectField{ + Name: fieldVar.Name(), + Type: fieldType, + } + } + sp.rawSchema.ObjectSchemas[rootType.Name] = objType + sp.rawSchema.Objects[rootType.String()] = objFields + + return rootType, nil + case *types.Named: + + innerType := inferredType.Obj() + if innerType == nil { + return nil, fmt.Errorf("failed to parse named type: %s", inferredType.String()) + } + + innerPkg := innerType.Pkg() + typeInfo := &TypeInfo{ + Name: innerType.Name(), + SchemaName: innerType.Name(), + TypeAST: innerType.Type(), + Schema: schema.NewNamedType(innerType.Name()), + } + if rootType != nil { + typeInfo.Embedded = rootType.Embedded + } + if innerPkg != nil { + typeInfo.PackageName = innerPkg.Name() + typeInfo.PackagePath = innerPkg.Path() + typeInfo.TypeFragments = []string{innerType.Name()} + if _, ok := sp.rawSchema.Objects[typeInfo.String()]; ok { + return typeInfo, nil + } + + if err := sp.parseTypeInfoFromComments(typeInfo, innerType.Parent()); err != nil { + return nil, err + } + var scalarName ScalarName + scalarSchema := schema.NewScalarType() + + switch innerPkg.Path() { + case "time": + switch innerType.Name() { + case "Time": + scalarName = ScalarTimestampTZ + scalarSchema.Representation = schema.NewTypeRepresentationTimestampTZ().Encode() + case "Duration": + return nil, errors.New("unsupported type time.Duration. Create a scalar type wrapper with FromValue method to decode the any value") + } + case "encoding/json": + switch innerType.Name() { + case "RawMessage": + scalarName = ScalarRawJSON + scalarSchema.Representation = schema.NewTypeRepresentationJSON().Encode() + } + case "github.com/google/uuid": + switch innerType.Name() { + case "UUID": + scalarName = ScalarUUID + scalarSchema.Representation = schema.NewTypeRepresentationUUID().Encode() + } + case "github.com/hasura/ndc-sdk-go/scalar": + scalarName = ScalarName(innerType.Name()) + switch innerType.Name() { + case "Date": + scalarSchema.Representation = schema.NewTypeRepresentationDate().Encode() + case "BigInt": + scalarSchema.Representation = schema.NewTypeRepresentationBigInteger().Encode() + case "Bytes": + scalarSchema.Representation = schema.NewTypeRepresentationBytes().Encode() + case "URL": + scalarSchema.Representation = schema.NewTypeRepresentationString().Encode() + } + } + + if scalarName != "" { + typeInfo.IsScalar = true + typeInfo.Schema = schema.NewNamedType(string(scalarName)) + typeInfo.TypeAST = ty + sp.rawSchema.ScalarSchemas[string(scalarName)] = *scalarSchema + return typeInfo, nil + } + } else if innerType.Name() == "error" { + if argumentFor != nil { + return nil, fmt.Errorf("%s: native `error` interface isn't allowed in input arguments", strings.Join(fieldPaths, ".")) + } + typeInfo.IsScalar = true + typeInfo.SchemaName = string(ScalarJSON) + typeInfo.Schema = schema.NewNullableType(schema.NewNamedType(string(ScalarJSON))) + typeInfo.ScalarRepresentation = schema.NewTypeRepresentationJSON().Encode() + + if _, ok := sp.rawSchema.ScalarSchemas[typeInfo.SchemaName]; !ok { + sp.rawSchema.ScalarSchemas[typeInfo.SchemaName] = defaultScalarTypes[ScalarJSON] + } + return typeInfo, nil + } else { + return nil, fmt.Errorf("%s: unsupported type <%s>", strings.Join(fieldPaths, "."), innerType.Name()) + } + + if typeInfo.IsScalar { + sp.rawSchema.CustomScalars[typeInfo.Name] = typeInfo + scalarSchema := schema.NewScalarType() + if typeInfo.ScalarRepresentation != nil { + scalarSchema.Representation = typeInfo.ScalarRepresentation + } else { + // requires representation since NDC spec v0.1.2 + scalarSchema.Representation = schema.NewTypeRepresentationJSON().Encode() + } + sp.rawSchema.ScalarSchemas[typeInfo.SchemaName] = *scalarSchema + return typeInfo, nil + } + + if _, ok := sp.rawSchema.ObjectSchemas[typeInfo.Name]; ok { + // the object schema exists, rename to format _ + packagePath := strings.TrimPrefix(typeInfo.PackagePath, sp.moduleName) + typeInfo.SchemaName = fieldNameRegex.ReplaceAllString(strings.Join([]string{typeInfo.Name, packagePath}, ""), "_") + typeInfo.Schema = schema.NewNamedType(typeInfo.SchemaName) + } + return sp.parseType(typeInfo, innerType.Type().Underlying(), append(fieldPaths, innerType.Name()), false, argumentFor) + case *types.Basic: + var scalarName ScalarName + switch inferredType.Kind() { + case types.Bool: + scalarName = ScalarBoolean + sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] + case types.Int8, types.Uint8: + scalarName = ScalarInt8 + sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] + case types.Int16, types.Uint16: + scalarName = ScalarInt16 + sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] + case types.Int, types.Int32, types.Uint, types.Uint32: + scalarName = ScalarInt32 + sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] + case types.Int64, types.Uint64: + scalarName = ScalarInt64 + sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] + case types.Float32: + scalarName = ScalarFloat32 + sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] + case types.Float64: + scalarName = ScalarFloat64 + sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] + case types.String: + scalarName = ScalarString + sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] + default: + return nil, fmt.Errorf("%s: unsupported scalar type <%s>", strings.Join(fieldPaths, "."), inferredType.String()) + } + if rootType == nil { + rootType = &TypeInfo{ + Name: inferredType.Name(), + SchemaName: inferredType.Name(), + TypeFragments: []string{inferredType.Name()}, + TypeAST: ty, + } + } + + rootType.Schema = schema.NewNamedType(string(scalarName)) + rootType.IsScalar = true + + return rootType, nil + case *types.Array: + innerType, err := sp.parseType(nil, inferredType.Elem(), fieldPaths, false, argumentFor) + if err != nil { + return nil, err + } + innerType.TypeFragments = append([]string{"[]"}, innerType.TypeFragments...) + innerType.Schema = schema.NewArrayType(innerType.Schema) + return innerType, nil + case *types.Slice: + innerType, err := sp.parseType(nil, inferredType.Elem(), fieldPaths, false, argumentFor) + if err != nil { + return nil, err + } + + innerType.TypeFragments = append([]string{"[]"}, innerType.TypeFragments...) + innerType.Schema = schema.NewArrayType(innerType.Schema) + return innerType, nil + case *types.Map, *types.Interface: + scalarName := ScalarJSON + if rootType == nil { + rootType = &TypeInfo{ + Name: inferredType.String(), + SchemaName: string(scalarName), + TypeAST: ty, + } + } else { + rootType.PackagePath = "" + } + + if _, ok := sp.rawSchema.ScalarSchemas[string(scalarName)]; !ok { + sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] + } + rootType.TypeFragments = append(rootType.TypeFragments, inferredType.String()) + rootType.Schema = schema.NewNamedType(string(scalarName)) + rootType.IsScalar = true + + return rootType, nil + default: + return nil, fmt.Errorf("unsupported type: %s", ty.String()) + } +} + +func (sp *SchemaParser) parseTypeInfoFromComments(typeInfo *TypeInfo, scope *types.Scope) error { + comments := make([]string, 0) + commentGroup := findCommentsFromPos(sp.FindPackageByPath(typeInfo.PackagePath), scope, typeInfo.Name) + if commentGroup != nil { + for i, line := range commentGroup.List { + text := strings.TrimSpace(strings.TrimLeft(line.Text, "/")) + if text == "" { + continue + } + if i == 0 { + text = strings.TrimPrefix(text, fmt.Sprintf("%s ", typeInfo.Name)) + } + + enumMatches := ndcEnumCommentRegex.FindStringSubmatch(text) + + if len(enumMatches) == 2 { + typeInfo.IsScalar = true + rawEnumItems := strings.Split(enumMatches[1], ",") + var enums []string + for _, item := range rawEnumItems { + trimmed := strings.TrimSpace(item) + if trimmed != "" { + enums = append(enums, trimmed) + } + } + if len(enums) == 0 { + return fmt.Errorf("require enum values in the comment of %s", typeInfo.Name) + } + typeInfo.ScalarRepresentation = schema.NewTypeRepresentationEnum(enums).Encode() + continue + } + + matches := ndcScalarCommentRegex.FindStringSubmatch(text) + matchesLen := len(matches) + if matchesLen > 1 { + typeInfo.IsScalar = true + if matchesLen > 3 && matches[3] != "" { + typeInfo.SchemaName = matches[2] + typeInfo.Schema = schema.NewNamedType(matches[2]) + typeRep, err := schema.ParseTypeRepresentationType(strings.TrimSpace(matches[3])) + if err != nil { + return fmt.Errorf("failed to parse type representation of scalar %s: %s", typeInfo.Name, err) + } + if typeRep == schema.TypeRepresentationTypeEnum { + return errors.New("use @enum tag with values instead") + } + typeInfo.ScalarRepresentation = schema.TypeRepresentation{ + "type": typeRep, + } + } else if matchesLen > 2 && matches[2] != "" { + // if the second string is a type representation, use it as a TypeRepresentation instead + // e.g @scalar string + typeRep, err := schema.ParseTypeRepresentationType(matches[2]) + if err == nil { + if typeRep == schema.TypeRepresentationTypeEnum { + return errors.New("use @enum tag with values instead") + } + typeInfo.ScalarRepresentation = schema.TypeRepresentation{ + "type": typeRep, + } + continue + } + + typeInfo.SchemaName = matches[2] + typeInfo.Schema = schema.NewNamedType(matches[2]) + } + continue + } + + comments = append(comments, text) + } + } + + if !typeInfo.IsScalar { + // fallback to parse scalar from type name with Scalar prefix + matches := ndcScalarNameRegex.FindStringSubmatch(typeInfo.Name) + if len(matches) > 1 { + typeInfo.IsScalar = true + typeInfo.SchemaName = matches[1] + typeInfo.Schema = schema.NewNamedType(matches[1]) + } + } + + desc := strings.Join(comments, " ") + if desc != "" { + typeInfo.Description = &desc + } + + return nil +} + +// format operation name with style +func (sp SchemaParser) formatOperationName(name string) string { + switch sp.namingStyle { + case StyleSnakeCase: + return strcase.ToSnake(name) + default: + return strcase.ToLowerCamel(name) + } +} + +func (sp *SchemaParser) parseOperationInfo(fn *types.Func) *OperationInfo { + functionName := fn.Name() + result := OperationInfo{ + OriginName: functionName, + Arguments: make(map[string]schema.ArgumentInfo), + } + + var descriptions []string + commentGroup := findCommentsFromPos(sp.GetCurrentPackage(), fn.Scope(), functionName) + if commentGroup != nil { + for i, comment := range commentGroup.List { + text := strings.TrimSpace(strings.TrimLeft(comment.Text, "/")) + + // trim the function name in the first line if exists + if i == 0 { + text = strings.TrimPrefix(text, fmt.Sprintf("%s ", functionName)) + } + matches := ndcOperationCommentRegex.FindStringSubmatch(text) + matchesLen := len(matches) + if matchesLen > 1 { + switch matches[1] { + case strings.ToLower(string(OperationFunction)): + result.Kind = OperationFunction + case strings.ToLower(string(OperationProcedure)): + result.Kind = OperationProcedure + default: + log.Debug().Msgf("unsupported operation kind: %s", matches) + } + + if matchesLen > 3 && strings.TrimSpace(matches[3]) != "" { + result.Name = strings.TrimSpace(matches[3]) + } else { + result.Name = sp.formatOperationName(functionName) + } + } else { + descriptions = append(descriptions, text) + } + } + } + + // try to parse function with following prefixes: + // - FunctionXxx as a query function + // - ProcedureXxx as a mutation procedure + if result.Kind == "" { + operationNameResults := ndcOperationNameRegex.FindStringSubmatch(functionName) + if len(operationNameResults) < 3 { + return nil + } + result.Kind = OperationKind(operationNameResults[1]) + result.Name = sp.formatOperationName(operationNameResults[2]) + } + + desc := strings.TrimSpace(strings.Join(descriptions, " ")) + if desc != "" { + result.Description = &desc + } + + return &result +} + +func findCommentsFromPos(pkg *packages.Package, scope *types.Scope, name string) *ast.CommentGroup { + if pkg == nil { + return nil + } + + for _, f := range pkg.Syntax { + for _, cg := range f.Comments { + if len(cg.List) == 0 { + continue + } + exp := regexp.MustCompile(fmt.Sprintf(`^//\s+%s(\s|$)`, name)) + if !exp.MatchString(cg.List[0].Text) { + continue + } + if _, obj := scope.LookupParent(name, cg.Pos()); obj != nil { + return cg + } + } + } + return nil +} + +// get field name by json tag +// return the struct field name if not exist +func getFieldNameOrTag(name string, tag string) string { + if tag == "" { + return name + } + tags, err := structtag.Parse(tag) + if err != nil { + log.Warn().Err(err).Msgf("failed to parse tag of struct field: %s", name) + return name + } + + jsonTag, err := tags.Get("json") + if err != nil { + log.Warn().Err(err).Msgf("json tag does not exist in struct field: %s", name) + return name + } + + return jsonTag.Name +} + +func findAndReplaceNativeScalarPackage(input string) (string, string, bool) { + for alias, pkg := range nativeScalarPackages { + if pkg.Pattern.MatchString(input) { + return pkg.PackageName, strings.ReplaceAll(input, pkg.PackageName, alias), true + } + } + return "", "", false +} + +func evalPackageTypesLocation(moduleName string, filePath string, connectorDir string) (string, error) { + matches, err := filepath.Glob(path.Join(filePath, "types", "*.go")) + if err == nil && len(matches) > 0 { + return fmt.Sprintf("%s/types", moduleName), nil + } + + if connectorDir != "" && !strings.HasPrefix(".", connectorDir) { + matches, err = filepath.Glob(path.Join(filePath, connectorDir, "types", "*.go")) + if err == nil && len(matches) > 0 { + return fmt.Sprintf("%s/%s/types", moduleName, connectorDir), nil + } + } + return "", fmt.Errorf("the `types` package where the State struct is in must be placed in root or connector directory, %s", err) +} diff --git a/cmd/hasura-ndc-go/command/internal/schema_template.go b/cmd/hasura-ndc-go/command/internal/schema_template.go new file mode 100644 index 0000000..45e5a4f --- /dev/null +++ b/cmd/hasura-ndc-go/command/internal/schema_template.go @@ -0,0 +1,271 @@ +package internal + +import ( + "fmt" + "strings" + + "github.com/hasura/ndc-sdk-go/schema" + "github.com/hasura/ndc-sdk-go/utils" +) + +// WriteGoSchema writes the schema as Go codes +func (rcs RawConnectorSchema) WriteGoSchema(packageName string) (string, error) { + builder := strings.Builder{} + writeFileHeader(&builder, packageName) + builder.WriteString(` +import ( + "github.com/hasura/ndc-sdk-go/schema" +) + + +func toPtr[V any](value V) *V { + return &value +} + +// GetConnectorSchema gets the generated connector schema +func GetConnectorSchema() *schema.SchemaResponse { + return &schema.SchemaResponse{ + Collections: []schema.CollectionInfo{}, + ObjectTypes: schema.SchemaResponseObjectTypes{`) + + objectKeys := utils.GetSortedKeys(rcs.ObjectSchemas) + for _, key := range objectKeys { + objectType := rcs.ObjectSchemas[key] + if err := rcs.writeObjectType(&builder, key, objectType); err != nil { + return "", err + } + } + builder.WriteString(` + }, + Functions: []schema.FunctionInfo{`) + for _, fn := range rcs.Functions { + fnSchema := fn.Schema() + if err := rcs.writeOperationInfo(&builder, fnSchema.Name, fnSchema.Description, fnSchema.Arguments, fnSchema.ResultType); err != nil { + return "", err + } + } + + builder.WriteString(` + }, + Procedures: []schema.ProcedureInfo{`) + for _, proc := range rcs.Procedures { + procSchema := proc.Schema() + if err := rcs.writeOperationInfo(&builder, procSchema.Name, procSchema.Description, procSchema.Arguments, procSchema.ResultType); err != nil { + return "", err + } + } + + builder.WriteString(` + }, + ScalarTypes: schema.SchemaResponseScalarTypes{`) + scalarKeys := utils.GetSortedKeys(rcs.ScalarSchemas) + for _, key := range scalarKeys { + scalarType := rcs.ScalarSchemas[key] + if err := rcs.writeScalarType(&builder, key, scalarType); err != nil { + return "", err + } + } + + builder.WriteString("\n },\n }\n}") + return builder.String(), nil +} + +func (rcs RawConnectorSchema) writeOperationInfo(builder *strings.Builder, name string, desc *string, arguments map[string]schema.ArgumentInfo, resultType schema.Type) error { + baseIndent := 6 + builder.WriteString(` + { + Name: "`) + builder.WriteString(name) + builder.WriteString("\",\n") + rcs.writeDescription(builder, desc) + writeIndent(builder, baseIndent+2) + + builder.WriteString("ResultType: ") + retType, err := rcs.writeType(resultType, 0) + if err != nil { + return fmt.Errorf("failed to render function %s: %s", name, err) + } + builder.WriteString(retType) + builder.WriteString(",\n") + writeIndent(builder, baseIndent+2) + builder.WriteString("Arguments: map[string]schema.ArgumentInfo{") + argumentKeys := utils.GetSortedKeys(arguments) + for _, argKey := range argumentKeys { + argument := arguments[argKey] + builder.WriteRune('\n') + writeIndent(builder, baseIndent+4) + builder.WriteRune('"') + builder.WriteString(argKey) + builder.WriteString("\": {\n") + rcs.writeDescription(builder, argument.Description) + writeIndent(builder, baseIndent+6) + builder.WriteString("Type: ") + + argType, err := rcs.writeType(argument.Type, 0) + if err != nil { + return fmt.Errorf("failed to render argument %s of function %s: %s", argKey, name, err) + } + builder.WriteString(argType) + builder.WriteString(",\n") + writeIndent(builder, baseIndent+4) + builder.WriteString("},") + } + builder.WriteRune('\n') + writeIndent(builder, baseIndent+2) + builder.WriteString("},\n") + writeIndent(builder, baseIndent) + builder.WriteString("},") + + return nil +} + +func (rcs RawConnectorSchema) writeScalarType(builder *strings.Builder, key string, scalarType schema.ScalarType) error { + baseIndent := 6 + builder.WriteRune('\n') + writeIndent(builder, baseIndent) + builder.WriteRune('"') + + builder.WriteString(key) + builder.WriteString(`": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{},`) + + if scalarType.Representation != nil { + builder.WriteRune('\n') + writeIndent(builder, baseIndent+2) + builder.WriteString("Representation: schema.NewTypeRepresentation") + rep, err := scalarType.Representation.InterfaceT() + switch t := rep.(type) { + case *schema.TypeRepresentationBoolean: + builder.WriteString("Boolean()") + case *schema.TypeRepresentationBigDecimal: + builder.WriteString("BigDecimal()") + case *schema.TypeRepresentationInt8: + builder.WriteString("Int8()") + case *schema.TypeRepresentationInt16: + builder.WriteString("Int16()") + case *schema.TypeRepresentationInt32: + builder.WriteString("Int32()") + case *schema.TypeRepresentationInt64: + builder.WriteString("Int64()") + case *schema.TypeRepresentationBigInteger: + builder.WriteString("BigInteger()") + case *schema.TypeRepresentationBytes: + builder.WriteString("Bytes()") + case *schema.TypeRepresentationDate: + builder.WriteString("Date()") + case *schema.TypeRepresentationFloat32: + builder.WriteString("Float32()") + case *schema.TypeRepresentationFloat64: + builder.WriteString("Float64()") + case *schema.TypeRepresentationJSON: + builder.WriteString("JSON()") + case *schema.TypeRepresentationString: + builder.WriteString("String()") + case *schema.TypeRepresentationTimestamp: + builder.WriteString("Timestamp()") + case *schema.TypeRepresentationTimestampTZ: + builder.WriteString("TimestampTZ()") + case *schema.TypeRepresentationUUID: + builder.WriteString("UUID()") + case *schema.TypeRepresentationGeography: + builder.WriteString("Geography()") + case *schema.TypeRepresentationGeometry: + builder.WriteString("Geometry()") + case *schema.TypeRepresentationEnum: + builder.WriteString("Enum([]string{") + for i, enum := range t.OneOf { + if i > 0 { + builder.WriteString(", ") + } + builder.WriteRune('"') + builder.WriteString(enum) + builder.WriteRune('"') + } + builder.WriteString("})") + default: + return err + } + } + builder.WriteString(".Encode(),") + builder.WriteString("\n },") + return nil +} + +func (rcs RawConnectorSchema) writeDescription(builder *strings.Builder, description *string) { + if description != nil { + builder.WriteString(` Description: toPtr("`) + builder.WriteString(*description) + builder.WriteString("\"),\n") + } +} + +func (rcs RawConnectorSchema) writeObjectType(builder *strings.Builder, key string, objectType schema.ObjectType) error { + baseIndent := 6 + builder.WriteRune('\n') + writeIndent(builder, baseIndent) + builder.WriteRune('"') + builder.WriteString(key) + builder.WriteString("\": schema.ObjectType{\n") + rcs.writeDescription(builder, objectType.Description) + builder.WriteString(strings.Repeat(" ", baseIndent)) + builder.WriteString(" Fields: schema.ObjectTypeFields{\n") + + fieldKeys := utils.GetSortedKeys(objectType.Fields) + for _, fieldKey := range fieldKeys { + field := objectType.Fields[fieldKey] + writeIndent(builder, baseIndent+4) + builder.WriteRune('"') + builder.WriteString(fieldKey) + builder.WriteString("\": schema.ObjectField{\n") + rcs.writeDescription(builder, field.Description) + + ft, err := rcs.writeType(field.Type, 0) + if err != nil { + return fmt.Errorf("%s: %s", key, err) + } + writeIndent(builder, baseIndent+6) + builder.WriteString("Type: ") + builder.WriteString(ft) + builder.WriteString(",\n") + writeIndent(builder, baseIndent+4) + builder.WriteString("},\n") + } + writeIndent(builder, baseIndent+2) + builder.WriteString("},\n") + writeIndent(builder, baseIndent) + builder.WriteString("},") + + return nil +} + +func (rcs RawConnectorSchema) writeType(schemaType schema.Type, depth uint) (string, error) { + ty, err := schemaType.InterfaceT() + switch t := ty.(type) { + case *schema.ArrayType: + nested, err := rcs.writeType(t.ElementType, depth+1) + if err != nil { + return "", err + } + if depth == 0 { + return fmt.Sprintf("schema.NewArrayType(%s).Encode()", nested), nil + } + return fmt.Sprintf("schema.NewArrayType(%s)", nested), nil + case *schema.NullableType: + nested, err := rcs.writeType(t.UnderlyingType, depth+1) + if err != nil { + return "", err + } + if depth == 0 { + return fmt.Sprintf("schema.NewNullableType(%s).Encode()", nested), nil + } + return fmt.Sprintf("schema.NewNullableType(%s)", nested), nil + case *schema.NamedType: + if depth == 0 { + return fmt.Sprintf(`schema.NewNamedType("%s").Encode()`, t.Name), nil + } + return fmt.Sprintf(`schema.NewNamedType("%s")`, t.Name), nil + default: + return "", fmt.Errorf("invalid schema type: %s", err) + } +} diff --git a/cmd/hasura-ndc-go/command/internal/template.go b/cmd/hasura-ndc-go/command/internal/template.go new file mode 100644 index 0000000..c94d1c0 --- /dev/null +++ b/cmd/hasura-ndc-go/command/internal/template.go @@ -0,0 +1,47 @@ +package internal + +import ( + _ "embed" + "fmt" + "strings" + "text/template" + + "github.com/iancoleman/strcase" +) + +const ( + connectorOutputFile = "connector.generated.go" + schemaOutputJSONFile = "schema.generated.json" + schemaOutputGoFile = "schema.generated.go" + typeMethodsOutputFile = "types.generated.go" +) + +//go:embed templates/connector/connector.go.tmpl +var connectorTemplateStr string +var connectorTemplate *template.Template + +func init() { + var err error + connectorTemplate, err = template.New(connectorOutputFile).Parse(connectorTemplateStr) + if err != nil { + panic(fmt.Errorf("failed to parse connector template: %s", err)) + } + + strcase.ConfigureAcronym("API", "Api") + strcase.ConfigureAcronym("REST", "Rest") + strcase.ConfigureAcronym("HTTP", "Http") + strcase.ConfigureAcronym("SQL", "sql") +} + +func writeFileHeader(builder *strings.Builder, packageName string) { + _, _ = builder.WriteString(`// Code generated by github.com/hasura/ndc-sdk-go/cmd/hasura-ndc-go, DO NOT EDIT. +package `) + _, _ = builder.WriteString(packageName) + _, _ = builder.WriteRune('\n') +} + +func writeIndent(builder *strings.Builder, num int) { + for i := 0; i < num; i++ { + _, _ = builder.WriteRune(' ') + } +} diff --git a/cmd/hasura-ndc-go/command/internal/templates/connector/connector.go.tmpl b/cmd/hasura-ndc-go/command/internal/templates/connector/connector.go.tmpl index d3a246d..6c8843a 100644 --- a/cmd/hasura-ndc-go/command/internal/templates/connector/connector.go.tmpl +++ b/cmd/hasura-ndc-go/command/internal/templates/connector/connector.go.tmpl @@ -1,5 +1,5 @@ -// Code generated by github.com/hasura/ndc-sdk-go/codegen, DO NOT EDIT. -package main +// Code generated by github.com/hasura/ndc-sdk-go/cmd/hasura-ndc-go, DO NOT EDIT. +package {{.PackageName}} import ( "context" @@ -8,7 +8,6 @@ import ( "log/slog" {{.Imports}} - "github.com/hasura/ndc-sdk-go/connector" "github.com/hasura/ndc-sdk-go/schema" "github.com/hasura/ndc-sdk-go/utils" "go.opentelemetry.io/otel/attribute" @@ -16,10 +15,34 @@ import ( "go.opentelemetry.io/otel/trace" ) -//go:embed schema.generated.json -var rawSchema []byte var schemaResponse *schema.RawSchemaResponse +var connectorQueryHandlers = []ConnectorQueryHandler{{.QueryHandlers}} +var connectorMutationHandlers = []ConnectorMutationHandler{{.MutationHandlers}} + +// ConnectorQueryHandler abstracts the connector query handler +type ConnectorQueryHandler interface { + Query(ctx context.Context, state *{{.StateArgument}}, request *schema.QueryRequest, arguments map[string]any) (*schema.RowSet, error) +} + +// ConnectorMutationHandler abstracts the connector mutation handler +type ConnectorMutationHandler interface { + Mutation(ctx context.Context, state *{{.StateArgument}}, request *schema.MutationOperation) (schema.MutationOperationResults, error) +} +{{ if (eq .SchemaFormat "go")}} +func init() { + rawSchema, err := json.Marshal(GetConnectorSchema()) + if err != nil { + panic(err) + } + schemaResponse, err = schema.NewRawSchemaResponse(rawSchema) + if err != nil { + panic(err) + } +} +{{ else }} +//go:embed schema.generated.json +var rawSchema []byte func init() { var err error schemaResponse, err = schema.NewRawSchemaResponse(rawSchema) @@ -27,29 +50,29 @@ func init() { panic(err) } } +{{ end }} // GetSchema gets the connector's schema. -func (c *Connector) GetSchema(ctx context.Context, configuration *types.Configuration, _ *types.State) (schema.SchemaResponseMarshaler, error) { +func (c *Connector) GetSchema(ctx context.Context, configuration *types.Configuration, _ *{{.StateArgument}}) (schema.SchemaResponseMarshaler, error) { return schemaResponse, nil } // Query executes a query. -func (c *Connector) Query(ctx context.Context, configuration *types.Configuration, state *types.State, request *schema.QueryRequest) (schema.QueryResponse, error) { - valueField, err := utils.EvalFunctionSelectionFieldValue(request) - if err != nil { - return nil, schema.UnprocessableContentError(err.Error(), nil) - } +func (c *Connector) Query(ctx context.Context, configuration *types.Configuration, state *{{.StateArgument}}, request *schema.QueryRequest) (schema.QueryResponse, error) { + if len(connectorQueryHandlers) == 0 { + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) + } - span := trace.SpanFromContext(ctx) - requestVars := request.Variables + span := trace.SpanFromContext(ctx) + requestVars := request.Variables varsLength := len(requestVars) - if varsLength == 0 { - requestVars = []schema.QueryRequestVariablesElem{make(schema.QueryRequestVariablesElem)} - varsLength = 1 - } + if varsLength == 0 { + requestVars = []schema.QueryRequestVariablesElem{make(schema.QueryRequestVariablesElem)} + varsLength = 1 + } - rowSets := make([]schema.RowSet, varsLength) - for i, requestVar := range requestVars { + rowSets := make([]schema.RowSet, varsLength) + for i, requestVar := range requestVars { childSpan := span childContext := ctx if varsLength > 1 { @@ -57,36 +80,55 @@ func (c *Connector) Query(ctx context.Context, configuration *types.Configuratio defer childSpan.End() } - result, err := execQuery(childContext, state, request, valueField, requestVar, childSpan) - if err != nil { + result, err := c.execQuery(childContext, state, request, requestVar) + if err != nil { if varsLength > 1 { childSpan.SetStatus(codes.Error, err.Error()) } - return nil, err - } - rowSets[i] = schema.RowSet{ - Aggregates: schema.RowSetAggregates{}, - Rows: []map[string]any{ - { - "__value": result, - }, - }, - } + return nil, err + } + rowSets[i] = *result + if varsLength > 1 { childSpan.End() } - } + } + + return rowSets, nil +} + +func (c *Connector) execQuery(ctx context.Context, state *{{.StateArgument}}, request *schema.QueryRequest, variables map[string]any) (*schema.RowSet, error) { + rawArgs, err := utils.ResolveArgumentVariables(request.Arguments, variables) + if err != nil { + return nil, schema.UnprocessableContentError("failed to resolve argument variables", map[string]any{ + "cause": err.Error(), + }) + } - return rowSets, nil + for _, handler := range connectorQueryHandlers { + result, err := handler.Query(ctx, state, request, rawArgs) + if err == nil { + return result, nil + } + + if err != utils.ErrHandlerNotfound { + return nil, err + } + } + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) } // Mutation executes a mutation. -func (c *Connector) Mutation(ctx context.Context, configuration *types.Configuration, state *types.State, request *schema.MutationRequest) (*schema.MutationResponse, error) { - operationLen := len(request.Operations) - operationResults := make([]schema.MutationOperationResults, operationLen) - span := trace.SpanFromContext(ctx) +func (c *Connector) Mutation(ctx context.Context, configuration *types.Configuration, state *{{.StateArgument}}, request *schema.MutationRequest) (*schema.MutationResponse, error) { + if len(connectorMutationHandlers) == 0 { + return nil, schema.UnprocessableContentError("unsupported mutation", nil) + } + + operationLen := len(request.Operations) + operationResults := make([]schema.MutationOperationResults, operationLen) + span := trace.SpanFromContext(ctx) - for i, operation := range request.Operations { + for i, operation := range request.Operations { childSpan := span childContext := ctx if operationLen > 1 { @@ -98,59 +140,45 @@ func (c *Connector) Mutation(ctx context.Context, configuration *types.Configura attribute.String("operation.name", string(operation.Name)), ) - switch operation.Type { - case schema.MutationOperationProcedure: - result, err := execProcedure(childContext, state, &operation, childSpan) - if err != nil { + switch operation.Type { + case schema.MutationOperationProcedure: + result, err := c.execProcedure(childContext, state, &operation) + if err != nil { if operationLen > 1 { childSpan.SetStatus(codes.Error, err.Error()) } - return nil, err - } - operationResults[i] = result + return nil, err + } + operationResults[i] = result if operationLen > 1 { childSpan.End() } - default: - return nil, schema.UnprocessableContentError(fmt.Sprintf("invalid operation type: %s", operation.Type), nil) - } - } + default: + return nil, schema.UnprocessableContentError(fmt.Sprintf("invalid operation type: %s", operation.Type), nil) + } + } - return &schema.MutationResponse{ - OperationResults: operationResults, - }, nil + return &schema.MutationResponse{ + OperationResults: operationResults, + }, nil } -func execQuery(ctx context.Context, state *types.State, request *schema.QueryRequest, queryFields schema.NestedField, variables map[string]any, span trace.Span) (any, error) { - logger := connector.GetLogger(ctx) - connector_addSpanEvent(span, logger, "validate_request", map[string]any{ - "variables": variables, - }) - switch request.Collection { -{{.Queries}} - default: - return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) - } -} +func (c *Connector) execProcedure(ctx context.Context, state *{{.StateArgument}}, operation *schema.MutationOperation) (schema.MutationOperationResults, error) { + for _, handler := range connectorMutationHandlers { + result, err := handler.Mutation(ctx, state, operation) + if err == nil { + return result, nil + } + if err != utils.ErrHandlerNotfound { + return nil, err + } + } -func execProcedure(ctx context.Context, state *types.State, operation *schema.MutationOperation, span trace.Span) (schema.MutationOperationResults, error) { - logger := connector.GetLogger(ctx) - connector_addSpanEvent(span, logger, "validate_request", map[string]any{ - "operations_name": operation.Name, - }) - switch operation.Name { -{{.Procedures}} - default: - return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported procedure operation: %s", operation.Name), nil) - } + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported procedure operation: %s", operation.Name), nil) } func connector_addSpanEvent(span trace.Span, logger *slog.Logger, name string, data map[string]any, options ...trace.EventOption) { logger.Debug(name, slog.Any("data", data)) - attrs := utils.DebugJSONAttributes(data, connector_isDebug(logger)) + attrs := utils.DebugJSONAttributes(data, utils.IsDebug(logger)) span.AddEvent(name, append(options, trace.WithAttributes(attrs...))...) -} - -func connector_isDebug(logger *slog.Logger) bool { - return logger.Enabled(context.TODO(), slog.LevelDebug) } \ No newline at end of file diff --git a/cmd/hasura-ndc-go/command/internal/testdata/.gitignore b/cmd/hasura-ndc-go/command/internal/testdata/.gitignore index c083a74..11f2f9a 100644 --- a/cmd/hasura-ndc-go/command/internal/testdata/.gitignore +++ b/cmd/hasura-ndc-go/command/internal/testdata/.gitignore @@ -1,4 +1,5 @@ **/testdata **/dist/ *.generated.go -schema.generated.json \ No newline at end of file +schema.generated.json +schema.generated.go \ No newline at end of file diff --git a/cmd/hasura-ndc-go/command/internal/testdata/basic/expected/connector-go.go.tmpl b/cmd/hasura-ndc-go/command/internal/testdata/basic/expected/connector-go.go.tmpl new file mode 100644 index 0000000..75d9659 --- /dev/null +++ b/cmd/hasura-ndc-go/command/internal/testdata/basic/expected/connector-go.go.tmpl @@ -0,0 +1,176 @@ +// Code generated by github.com/hasura/ndc-sdk-go/cmd/hasura-ndc-go, DO NOT EDIT. +package main + +import ( + "context" + _ "embed" + "fmt" + "log/slog" + + "encoding/json" +"github.com/hasura/ndc-codegen-test/functions" +"github.com/hasura/ndc-codegen-test/types" + "github.com/hasura/ndc-sdk-go/schema" + "github.com/hasura/ndc-sdk-go/utils" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/codes" + "go.opentelemetry.io/otel/trace" +) + +var schemaResponse *schema.RawSchemaResponse +var connectorQueryHandlers = []ConnectorQueryHandler{functions.DataConnectorHandler{}} +var connectorMutationHandlers = []ConnectorMutationHandler{functions.DataConnectorHandler{}} + +// ConnectorQueryHandler abstracts the connector query handler +type ConnectorQueryHandler interface { + Query(ctx context.Context, state *types.State, request *schema.QueryRequest, arguments map[string]any) (*schema.RowSet, error) +} + +// ConnectorMutationHandler abstracts the connector mutation handler +type ConnectorMutationHandler interface { + Mutation(ctx context.Context, state *types.State, request *schema.MutationOperation) (schema.MutationOperationResults, error) +} + + +func init() { + rawSchema, err := json.Marshal(GetConnectorSchema()) + if err != nil { + panic(err) + } + schemaResponse, err = schema.NewRawSchemaResponse(rawSchema) + if err != nil { + panic(err) + } +} + + +// GetSchema gets the connector's schema. +func (c *Connector) GetSchema(ctx context.Context, configuration *types.Configuration, _ *types.State) (schema.SchemaResponseMarshaler, error) { + return schemaResponse, nil +} + +// Query executes a query. +func (c *Connector) Query(ctx context.Context, configuration *types.Configuration, state *types.State, request *schema.QueryRequest) (schema.QueryResponse, error) { + if len(connectorQueryHandlers) == 0 { + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) + } + + span := trace.SpanFromContext(ctx) + requestVars := request.Variables + varsLength := len(requestVars) + if varsLength == 0 { + requestVars = []schema.QueryRequestVariablesElem{make(schema.QueryRequestVariablesElem)} + varsLength = 1 + } + + rowSets := make([]schema.RowSet, varsLength) + for i, requestVar := range requestVars { + childSpan := span + childContext := ctx + if varsLength > 1 { + childContext, childSpan = state.Tracer.Start(ctx, fmt.Sprintf("execute_function_%d", i)) + defer childSpan.End() + } + + result, err := c.execQuery(childContext, state, request, requestVar) + if err != nil { + if varsLength > 1 { + childSpan.SetStatus(codes.Error, err.Error()) + } + return nil, err + } + rowSets[i] = *result + + if varsLength > 1 { + childSpan.End() + } + } + + return rowSets, nil +} + +func (c *Connector) execQuery(ctx context.Context, state *types.State, request *schema.QueryRequest, variables map[string]any) (*schema.RowSet, error) { + rawArgs, err := utils.ResolveArgumentVariables(request.Arguments, variables) + if err != nil { + return nil, schema.UnprocessableContentError("failed to resolve argument variables", map[string]any{ + "cause": err.Error(), + }) + } + + for _, handler := range connectorQueryHandlers { + result, err := handler.Query(ctx, state, request, rawArgs) + if err == nil { + return result, nil + } + + if err != utils.ErrHandlerNotfound { + return nil, err + } + } + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) +} + +// Mutation executes a mutation. +func (c *Connector) Mutation(ctx context.Context, configuration *types.Configuration, state *types.State, request *schema.MutationRequest) (*schema.MutationResponse, error) { + if len(connectorMutationHandlers) == 0 { + return nil, schema.UnprocessableContentError("unsupported mutation", nil) + } + + operationLen := len(request.Operations) + operationResults := make([]schema.MutationOperationResults, operationLen) + span := trace.SpanFromContext(ctx) + + for i, operation := range request.Operations { + childSpan := span + childContext := ctx + if operationLen > 1 { + childContext, childSpan = state.Tracer.Start(ctx, fmt.Sprintf("execute_operation_%d", i)) + defer childSpan.End() + } + childSpan.SetAttributes( + attribute.String("operation.type", string(operation.Type)), + attribute.String("operation.name", string(operation.Name)), + ) + + switch operation.Type { + case schema.MutationOperationProcedure: + result, err := c.execProcedure(childContext, state, &operation) + if err != nil { + if operationLen > 1 { + childSpan.SetStatus(codes.Error, err.Error()) + } + return nil, err + } + operationResults[i] = result + if operationLen > 1 { + childSpan.End() + } + default: + return nil, schema.UnprocessableContentError(fmt.Sprintf("invalid operation type: %s", operation.Type), nil) + } + } + + return &schema.MutationResponse{ + OperationResults: operationResults, + }, nil +} + +func (c *Connector) execProcedure(ctx context.Context, state *types.State, operation *schema.MutationOperation) (schema.MutationOperationResults, error) { + for _, handler := range connectorMutationHandlers { + result, err := handler.Mutation(ctx, state, operation) + if err == nil { + return result, nil + } + if err != utils.ErrHandlerNotfound { + return nil, err + } + } + + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported procedure operation: %s", operation.Name), nil) +} + +func connector_addSpanEvent(span trace.Span, logger *slog.Logger, name string, data map[string]any, options ...trace.EventOption) { + logger.Debug(name, slog.Any("data", data)) + attrs := utils.DebugJSONAttributes(data, utils.IsDebug(logger)) + span.AddEvent(name, append(options, trace.WithAttributes(attrs...))...) +} \ No newline at end of file diff --git a/cmd/hasura-ndc-go/command/internal/testdata/basic/expected/connector.go.tmpl b/cmd/hasura-ndc-go/command/internal/testdata/basic/expected/connector.go.tmpl index 732f79b..981856f 100644 --- a/cmd/hasura-ndc-go/command/internal/testdata/basic/expected/connector.go.tmpl +++ b/cmd/hasura-ndc-go/command/internal/testdata/basic/expected/connector.go.tmpl @@ -1,24 +1,38 @@ -// Code generated by github.com/hasura/ndc-sdk-go/codegen, DO NOT EDIT. +// Code generated by github.com/hasura/ndc-sdk-go/cmd/hasura-ndc-go, DO NOT EDIT. package main + import ( "context" _ "embed" "fmt" "log/slog" - "encoding/json" + "github.com/hasura/ndc-codegen-test/functions" - "github.com/hasura/ndc-codegen-test/types" - "github.com/hasura/ndc-sdk-go/connector" +"github.com/hasura/ndc-codegen-test/types" "github.com/hasura/ndc-sdk-go/schema" "github.com/hasura/ndc-sdk-go/utils" "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/codes" "go.opentelemetry.io/otel/trace" ) -//go:embed schema.generated.json -var rawSchema []byte + var schemaResponse *schema.RawSchemaResponse +var connectorQueryHandlers = []ConnectorQueryHandler{functions.DataConnectorHandler{}} +var connectorMutationHandlers = []ConnectorMutationHandler{functions.DataConnectorHandler{}} + +// ConnectorQueryHandler abstracts the connector query handler +type ConnectorQueryHandler interface { + Query(ctx context.Context, state *types.State, request *schema.QueryRequest, arguments map[string]any) (*schema.RowSet, error) +} +// ConnectorMutationHandler abstracts the connector mutation handler +type ConnectorMutationHandler interface { + Mutation(ctx context.Context, state *types.State, request *schema.MutationOperation) (schema.MutationOperationResults, error) +} + + +//go:embed schema.generated.json +var rawSchema []byte func init() { var err error schemaResponse, err = schema.NewRawSchemaResponse(rawSchema) @@ -26,16 +40,19 @@ func init() { panic(err) } } + + // GetSchema gets the connector's schema. func (c *Connector) GetSchema(ctx context.Context, configuration *types.Configuration, _ *types.State) (schema.SchemaResponseMarshaler, error) { return schemaResponse, nil } + // Query executes a query. func (c *Connector) Query(ctx context.Context, configuration *types.Configuration, state *types.State, request *schema.QueryRequest) (schema.QueryResponse, error) { - valueField, err := utils.EvalFunctionSelectionFieldValue(request) - if err != nil { - return nil, schema.UnprocessableContentError(err.Error(), nil) + if len(connectorQueryHandlers) == 0 { + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) } + span := trace.SpanFromContext(ctx) requestVars := request.Variables varsLength := len(requestVars) @@ -53,21 +70,15 @@ func (c *Connector) Query(ctx context.Context, configuration *types.Configuratio defer childSpan.End() } - result, err := execQuery(childContext, state, request, valueField, requestVar, childSpan) + result, err := c.execQuery(childContext, state, request, requestVar) if err != nil { if varsLength > 1 { childSpan.SetStatus(codes.Error, err.Error()) } return nil, err } - rowSets[i] = schema.RowSet{ - Aggregates: schema.RowSetAggregates{}, - Rows: []map[string]any{ - { - "__value": result, - }, - }, - } + rowSets[i] = *result + if varsLength > 1 { childSpan.End() } @@ -75,12 +86,39 @@ func (c *Connector) Query(ctx context.Context, configuration *types.Configuratio return rowSets, nil } + +func (c *Connector) execQuery(ctx context.Context, state *types.State, request *schema.QueryRequest, variables map[string]any) (*schema.RowSet, error) { + rawArgs, err := utils.ResolveArgumentVariables(request.Arguments, variables) + if err != nil { + return nil, schema.UnprocessableContentError("failed to resolve argument variables", map[string]any{ + "cause": err.Error(), + }) + } + + for _, handler := range connectorQueryHandlers { + result, err := handler.Query(ctx, state, request, rawArgs) + if err == nil { + return result, nil + } + + if err != utils.ErrHandlerNotfound { + return nil, err + } + } + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) +} + // Mutation executes a mutation. func (c *Connector) Mutation(ctx context.Context, configuration *types.Configuration, state *types.State, request *schema.MutationRequest) (*schema.MutationResponse, error) { - operationLen := len(request.Operations) + if len(connectorMutationHandlers) == 0 { + return nil, schema.UnprocessableContentError("unsupported mutation", nil) + } + + operationLen := len(request.Operations) operationResults := make([]schema.MutationOperationResults, operationLen) - span := trace.SpanFromContext(ctx) - for i, operation := range request.Operations { + span := trace.SpanFromContext(ctx) + + for i, operation := range request.Operations { childSpan := span childContext := ctx if operationLen > 1 { @@ -92,259 +130,45 @@ func (c *Connector) Mutation(ctx context.Context, configuration *types.Configura attribute.String("operation.name", string(operation.Name)), ) - switch operation.Type { - case schema.MutationOperationProcedure: - result, err := execProcedure(childContext, state, &operation, childSpan) - if err != nil { + switch operation.Type { + case schema.MutationOperationProcedure: + result, err := c.execProcedure(childContext, state, &operation) + if err != nil { if operationLen > 1 { childSpan.SetStatus(codes.Error, err.Error()) } - return nil, err - } - operationResults[i] = result + return nil, err + } + operationResults[i] = result if operationLen > 1 { childSpan.End() } - default: - return nil, schema.UnprocessableContentError(fmt.Sprintf("invalid operation type: %s", operation.Type), nil) - } - } + default: + return nil, schema.UnprocessableContentError(fmt.Sprintf("invalid operation type: %s", operation.Type), nil) + } + } - return &schema.MutationResponse{ - OperationResults: operationResults, - }, nil + return &schema.MutationResponse{ + OperationResults: operationResults, + }, nil } -func execQuery(ctx context.Context, state *types.State, request *schema.QueryRequest, queryFields schema.NestedField, variables map[string]any, span trace.Span) (any, error) { - logger := connector.GetLogger(ctx) - connector_addSpanEvent(span, logger, "validate_request", map[string]any{ - "variables": variables, - }) - switch request.Collection { - case "getBool": - if len(queryFields) > 0 { - return nil, schema.UnprocessableContentError("cannot evaluate selection fields for scalar", nil) - } - return functions.FunctionGetBool(ctx, state) - case "getTypes": - selection, err := queryFields.AsObject() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ - "cause": err.Error(), - }) - } - rawArgs, err := utils.ResolveArgumentVariables(request.Arguments, variables) - if err != nil { - return nil, schema.UnprocessableContentError("failed to resolve argument variables", map[string]any{ - "cause": err.Error(), - }) - } - connector_addSpanEvent(span, logger, "resolve_arguments", map[string]any{ - "raw_arguments": rawArgs, - }) - var args functions.GetTypesArguments - if err = args.FromValue(rawArgs); err != nil { - return nil, schema.UnprocessableContentError("failed to resolve arguments", map[string]any{ - "cause": err.Error(), - }) - } - connector_addSpanEvent(span, logger, "execute_function", map[string]any{ - "arguments": args, - }) - rawResult, err := functions.FunctionGetTypes(ctx, state, &args) - if err != nil { - return nil, err - } - if rawResult == nil { - return nil, nil - } - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnObject(selection, rawResult) - if err != nil { - return nil, err - } - return result, nil - case "hello": - selection, err := queryFields.AsObject() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ - "cause": err.Error(), - }) - } - rawResult, err := functions.FunctionHello(ctx, state) - if err != nil { - return nil, err - } - if rawResult == nil { - return nil, nil - } - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnObject(selection, rawResult) - if err != nil { - return nil, err - } - return result, nil - case "getArticles": - selection, err := queryFields.AsArray() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be array", map[string]any{ - "cause": err.Error(), - }) - } - rawArgs, err := utils.ResolveArgumentVariables(request.Arguments, variables) - if err != nil { - return nil, schema.UnprocessableContentError("failed to resolve argument variables", map[string]any{ - "cause": err.Error(), - }) - } - connector_addSpanEvent(span, logger, "resolve_arguments", map[string]any{ - "raw_arguments": rawArgs, - }) - var args functions.GetArticlesArguments - if err = args.FromValue(rawArgs); err != nil { - return nil, schema.UnprocessableContentError("failed to resolve arguments", map[string]any{ - "cause": err.Error(), - }) - } - connector_addSpanEvent(span, logger, "execute_function", map[string]any{ - "arguments": args, - }) - rawResult, err := functions.GetArticles(ctx, state, &args) - if err != nil { - return nil, err - } - if rawResult == nil { - return nil, schema.UnprocessableContentError("expected not null result", nil) +func (c *Connector) execProcedure(ctx context.Context, state *types.State, operation *schema.MutationOperation) (schema.MutationOperationResults, error) { + for _, handler := range connectorMutationHandlers { + result, err := handler.Mutation(ctx, state, operation) + if err == nil { + return result, nil } - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnArrayIntoSlice(selection, rawResult) - if err != nil { + if err != utils.ErrHandlerNotfound { return nil, err } - return result, nil - default: - return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) } + + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported procedure operation: %s", operation.Name), nil) } -func execProcedure(ctx context.Context, state *types.State, operation *schema.MutationOperation, span trace.Span) (schema.MutationOperationResults, error) { - logger := connector.GetLogger(ctx) - connector_addSpanEvent(span, logger, "validate_request", map[string]any{ - "operations_name": operation.Name, - }) - switch operation.Name { - case "create_article": - selection, err := operation.Fields.AsObject() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ - "cause": err.Error(), - }) - } - var args functions.CreateArticleArguments - if err := json.Unmarshal(operation.Arguments, &args); err != nil { - return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ - "cause": err.Error(), - }) - } - span.AddEvent("execute_procedure") - rawResult, err := functions.CreateArticle(ctx, state, &args) - if err != nil { - return nil, err - } - if rawResult == nil { - return nil, nil - } - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnObject(selection, rawResult) - if err != nil { - return nil, err - } - return schema.NewProcedureResult(result).Encode(), nil - case "increase": - if len(operation.Fields) > 0 { - return nil, schema.UnprocessableContentError("cannot evaluate selection fields for scalar", nil) - } - span.AddEvent("execute_procedure") - var err error - result, err := functions.Increase(ctx, state) - if err != nil { - return nil, err - } - return schema.NewProcedureResult(result).Encode(), nil - case "createAuthor": - selection, err := operation.Fields.AsObject() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ - "cause": err.Error(), - }) - } - var args functions.CreateAuthorArguments - if err := json.Unmarshal(operation.Arguments, &args); err != nil { - return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ - "cause": err.Error(), - }) - } - span.AddEvent("execute_procedure") - rawResult, err := functions.ProcedureCreateAuthor(ctx, state, &args) - if err != nil { - return nil, err - } - if rawResult == nil { - return nil, nil - } - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnObject(selection, rawResult) - if err != nil { - return nil, err - } - return schema.NewProcedureResult(result).Encode(), nil - case "createAuthors": - selection, err := operation.Fields.AsArray() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be array", map[string]any{ - "cause": err.Error(), - }) - } - var args functions.CreateAuthorsArguments - if err := json.Unmarshal(operation.Arguments, &args); err != nil { - return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ - "cause": err.Error(), - }) - } - span.AddEvent("execute_procedure") - rawResult, err := functions.ProcedureCreateAuthors(ctx, state, &args) - if err != nil { - return nil, err - } - if rawResult == nil { - return nil, schema.UnprocessableContentError("expected not null result", nil) - } - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnArrayIntoSlice(selection, rawResult) - if err != nil { - return nil, err - } - return schema.NewProcedureResult(result).Encode(), nil - default: - return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported procedure operation: %s", operation.Name), nil) - } -} + func connector_addSpanEvent(span trace.Span, logger *slog.Logger, name string, data map[string]any, options ...trace.EventOption) { logger.Debug(name, slog.Any("data", data)) - attrs := utils.DebugJSONAttributes(data, connector_isDebug(logger)) + attrs := utils.DebugJSONAttributes(data, utils.IsDebug(logger)) span.AddEvent(name, append(options, trace.WithAttributes(attrs...))...) -} -func connector_isDebug(logger *slog.Logger) bool { - return logger.Enabled(context.TODO(), slog.LevelDebug) } \ No newline at end of file diff --git a/cmd/hasura-ndc-go/command/internal/testdata/basic/expected/functions.go.tmpl b/cmd/hasura-ndc-go/command/internal/testdata/basic/expected/functions.go.tmpl index eb6594f..de680a3 100644 --- a/cmd/hasura-ndc-go/command/internal/testdata/basic/expected/functions.go.tmpl +++ b/cmd/hasura-ndc-go/command/internal/testdata/basic/expected/functions.go.tmpl @@ -1,24 +1,52 @@ -// Code generated by github.com/hasura/ndc-sdk-go/codegen, DO NOT EDIT. +// Code generated by github.com/hasura/ndc-sdk-go/cmd/hasura-ndc-go, DO NOT EDIT. package functions import ( + "context" "encoding/json" "errors" "github.com/google/uuid" + "github.com/hasura/ndc-codegen-test/types" + "github.com/hasura/ndc-sdk-go/connector" "github.com/hasura/ndc-sdk-go/scalar" + "github.com/hasura/ndc-sdk-go/schema" "github.com/hasura/ndc-sdk-go/utils" + "go.opentelemetry.io/otel/trace" + "log/slog" "slices" "time" ) -var functions_Decoder = utils.NewDecoder() +var connector_Decoder = utils.NewDecoder() +// FromValue decodes values from map +func (j *Author) FromValue(input map[string]any) error { + var err error + j.CreatedAt, err = utils.GetDateTime(input, "created_at") + if err != nil { + return err + } + j.ID, err = utils.GetString(input, "id") + if err != nil { + return err + } + return nil +} +// FromValue decodes values from map +func (j *GetArticlesArguments) FromValue(input map[string]any) error { + var err error + j.Limit, err = utils.GetFloat[float64](input, "Limit") + if err != nil { + return err + } + return nil +} // FromValue decodes values from map func (j *GetTypesArguments) FromValue(input map[string]any) error { var err error - err = functions_Decoder.DecodeObjectValue(&j.ArrayBigInt, input, "ArrayBigInt") + err = connector_Decoder.DecodeObjectValue(&j.ArrayBigInt, input, "ArrayBigInt") if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.ArrayBigIntPtr, input, "ArrayBigIntPtr") + err = connector_Decoder.DecodeObjectValue(&j.ArrayBigIntPtr, input, "ArrayBigIntPtr") if err != nil { return err } @@ -94,21 +122,21 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.ArrayMap, input, "ArrayMap") + err = connector_Decoder.DecodeObjectValue(&j.ArrayMap, input, "ArrayMap") if err != nil { return err } j.ArrayMapPtr = new([]map[string]any) - err = functions_Decoder.DecodeNullableObjectValue(j.ArrayMapPtr, input, "ArrayMapPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.ArrayMapPtr, input, "ArrayMapPtr") if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.ArrayObject, input, "ArrayObject") + err = connector_Decoder.DecodeObjectValue(&j.ArrayObject, input, "ArrayObject") if err != nil { return err } j.ArrayObjectPtr = new([]struct{Content string "json:\"content\""}) - err = functions_Decoder.DecodeNullableObjectValue(j.ArrayObjectPtr, input, "ArrayObjectPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.ArrayObjectPtr, input, "ArrayObjectPtr") if err != nil { return err } @@ -184,12 +212,12 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.BigInt, input, "BigInt") + err = connector_Decoder.DecodeObjectValue(&j.BigInt, input, "BigInt") if err != nil { return err } j.BigIntPtr = new(scalar.BigInt) - err = functions_Decoder.DecodeNullableObjectValue(j.BigIntPtr, input, "BigIntPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.BigIntPtr, input, "BigIntPtr") if err != nil { return err } @@ -201,30 +229,30 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.Bytes, input, "Bytes") + err = connector_Decoder.DecodeObjectValue(&j.Bytes, input, "Bytes") if err != nil { return err } j.BytesPtr = new(scalar.Bytes) - err = functions_Decoder.DecodeNullableObjectValue(j.BytesPtr, input, "BytesPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.BytesPtr, input, "BytesPtr") if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.CustomScalar, input, "CustomScalar") + err = connector_Decoder.DecodeObjectValue(&j.CustomScalar, input, "CustomScalar") if err != nil { return err } j.CustomScalarPtr = new(CommentText) - err = functions_Decoder.DecodeNullableObjectValue(j.CustomScalarPtr, input, "CustomScalarPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.CustomScalarPtr, input, "CustomScalarPtr") if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.Enum, input, "Enum") + err = connector_Decoder.DecodeObjectValue(&j.Enum, input, "Enum") if err != nil { return err } j.EnumPtr = new(SomeEnum) - err = functions_Decoder.DecodeNullableObjectValue(j.EnumPtr, input, "EnumPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.EnumPtr, input, "EnumPtr") if err != nil { return err } @@ -292,54 +320,54 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.Map, input, "Map") + err = connector_Decoder.DecodeObjectValue(&j.Map, input, "Map") if err != nil { return err } j.MapPtr = new(map[string]any) - err = functions_Decoder.DecodeNullableObjectValue(j.MapPtr, input, "MapPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.MapPtr, input, "MapPtr") if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.NamedArray, input, "NamedArray") + err = connector_Decoder.DecodeObjectValue(&j.NamedArray, input, "NamedArray") if err != nil { return err } j.NamedArrayPtr = new([]Author) - err = functions_Decoder.DecodeNullableObjectValue(j.NamedArrayPtr, input, "NamedArrayPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.NamedArrayPtr, input, "NamedArrayPtr") if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.NamedObject, input, "NamedObject") + err = connector_Decoder.DecodeObjectValue(&j.NamedObject, input, "NamedObject") if err != nil { return err } j.NamedObjectPtr = new(Author) - err = functions_Decoder.DecodeNullableObjectValue(j.NamedObjectPtr, input, "NamedObjectPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.NamedObjectPtr, input, "NamedObjectPtr") if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.Object, input, "Object") + err = connector_Decoder.DecodeObjectValue(&j.Object, input, "Object") if err != nil { return err } j.ObjectPtr = new(struct{Long int; Lat int}) - err = functions_Decoder.DecodeNullableObjectValue(j.ObjectPtr, input, "ObjectPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.ObjectPtr, input, "ObjectPtr") if err != nil { return err } j.PtrArrayBigInt = new([]scalar.BigInt) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayBigInt, input, "PtrArrayBigInt") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayBigInt, input, "PtrArrayBigInt") if err != nil { return err } j.PtrArrayBigIntPtr = new([]*scalar.BigInt) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayBigIntPtr, input, "PtrArrayBigIntPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayBigIntPtr, input, "PtrArrayBigIntPtr") if err != nil { return err } j.PtrArrayBool = new([]bool) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayBool, input, "PtrArrayBool") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayBool, input, "PtrArrayBool") if err != nil { return err } @@ -348,7 +376,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayFloat32 = new([]float32) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayFloat32, input, "PtrArrayFloat32") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayFloat32, input, "PtrArrayFloat32") if err != nil { return err } @@ -357,7 +385,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayFloat64 = new([]float64) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayFloat64, input, "PtrArrayFloat64") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayFloat64, input, "PtrArrayFloat64") if err != nil { return err } @@ -366,12 +394,12 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayInt = new([]int) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayInt, input, "PtrArrayInt") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayInt, input, "PtrArrayInt") if err != nil { return err } j.PtrArrayInt16 = new([]int16) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayInt16, input, "PtrArrayInt16") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayInt16, input, "PtrArrayInt16") if err != nil { return err } @@ -380,7 +408,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayInt32 = new([]int32) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayInt32, input, "PtrArrayInt32") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayInt32, input, "PtrArrayInt32") if err != nil { return err } @@ -389,7 +417,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayInt64 = new([]int64) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayInt64, input, "PtrArrayInt64") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayInt64, input, "PtrArrayInt64") if err != nil { return err } @@ -398,7 +426,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayInt8 = new([]int8) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayInt8, input, "PtrArrayInt8") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayInt8, input, "PtrArrayInt8") if err != nil { return err } @@ -411,7 +439,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayJSON = new([]any) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayJSON, input, "PtrArrayJSON") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayJSON, input, "PtrArrayJSON") if err != nil { return err } @@ -420,7 +448,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayRawJSON = new([]json.RawMessage) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayRawJSON, input, "PtrArrayRawJSON") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayRawJSON, input, "PtrArrayRawJSON") if err != nil { return err } @@ -429,7 +457,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayString = new([]string) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayString, input, "PtrArrayString") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayString, input, "PtrArrayString") if err != nil { return err } @@ -438,7 +466,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayTime = new([]time.Time) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayTime, input, "PtrArrayTime") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayTime, input, "PtrArrayTime") if err != nil { return err } @@ -447,7 +475,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayUUID = new([]uuid.UUID) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayUUID, input, "PtrArrayUUID") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayUUID, input, "PtrArrayUUID") if err != nil { return err } @@ -456,12 +484,12 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayUint = new([]uint) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayUint, input, "PtrArrayUint") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayUint, input, "PtrArrayUint") if err != nil { return err } j.PtrArrayUint16 = new([]uint16) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayUint16, input, "PtrArrayUint16") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayUint16, input, "PtrArrayUint16") if err != nil { return err } @@ -470,7 +498,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayUint32 = new([]uint32) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayUint32, input, "PtrArrayUint32") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayUint32, input, "PtrArrayUint32") if err != nil { return err } @@ -479,7 +507,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayUint64 = new([]uint64) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayUint64, input, "PtrArrayUint64") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayUint64, input, "PtrArrayUint64") if err != nil { return err } @@ -488,7 +516,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayUint8 = new([]uint8) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayUint8, input, "PtrArrayUint8") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayUint8, input, "PtrArrayUint8") if err != nil { return err } @@ -516,12 +544,12 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.Text, input, "Text") + err = connector_Decoder.DecodeObjectValue(&j.Text, input, "Text") if err != nil { return err } j.TextPtr = new(Text) - err = functions_Decoder.DecodeNullableObjectValue(j.TextPtr, input, "TextPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.TextPtr, input, "TextPtr") if err != nil { return err } @@ -533,7 +561,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.URL, input, "URL") + err = connector_Decoder.DecodeObjectValue(&j.URL, input, "URL") if err != nil { return err } @@ -591,15 +619,6 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { } return nil } -// FromValue decodes values from map -func (j *GetArticlesArguments) FromValue(input map[string]any) error { - var err error - j.Limit, err = utils.GetFloat[float64](input, "Limit") - if err != nil { - return err - } - return nil -} // ToMap encodes the struct to a value map func (j Author) ToMap() map[string]any { r := make(map[string]any) @@ -614,7 +633,7 @@ func (j CreateArticleResult) ToMap() map[string]any { j_Authors := make([]any, len(j.Authors)) for i, j_Authors_v := range j.Authors { j_Authors[i] = j_Authors_v - } + } r["authors"] = j_Authors r["id"] = j.ID @@ -632,8 +651,8 @@ func (j CreateAuthorResult) ToMap() map[string]any { // ToMap encodes the struct to a value map func (j GetArticlesResult) ToMap() map[string]any { r := make(map[string]any) - r["id"] = j.ID r["Name"] = j.Name + r["id"] = j.ID return r } @@ -727,19 +746,19 @@ func (j GetTypesArguments) ToMap() map[string]any { j_NamedArray := make([]any, len(j.NamedArray)) for i, j_NamedArray_v := range j.NamedArray { j_NamedArray[i] = j_NamedArray_v - } + } r["NamedArray"] = j_NamedArray if j.NamedArrayPtr != nil { j_NamedArrayPtr := make([]any, len((*j.NamedArrayPtr))) for i, j_NamedArrayPtr_v := range (*j.NamedArrayPtr) { j_NamedArrayPtr[i] = j_NamedArrayPtr_v - } + } r["NamedArrayPtr"] = j_NamedArrayPtr } r["NamedObject"] = j.NamedObject - if j.NamedObjectPtr != nil { + if j.NamedObjectPtr != nil { r["NamedObjectPtr"] = (*j.NamedObjectPtr) - } + } j_Object_obj := make(map[string]any) j_Object_obj["created_at"] = j.Object.CreatedAt j_Object_obj["id"] = j.Object.ID @@ -893,3 +912,277 @@ func (s *SomeEnum) FromValue(value any) error { *s = result return nil } + +// DataConnectorHandler implements the data connector handler +type DataConnectorHandler struct{} + +// QueryExists check if the query name exists +func (dch DataConnectorHandler) QueryExists(name string) bool { + return slices.Contains(enumValues_FunctionName, name) +} +func (dch DataConnectorHandler) Query(ctx context.Context, state *types.State, request *schema.QueryRequest, rawArgs map[string]any) (*schema.RowSet, error) { + if !dch.QueryExists(request.Collection) { + return nil, utils.ErrHandlerNotfound + } + queryFields, err := utils.EvalFunctionSelectionFieldValue(request) + if err != nil { + return nil, schema.UnprocessableContentError(err.Error(), nil) + } + + result, err := dch.execQuery(ctx, state, request, queryFields, rawArgs) + if err != nil { + return nil, err + } + + return &schema.RowSet{ + Aggregates: schema.RowSetAggregates{}, + Rows: []map[string]any{ + { + "__value": result, + }, + }, + }, nil +} + +func (dch DataConnectorHandler) execQuery(ctx context.Context, state *types.State, request *schema.QueryRequest, queryFields schema.NestedField, rawArgs map[string]any) (any, error) { + span := trace.SpanFromContext(ctx) + logger := connector.GetLogger(ctx) + switch request.Collection { + case "getBool": + + if len(queryFields) > 0 { + return nil, schema.UnprocessableContentError("cannot evaluate selection fields for scalar", nil) + } + return FunctionGetBool(ctx, state) + + case "getTypes": + + selection, err := queryFields.AsObject() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ + "cause": err.Error(), + }) + } + var args GetTypesArguments + if parseErr := args.FromValue(rawArgs); parseErr != nil { + return nil, schema.UnprocessableContentError("failed to resolve arguments", map[string]any{ + "cause": parseErr.Error(), + }) + } + + connector_addSpanEvent(span, logger, "execute_function", map[string]any{ + "arguments": args, + }) + rawResult, err := FunctionGetTypes(ctx, state, &args) + if err != nil { + return nil, err + } + + if rawResult == nil { + return nil, nil + } + + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnObject(selection, rawResult) + if err != nil { + return nil, err + } + return result, nil + + case "hello": + + selection, err := queryFields.AsObject() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ + "cause": err.Error(), + }) + } + rawResult, err := FunctionHello(ctx, state) + if err != nil { + return nil, err + } + + if rawResult == nil { + return nil, nil + } + + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnObject(selection, rawResult) + if err != nil { + return nil, err + } + return result, nil + + case "getArticles": + + selection, err := queryFields.AsArray() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be array", map[string]any{ + "cause": err.Error(), + }) + } + var args GetArticlesArguments + if parseErr := args.FromValue(rawArgs); parseErr != nil { + return nil, schema.UnprocessableContentError("failed to resolve arguments", map[string]any{ + "cause": parseErr.Error(), + }) + } + + connector_addSpanEvent(span, logger, "execute_function", map[string]any{ + "arguments": args, + }) + rawResult, err := GetArticles(ctx, state, &args) + if err != nil { + return nil, err + } + + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnArrayIntoSlice(selection, rawResult) + if err != nil { + return nil, err + } + return result, nil + + default: + return nil, utils.ErrHandlerNotfound + } +} +var enumValues_FunctionName = []string{"getBool", "getTypes", "hello", "getArticles"} +// MutationExists check if the mutation name exists +func (dch DataConnectorHandler) MutationExists(name string) bool { + return slices.Contains(enumValues_ProcedureName, name) +} +func (dch DataConnectorHandler) Mutation(ctx context.Context, state *types.State, operation *schema.MutationOperation) (schema.MutationOperationResults, error) { + span := trace.SpanFromContext(ctx) + logger := connector.GetLogger(ctx) + connector_addSpanEvent(span, logger, "validate_request", map[string]any{ + "operations_name": operation.Name, + }) + + switch operation.Name { + case "create_article": + + selection, err := operation.Fields.AsObject() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ + "cause": err.Error(), + }) + } + var args CreateArticleArguments + if err := json.Unmarshal(operation.Arguments, &args); err != nil { + return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ + "cause": err.Error(), + }) + } + span.AddEvent("execute_procedure") + rawResult, err := CreateArticle(ctx, state, &args) + + if err != nil { + return nil, err + } + + if rawResult == nil { + return nil, nil + } + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnObject(selection, rawResult) + + if err != nil { + return nil, err + } + return schema.NewProcedureResult(result).Encode(), nil + + case "increase": + + if len(operation.Fields) > 0 { + return nil, schema.UnprocessableContentError("cannot evaluate selection fields for scalar", nil) + } + span.AddEvent("execute_procedure") + result, err := Increase(ctx, state) + if err != nil { + return nil, err + } + return schema.NewProcedureResult(result).Encode(), nil + + case "createAuthor": + + selection, err := operation.Fields.AsObject() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ + "cause": err.Error(), + }) + } + var args CreateAuthorArguments + if err := json.Unmarshal(operation.Arguments, &args); err != nil { + return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ + "cause": err.Error(), + }) + } + span.AddEvent("execute_procedure") + rawResult, err := ProcedureCreateAuthor(ctx, state, &args) + + if err != nil { + return nil, err + } + + if rawResult == nil { + return nil, nil + } + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnObject(selection, rawResult) + + if err != nil { + return nil, err + } + return schema.NewProcedureResult(result).Encode(), nil + + case "createAuthors": + + selection, err := operation.Fields.AsArray() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be array", map[string]any{ + "cause": err.Error(), + }) + } + var args CreateAuthorsArguments + if err := json.Unmarshal(operation.Arguments, &args); err != nil { + return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ + "cause": err.Error(), + }) + } + span.AddEvent("execute_procedure") + rawResult, err := ProcedureCreateAuthors(ctx, state, &args) + + if err != nil { + return nil, err + } + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnArrayIntoSlice(selection, rawResult) + + if err != nil { + return nil, err + } + return schema.NewProcedureResult(result).Encode(), nil + + default: + return nil, utils.ErrHandlerNotfound + } +} +var enumValues_ProcedureName = []string{"create_article", "increase", "createAuthor", "createAuthors"} +func connector_addSpanEvent(span trace.Span, logger *slog.Logger, name string, data map[string]any, options ...trace.EventOption) { + logger.Debug(name, slog.Any("data", data)) + attrs := utils.DebugJSONAttributes(data, utils.IsDebug(logger)) + span.AddEvent(name, append(options, trace.WithAttributes(attrs...))...) +} \ No newline at end of file diff --git a/cmd/hasura-ndc-go/command/internal/testdata/basic/expected/schema.go.tmpl b/cmd/hasura-ndc-go/command/internal/testdata/basic/expected/schema.go.tmpl new file mode 100644 index 0000000..62dcdbc --- /dev/null +++ b/cmd/hasura-ndc-go/command/internal/testdata/basic/expected/schema.go.tmpl @@ -0,0 +1,1109 @@ +// Code generated by github.com/hasura/ndc-sdk-go/cmd/hasura-ndc-go, DO NOT EDIT. +package main + +import ( + "github.com/hasura/ndc-sdk-go/schema" +) + + +func toPtr[V any](value V) *V { + return &value +} + +// GetConnectorSchema gets the generated connector schema +func GetConnectorSchema() *schema.SchemaResponse { + return &schema.SchemaResponse{ + Collections: []schema.CollectionInfo{}, + ObjectTypes: schema.SchemaResponseObjectTypes{ + "Author": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "created_at": schema.ObjectField{ + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "CreateArticleArgumentsAuthor": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "created_at": schema.ObjectField{ + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("UUID").Encode(), + }, + }, + }, + "CreateArticleResult": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "authors": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Author")).Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + }, + }, + "CreateAuthorResult": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "created_at": schema.ObjectField{ + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "name": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "GetArticlesResult": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "Name": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "GetTypesArguments": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "ArrayBigInt": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("BigInt")).Encode(), + }, + "ArrayBigIntPtr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("BigInt"))).Encode(), + }, + "ArrayBool": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Boolean")).Encode(), + }, + "ArrayBoolPtr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Boolean"))).Encode(), + }, + "ArrayFloat32": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Float32")).Encode(), + }, + "ArrayFloat32Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Float32"))).Encode(), + }, + "ArrayFloat64": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Float64")).Encode(), + }, + "ArrayFloat64Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Float64"))).Encode(), + }, + "ArrayInt": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int32")).Encode(), + }, + "ArrayInt16": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int16")).Encode(), + }, + "ArrayInt16Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int16"))).Encode(), + }, + "ArrayInt32": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int32")).Encode(), + }, + "ArrayInt32Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32"))).Encode(), + }, + "ArrayInt64": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int64")).Encode(), + }, + "ArrayInt64Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int64"))).Encode(), + }, + "ArrayInt8": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int8")).Encode(), + }, + "ArrayInt8Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int8"))).Encode(), + }, + "ArrayIntPtr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32"))).Encode(), + }, + "ArrayJSON": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("JSON")).Encode(), + }, + "ArrayJSONPtr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("JSON"))).Encode(), + }, + "ArrayMap": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("JSON")).Encode(), + }, + "ArrayMapPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("JSON"))).Encode(), + }, + "ArrayObject": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("GetTypesArgumentsArrayObject")).Encode(), + }, + "ArrayObjectPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("GetTypesArgumentsArrayObjectPtr"))).Encode(), + }, + "ArrayRawJSON": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("RawJSON")).Encode(), + }, + "ArrayRawJSONPtr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("RawJSON"))).Encode(), + }, + "ArrayString": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("String")).Encode(), + }, + "ArrayStringPtr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("String"))).Encode(), + }, + "ArrayTime": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("TimestampTZ")).Encode(), + }, + "ArrayTimePtr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("TimestampTZ"))).Encode(), + }, + "ArrayUUID": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("UUID")).Encode(), + }, + "ArrayUUIDPtr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("UUID"))).Encode(), + }, + "ArrayUint": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int32")).Encode(), + }, + "ArrayUint16": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int16")).Encode(), + }, + "ArrayUint16Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int16"))).Encode(), + }, + "ArrayUint32": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int32")).Encode(), + }, + "ArrayUint32Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32"))).Encode(), + }, + "ArrayUint64": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int64")).Encode(), + }, + "ArrayUint64Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int64"))).Encode(), + }, + "ArrayUint8": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int8")).Encode(), + }, + "ArrayUint8Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int8"))).Encode(), + }, + "ArrayUintPtr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32"))).Encode(), + }, + "BigInt": schema.ObjectField{ + Type: schema.NewNamedType("BigInt").Encode(), + }, + "BigIntPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("BigInt")).Encode(), + }, + "Bool": schema.ObjectField{ + Type: schema.NewNamedType("Boolean").Encode(), + }, + "BoolPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Boolean")).Encode(), + }, + "Bytes": schema.ObjectField{ + Type: schema.NewNamedType("Bytes").Encode(), + }, + "BytesPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Bytes")).Encode(), + }, + "CustomScalar": schema.ObjectField{ + Type: schema.NewNamedType("CommentString").Encode(), + }, + "CustomScalarPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("CommentString")).Encode(), + }, + "Enum": schema.ObjectField{ + Type: schema.NewNamedType("SomeEnum").Encode(), + }, + "EnumPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("SomeEnum")).Encode(), + }, + "Float32": schema.ObjectField{ + Type: schema.NewNamedType("Float32").Encode(), + }, + "Float32Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Float32")).Encode(), + }, + "Float64": schema.ObjectField{ + Type: schema.NewNamedType("Float64").Encode(), + }, + "Float64Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Float64")).Encode(), + }, + "Int": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "Int16": schema.ObjectField{ + Type: schema.NewNamedType("Int16").Encode(), + }, + "Int16Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int16")).Encode(), + }, + "Int32": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "Int32Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int32")).Encode(), + }, + "Int64": schema.ObjectField{ + Type: schema.NewNamedType("Int64").Encode(), + }, + "Int64Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int64")).Encode(), + }, + "Int8": schema.ObjectField{ + Type: schema.NewNamedType("Int8").Encode(), + }, + "Int8Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int8")).Encode(), + }, + "IntPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int32")).Encode(), + }, + "JSON": schema.ObjectField{ + Type: schema.NewNamedType("JSON").Encode(), + }, + "JSONPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("JSON")).Encode(), + }, + "Map": schema.ObjectField{ + Type: schema.NewNamedType("JSON").Encode(), + }, + "MapPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("JSON")).Encode(), + }, + "NamedArray": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Author")).Encode(), + }, + "NamedArrayPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Author"))).Encode(), + }, + "NamedObject": schema.ObjectField{ + Type: schema.NewNamedType("Author").Encode(), + }, + "NamedObjectPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Author")).Encode(), + }, + "Object": schema.ObjectField{ + Type: schema.NewNamedType("GetTypesArgumentsObject").Encode(), + }, + "ObjectPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("GetTypesArgumentsObjectPtr")).Encode(), + }, + "PtrArrayBigInt": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("BigInt"))).Encode(), + }, + "PtrArrayBigIntPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("BigInt")))).Encode(), + }, + "PtrArrayBool": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Boolean"))).Encode(), + }, + "PtrArrayBoolPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Boolean")))).Encode(), + }, + "PtrArrayFloat32": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Float32"))).Encode(), + }, + "PtrArrayFloat32Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Float32")))).Encode(), + }, + "PtrArrayFloat64": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Float64"))).Encode(), + }, + "PtrArrayFloat64Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Float64")))).Encode(), + }, + "PtrArrayInt": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int32"))).Encode(), + }, + "PtrArrayInt16": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int16"))).Encode(), + }, + "PtrArrayInt16Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int16")))).Encode(), + }, + "PtrArrayInt32": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int32"))).Encode(), + }, + "PtrArrayInt32Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32")))).Encode(), + }, + "PtrArrayInt64": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int64"))).Encode(), + }, + "PtrArrayInt64Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int64")))).Encode(), + }, + "PtrArrayInt8": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int8"))).Encode(), + }, + "PtrArrayInt8Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int8")))).Encode(), + }, + "PtrArrayIntPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32")))).Encode(), + }, + "PtrArrayJSON": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("JSON"))).Encode(), + }, + "PtrArrayJSONPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("JSON")))).Encode(), + }, + "PtrArrayRawJSON": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("RawJSON"))).Encode(), + }, + "PtrArrayRawJSONPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("RawJSON")))).Encode(), + }, + "PtrArrayString": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("String"))).Encode(), + }, + "PtrArrayStringPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("String")))).Encode(), + }, + "PtrArrayTime": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("TimestampTZ"))).Encode(), + }, + "PtrArrayTimePtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("TimestampTZ")))).Encode(), + }, + "PtrArrayUUID": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("UUID"))).Encode(), + }, + "PtrArrayUUIDPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("UUID")))).Encode(), + }, + "PtrArrayUint": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int32"))).Encode(), + }, + "PtrArrayUint16": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int16"))).Encode(), + }, + "PtrArrayUint16Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int16")))).Encode(), + }, + "PtrArrayUint32": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int32"))).Encode(), + }, + "PtrArrayUint32Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32")))).Encode(), + }, + "PtrArrayUint64": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int64"))).Encode(), + }, + "PtrArrayUint64Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int64")))).Encode(), + }, + "PtrArrayUint8": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int8"))).Encode(), + }, + "PtrArrayUint8Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int8")))).Encode(), + }, + "PtrArrayUintPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32")))).Encode(), + }, + "RawJSON": schema.ObjectField{ + Type: schema.NewNamedType("RawJSON").Encode(), + }, + "RawJSONPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("RawJSON")).Encode(), + }, + "String": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + "StringPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("String")).Encode(), + }, + "Text": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + "TextPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("String")).Encode(), + }, + "Time": schema.ObjectField{ + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "TimePtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("TimestampTZ")).Encode(), + }, + "URL": schema.ObjectField{ + Type: schema.NewNamedType("URL").Encode(), + }, + "UUID": schema.ObjectField{ + Type: schema.NewNamedType("UUID").Encode(), + }, + "UUIDArray": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("UUID")).Encode(), + }, + "UUIDPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("UUID")).Encode(), + }, + "Uint": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "Uint16": schema.ObjectField{ + Type: schema.NewNamedType("Int16").Encode(), + }, + "Uint16Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int16")).Encode(), + }, + "Uint32": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "Uint32Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int32")).Encode(), + }, + "Uint64": schema.ObjectField{ + Type: schema.NewNamedType("Int64").Encode(), + }, + "Uint64Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int64")).Encode(), + }, + "Uint8": schema.ObjectField{ + Type: schema.NewNamedType("Int8").Encode(), + }, + "Uint8Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int8")).Encode(), + }, + "UintPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int32")).Encode(), + }, + }, + }, + "GetTypesArgumentsArrayObject": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "content": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "GetTypesArgumentsArrayObjectPtr": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "content": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "GetTypesArgumentsObject": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "created_at": schema.ObjectField{ + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("UUID").Encode(), + }, + }, + }, + "GetTypesArgumentsObjectPtr": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "Lat": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "Long": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + }, + }, + "HelloResult": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "error": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("JSON")).Encode(), + }, + "foo": schema.ObjectField{ + Type: schema.NewNamedType("Foo").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("UUID").Encode(), + }, + "num": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "text": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + }, + Functions: []schema.FunctionInfo{ + { + Name: "getBool", + Description: toPtr("return an scalar boolean"), + ResultType: schema.NewNamedType("Boolean").Encode(), + Arguments: map[string]schema.ArgumentInfo{ + }, + }, + { + Name: "getTypes", + ResultType: schema.NewNullableType(schema.NewNamedType("GetTypesArguments")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "ArrayBigInt": { + Type: schema.NewArrayType(schema.NewNamedType("BigInt")).Encode(), + }, + "ArrayBigIntPtr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("BigInt"))).Encode(), + }, + "ArrayBool": { + Type: schema.NewArrayType(schema.NewNamedType("Boolean")).Encode(), + }, + "ArrayBoolPtr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Boolean"))).Encode(), + }, + "ArrayFloat32": { + Type: schema.NewArrayType(schema.NewNamedType("Float32")).Encode(), + }, + "ArrayFloat32Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Float32"))).Encode(), + }, + "ArrayFloat64": { + Type: schema.NewArrayType(schema.NewNamedType("Float64")).Encode(), + }, + "ArrayFloat64Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Float64"))).Encode(), + }, + "ArrayInt": { + Type: schema.NewArrayType(schema.NewNamedType("Int32")).Encode(), + }, + "ArrayInt16": { + Type: schema.NewArrayType(schema.NewNamedType("Int16")).Encode(), + }, + "ArrayInt16Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int16"))).Encode(), + }, + "ArrayInt32": { + Type: schema.NewArrayType(schema.NewNamedType("Int32")).Encode(), + }, + "ArrayInt32Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32"))).Encode(), + }, + "ArrayInt64": { + Type: schema.NewArrayType(schema.NewNamedType("Int64")).Encode(), + }, + "ArrayInt64Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int64"))).Encode(), + }, + "ArrayInt8": { + Type: schema.NewArrayType(schema.NewNamedType("Int8")).Encode(), + }, + "ArrayInt8Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int8"))).Encode(), + }, + "ArrayIntPtr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32"))).Encode(), + }, + "ArrayJSON": { + Type: schema.NewArrayType(schema.NewNamedType("JSON")).Encode(), + }, + "ArrayJSONPtr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("JSON"))).Encode(), + }, + "ArrayMap": { + Type: schema.NewArrayType(schema.NewNamedType("JSON")).Encode(), + }, + "ArrayMapPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("JSON"))).Encode(), + }, + "ArrayObject": { + Type: schema.NewArrayType(schema.NewNamedType("GetTypesArgumentsArrayObject")).Encode(), + }, + "ArrayObjectPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("GetTypesArgumentsArrayObjectPtr"))).Encode(), + }, + "ArrayRawJSON": { + Type: schema.NewArrayType(schema.NewNamedType("RawJSON")).Encode(), + }, + "ArrayRawJSONPtr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("RawJSON"))).Encode(), + }, + "ArrayString": { + Type: schema.NewArrayType(schema.NewNamedType("String")).Encode(), + }, + "ArrayStringPtr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("String"))).Encode(), + }, + "ArrayTime": { + Type: schema.NewArrayType(schema.NewNamedType("TimestampTZ")).Encode(), + }, + "ArrayTimePtr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("TimestampTZ"))).Encode(), + }, + "ArrayUUID": { + Type: schema.NewArrayType(schema.NewNamedType("UUID")).Encode(), + }, + "ArrayUUIDPtr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("UUID"))).Encode(), + }, + "ArrayUint": { + Type: schema.NewArrayType(schema.NewNamedType("Int32")).Encode(), + }, + "ArrayUint16": { + Type: schema.NewArrayType(schema.NewNamedType("Int16")).Encode(), + }, + "ArrayUint16Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int16"))).Encode(), + }, + "ArrayUint32": { + Type: schema.NewArrayType(schema.NewNamedType("Int32")).Encode(), + }, + "ArrayUint32Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32"))).Encode(), + }, + "ArrayUint64": { + Type: schema.NewArrayType(schema.NewNamedType("Int64")).Encode(), + }, + "ArrayUint64Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int64"))).Encode(), + }, + "ArrayUint8": { + Type: schema.NewArrayType(schema.NewNamedType("Int8")).Encode(), + }, + "ArrayUint8Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int8"))).Encode(), + }, + "ArrayUintPtr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32"))).Encode(), + }, + "BigInt": { + Type: schema.NewNamedType("BigInt").Encode(), + }, + "BigIntPtr": { + Type: schema.NewNullableType(schema.NewNamedType("BigInt")).Encode(), + }, + "Bool": { + Type: schema.NewNamedType("Boolean").Encode(), + }, + "BoolPtr": { + Type: schema.NewNullableType(schema.NewNamedType("Boolean")).Encode(), + }, + "Bytes": { + Type: schema.NewNamedType("Bytes").Encode(), + }, + "BytesPtr": { + Type: schema.NewNullableType(schema.NewNamedType("Bytes")).Encode(), + }, + "CustomScalar": { + Type: schema.NewNamedType("CommentString").Encode(), + }, + "CustomScalarPtr": { + Type: schema.NewNullableType(schema.NewNamedType("CommentString")).Encode(), + }, + "Enum": { + Type: schema.NewNamedType("SomeEnum").Encode(), + }, + "EnumPtr": { + Type: schema.NewNullableType(schema.NewNamedType("SomeEnum")).Encode(), + }, + "Float32": { + Type: schema.NewNamedType("Float32").Encode(), + }, + "Float32Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Float32")).Encode(), + }, + "Float64": { + Type: schema.NewNamedType("Float64").Encode(), + }, + "Float64Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Float64")).Encode(), + }, + "Int": { + Type: schema.NewNamedType("Int32").Encode(), + }, + "Int16": { + Type: schema.NewNamedType("Int16").Encode(), + }, + "Int16Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Int16")).Encode(), + }, + "Int32": { + Type: schema.NewNamedType("Int32").Encode(), + }, + "Int32Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Int32")).Encode(), + }, + "Int64": { + Type: schema.NewNamedType("Int64").Encode(), + }, + "Int64Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Int64")).Encode(), + }, + "Int8": { + Type: schema.NewNamedType("Int8").Encode(), + }, + "Int8Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Int8")).Encode(), + }, + "IntPtr": { + Type: schema.NewNullableType(schema.NewNamedType("Int32")).Encode(), + }, + "JSON": { + Type: schema.NewNamedType("JSON").Encode(), + }, + "JSONPtr": { + Type: schema.NewNullableType(schema.NewNamedType("JSON")).Encode(), + }, + "Map": { + Type: schema.NewNamedType("JSON").Encode(), + }, + "MapPtr": { + Type: schema.NewNullableType(schema.NewNamedType("JSON")).Encode(), + }, + "NamedArray": { + Type: schema.NewArrayType(schema.NewNamedType("Author")).Encode(), + }, + "NamedArrayPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Author"))).Encode(), + }, + "NamedObject": { + Type: schema.NewNamedType("Author").Encode(), + }, + "NamedObjectPtr": { + Type: schema.NewNullableType(schema.NewNamedType("Author")).Encode(), + }, + "Object": { + Type: schema.NewNamedType("GetTypesArgumentsObject").Encode(), + }, + "ObjectPtr": { + Type: schema.NewNullableType(schema.NewNamedType("GetTypesArgumentsObjectPtr")).Encode(), + }, + "PtrArrayBigInt": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("BigInt"))).Encode(), + }, + "PtrArrayBigIntPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("BigInt")))).Encode(), + }, + "PtrArrayBool": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Boolean"))).Encode(), + }, + "PtrArrayBoolPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Boolean")))).Encode(), + }, + "PtrArrayFloat32": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Float32"))).Encode(), + }, + "PtrArrayFloat32Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Float32")))).Encode(), + }, + "PtrArrayFloat64": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Float64"))).Encode(), + }, + "PtrArrayFloat64Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Float64")))).Encode(), + }, + "PtrArrayInt": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int32"))).Encode(), + }, + "PtrArrayInt16": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int16"))).Encode(), + }, + "PtrArrayInt16Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int16")))).Encode(), + }, + "PtrArrayInt32": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int32"))).Encode(), + }, + "PtrArrayInt32Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32")))).Encode(), + }, + "PtrArrayInt64": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int64"))).Encode(), + }, + "PtrArrayInt64Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int64")))).Encode(), + }, + "PtrArrayInt8": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int8"))).Encode(), + }, + "PtrArrayInt8Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int8")))).Encode(), + }, + "PtrArrayIntPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32")))).Encode(), + }, + "PtrArrayJSON": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("JSON"))).Encode(), + }, + "PtrArrayJSONPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("JSON")))).Encode(), + }, + "PtrArrayRawJSON": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("RawJSON"))).Encode(), + }, + "PtrArrayRawJSONPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("RawJSON")))).Encode(), + }, + "PtrArrayString": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("String"))).Encode(), + }, + "PtrArrayStringPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("String")))).Encode(), + }, + "PtrArrayTime": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("TimestampTZ"))).Encode(), + }, + "PtrArrayTimePtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("TimestampTZ")))).Encode(), + }, + "PtrArrayUUID": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("UUID"))).Encode(), + }, + "PtrArrayUUIDPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("UUID")))).Encode(), + }, + "PtrArrayUint": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int32"))).Encode(), + }, + "PtrArrayUint16": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int16"))).Encode(), + }, + "PtrArrayUint16Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int16")))).Encode(), + }, + "PtrArrayUint32": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int32"))).Encode(), + }, + "PtrArrayUint32Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32")))).Encode(), + }, + "PtrArrayUint64": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int64"))).Encode(), + }, + "PtrArrayUint64Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int64")))).Encode(), + }, + "PtrArrayUint8": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int8"))).Encode(), + }, + "PtrArrayUint8Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int8")))).Encode(), + }, + "PtrArrayUintPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32")))).Encode(), + }, + "RawJSON": { + Type: schema.NewNamedType("RawJSON").Encode(), + }, + "RawJSONPtr": { + Type: schema.NewNullableType(schema.NewNamedType("RawJSON")).Encode(), + }, + "String": { + Type: schema.NewNamedType("String").Encode(), + }, + "StringPtr": { + Type: schema.NewNullableType(schema.NewNamedType("String")).Encode(), + }, + "Text": { + Type: schema.NewNamedType("String").Encode(), + }, + "TextPtr": { + Type: schema.NewNullableType(schema.NewNamedType("String")).Encode(), + }, + "Time": { + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "TimePtr": { + Type: schema.NewNullableType(schema.NewNamedType("TimestampTZ")).Encode(), + }, + "URL": { + Type: schema.NewNamedType("URL").Encode(), + }, + "UUID": { + Type: schema.NewNamedType("UUID").Encode(), + }, + "UUIDArray": { + Type: schema.NewArrayType(schema.NewNamedType("UUID")).Encode(), + }, + "UUIDPtr": { + Type: schema.NewNullableType(schema.NewNamedType("UUID")).Encode(), + }, + "Uint": { + Type: schema.NewNamedType("Int32").Encode(), + }, + "Uint16": { + Type: schema.NewNamedType("Int16").Encode(), + }, + "Uint16Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Int16")).Encode(), + }, + "Uint32": { + Type: schema.NewNamedType("Int32").Encode(), + }, + "Uint32Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Int32")).Encode(), + }, + "Uint64": { + Type: schema.NewNamedType("Int64").Encode(), + }, + "Uint64Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Int64")).Encode(), + }, + "Uint8": { + Type: schema.NewNamedType("Int8").Encode(), + }, + "Uint8Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Int8")).Encode(), + }, + "UintPtr": { + Type: schema.NewNullableType(schema.NewNamedType("Int32")).Encode(), + }, + }, + }, + { + Name: "hello", + Description: toPtr("sends a hello message"), + ResultType: schema.NewNullableType(schema.NewNamedType("HelloResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + }, + }, + { + Name: "getArticles", + Description: toPtr("GetArticles"), + ResultType: schema.NewArrayType(schema.NewNamedType("GetArticlesResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "Limit": { + Type: schema.NewNamedType("Float64").Encode(), + }, + }, + }, + }, + Procedures: []schema.ProcedureInfo{ + { + Name: "create_article", + Description: toPtr("CreateArticle"), + ResultType: schema.NewNullableType(schema.NewNamedType("CreateArticleResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "author": { + Type: schema.NewNamedType("CreateArticleArgumentsAuthor").Encode(), + }, + }, + }, + { + Name: "increase", + Description: toPtr("Increase"), + ResultType: schema.NewNamedType("Int32").Encode(), + Arguments: map[string]schema.ArgumentInfo{ + }, + }, + { + Name: "createAuthor", + Description: toPtr("creates an author"), + ResultType: schema.NewNullableType(schema.NewNamedType("CreateAuthorResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "name": { + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + { + Name: "createAuthors", + Description: toPtr("creates a list of authors"), + ResultType: schema.NewArrayType(schema.NewNamedType("CreateAuthorResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "names": { + Type: schema.NewArrayType(schema.NewNamedType("String")).Encode(), + }, + }, + }, + }, + ScalarTypes: schema.SchemaResponseScalarTypes{ + "BigInt": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationBigInteger().Encode(), + }, + "Boolean": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationBoolean().Encode(), + }, + "Bytes": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationBytes().Encode(), + }, + "CommentString": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationJSON().Encode(), + }, + "Float32": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationFloat32().Encode(), + }, + "Float64": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationFloat64().Encode(), + }, + "Foo": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationJSON().Encode(), + }, + "Int16": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationInt16().Encode(), + }, + "Int32": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationInt32().Encode(), + }, + "Int64": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationInt64().Encode(), + }, + "Int8": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationInt8().Encode(), + }, + "JSON": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationJSON().Encode(), + }, + "RawJSON": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationJSON().Encode(), + }, + "SomeEnum": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationEnum([]string{"foo", "bar"}).Encode(), + }, + "String": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationString().Encode(), + }, + "TimestampTZ": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationTimestampTZ().Encode(), + }, + "URL": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationString().Encode(), + }, + "UUID": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationUUID().Encode(), + }, + }, + } +} \ No newline at end of file diff --git a/cmd/hasura-ndc-go/command/internal/testdata/empty/expected/connector.go.tmpl b/cmd/hasura-ndc-go/command/internal/testdata/empty/expected/connector.go.tmpl index 56f57a8..364fe2a 100644 --- a/cmd/hasura-ndc-go/command/internal/testdata/empty/expected/connector.go.tmpl +++ b/cmd/hasura-ndc-go/command/internal/testdata/empty/expected/connector.go.tmpl @@ -1,4 +1,4 @@ -// Code generated by github.com/hasura/ndc-sdk-go/codegen, DO NOT EDIT. +// Code generated by github.com/hasura/ndc-sdk-go/cmd/hasura-ndc-go, DO NOT EDIT. package main import ( @@ -7,9 +7,7 @@ import ( "fmt" "log/slog" - "github.com/hasura/ndc-codegen-empty-test/types" - "github.com/hasura/ndc-sdk-go/connector" "github.com/hasura/ndc-sdk-go/schema" "github.com/hasura/ndc-sdk-go/utils" "go.opentelemetry.io/otel/attribute" @@ -17,10 +15,23 @@ import ( "go.opentelemetry.io/otel/trace" ) -//go:embed schema.generated.json -var rawSchema []byte var schemaResponse *schema.RawSchemaResponse +var connectorQueryHandlers = []ConnectorQueryHandler{} +var connectorMutationHandlers = []ConnectorMutationHandler{} +// ConnectorQueryHandler abstracts the connector query handler +type ConnectorQueryHandler interface { + Query(ctx context.Context, state *types.State, request *schema.QueryRequest, arguments map[string]any) (*schema.RowSet, error) +} + +// ConnectorMutationHandler abstracts the connector mutation handler +type ConnectorMutationHandler interface { + Mutation(ctx context.Context, state *types.State, request *schema.MutationOperation) (schema.MutationOperationResults, error) +} + + +//go:embed schema.generated.json +var rawSchema []byte func init() { var err error schemaResponse, err = schema.NewRawSchemaResponse(rawSchema) @@ -29,6 +40,7 @@ func init() { } } + // GetSchema gets the connector's schema. func (c *Connector) GetSchema(ctx context.Context, configuration *types.Configuration, _ *types.State) (schema.SchemaResponseMarshaler, error) { return schemaResponse, nil @@ -36,21 +48,20 @@ func (c *Connector) GetSchema(ctx context.Context, configuration *types.Configur // Query executes a query. func (c *Connector) Query(ctx context.Context, configuration *types.Configuration, state *types.State, request *schema.QueryRequest) (schema.QueryResponse, error) { - valueField, err := utils.EvalFunctionSelectionFieldValue(request) - if err != nil { - return nil, schema.UnprocessableContentError(err.Error(), nil) - } + if len(connectorQueryHandlers) == 0 { + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) + } - span := trace.SpanFromContext(ctx) - requestVars := request.Variables + span := trace.SpanFromContext(ctx) + requestVars := request.Variables varsLength := len(requestVars) - if varsLength == 0 { - requestVars = []schema.QueryRequestVariablesElem{make(schema.QueryRequestVariablesElem)} - varsLength = 1 - } + if varsLength == 0 { + requestVars = []schema.QueryRequestVariablesElem{make(schema.QueryRequestVariablesElem)} + varsLength = 1 + } - rowSets := make([]schema.RowSet, varsLength) - for i, requestVar := range requestVars { + rowSets := make([]schema.RowSet, varsLength) + for i, requestVar := range requestVars { childSpan := span childContext := ctx if varsLength > 1 { @@ -58,36 +69,55 @@ func (c *Connector) Query(ctx context.Context, configuration *types.Configuratio defer childSpan.End() } - result, err := execQuery(childContext, state, request, valueField, requestVar, childSpan) - if err != nil { + result, err := c.execQuery(childContext, state, request, requestVar) + if err != nil { if varsLength > 1 { childSpan.SetStatus(codes.Error, err.Error()) } - return nil, err - } - rowSets[i] = schema.RowSet{ - Aggregates: schema.RowSetAggregates{}, - Rows: []map[string]any{ - { - "__value": result, - }, - }, - } + return nil, err + } + rowSets[i] = *result + if varsLength > 1 { childSpan.End() } - } + } + + return rowSets, nil +} + +func (c *Connector) execQuery(ctx context.Context, state *types.State, request *schema.QueryRequest, variables map[string]any) (*schema.RowSet, error) { + rawArgs, err := utils.ResolveArgumentVariables(request.Arguments, variables) + if err != nil { + return nil, schema.UnprocessableContentError("failed to resolve argument variables", map[string]any{ + "cause": err.Error(), + }) + } + + for _, handler := range connectorQueryHandlers { + result, err := handler.Query(ctx, state, request, rawArgs) + if err == nil { + return result, nil + } - return rowSets, nil + if err != utils.ErrHandlerNotfound { + return nil, err + } + } + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) } // Mutation executes a mutation. func (c *Connector) Mutation(ctx context.Context, configuration *types.Configuration, state *types.State, request *schema.MutationRequest) (*schema.MutationResponse, error) { - operationLen := len(request.Operations) - operationResults := make([]schema.MutationOperationResults, operationLen) - span := trace.SpanFromContext(ctx) + if len(connectorMutationHandlers) == 0 { + return nil, schema.UnprocessableContentError("unsupported mutation", nil) + } - for i, operation := range request.Operations { + operationLen := len(request.Operations) + operationResults := make([]schema.MutationOperationResults, operationLen) + span := trace.SpanFromContext(ctx) + + for i, operation := range request.Operations { childSpan := span childContext := ctx if operationLen > 1 { @@ -99,59 +129,45 @@ func (c *Connector) Mutation(ctx context.Context, configuration *types.Configura attribute.String("operation.name", string(operation.Name)), ) - switch operation.Type { - case schema.MutationOperationProcedure: - result, err := execProcedure(childContext, state, &operation, childSpan) - if err != nil { + switch operation.Type { + case schema.MutationOperationProcedure: + result, err := c.execProcedure(childContext, state, &operation) + if err != nil { if operationLen > 1 { childSpan.SetStatus(codes.Error, err.Error()) } - return nil, err - } - operationResults[i] = result + return nil, err + } + operationResults[i] = result if operationLen > 1 { childSpan.End() } - default: - return nil, schema.UnprocessableContentError(fmt.Sprintf("invalid operation type: %s", operation.Type), nil) - } - } - - return &schema.MutationResponse{ - OperationResults: operationResults, - }, nil -} - -func execQuery(ctx context.Context, state *types.State, request *schema.QueryRequest, queryFields schema.NestedField, variables map[string]any, span trace.Span) (any, error) { - logger := connector.GetLogger(ctx) - connector_addSpanEvent(span, logger, "validate_request", map[string]any{ - "variables": variables, - }) - switch request.Collection { + default: + return nil, schema.UnprocessableContentError(fmt.Sprintf("invalid operation type: %s", operation.Type), nil) + } + } - default: - return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) - } + return &schema.MutationResponse{ + OperationResults: operationResults, + }, nil } -func execProcedure(ctx context.Context, state *types.State, operation *schema.MutationOperation, span trace.Span) (schema.MutationOperationResults, error) { - logger := connector.GetLogger(ctx) - connector_addSpanEvent(span, logger, "validate_request", map[string]any{ - "operations_name": operation.Name, - }) - switch operation.Name { +func (c *Connector) execProcedure(ctx context.Context, state *types.State, operation *schema.MutationOperation) (schema.MutationOperationResults, error) { + for _, handler := range connectorMutationHandlers { + result, err := handler.Mutation(ctx, state, operation) + if err == nil { + return result, nil + } + if err != utils.ErrHandlerNotfound { + return nil, err + } + } - default: - return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported procedure operation: %s", operation.Name), nil) - } + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported procedure operation: %s", operation.Name), nil) } func connector_addSpanEvent(span trace.Span, logger *slog.Logger, name string, data map[string]any, options ...trace.EventOption) { logger.Debug(name, slog.Any("data", data)) - attrs := utils.DebugJSONAttributes(data, connector_isDebug(logger)) + attrs := utils.DebugJSONAttributes(data, utils.IsDebug(logger)) span.AddEvent(name, append(options, trace.WithAttributes(attrs...))...) -} - -func connector_isDebug(logger *slog.Logger) bool { - return logger.Enabled(context.TODO(), slog.LevelDebug) } \ No newline at end of file diff --git a/cmd/hasura-ndc-go/command/internal/testdata/snake_case/expected/connector.go.tmpl b/cmd/hasura-ndc-go/command/internal/testdata/snake_case/expected/connector.go.tmpl index 1ebb31e..85fa006 100644 --- a/cmd/hasura-ndc-go/command/internal/testdata/snake_case/expected/connector.go.tmpl +++ b/cmd/hasura-ndc-go/command/internal/testdata/snake_case/expected/connector.go.tmpl @@ -1,4 +1,4 @@ -// Code generated by github.com/hasura/ndc-sdk-go/codegen, DO NOT EDIT. +// Code generated by github.com/hasura/ndc-sdk-go/cmd/hasura-ndc-go, DO NOT EDIT. package main import ( @@ -7,10 +7,8 @@ import ( "fmt" "log/slog" - "encoding/json" -"github.com/hasura/ndc-codegen-test-snake-case/functions" + "github.com/hasura/ndc-codegen-test-snake-case/functions" "github.com/hasura/ndc-codegen-test-snake-case/types" - "github.com/hasura/ndc-sdk-go/connector" "github.com/hasura/ndc-sdk-go/schema" "github.com/hasura/ndc-sdk-go/utils" "go.opentelemetry.io/otel/attribute" @@ -18,10 +16,23 @@ import ( "go.opentelemetry.io/otel/trace" ) -//go:embed schema.generated.json -var rawSchema []byte var schemaResponse *schema.RawSchemaResponse +var connectorQueryHandlers = []ConnectorQueryHandler{functions.DataConnectorHandler{}} +var connectorMutationHandlers = []ConnectorMutationHandler{functions.DataConnectorHandler{}} + +// ConnectorQueryHandler abstracts the connector query handler +type ConnectorQueryHandler interface { + Query(ctx context.Context, state *types.State, request *schema.QueryRequest, arguments map[string]any) (*schema.RowSet, error) +} +// ConnectorMutationHandler abstracts the connector mutation handler +type ConnectorMutationHandler interface { + Mutation(ctx context.Context, state *types.State, request *schema.MutationOperation) (schema.MutationOperationResults, error) +} + + +//go:embed schema.generated.json +var rawSchema []byte func init() { var err error schemaResponse, err = schema.NewRawSchemaResponse(rawSchema) @@ -30,6 +41,7 @@ func init() { } } + // GetSchema gets the connector's schema. func (c *Connector) GetSchema(ctx context.Context, configuration *types.Configuration, _ *types.State) (schema.SchemaResponseMarshaler, error) { return schemaResponse, nil @@ -37,21 +49,20 @@ func (c *Connector) GetSchema(ctx context.Context, configuration *types.Configur // Query executes a query. func (c *Connector) Query(ctx context.Context, configuration *types.Configuration, state *types.State, request *schema.QueryRequest) (schema.QueryResponse, error) { - valueField, err := utils.EvalFunctionSelectionFieldValue(request) - if err != nil { - return nil, schema.UnprocessableContentError(err.Error(), nil) - } + if len(connectorQueryHandlers) == 0 { + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) + } - span := trace.SpanFromContext(ctx) - requestVars := request.Variables + span := trace.SpanFromContext(ctx) + requestVars := request.Variables varsLength := len(requestVars) - if varsLength == 0 { - requestVars = []schema.QueryRequestVariablesElem{make(schema.QueryRequestVariablesElem)} - varsLength = 1 - } + if varsLength == 0 { + requestVars = []schema.QueryRequestVariablesElem{make(schema.QueryRequestVariablesElem)} + varsLength = 1 + } - rowSets := make([]schema.RowSet, varsLength) - for i, requestVar := range requestVars { + rowSets := make([]schema.RowSet, varsLength) + for i, requestVar := range requestVars { childSpan := span childContext := ctx if varsLength > 1 { @@ -59,36 +70,55 @@ func (c *Connector) Query(ctx context.Context, configuration *types.Configuratio defer childSpan.End() } - result, err := execQuery(childContext, state, request, valueField, requestVar, childSpan) - if err != nil { + result, err := c.execQuery(childContext, state, request, requestVar) + if err != nil { if varsLength > 1 { childSpan.SetStatus(codes.Error, err.Error()) } - return nil, err - } - rowSets[i] = schema.RowSet{ - Aggregates: schema.RowSetAggregates{}, - Rows: []map[string]any{ - { - "__value": result, - }, - }, - } + return nil, err + } + rowSets[i] = *result + if varsLength > 1 { childSpan.End() } - } + } + + return rowSets, nil +} + +func (c *Connector) execQuery(ctx context.Context, state *types.State, request *schema.QueryRequest, variables map[string]any) (*schema.RowSet, error) { + rawArgs, err := utils.ResolveArgumentVariables(request.Arguments, variables) + if err != nil { + return nil, schema.UnprocessableContentError("failed to resolve argument variables", map[string]any{ + "cause": err.Error(), + }) + } + + for _, handler := range connectorQueryHandlers { + result, err := handler.Query(ctx, state, request, rawArgs) + if err == nil { + return result, nil + } - return rowSets, nil + if err != utils.ErrHandlerNotfound { + return nil, err + } + } + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) } // Mutation executes a mutation. func (c *Connector) Mutation(ctx context.Context, configuration *types.Configuration, state *types.State, request *schema.MutationRequest) (*schema.MutationResponse, error) { - operationLen := len(request.Operations) - operationResults := make([]schema.MutationOperationResults, operationLen) - span := trace.SpanFromContext(ctx) + if len(connectorMutationHandlers) == 0 { + return nil, schema.UnprocessableContentError("unsupported mutation", nil) + } - for i, operation := range request.Operations { + operationLen := len(request.Operations) + operationResults := make([]schema.MutationOperationResults, operationLen) + span := trace.SpanFromContext(ctx) + + for i, operation := range request.Operations { childSpan := span childContext := ctx if operationLen > 1 { @@ -100,285 +130,45 @@ func (c *Connector) Mutation(ctx context.Context, configuration *types.Configura attribute.String("operation.name", string(operation.Name)), ) - switch operation.Type { - case schema.MutationOperationProcedure: - result, err := execProcedure(childContext, state, &operation, childSpan) - if err != nil { + switch operation.Type { + case schema.MutationOperationProcedure: + result, err := c.execProcedure(childContext, state, &operation) + if err != nil { if operationLen > 1 { childSpan.SetStatus(codes.Error, err.Error()) } - return nil, err - } - operationResults[i] = result + return nil, err + } + operationResults[i] = result if operationLen > 1 { childSpan.End() } - default: - return nil, schema.UnprocessableContentError(fmt.Sprintf("invalid operation type: %s", operation.Type), nil) - } - } - - return &schema.MutationResponse{ - OperationResults: operationResults, - }, nil -} - -func execQuery(ctx context.Context, state *types.State, request *schema.QueryRequest, queryFields schema.NestedField, variables map[string]any, span trace.Span) (any, error) { - logger := connector.GetLogger(ctx) - connector_addSpanEvent(span, logger, "validate_request", map[string]any{ - "variables": variables, - }) - switch request.Collection { - case "get_bool": - if len(queryFields) > 0 { - return nil, schema.UnprocessableContentError("cannot evaluate selection fields for scalar", nil) - } - return functions.FunctionGetBool(ctx, state) - case "get_types": - selection, err := queryFields.AsObject() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ - "cause": err.Error(), - }) - } - rawArgs, err := utils.ResolveArgumentVariables(request.Arguments, variables) - if err != nil { - return nil, schema.UnprocessableContentError("failed to resolve argument variables", map[string]any{ - "cause": err.Error(), - }) - } - - connector_addSpanEvent(span, logger, "resolve_arguments", map[string]any{ - "raw_arguments": rawArgs, - }) - - var args functions.GetTypesArguments - if err = args.FromValue(rawArgs); err != nil { - return nil, schema.UnprocessableContentError("failed to resolve arguments", map[string]any{ - "cause": err.Error(), - }) - } - - connector_addSpanEvent(span, logger, "execute_function", map[string]any{ - "arguments": args, - }) - rawResult, err := functions.FunctionGetTypes(ctx, state, &args) - if err != nil { - return nil, err - } - - if rawResult == nil { - return nil, nil - } - - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnObject(selection, rawResult) - if err != nil { - return nil, err - } - return result, nil - case "hello": - selection, err := queryFields.AsObject() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ - "cause": err.Error(), - }) - } - rawResult, err := functions.FunctionHello(ctx, state) - if err != nil { - return nil, err - } - - if rawResult == nil { - return nil, nil - } - - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnObject(selection, rawResult) - if err != nil { - return nil, err - } - return result, nil - case "get_articles": - selection, err := queryFields.AsArray() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be array", map[string]any{ - "cause": err.Error(), - }) - } - rawArgs, err := utils.ResolveArgumentVariables(request.Arguments, variables) - if err != nil { - return nil, schema.UnprocessableContentError("failed to resolve argument variables", map[string]any{ - "cause": err.Error(), - }) - } - - connector_addSpanEvent(span, logger, "resolve_arguments", map[string]any{ - "raw_arguments": rawArgs, - }) - - var args functions.GetArticlesArguments - if err = args.FromValue(rawArgs); err != nil { - return nil, schema.UnprocessableContentError("failed to resolve arguments", map[string]any{ - "cause": err.Error(), - }) - } - - connector_addSpanEvent(span, logger, "execute_function", map[string]any{ - "arguments": args, - }) - rawResult, err := functions.GetArticles(ctx, state, &args) - if err != nil { - return nil, err - } - - if rawResult == nil { - return nil, schema.UnprocessableContentError("expected not null result", nil) - } - - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnArrayIntoSlice(selection, rawResult) - if err != nil { - return nil, err - } - return result, nil + default: + return nil, schema.UnprocessableContentError(fmt.Sprintf("invalid operation type: %s", operation.Type), nil) + } + } - default: - return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) - } + return &schema.MutationResponse{ + OperationResults: operationResults, + }, nil } -func execProcedure(ctx context.Context, state *types.State, operation *schema.MutationOperation, span trace.Span) (schema.MutationOperationResults, error) { - logger := connector.GetLogger(ctx) - connector_addSpanEvent(span, logger, "validate_request", map[string]any{ - "operations_name": operation.Name, - }) - switch operation.Name { - case "create_article": - selection, err := operation.Fields.AsObject() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ - "cause": err.Error(), - }) - } - var args functions.CreateArticleArguments - if err := json.Unmarshal(operation.Arguments, &args); err != nil { - return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ - "cause": err.Error(), - }) - } - span.AddEvent("execute_procedure") - rawResult, err := functions.CreateArticle(ctx, state, &args) - - if err != nil { - return nil, err - } - - if rawResult == nil { - return nil, nil - } - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnObject(selection, rawResult) - - if err != nil { - return nil, err - } - return schema.NewProcedureResult(result).Encode(), nil - case "increase": - if len(operation.Fields) > 0 { - return nil, schema.UnprocessableContentError("cannot evaluate selection fields for scalar", nil) - } - span.AddEvent("execute_procedure") - var err error - result, err := functions.Increase(ctx, state) - if err != nil { - return nil, err - } - return schema.NewProcedureResult(result).Encode(), nil - case "create_author": - selection, err := operation.Fields.AsObject() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ - "cause": err.Error(), - }) - } - var args functions.CreateAuthorArguments - if err := json.Unmarshal(operation.Arguments, &args); err != nil { - return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ - "cause": err.Error(), - }) - } - span.AddEvent("execute_procedure") - rawResult, err := functions.ProcedureCreateAuthor(ctx, state, &args) - - if err != nil { - return nil, err - } - - if rawResult == nil { - return nil, nil - } - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnObject(selection, rawResult) - - if err != nil { - return nil, err - } - return schema.NewProcedureResult(result).Encode(), nil - case "create_authors": - selection, err := operation.Fields.AsArray() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be array", map[string]any{ - "cause": err.Error(), - }) - } - var args functions.CreateAuthorsArguments - if err := json.Unmarshal(operation.Arguments, &args); err != nil { - return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ - "cause": err.Error(), - }) - } - span.AddEvent("execute_procedure") - rawResult, err := functions.ProcedureCreateAuthors(ctx, state, &args) - - if err != nil { - return nil, err - } - - if rawResult == nil { - return nil, schema.UnprocessableContentError("expected not null result", nil) - } - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnArrayIntoSlice(selection, rawResult) - - if err != nil { - return nil, err - } - return schema.NewProcedureResult(result).Encode(), nil +func (c *Connector) execProcedure(ctx context.Context, state *types.State, operation *schema.MutationOperation) (schema.MutationOperationResults, error) { + for _, handler := range connectorMutationHandlers { + result, err := handler.Mutation(ctx, state, operation) + if err == nil { + return result, nil + } + if err != utils.ErrHandlerNotfound { + return nil, err + } + } - default: - return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported procedure operation: %s", operation.Name), nil) - } + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported procedure operation: %s", operation.Name), nil) } func connector_addSpanEvent(span trace.Span, logger *slog.Logger, name string, data map[string]any, options ...trace.EventOption) { logger.Debug(name, slog.Any("data", data)) - attrs := utils.DebugJSONAttributes(data, connector_isDebug(logger)) + attrs := utils.DebugJSONAttributes(data, utils.IsDebug(logger)) span.AddEvent(name, append(options, trace.WithAttributes(attrs...))...) -} - -func connector_isDebug(logger *slog.Logger) bool { - return logger.Enabled(context.TODO(), slog.LevelDebug) } \ No newline at end of file diff --git a/cmd/hasura-ndc-go/command/internal/testdata/snake_case/expected/functions.go.tmpl b/cmd/hasura-ndc-go/command/internal/testdata/snake_case/expected/functions.go.tmpl index eb6594f..06b4ec3 100644 --- a/cmd/hasura-ndc-go/command/internal/testdata/snake_case/expected/functions.go.tmpl +++ b/cmd/hasura-ndc-go/command/internal/testdata/snake_case/expected/functions.go.tmpl @@ -1,24 +1,52 @@ -// Code generated by github.com/hasura/ndc-sdk-go/codegen, DO NOT EDIT. +// Code generated by github.com/hasura/ndc-sdk-go/cmd/hasura-ndc-go, DO NOT EDIT. package functions import ( + "context" "encoding/json" "errors" "github.com/google/uuid" + "github.com/hasura/ndc-codegen-test-snake-case/types" + "github.com/hasura/ndc-sdk-go/connector" "github.com/hasura/ndc-sdk-go/scalar" + "github.com/hasura/ndc-sdk-go/schema" "github.com/hasura/ndc-sdk-go/utils" + "go.opentelemetry.io/otel/trace" + "log/slog" "slices" "time" ) -var functions_Decoder = utils.NewDecoder() +var connector_Decoder = utils.NewDecoder() +// FromValue decodes values from map +func (j *Author) FromValue(input map[string]any) error { + var err error + j.CreatedAt, err = utils.GetDateTime(input, "created_at") + if err != nil { + return err + } + j.ID, err = utils.GetString(input, "id") + if err != nil { + return err + } + return nil +} +// FromValue decodes values from map +func (j *GetArticlesArguments) FromValue(input map[string]any) error { + var err error + j.Limit, err = utils.GetFloat[float64](input, "Limit") + if err != nil { + return err + } + return nil +} // FromValue decodes values from map func (j *GetTypesArguments) FromValue(input map[string]any) error { var err error - err = functions_Decoder.DecodeObjectValue(&j.ArrayBigInt, input, "ArrayBigInt") + err = connector_Decoder.DecodeObjectValue(&j.ArrayBigInt, input, "ArrayBigInt") if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.ArrayBigIntPtr, input, "ArrayBigIntPtr") + err = connector_Decoder.DecodeObjectValue(&j.ArrayBigIntPtr, input, "ArrayBigIntPtr") if err != nil { return err } @@ -94,21 +122,21 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.ArrayMap, input, "ArrayMap") + err = connector_Decoder.DecodeObjectValue(&j.ArrayMap, input, "ArrayMap") if err != nil { return err } j.ArrayMapPtr = new([]map[string]any) - err = functions_Decoder.DecodeNullableObjectValue(j.ArrayMapPtr, input, "ArrayMapPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.ArrayMapPtr, input, "ArrayMapPtr") if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.ArrayObject, input, "ArrayObject") + err = connector_Decoder.DecodeObjectValue(&j.ArrayObject, input, "ArrayObject") if err != nil { return err } j.ArrayObjectPtr = new([]struct{Content string "json:\"content\""}) - err = functions_Decoder.DecodeNullableObjectValue(j.ArrayObjectPtr, input, "ArrayObjectPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.ArrayObjectPtr, input, "ArrayObjectPtr") if err != nil { return err } @@ -184,12 +212,12 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.BigInt, input, "BigInt") + err = connector_Decoder.DecodeObjectValue(&j.BigInt, input, "BigInt") if err != nil { return err } j.BigIntPtr = new(scalar.BigInt) - err = functions_Decoder.DecodeNullableObjectValue(j.BigIntPtr, input, "BigIntPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.BigIntPtr, input, "BigIntPtr") if err != nil { return err } @@ -201,30 +229,30 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.Bytes, input, "Bytes") + err = connector_Decoder.DecodeObjectValue(&j.Bytes, input, "Bytes") if err != nil { return err } j.BytesPtr = new(scalar.Bytes) - err = functions_Decoder.DecodeNullableObjectValue(j.BytesPtr, input, "BytesPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.BytesPtr, input, "BytesPtr") if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.CustomScalar, input, "CustomScalar") + err = connector_Decoder.DecodeObjectValue(&j.CustomScalar, input, "CustomScalar") if err != nil { return err } j.CustomScalarPtr = new(CommentText) - err = functions_Decoder.DecodeNullableObjectValue(j.CustomScalarPtr, input, "CustomScalarPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.CustomScalarPtr, input, "CustomScalarPtr") if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.Enum, input, "Enum") + err = connector_Decoder.DecodeObjectValue(&j.Enum, input, "Enum") if err != nil { return err } j.EnumPtr = new(SomeEnum) - err = functions_Decoder.DecodeNullableObjectValue(j.EnumPtr, input, "EnumPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.EnumPtr, input, "EnumPtr") if err != nil { return err } @@ -292,54 +320,54 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.Map, input, "Map") + err = connector_Decoder.DecodeObjectValue(&j.Map, input, "Map") if err != nil { return err } j.MapPtr = new(map[string]any) - err = functions_Decoder.DecodeNullableObjectValue(j.MapPtr, input, "MapPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.MapPtr, input, "MapPtr") if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.NamedArray, input, "NamedArray") + err = connector_Decoder.DecodeObjectValue(&j.NamedArray, input, "NamedArray") if err != nil { return err } j.NamedArrayPtr = new([]Author) - err = functions_Decoder.DecodeNullableObjectValue(j.NamedArrayPtr, input, "NamedArrayPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.NamedArrayPtr, input, "NamedArrayPtr") if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.NamedObject, input, "NamedObject") + err = connector_Decoder.DecodeObjectValue(&j.NamedObject, input, "NamedObject") if err != nil { return err } j.NamedObjectPtr = new(Author) - err = functions_Decoder.DecodeNullableObjectValue(j.NamedObjectPtr, input, "NamedObjectPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.NamedObjectPtr, input, "NamedObjectPtr") if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.Object, input, "Object") + err = connector_Decoder.DecodeObjectValue(&j.Object, input, "Object") if err != nil { return err } j.ObjectPtr = new(struct{Long int; Lat int}) - err = functions_Decoder.DecodeNullableObjectValue(j.ObjectPtr, input, "ObjectPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.ObjectPtr, input, "ObjectPtr") if err != nil { return err } j.PtrArrayBigInt = new([]scalar.BigInt) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayBigInt, input, "PtrArrayBigInt") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayBigInt, input, "PtrArrayBigInt") if err != nil { return err } j.PtrArrayBigIntPtr = new([]*scalar.BigInt) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayBigIntPtr, input, "PtrArrayBigIntPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayBigIntPtr, input, "PtrArrayBigIntPtr") if err != nil { return err } j.PtrArrayBool = new([]bool) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayBool, input, "PtrArrayBool") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayBool, input, "PtrArrayBool") if err != nil { return err } @@ -348,7 +376,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayFloat32 = new([]float32) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayFloat32, input, "PtrArrayFloat32") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayFloat32, input, "PtrArrayFloat32") if err != nil { return err } @@ -357,7 +385,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayFloat64 = new([]float64) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayFloat64, input, "PtrArrayFloat64") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayFloat64, input, "PtrArrayFloat64") if err != nil { return err } @@ -366,12 +394,12 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayInt = new([]int) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayInt, input, "PtrArrayInt") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayInt, input, "PtrArrayInt") if err != nil { return err } j.PtrArrayInt16 = new([]int16) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayInt16, input, "PtrArrayInt16") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayInt16, input, "PtrArrayInt16") if err != nil { return err } @@ -380,7 +408,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayInt32 = new([]int32) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayInt32, input, "PtrArrayInt32") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayInt32, input, "PtrArrayInt32") if err != nil { return err } @@ -389,7 +417,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayInt64 = new([]int64) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayInt64, input, "PtrArrayInt64") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayInt64, input, "PtrArrayInt64") if err != nil { return err } @@ -398,7 +426,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayInt8 = new([]int8) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayInt8, input, "PtrArrayInt8") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayInt8, input, "PtrArrayInt8") if err != nil { return err } @@ -411,7 +439,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayJSON = new([]any) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayJSON, input, "PtrArrayJSON") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayJSON, input, "PtrArrayJSON") if err != nil { return err } @@ -420,7 +448,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayRawJSON = new([]json.RawMessage) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayRawJSON, input, "PtrArrayRawJSON") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayRawJSON, input, "PtrArrayRawJSON") if err != nil { return err } @@ -429,7 +457,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayString = new([]string) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayString, input, "PtrArrayString") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayString, input, "PtrArrayString") if err != nil { return err } @@ -438,7 +466,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayTime = new([]time.Time) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayTime, input, "PtrArrayTime") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayTime, input, "PtrArrayTime") if err != nil { return err } @@ -447,7 +475,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayUUID = new([]uuid.UUID) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayUUID, input, "PtrArrayUUID") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayUUID, input, "PtrArrayUUID") if err != nil { return err } @@ -456,12 +484,12 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayUint = new([]uint) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayUint, input, "PtrArrayUint") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayUint, input, "PtrArrayUint") if err != nil { return err } j.PtrArrayUint16 = new([]uint16) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayUint16, input, "PtrArrayUint16") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayUint16, input, "PtrArrayUint16") if err != nil { return err } @@ -470,7 +498,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayUint32 = new([]uint32) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayUint32, input, "PtrArrayUint32") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayUint32, input, "PtrArrayUint32") if err != nil { return err } @@ -479,7 +507,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayUint64 = new([]uint64) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayUint64, input, "PtrArrayUint64") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayUint64, input, "PtrArrayUint64") if err != nil { return err } @@ -488,7 +516,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayUint8 = new([]uint8) - err = functions_Decoder.DecodeNullableObjectValue(j.PtrArrayUint8, input, "PtrArrayUint8") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayUint8, input, "PtrArrayUint8") if err != nil { return err } @@ -516,12 +544,12 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.Text, input, "Text") + err = connector_Decoder.DecodeObjectValue(&j.Text, input, "Text") if err != nil { return err } j.TextPtr = new(Text) - err = functions_Decoder.DecodeNullableObjectValue(j.TextPtr, input, "TextPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.TextPtr, input, "TextPtr") if err != nil { return err } @@ -533,7 +561,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { if err != nil { return err } - err = functions_Decoder.DecodeObjectValue(&j.URL, input, "URL") + err = connector_Decoder.DecodeObjectValue(&j.URL, input, "URL") if err != nil { return err } @@ -591,15 +619,6 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { } return nil } -// FromValue decodes values from map -func (j *GetArticlesArguments) FromValue(input map[string]any) error { - var err error - j.Limit, err = utils.GetFloat[float64](input, "Limit") - if err != nil { - return err - } - return nil -} // ToMap encodes the struct to a value map func (j Author) ToMap() map[string]any { r := make(map[string]any) @@ -614,7 +633,7 @@ func (j CreateArticleResult) ToMap() map[string]any { j_Authors := make([]any, len(j.Authors)) for i, j_Authors_v := range j.Authors { j_Authors[i] = j_Authors_v - } + } r["authors"] = j_Authors r["id"] = j.ID @@ -632,8 +651,8 @@ func (j CreateAuthorResult) ToMap() map[string]any { // ToMap encodes the struct to a value map func (j GetArticlesResult) ToMap() map[string]any { r := make(map[string]any) - r["id"] = j.ID r["Name"] = j.Name + r["id"] = j.ID return r } @@ -727,19 +746,19 @@ func (j GetTypesArguments) ToMap() map[string]any { j_NamedArray := make([]any, len(j.NamedArray)) for i, j_NamedArray_v := range j.NamedArray { j_NamedArray[i] = j_NamedArray_v - } + } r["NamedArray"] = j_NamedArray if j.NamedArrayPtr != nil { j_NamedArrayPtr := make([]any, len((*j.NamedArrayPtr))) for i, j_NamedArrayPtr_v := range (*j.NamedArrayPtr) { j_NamedArrayPtr[i] = j_NamedArrayPtr_v - } + } r["NamedArrayPtr"] = j_NamedArrayPtr } r["NamedObject"] = j.NamedObject - if j.NamedObjectPtr != nil { + if j.NamedObjectPtr != nil { r["NamedObjectPtr"] = (*j.NamedObjectPtr) - } + } j_Object_obj := make(map[string]any) j_Object_obj["created_at"] = j.Object.CreatedAt j_Object_obj["id"] = j.Object.ID @@ -893,3 +912,277 @@ func (s *SomeEnum) FromValue(value any) error { *s = result return nil } + +// DataConnectorHandler implements the data connector handler +type DataConnectorHandler struct{} + +// QueryExists check if the query name exists +func (dch DataConnectorHandler) QueryExists(name string) bool { + return slices.Contains(enumValues_FunctionName, name) +} +func (dch DataConnectorHandler) Query(ctx context.Context, state *types.State, request *schema.QueryRequest, rawArgs map[string]any) (*schema.RowSet, error) { + if !dch.QueryExists(request.Collection) { + return nil, utils.ErrHandlerNotfound + } + queryFields, err := utils.EvalFunctionSelectionFieldValue(request) + if err != nil { + return nil, schema.UnprocessableContentError(err.Error(), nil) + } + + result, err := dch.execQuery(ctx, state, request, queryFields, rawArgs) + if err != nil { + return nil, err + } + + return &schema.RowSet{ + Aggregates: schema.RowSetAggregates{}, + Rows: []map[string]any{ + { + "__value": result, + }, + }, + }, nil +} + +func (dch DataConnectorHandler) execQuery(ctx context.Context, state *types.State, request *schema.QueryRequest, queryFields schema.NestedField, rawArgs map[string]any) (any, error) { + span := trace.SpanFromContext(ctx) + logger := connector.GetLogger(ctx) + switch request.Collection { + case "get_bool": + + if len(queryFields) > 0 { + return nil, schema.UnprocessableContentError("cannot evaluate selection fields for scalar", nil) + } + return FunctionGetBool(ctx, state) + + case "get_types": + + selection, err := queryFields.AsObject() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ + "cause": err.Error(), + }) + } + var args GetTypesArguments + if parseErr := args.FromValue(rawArgs); parseErr != nil { + return nil, schema.UnprocessableContentError("failed to resolve arguments", map[string]any{ + "cause": parseErr.Error(), + }) + } + + connector_addSpanEvent(span, logger, "execute_function", map[string]any{ + "arguments": args, + }) + rawResult, err := FunctionGetTypes(ctx, state, &args) + if err != nil { + return nil, err + } + + if rawResult == nil { + return nil, nil + } + + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnObject(selection, rawResult) + if err != nil { + return nil, err + } + return result, nil + + case "hello": + + selection, err := queryFields.AsObject() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ + "cause": err.Error(), + }) + } + rawResult, err := FunctionHello(ctx, state) + if err != nil { + return nil, err + } + + if rawResult == nil { + return nil, nil + } + + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnObject(selection, rawResult) + if err != nil { + return nil, err + } + return result, nil + + case "get_articles": + + selection, err := queryFields.AsArray() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be array", map[string]any{ + "cause": err.Error(), + }) + } + var args GetArticlesArguments + if parseErr := args.FromValue(rawArgs); parseErr != nil { + return nil, schema.UnprocessableContentError("failed to resolve arguments", map[string]any{ + "cause": parseErr.Error(), + }) + } + + connector_addSpanEvent(span, logger, "execute_function", map[string]any{ + "arguments": args, + }) + rawResult, err := GetArticles(ctx, state, &args) + if err != nil { + return nil, err + } + + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnArrayIntoSlice(selection, rawResult) + if err != nil { + return nil, err + } + return result, nil + + default: + return nil, utils.ErrHandlerNotfound + } +} +var enumValues_FunctionName = []string{"get_bool", "get_types", "hello", "get_articles"} +// MutationExists check if the mutation name exists +func (dch DataConnectorHandler) MutationExists(name string) bool { + return slices.Contains(enumValues_ProcedureName, name) +} +func (dch DataConnectorHandler) Mutation(ctx context.Context, state *types.State, operation *schema.MutationOperation) (schema.MutationOperationResults, error) { + span := trace.SpanFromContext(ctx) + logger := connector.GetLogger(ctx) + connector_addSpanEvent(span, logger, "validate_request", map[string]any{ + "operations_name": operation.Name, + }) + + switch operation.Name { + case "create_article": + + selection, err := operation.Fields.AsObject() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ + "cause": err.Error(), + }) + } + var args CreateArticleArguments + if err := json.Unmarshal(operation.Arguments, &args); err != nil { + return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ + "cause": err.Error(), + }) + } + span.AddEvent("execute_procedure") + rawResult, err := CreateArticle(ctx, state, &args) + + if err != nil { + return nil, err + } + + if rawResult == nil { + return nil, nil + } + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnObject(selection, rawResult) + + if err != nil { + return nil, err + } + return schema.NewProcedureResult(result).Encode(), nil + + case "increase": + + if len(operation.Fields) > 0 { + return nil, schema.UnprocessableContentError("cannot evaluate selection fields for scalar", nil) + } + span.AddEvent("execute_procedure") + result, err := Increase(ctx, state) + if err != nil { + return nil, err + } + return schema.NewProcedureResult(result).Encode(), nil + + case "create_author": + + selection, err := operation.Fields.AsObject() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ + "cause": err.Error(), + }) + } + var args CreateAuthorArguments + if err := json.Unmarshal(operation.Arguments, &args); err != nil { + return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ + "cause": err.Error(), + }) + } + span.AddEvent("execute_procedure") + rawResult, err := ProcedureCreateAuthor(ctx, state, &args) + + if err != nil { + return nil, err + } + + if rawResult == nil { + return nil, nil + } + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnObject(selection, rawResult) + + if err != nil { + return nil, err + } + return schema.NewProcedureResult(result).Encode(), nil + + case "create_authors": + + selection, err := operation.Fields.AsArray() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be array", map[string]any{ + "cause": err.Error(), + }) + } + var args CreateAuthorsArguments + if err := json.Unmarshal(operation.Arguments, &args); err != nil { + return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ + "cause": err.Error(), + }) + } + span.AddEvent("execute_procedure") + rawResult, err := ProcedureCreateAuthors(ctx, state, &args) + + if err != nil { + return nil, err + } + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnArrayIntoSlice(selection, rawResult) + + if err != nil { + return nil, err + } + return schema.NewProcedureResult(result).Encode(), nil + + default: + return nil, utils.ErrHandlerNotfound + } +} +var enumValues_ProcedureName = []string{"create_article", "increase", "create_author", "create_authors"} +func connector_addSpanEvent(span trace.Span, logger *slog.Logger, name string, data map[string]any, options ...trace.EventOption) { + logger.Debug(name, slog.Any("data", data)) + attrs := utils.DebugJSONAttributes(data, utils.IsDebug(logger)) + span.AddEvent(name, append(options, trace.WithAttributes(attrs...))...) +} \ No newline at end of file diff --git a/cmd/hasura-ndc-go/command/internal/testdata/subdir/expected/connector.go.tmpl b/cmd/hasura-ndc-go/command/internal/testdata/subdir/expected/connector.go.tmpl index d6eec6b..cf924df 100644 --- a/cmd/hasura-ndc-go/command/internal/testdata/subdir/expected/connector.go.tmpl +++ b/cmd/hasura-ndc-go/command/internal/testdata/subdir/expected/connector.go.tmpl @@ -1,5 +1,5 @@ -// Code generated by github.com/hasura/ndc-sdk-go/codegen, DO NOT EDIT. -package main +// Code generated by github.com/hasura/ndc-sdk-go/cmd/hasura-ndc-go, DO NOT EDIT. +package connector import ( "context" @@ -8,8 +8,7 @@ import ( "log/slog" "github.com/hasura/ndc-codegen-subdir-test/connector/functions" - "github.com/hasura/ndc-codegen-subdir-test/connector/types" - "github.com/hasura/ndc-sdk-go/connector" +"github.com/hasura/ndc-codegen-subdir-test/connector/types" "github.com/hasura/ndc-sdk-go/schema" "github.com/hasura/ndc-sdk-go/utils" "go.opentelemetry.io/otel/attribute" @@ -17,10 +16,23 @@ import ( "go.opentelemetry.io/otel/trace" ) -//go:embed schema.generated.json -var rawSchema []byte var schemaResponse *schema.RawSchemaResponse +var connectorQueryHandlers = []ConnectorQueryHandler{functions.DataConnectorHandler{}} +var connectorMutationHandlers = []ConnectorMutationHandler{} + +// ConnectorQueryHandler abstracts the connector query handler +type ConnectorQueryHandler interface { + Query(ctx context.Context, state *types.State, request *schema.QueryRequest, arguments map[string]any) (*schema.RowSet, error) +} + +// ConnectorMutationHandler abstracts the connector mutation handler +type ConnectorMutationHandler interface { + Mutation(ctx context.Context, state *types.State, request *schema.MutationOperation) (schema.MutationOperationResults, error) +} + +//go:embed schema.generated.json +var rawSchema []byte func init() { var err error schemaResponse, err = schema.NewRawSchemaResponse(rawSchema) @@ -29,6 +41,7 @@ func init() { } } + // GetSchema gets the connector's schema. func (c *Connector) GetSchema(ctx context.Context, configuration *types.Configuration, _ *types.State) (schema.SchemaResponseMarshaler, error) { return schemaResponse, nil @@ -36,21 +49,20 @@ func (c *Connector) GetSchema(ctx context.Context, configuration *types.Configur // Query executes a query. func (c *Connector) Query(ctx context.Context, configuration *types.Configuration, state *types.State, request *schema.QueryRequest) (schema.QueryResponse, error) { - valueField, err := utils.EvalFunctionSelectionFieldValue(request) - if err != nil { - return nil, schema.UnprocessableContentError(err.Error(), nil) - } + if len(connectorQueryHandlers) == 0 { + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) + } - span := trace.SpanFromContext(ctx) - requestVars := request.Variables + span := trace.SpanFromContext(ctx) + requestVars := request.Variables varsLength := len(requestVars) - if varsLength == 0 { - requestVars = []schema.QueryRequestVariablesElem{make(schema.QueryRequestVariablesElem)} - varsLength = 1 - } + if varsLength == 0 { + requestVars = []schema.QueryRequestVariablesElem{make(schema.QueryRequestVariablesElem)} + varsLength = 1 + } - rowSets := make([]schema.RowSet, varsLength) - for i, requestVar := range requestVars { + rowSets := make([]schema.RowSet, varsLength) + for i, requestVar := range requestVars { childSpan := span childContext := ctx if varsLength > 1 { @@ -58,36 +70,55 @@ func (c *Connector) Query(ctx context.Context, configuration *types.Configuratio defer childSpan.End() } - result, err := execQuery(childContext, state, request, valueField, requestVar, childSpan) - if err != nil { + result, err := c.execQuery(childContext, state, request, requestVar) + if err != nil { if varsLength > 1 { childSpan.SetStatus(codes.Error, err.Error()) } - return nil, err - } - rowSets[i] = schema.RowSet{ - Aggregates: schema.RowSetAggregates{}, - Rows: []map[string]any{ - { - "__value": result, - }, - }, - } + return nil, err + } + rowSets[i] = *result + if varsLength > 1 { childSpan.End() } - } + } - return rowSets, nil + return rowSets, nil +} + +func (c *Connector) execQuery(ctx context.Context, state *types.State, request *schema.QueryRequest, variables map[string]any) (*schema.RowSet, error) { + rawArgs, err := utils.ResolveArgumentVariables(request.Arguments, variables) + if err != nil { + return nil, schema.UnprocessableContentError("failed to resolve argument variables", map[string]any{ + "cause": err.Error(), + }) + } + + for _, handler := range connectorQueryHandlers { + result, err := handler.Query(ctx, state, request, rawArgs) + if err == nil { + return result, nil + } + + if err != utils.ErrHandlerNotfound { + return nil, err + } + } + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) } // Mutation executes a mutation. func (c *Connector) Mutation(ctx context.Context, configuration *types.Configuration, state *types.State, request *schema.MutationRequest) (*schema.MutationResponse, error) { - operationLen := len(request.Operations) - operationResults := make([]schema.MutationOperationResults, operationLen) - span := trace.SpanFromContext(ctx) + if len(connectorMutationHandlers) == 0 { + return nil, schema.UnprocessableContentError("unsupported mutation", nil) + } - for i, operation := range request.Operations { + operationLen := len(request.Operations) + operationResults := make([]schema.MutationOperationResults, operationLen) + span := trace.SpanFromContext(ctx) + + for i, operation := range request.Operations { childSpan := span childContext := ctx if operationLen > 1 { @@ -99,104 +130,45 @@ func (c *Connector) Mutation(ctx context.Context, configuration *types.Configura attribute.String("operation.name", string(operation.Name)), ) - switch operation.Type { - case schema.MutationOperationProcedure: - result, err := execProcedure(childContext, state, &operation, childSpan) - if err != nil { + switch operation.Type { + case schema.MutationOperationProcedure: + result, err := c.execProcedure(childContext, state, &operation) + if err != nil { if operationLen > 1 { childSpan.SetStatus(codes.Error, err.Error()) } - return nil, err - } - operationResults[i] = result + return nil, err + } + operationResults[i] = result if operationLen > 1 { childSpan.End() } - default: - return nil, schema.UnprocessableContentError(fmt.Sprintf("invalid operation type: %s", operation.Type), nil) - } - } - - return &schema.MutationResponse{ - OperationResults: operationResults, - }, nil -} - -func execQuery(ctx context.Context, state *types.State, request *schema.QueryRequest, queryFields schema.NestedField, variables map[string]any, span trace.Span) (any, error) { - logger := connector.GetLogger(ctx) - connector_addSpanEvent(span, logger, "validate_request", map[string]any{ - "variables": variables, - }) - switch request.Collection { - case "getArticles": - selection, err := queryFields.AsArray() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be array", map[string]any{ - "cause": err.Error(), - }) - } - rawArgs, err := utils.ResolveArgumentVariables(request.Arguments, variables) - if err != nil { - return nil, schema.UnprocessableContentError("failed to resolve argument variables", map[string]any{ - "cause": err.Error(), - }) - } - - connector_addSpanEvent(span, logger, "resolve_arguments", map[string]any{ - "raw_arguments": rawArgs, - }) - - var args functions.GetArticlesArguments - if err = args.FromValue(rawArgs); err != nil { - return nil, schema.UnprocessableContentError("failed to resolve arguments", map[string]any{ - "cause": err.Error(), - }) - } - - connector_addSpanEvent(span, logger, "execute_function", map[string]any{ - "arguments": args, - }) - rawResult, err := functions.GetArticles(ctx, state, &args) - if err != nil { - return nil, err - } - - if rawResult == nil { - return nil, schema.UnprocessableContentError("expected not null result", nil) - } + default: + return nil, schema.UnprocessableContentError(fmt.Sprintf("invalid operation type: %s", operation.Type), nil) + } + } - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnArrayIntoSlice(selection, rawResult) - if err != nil { - return nil, err - } - return result, nil - - default: - return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) - } + return &schema.MutationResponse{ + OperationResults: operationResults, + }, nil } -func execProcedure(ctx context.Context, state *types.State, operation *schema.MutationOperation, span trace.Span) (schema.MutationOperationResults, error) { - logger := connector.GetLogger(ctx) - connector_addSpanEvent(span, logger, "validate_request", map[string]any{ - "operations_name": operation.Name, - }) - switch operation.Name { +func (c *Connector) execProcedure(ctx context.Context, state *types.State, operation *schema.MutationOperation) (schema.MutationOperationResults, error) { + for _, handler := range connectorMutationHandlers { + result, err := handler.Mutation(ctx, state, operation) + if err == nil { + return result, nil + } + if err != utils.ErrHandlerNotfound { + return nil, err + } + } - default: - return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported procedure operation: %s", operation.Name), nil) - } + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported procedure operation: %s", operation.Name), nil) } func connector_addSpanEvent(span trace.Span, logger *slog.Logger, name string, data map[string]any, options ...trace.EventOption) { logger.Debug(name, slog.Any("data", data)) - attrs := utils.DebugJSONAttributes(data, connector_isDebug(logger)) + attrs := utils.DebugJSONAttributes(data, utils.IsDebug(logger)) span.AddEvent(name, append(options, trace.WithAttributes(attrs...))...) -} - -func connector_isDebug(logger *slog.Logger) bool { - return logger.Enabled(context.TODO(), slog.LevelDebug) } \ No newline at end of file diff --git a/cmd/hasura-ndc-go/command/internal/testdata/subdir/expected/functions.go.tmpl b/cmd/hasura-ndc-go/command/internal/testdata/subdir/expected/functions.go.tmpl index b1e94cc..b7bb228 100644 --- a/cmd/hasura-ndc-go/command/internal/testdata/subdir/expected/functions.go.tmpl +++ b/cmd/hasura-ndc-go/command/internal/testdata/subdir/expected/functions.go.tmpl @@ -1,14 +1,21 @@ -// Code generated by github.com/hasura/ndc-sdk-go/codegen, DO NOT EDIT. +// Code generated by github.com/hasura/ndc-sdk-go/cmd/hasura-ndc-go, DO NOT EDIT. package functions import ( + "context" + "github.com/hasura/ndc-codegen-subdir-test/connector/types" + "github.com/hasura/ndc-sdk-go/connector" + "github.com/hasura/ndc-sdk-go/schema" "github.com/hasura/ndc-sdk-go/utils" + "go.opentelemetry.io/otel/trace" + "log/slog" + "slices" ) -var functions_Decoder = utils.NewDecoder() +var connector_Decoder = utils.NewDecoder() // FromValue decodes values from map func (j *GetArticlesArguments) FromValue(input map[string]any) error { var err error - err = functions_Decoder.DecodeObjectValue(&j.Author, input, "Author") + err = connector_Decoder.DecodeObject(&j.Author, input) if err != nil { return err } @@ -21,8 +28,85 @@ func (j *GetArticlesArguments) FromValue(input map[string]any) error { // ToMap encodes the struct to a value map func (j GetArticlesResult) ToMap() map[string]any { r := make(map[string]any) - r["Author"] = j.Author - r["id"] = j.ID + r = utils.MergeMap(r, j.Author.ToMap()) + r["id"] = j.ID return r +} +// DataConnectorHandler implements the data connector handler +type DataConnectorHandler struct{} + +// QueryExists check if the query name exists +func (dch DataConnectorHandler) QueryExists(name string) bool { + return slices.Contains(enumValues_FunctionName, name) +} +func (dch DataConnectorHandler) Query(ctx context.Context, state *types.State, request *schema.QueryRequest, rawArgs map[string]any) (*schema.RowSet, error) { + if !dch.QueryExists(request.Collection) { + return nil, utils.ErrHandlerNotfound + } + queryFields, err := utils.EvalFunctionSelectionFieldValue(request) + if err != nil { + return nil, schema.UnprocessableContentError(err.Error(), nil) + } + + result, err := dch.execQuery(ctx, state, request, queryFields, rawArgs) + if err != nil { + return nil, err + } + + return &schema.RowSet{ + Aggregates: schema.RowSetAggregates{}, + Rows: []map[string]any{ + { + "__value": result, + }, + }, + }, nil +} + +func (dch DataConnectorHandler) execQuery(ctx context.Context, state *types.State, request *schema.QueryRequest, queryFields schema.NestedField, rawArgs map[string]any) (any, error) { + span := trace.SpanFromContext(ctx) + logger := connector.GetLogger(ctx) + switch request.Collection { + case "getArticles": + + selection, err := queryFields.AsArray() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be array", map[string]any{ + "cause": err.Error(), + }) + } + var args GetArticlesArguments + if parseErr := args.FromValue(rawArgs); parseErr != nil { + return nil, schema.UnprocessableContentError("failed to resolve arguments", map[string]any{ + "cause": parseErr.Error(), + }) + } + + connector_addSpanEvent(span, logger, "execute_function", map[string]any{ + "arguments": args, + }) + rawResult, err := GetArticles(ctx, state, &args) + if err != nil { + return nil, err + } + + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnArrayIntoSlice(selection, rawResult) + if err != nil { + return nil, err + } + return result, nil + + default: + return nil, utils.ErrHandlerNotfound + } +} +var enumValues_FunctionName = []string{"getArticles"} +func connector_addSpanEvent(span trace.Span, logger *slog.Logger, name string, data map[string]any, options ...trace.EventOption) { + logger.Debug(name, slog.Any("data", data)) + attrs := utils.DebugJSONAttributes(data, utils.IsDebug(logger)) + span.AddEvent(name, append(options, trace.WithAttributes(attrs...))...) } \ No newline at end of file diff --git a/cmd/hasura-ndc-go/command/internal/testdata/subdir/expected/schema.json b/cmd/hasura-ndc-go/command/internal/testdata/subdir/expected/schema.json index 9a303fc..333c428 100644 --- a/cmd/hasura-ndc-go/command/internal/testdata/subdir/expected/schema.json +++ b/cmd/hasura-ndc-go/command/internal/testdata/subdir/expected/schema.json @@ -3,17 +3,50 @@ "functions": [ { "arguments": { - "Author": { + "Limit": { "type": { - "name": "Author", + "name": "Float64", "type": "named" } }, - "Limit": { + "author": { "type": { - "name": "Float64", + "type": "nullable", + "underlying_type": { + "name": "Author", + "type": "named" + } + } + }, + "created_at": { + "type": { + "name": "TimestampTZ", "type": "named" } + }, + "id": { + "type": { + "name": "String", + "type": "named" + } + }, + "status": { + "type": { + "type": "nullable", + "underlying_type": { + "name": "String", + "type": "named" + } + } + }, + "tags": { + "type": { + "element_type": { + "name": "String", + "type": "named" + }, + "type": "array" + } } }, "description": "GetArticles", @@ -51,6 +84,15 @@ "type": "named" } }, + "status": { + "type": { + "type": "nullable", + "underlying_type": { + "name": "String", + "type": "named" + } + } + }, "tags": { "type": { "element_type": { @@ -64,9 +106,18 @@ }, "GetArticlesResult": { "fields": { - "Author": { + "author": { + "type": { + "type": "nullable", + "underlying_type": { + "name": "Author", + "type": "named" + } + } + }, + "created_at": { "type": { - "name": "Author", + "name": "TimestampTZ", "type": "named" } }, @@ -75,6 +126,24 @@ "name": "String", "type": "named" } + }, + "status": { + "type": { + "type": "nullable", + "underlying_type": { + "name": "String", + "type": "named" + } + } + }, + "tags": { + "type": { + "element_type": { + "name": "String", + "type": "named" + }, + "type": "array" + } } } } diff --git a/cmd/hasura-ndc-go/command/internal/testdata/subdir/source/connector/connector.go b/cmd/hasura-ndc-go/command/internal/testdata/subdir/source/connector/connector.go index c989dc8..bedf6e3 100644 --- a/cmd/hasura-ndc-go/command/internal/testdata/subdir/source/connector/connector.go +++ b/cmd/hasura-ndc-go/command/internal/testdata/subdir/source/connector/connector.go @@ -1,4 +1,4 @@ -package main +package connector import ( "context" diff --git a/cmd/hasura-ndc-go/command/new.go b/cmd/hasura-ndc-go/command/new.go index 1aea9d8..809a895 100644 --- a/cmd/hasura-ndc-go/command/new.go +++ b/cmd/hasura-ndc-go/command/new.go @@ -2,6 +2,7 @@ package command import ( "bufio" + "bytes" "embed" "fmt" "io/fs" @@ -141,12 +142,15 @@ func execCommand(basePath string, commandName string, args ...string) error { if basePath != "" { cmd.Dir = basePath } + l := log.With().Strs("args", args).Str("command", commandName).Logger() + var errBuf bytes.Buffer + cmd.Stderr = &errBuf out, err := cmd.Output() - l := log.Debug() if err != nil { - l = log.Error().Err(err) + l.Error().Err(fmt.Errorf(errBuf.String())).Msg(err.Error()) + } else { + l.Debug().Str("logs", errBuf.String()).Msg(string(out)) } - l.Strs("args", args).Str("result", string(out)).Msg(commandName) return err } diff --git a/cmd/hasura-ndc-go/command/update.go b/cmd/hasura-ndc-go/command/update.go index 30aa971..14ab4ea 100644 --- a/cmd/hasura-ndc-go/command/update.go +++ b/cmd/hasura-ndc-go/command/update.go @@ -16,7 +16,6 @@ func UpdateConnectorSchema(args UpdateArguments, start time.Time) { log.Info(). Str("path", args.Path). Str("connector_dir", args.ConnectorDir). - Str("package_types", args.PackageTypes). Msg("generating connector schema...") moduleName, err := getModuleName(args.Path) diff --git a/cmd/hasura-ndc-go/go.mod b/cmd/hasura-ndc-go/go.mod index 32d591e..65103fc 100644 --- a/cmd/hasura-ndc-go/go.mod +++ b/cmd/hasura-ndc-go/go.mod @@ -16,9 +16,11 @@ require ( require ( github.com/davecgh/go-spew v1.1.1 // indirect + github.com/go-viper/mapstructure/v2 v2.1.0 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect + go.opentelemetry.io/otel v1.28.0 // indirect golang.org/x/sync v0.8.0 // indirect golang.org/x/sys v0.25.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/cmd/hasura-ndc-go/go.sum b/cmd/hasura-ndc-go/go.sum index b1e9abf..403d1c0 100644 --- a/cmd/hasura-ndc-go/go.sum +++ b/cmd/hasura-ndc-go/go.sum @@ -9,7 +9,11 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= +github.com/go-viper/mapstructure/v2 v2.1.0 h1:gHnMa2Y/pIxElCH2GlZZ1lZSsn6XMtufpGyP1XxdC/w= +github.com/go-viper/mapstructure/v2 v2.1.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/hasura/ndc-sdk-go v1.3.0 h1:xcL7hj0lPZXu9HntytlBe5pAAlCUTLqYAsPrltRUDbc= @@ -32,6 +36,8 @@ github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8= github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +go.opentelemetry.io/otel v1.28.0 h1:/SqNcYk+idO0CxKEUOtKQClMK/MimZihKYMruSMViUo= +go.opentelemetry.io/otel v1.28.0/go.mod h1:q68ijF8Fc8CnMHKyzqL6akLO46ePnjkgfIMIjUIX9z4= golang.org/x/mod v0.20.0 h1:utOm6MM3R3dnawAiJgn0y+xvuYRsm1RKM/4giyfDgV0= golang.org/x/mod v0.20.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= diff --git a/connector/http.go b/connector/http.go index f340622..a92046f 100644 --- a/connector/http.go +++ b/connector/http.go @@ -10,6 +10,7 @@ import ( "log/slog" "net/http" "runtime/debug" + "slices" "strings" "time" @@ -31,13 +32,26 @@ const ( contentTypeJson string = "application/json" ) +const ( + apiPathCapabilities = "/capabilities" + apiPathSchema = "/schema" + apiPathQuery = "/query" + apiPathQueryExplain = "/query/explain" + apiPathMutation = "/mutation" + apiPathMutationExplain = "/mutation/explain" + apiPathHealth = "/health" + apiPathMetrics = "/metrics" +) + var allowedTraceEndpoints = map[string]string{ - "/query": "ndc_query", - "/query/explain": "ndc_query_explain", - "/mutation": "ndc_mutation", - "/mutation/explain": "ndc_mutation_explain", + apiPathQuery: "ndc_query", + apiPathQueryExplain: "ndc_query_explain", + apiPathMutation: "ndc_mutation", + apiPathMutationExplain: "ndc_mutation_explain", } +var debugApiPaths = []string{apiPathMetrics, apiPathHealth} + // define a custom response write to capture response information for logging type customResponseWriter struct { http.ResponseWriter @@ -232,15 +246,20 @@ func (rt *router) Build() *http.ServeMux { slog.Any("response", responseLogData), ) span.SetStatus(codes.Error, http.StatusText(writer.statusCode)) - } else { - logger.Info( - "success", - slog.Duration("latency", time.Since(startTime)), - slog.Any("request", requestLogData), - slog.Any("response", responseLogData), - ) - span.SetStatus(codes.Ok, "success") + return } + printSuccess := logger.Info + if slices.Contains(debugApiPaths, r.URL.Path) { + printSuccess = logger.Debug + } + + printSuccess( + "success", + slog.Duration("latency", time.Since(startTime)), + slog.Any("request", requestLogData), + slog.Any("response", responseLogData), + ) + span.SetStatus(codes.Ok, "success") } } diff --git a/connector/server.go b/connector/server.go index 150e3d2..18fa665 100644 --- a/connector/server.go +++ b/connector/server.go @@ -356,15 +356,15 @@ func (s *Server[Configuration, State]) unmarshalBodyJSON(w http.ResponseWriter, func (s *Server[Configuration, State]) buildHandler() *http.ServeMux { router := newRouter(s.logger, s.telemetry, !s.withoutRecovery) - router.Use("/capabilities", http.MethodGet, s.withAuth(s.GetCapabilities)) - router.Use("/schema", http.MethodGet, s.withAuth(s.GetSchema)) - router.Use("/query", http.MethodPost, s.withAuth(s.Query)) - router.Use("/query/explain", http.MethodPost, s.withAuth(s.QueryExplain)) - router.Use("/mutation/explain", http.MethodPost, s.withAuth(s.MutationExplain)) - router.Use("/mutation", http.MethodPost, s.withAuth(s.Mutation)) - router.Use("/health", http.MethodGet, s.Health) + router.Use(apiPathCapabilities, http.MethodGet, s.withAuth(s.GetCapabilities)) + router.Use(apiPathSchema, http.MethodGet, s.withAuth(s.GetSchema)) + router.Use(apiPathQuery, http.MethodPost, s.withAuth(s.Query)) + router.Use(apiPathQueryExplain, http.MethodPost, s.withAuth(s.QueryExplain)) + router.Use(apiPathMutationExplain, http.MethodPost, s.withAuth(s.MutationExplain)) + router.Use(apiPathMutation, http.MethodPost, s.withAuth(s.Mutation)) + router.Use(apiPathHealth, http.MethodGet, s.Health) if s.options.MetricsExporter == string(otelMetricsExporterPrometheus) && s.options.PrometheusPort == nil { - router.Use("/metrics", http.MethodGet, s.withAuth(promhttp.Handler().ServeHTTP)) + router.Use(apiPathMetrics, http.MethodGet, s.withAuth(promhttp.Handler().ServeHTTP)) } return router.Build() diff --git a/connector/telemetry.go b/connector/telemetry.go index 9fb6052..3ee838e 100644 --- a/connector/telemetry.go +++ b/connector/telemetry.go @@ -380,6 +380,13 @@ type Tracer struct { var _ traceapi.Tracer = &Tracer{} +// NewTracer creates a new OpenTelemetry tracer +func NewTracer(name string, opts ...traceapi.TracerOption) *Tracer { + return &Tracer{ + Tracer: otel.Tracer(name, opts...), + } +} + // Start creates a span and a context.Context containing the newly-created span // with `internal.visibility: "user"` so that it shows up in the Hasura Console. func (t *Tracer) Start(ctx context.Context, spanName string, opts ...traceapi.SpanStartOption) (context.Context, traceapi.Span) { diff --git a/example/codegen/README.md b/example/codegen/README.md index a525cb8..18650ee 100644 --- a/example/codegen/README.md +++ b/example/codegen/README.md @@ -9,3 +9,11 @@ go run . serve ``` See [NDC Go SDK](https://github.com/hasura/ndc-sdk-go) for more information and [the generation tool](https://github.com/hasura/ndc-sdk-go/tree/main/cmd/hasura-ndc-go) for command documentation. + +## Development + +Generate codes with the following command: + +```sh +go run ../../cmd/hasura-ndc-go update --schema-format go +``` diff --git a/example/codegen/connector.generated.go b/example/codegen/connector.generated.go index 089543f..2c54ef1 100644 --- a/example/codegen/connector.generated.go +++ b/example/codegen/connector.generated.go @@ -1,4 +1,4 @@ -// Code generated by github.com/hasura/ndc-sdk-go/codegen, DO NOT EDIT. +// Code generated by github.com/hasura/ndc-sdk-go/cmd/hasura-ndc-go, DO NOT EDIT. package main import ( @@ -10,8 +10,6 @@ import ( "encoding/json" "github.com/hasura/ndc-codegen-example/functions" "github.com/hasura/ndc-codegen-example/types" - "github.com/hasura/ndc-codegen-example/types/arguments" - "github.com/hasura/ndc-sdk-go/connector" "github.com/hasura/ndc-sdk-go/schema" "github.com/hasura/ndc-sdk-go/utils" "go.opentelemetry.io/otel/attribute" @@ -19,12 +17,25 @@ import ( "go.opentelemetry.io/otel/trace" ) -//go:embed schema.generated.json -var rawSchema []byte var schemaResponse *schema.RawSchemaResponse +var connectorQueryHandlers = []ConnectorQueryHandler{functions.DataConnectorHandler{}} +var connectorMutationHandlers = []ConnectorMutationHandler{functions.DataConnectorHandler{}} + +// ConnectorQueryHandler abstracts the connector query handler +type ConnectorQueryHandler interface { + Query(ctx context.Context, state *types.State, request *schema.QueryRequest, arguments map[string]any) (*schema.RowSet, error) +} + +// ConnectorMutationHandler abstracts the connector mutation handler +type ConnectorMutationHandler interface { + Mutation(ctx context.Context, state *types.State, request *schema.MutationOperation) (schema.MutationOperationResults, error) +} func init() { - var err error + rawSchema, err := json.Marshal(GetConnectorSchema()) + if err != nil { + panic(err) + } schemaResponse, err = schema.NewRawSchemaResponse(rawSchema) if err != nil { panic(err) @@ -38,9 +49,8 @@ func (c *Connector) GetSchema(ctx context.Context, configuration *types.Configur // Query executes a query. func (c *Connector) Query(ctx context.Context, configuration *types.Configuration, state *types.State, request *schema.QueryRequest) (schema.QueryResponse, error) { - valueField, err := utils.EvalFunctionSelectionFieldValue(request) - if err != nil { - return nil, schema.UnprocessableContentError(err.Error(), nil) + if len(connectorQueryHandlers) == 0 { + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) } span := trace.SpanFromContext(ctx) @@ -60,21 +70,15 @@ func (c *Connector) Query(ctx context.Context, configuration *types.Configuratio defer childSpan.End() } - result, err := execQuery(childContext, state, request, valueField, requestVar, childSpan) + result, err := c.execQuery(childContext, state, request, requestVar) if err != nil { if varsLength > 1 { childSpan.SetStatus(codes.Error, err.Error()) } return nil, err } - rowSets[i] = schema.RowSet{ - Aggregates: schema.RowSetAggregates{}, - Rows: []map[string]any{ - { - "__value": result, - }, - }, - } + rowSets[i] = *result + if varsLength > 1 { childSpan.End() } @@ -83,8 +87,33 @@ func (c *Connector) Query(ctx context.Context, configuration *types.Configuratio return rowSets, nil } +func (c *Connector) execQuery(ctx context.Context, state *types.State, request *schema.QueryRequest, variables map[string]any) (*schema.RowSet, error) { + rawArgs, err := utils.ResolveArgumentVariables(request.Arguments, variables) + if err != nil { + return nil, schema.UnprocessableContentError("failed to resolve argument variables", map[string]any{ + "cause": err.Error(), + }) + } + + for _, handler := range connectorQueryHandlers { + result, err := handler.Query(ctx, state, request, rawArgs) + if err == nil { + return result, nil + } + + if err != utils.ErrHandlerNotfound { + return nil, err + } + } + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) +} + // Mutation executes a mutation. func (c *Connector) Mutation(ctx context.Context, configuration *types.Configuration, state *types.State, request *schema.MutationRequest) (*schema.MutationResponse, error) { + if len(connectorMutationHandlers) == 0 { + return nil, schema.UnprocessableContentError("unsupported mutation", nil) + } + operationLen := len(request.Operations) operationResults := make([]schema.MutationOperationResults, operationLen) span := trace.SpanFromContext(ctx) @@ -103,7 +132,7 @@ func (c *Connector) Mutation(ctx context.Context, configuration *types.Configura switch operation.Type { case schema.MutationOperationProcedure: - result, err := execProcedure(childContext, state, &operation, childSpan) + result, err := c.execProcedure(childContext, state, &operation) if err != nil { if operationLen > 1 { childSpan.SetStatus(codes.Error, err.Error()) @@ -124,262 +153,22 @@ func (c *Connector) Mutation(ctx context.Context, configuration *types.Configura }, nil } -func execQuery(ctx context.Context, state *types.State, request *schema.QueryRequest, queryFields schema.NestedField, variables map[string]any, span trace.Span) (any, error) { - logger := connector.GetLogger(ctx) - connector_addSpanEvent(span, logger, "validate_request", map[string]any{ - "variables": variables, - }) - switch request.Collection { - case "getBool": - if len(queryFields) > 0 { - return nil, schema.UnprocessableContentError("cannot evaluate selection fields for scalar", nil) - } - return functions.FunctionGetBool(ctx, state) - case "getTypes": - selection, err := queryFields.AsObject() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ - "cause": err.Error(), - }) - } - rawArgs, err := utils.ResolveArgumentVariables(request.Arguments, variables) - if err != nil { - return nil, schema.UnprocessableContentError("failed to resolve argument variables", map[string]any{ - "cause": err.Error(), - }) - } - - connector_addSpanEvent(span, logger, "resolve_arguments", map[string]any{ - "raw_arguments": rawArgs, - }) - - var args arguments.GetTypesArguments - if err = args.FromValue(rawArgs); err != nil { - return nil, schema.UnprocessableContentError("failed to resolve arguments", map[string]any{ - "cause": err.Error(), - }) +func (c *Connector) execProcedure(ctx context.Context, state *types.State, operation *schema.MutationOperation) (schema.MutationOperationResults, error) { + for _, handler := range connectorMutationHandlers { + result, err := handler.Mutation(ctx, state, operation) + if err == nil { + return result, nil } - - connector_addSpanEvent(span, logger, "execute_function", map[string]any{ - "arguments": args, - }) - rawResult, err := functions.FunctionGetTypes(ctx, state, &args) - if err != nil { - return nil, err - } - - if rawResult == nil { - return nil, nil - } - - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnObject(selection, rawResult) - if err != nil { - return nil, err - } - return result, nil - case "hello": - selection, err := queryFields.AsObject() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ - "cause": err.Error(), - }) - } - rawResult, err := functions.FunctionHello(ctx, state) - if err != nil { - return nil, err - } - - if rawResult == nil { - return nil, nil - } - - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnObject(selection, rawResult) - if err != nil { + if err != utils.ErrHandlerNotfound { return nil, err } - return result, nil - case "getArticles": - selection, err := queryFields.AsArray() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be array", map[string]any{ - "cause": err.Error(), - }) - } - rawArgs, err := utils.ResolveArgumentVariables(request.Arguments, variables) - if err != nil { - return nil, schema.UnprocessableContentError("failed to resolve argument variables", map[string]any{ - "cause": err.Error(), - }) - } - - connector_addSpanEvent(span, logger, "resolve_arguments", map[string]any{ - "raw_arguments": rawArgs, - }) - - var args functions.GetArticlesArguments - if err = args.FromValue(rawArgs); err != nil { - return nil, schema.UnprocessableContentError("failed to resolve arguments", map[string]any{ - "cause": err.Error(), - }) - } - - connector_addSpanEvent(span, logger, "execute_function", map[string]any{ - "arguments": args, - }) - rawResult, err := functions.GetArticles(ctx, state, &args) - if err != nil { - return nil, err - } - - if rawResult == nil { - return nil, schema.UnprocessableContentError("expected not null result", nil) - } - - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnArrayIntoSlice(selection, rawResult) - if err != nil { - return nil, err - } - return result, nil - - default: - return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported query: %s", request.Collection), nil) } -} - -func execProcedure(ctx context.Context, state *types.State, operation *schema.MutationOperation, span trace.Span) (schema.MutationOperationResults, error) { - logger := connector.GetLogger(ctx) - connector_addSpanEvent(span, logger, "validate_request", map[string]any{ - "operations_name": operation.Name, - }) - switch operation.Name { - case "create_article": - selection, err := operation.Fields.AsObject() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ - "cause": err.Error(), - }) - } - var args functions.CreateArticleArguments - if err := json.Unmarshal(operation.Arguments, &args); err != nil { - return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ - "cause": err.Error(), - }) - } - span.AddEvent("execute_procedure") - rawResult, err := functions.CreateArticle(ctx, state, &args) - - if err != nil { - return nil, err - } - - if rawResult == nil { - return nil, nil - } - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnObject(selection, rawResult) - if err != nil { - return nil, err - } - return schema.NewProcedureResult(result).Encode(), nil - case "increase": - if len(operation.Fields) > 0 { - return nil, schema.UnprocessableContentError("cannot evaluate selection fields for scalar", nil) - } - span.AddEvent("execute_procedure") - var err error - result, err := functions.Increase(ctx, state) - if err != nil { - return nil, err - } - return schema.NewProcedureResult(result).Encode(), nil - case "createAuthor": - selection, err := operation.Fields.AsObject() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ - "cause": err.Error(), - }) - } - var args functions.CreateAuthorArguments - if err := json.Unmarshal(operation.Arguments, &args); err != nil { - return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ - "cause": err.Error(), - }) - } - span.AddEvent("execute_procedure") - rawResult, err := functions.ProcedureCreateAuthor(ctx, state, &args) - - if err != nil { - return nil, err - } - - if rawResult == nil { - return nil, nil - } - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnObject(selection, rawResult) - - if err != nil { - return nil, err - } - return schema.NewProcedureResult(result).Encode(), nil - case "createAuthors": - selection, err := operation.Fields.AsArray() - if err != nil { - return nil, schema.UnprocessableContentError("the selection field type must be array", map[string]any{ - "cause": err.Error(), - }) - } - var args functions.CreateAuthorsArguments - if err := json.Unmarshal(operation.Arguments, &args); err != nil { - return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ - "cause": err.Error(), - }) - } - span.AddEvent("execute_procedure") - rawResult, err := functions.ProcedureCreateAuthors(ctx, state, &args) - - if err != nil { - return nil, err - } - - if rawResult == nil { - return nil, schema.UnprocessableContentError("expected not null result", nil) - } - connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ - "raw_result": rawResult, - }) - result, err := utils.EvalNestedColumnArrayIntoSlice(selection, rawResult) - - if err != nil { - return nil, err - } - return schema.NewProcedureResult(result).Encode(), nil - - default: - return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported procedure operation: %s", operation.Name), nil) - } + return nil, schema.UnprocessableContentError(fmt.Sprintf("unsupported procedure operation: %s", operation.Name), nil) } func connector_addSpanEvent(span trace.Span, logger *slog.Logger, name string, data map[string]any, options ...trace.EventOption) { logger.Debug(name, slog.Any("data", data)) - attrs := utils.DebugJSONAttributes(data, connector_isDebug(logger)) + attrs := utils.DebugJSONAttributes(data, utils.IsDebug(logger)) span.AddEvent(name, append(options, trace.WithAttributes(attrs...))...) } - -func connector_isDebug(logger *slog.Logger) bool { - return logger.Enabled(context.TODO(), slog.LevelDebug) -} diff --git a/example/codegen/connector_test.go b/example/codegen/connector_test.go index 85ee1ec..ae800c3 100644 --- a/example/codegen/connector_test.go +++ b/example/codegen/connector_test.go @@ -261,6 +261,7 @@ func TestQueryGetTypes(t *testing.T) { "value": { "id": "1", "duration": 10, + "tags": [], "created_at": "2024-03-05T05:00:00Z" } }, @@ -269,6 +270,7 @@ func TestQueryGetTypes(t *testing.T) { "value": { "id": "2", "duration": 11, + "tags": [], "created_at": "2024-03-05T04:00:00Z" } }, @@ -278,6 +280,7 @@ func TestQueryGetTypes(t *testing.T) { { "id": "3", "duration": 12, + "tags": [], "created_at": "2024-03-05T03:00:00Z" } ] @@ -287,6 +290,7 @@ func TestQueryGetTypes(t *testing.T) { "value": [ { "created_at": "2024-03-05T02:00:00Z", + "tags": [], "id": "bPgG5cs38N" } ] @@ -1667,6 +1671,10 @@ func TestQueries(t *testing.T) { } }, "arguments": { + "name": { + "type": "literal", + "value": "foo" + }, "Limit": { "type": "literal", "value": 1 @@ -1727,13 +1735,21 @@ func TestProcedures(t *testing.T) { { "type": "procedure", "name": "create_article", - "arguments": {}, + "arguments": { + "author": { + "type": "literal", + "value": { + "created_at": "2024-03-31T12:01:32+07:00", + "id": "5ea23c2a-f75b-4901-a640-87a46a509418" + } + } + }, "fields": { "type": "object", "fields": { "id": { - "type": "column", - "column": "id" + "type": "column", + "column": "id" } } } @@ -1780,7 +1796,16 @@ func TestProcedures(t *testing.T) { { "type": "procedure", "name": "createAuthors", - "arguments": {}, + "arguments": { + "Authors": [ + { + "name": "Author 1" + }, + { + "name": "Author 2" + } + ] + }, "fields": { "type": "array", "fields": { @@ -1798,6 +1823,8 @@ func TestProcedures(t *testing.T) { "collection_relationships": {} }`, response: `[{ + "id": 0 + }, { "id": 1 }]`, }, diff --git a/example/codegen/functions/comment.go b/example/codegen/functions/comment.go index 2d48961..55077b6 100644 --- a/example/codegen/functions/comment.go +++ b/example/codegen/functions/comment.go @@ -9,6 +9,8 @@ import ( ) type GetArticlesArguments struct { + BaseAuthor + Limit float64 } @@ -23,7 +25,7 @@ func GetArticles(ctx context.Context, state *types.State, arguments *GetArticles return []GetArticlesResult{ { ID: "1", - Name: "Article 1", + Name: types.Text(arguments.Name), }, }, nil } @@ -42,8 +44,8 @@ type CreateArticleResult struct { // CreateArticle // @procedure create_article -func CreateArticle(ctx context.Context, state *types.State, arguments *CreateArticleArguments) (*CreateArticleResult, error) { - return &CreateArticleResult{ +func CreateArticle(ctx context.Context, state *types.State, arguments *CreateArticleArguments) (CreateArticleResult, error) { + return CreateArticleResult{ ID: 1, Authors: []types.Author{}, }, nil diff --git a/example/codegen/functions/prefix.go b/example/codegen/functions/prefix.go index e7da307..f4f3298 100644 --- a/example/codegen/functions/prefix.go +++ b/example/codegen/functions/prefix.go @@ -4,7 +4,6 @@ import ( "context" "encoding/json" "fmt" - "strings" "time" "github.com/google/uuid" @@ -48,12 +47,10 @@ func FunctionHello(ctx context.Context, state *types.State) (*HelloResult, error // A create author argument type CreateAuthorArguments struct { - Name string `json:"name"` + BaseAuthor } - -// A create authors argument type CreateAuthorsArguments struct { - Names []string `json:"names"` + Authors []CreateAuthorArguments } // A create author result @@ -73,12 +70,14 @@ func ProcedureCreateAuthor(ctx context.Context, state *types.State, arguments *C // ProcedureCreateAuthors creates a list of authors func ProcedureCreateAuthors(ctx context.Context, state *types.State, arguments *CreateAuthorsArguments) ([]CreateAuthorResult, error) { - return []CreateAuthorResult{ - { - ID: 1, - Name: strings.Join(arguments.Names, ","), - }, - }, nil + results := make([]CreateAuthorResult, len(arguments.Authors)) + for i, arg := range arguments.Authors { + results[i] = CreateAuthorResult{ + ID: i, + Name: arg.Name, + } + } + return results, nil } // FunctionGetBool return an scalar boolean @@ -89,3 +88,36 @@ func FunctionGetBool(ctx context.Context, state *types.State) (bool, error) { func FunctionGetTypes(ctx context.Context, state *types.State, arguments *arguments.GetTypesArguments) (*arguments.GetTypesArguments, error) { return arguments, nil } + +type BaseAuthor struct { + Name string `json:"name"` +} + +type GetAuthorArguments struct { + *BaseAuthor + + ID string `json:"id"` +} + +type GetAuthorResult struct { + *CreateAuthorResult + + Disabled bool `json:"disabled"` +} + +func FunctionGetAuthor(ctx context.Context, state *types.State, arguments *GetAuthorArguments) (*GetAuthorResult, error) { + return &GetAuthorResult{ + CreateAuthorResult: &CreateAuthorResult{ + ID: 1, + Name: arguments.Name, + }, + Disabled: false, + }, nil +} + +func FunctionGetAuthor2(ctx context.Context, state *types.State, arguments *GetAuthorArguments) (types.GetAuthorResult, error) { + return types.GetAuthorResult{ + ID: 1, + Name: arguments.Name, + }, nil +} diff --git a/example/codegen/functions/types.generated.go b/example/codegen/functions/types.generated.go index 93cce8e..a0447be 100644 --- a/example/codegen/functions/types.generated.go +++ b/example/codegen/functions/types.generated.go @@ -1,15 +1,38 @@ -// Code generated by github.com/hasura/ndc-sdk-go/codegen, DO NOT EDIT. +// Code generated by github.com/hasura/ndc-sdk-go/cmd/hasura-ndc-go, DO NOT EDIT. package functions import ( + "context" + "encoding/json" + "github.com/hasura/ndc-codegen-example/types" + "github.com/hasura/ndc-codegen-example/types/arguments" + "github.com/hasura/ndc-sdk-go/connector" + "github.com/hasura/ndc-sdk-go/schema" "github.com/hasura/ndc-sdk-go/utils" + "go.opentelemetry.io/otel/trace" + "log/slog" + "slices" ) -var functions_Decoder = utils.NewDecoder() +var connector_Decoder = utils.NewDecoder() + +// FromValue decodes values from map +func (j *BaseAuthor) FromValue(input map[string]any) error { + var err error + j.Name, err = utils.GetString(input, "name") + if err != nil { + return err + } + return nil +} // FromValue decodes values from map func (j *GetArticlesArguments) FromValue(input map[string]any) error { var err error + err = connector_Decoder.DecodeObject(&j.BaseAuthor, input) + if err != nil { + return err + } j.Limit, err = utils.GetFloat[float64](input, "Limit") if err != nil { return err @@ -17,6 +40,29 @@ func (j *GetArticlesArguments) FromValue(input map[string]any) error { return nil } +// FromValue decodes values from map +func (j *GetAuthorArguments) FromValue(input map[string]any) error { + var err error + j.BaseAuthor = new(BaseAuthor) + err = connector_Decoder.DecodeObject(j.BaseAuthor, input) + if err != nil { + return err + } + j.ID, err = utils.GetString(input, "id") + if err != nil { + return err + } + return nil +} + +// ToMap encodes the struct to a value map +func (j BaseAuthor) ToMap() map[string]any { + r := make(map[string]any) + r["name"] = j.Name + + return r +} + // ToMap encodes the struct to a value map func (j CreateArticleResult) ToMap() map[string]any { r := make(map[string]any) @@ -30,6 +76,14 @@ func (j CreateArticleResult) ToMap() map[string]any { return r } +// ToMap encodes the struct to a value map +func (j CreateAuthorArguments) ToMap() map[string]any { + r := make(map[string]any) + r = utils.MergeMap(r, j.BaseAuthor.ToMap()) + + return r +} + // ToMap encodes the struct to a value map func (j CreateAuthorResult) ToMap() map[string]any { r := make(map[string]any) @@ -43,8 +97,19 @@ func (j CreateAuthorResult) ToMap() map[string]any { // ToMap encodes the struct to a value map func (j GetArticlesResult) ToMap() map[string]any { r := make(map[string]any) - r["id"] = j.ID r["Name"] = j.Name + r["id"] = j.ID + + return r +} + +// ToMap encodes the struct to a value map +func (j GetAuthorResult) ToMap() map[string]any { + r := make(map[string]any) + if j.CreateAuthorResult != nil { + r = utils.MergeMap(r, (*j.CreateAuthorResult).ToMap()) + } + r["disabled"] = j.Disabled return r } @@ -65,3 +130,345 @@ func (j HelloResult) ToMap() map[string]any { func (j ScalarFoo) ScalarName() string { return "Foo" } + +// DataConnectorHandler implements the data connector handler +type DataConnectorHandler struct{} + +// QueryExists check if the query name exists +func (dch DataConnectorHandler) QueryExists(name string) bool { + return slices.Contains(enumValues_FunctionName, name) +} +func (dch DataConnectorHandler) Query(ctx context.Context, state *types.State, request *schema.QueryRequest, rawArgs map[string]any) (*schema.RowSet, error) { + if !dch.QueryExists(request.Collection) { + return nil, utils.ErrHandlerNotfound + } + queryFields, err := utils.EvalFunctionSelectionFieldValue(request) + if err != nil { + return nil, schema.UnprocessableContentError(err.Error(), nil) + } + + result, err := dch.execQuery(ctx, state, request, queryFields, rawArgs) + if err != nil { + return nil, err + } + + return &schema.RowSet{ + Aggregates: schema.RowSetAggregates{}, + Rows: []map[string]any{ + { + "__value": result, + }, + }, + }, nil +} + +func (dch DataConnectorHandler) execQuery(ctx context.Context, state *types.State, request *schema.QueryRequest, queryFields schema.NestedField, rawArgs map[string]any) (any, error) { + span := trace.SpanFromContext(ctx) + logger := connector.GetLogger(ctx) + switch request.Collection { + case "getAuthor": + + selection, err := queryFields.AsObject() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ + "cause": err.Error(), + }) + } + var args GetAuthorArguments + if parseErr := args.FromValue(rawArgs); parseErr != nil { + return nil, schema.UnprocessableContentError("failed to resolve arguments", map[string]any{ + "cause": parseErr.Error(), + }) + } + + connector_addSpanEvent(span, logger, "execute_function", map[string]any{ + "arguments": args, + }) + rawResult, err := FunctionGetAuthor(ctx, state, &args) + if err != nil { + return nil, err + } + + if rawResult == nil { + return nil, nil + } + + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnObject(selection, rawResult) + if err != nil { + return nil, err + } + return result, nil + + case "getAuthor2": + + selection, err := queryFields.AsObject() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ + "cause": err.Error(), + }) + } + var args GetAuthorArguments + if parseErr := args.FromValue(rawArgs); parseErr != nil { + return nil, schema.UnprocessableContentError("failed to resolve arguments", map[string]any{ + "cause": parseErr.Error(), + }) + } + + connector_addSpanEvent(span, logger, "execute_function", map[string]any{ + "arguments": args, + }) + rawResult, err := FunctionGetAuthor2(ctx, state, &args) + if err != nil { + return nil, err + } + + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnObject(selection, rawResult) + if err != nil { + return nil, err + } + return result, nil + + case "getBool": + + if len(queryFields) > 0 { + return nil, schema.UnprocessableContentError("cannot evaluate selection fields for scalar", nil) + } + return FunctionGetBool(ctx, state) + + case "getTypes": + + selection, err := queryFields.AsObject() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ + "cause": err.Error(), + }) + } + var args arguments.GetTypesArguments + if parseErr := args.FromValue(rawArgs); parseErr != nil { + return nil, schema.UnprocessableContentError("failed to resolve arguments", map[string]any{ + "cause": parseErr.Error(), + }) + } + + connector_addSpanEvent(span, logger, "execute_function", map[string]any{ + "arguments": args, + }) + rawResult, err := FunctionGetTypes(ctx, state, &args) + if err != nil { + return nil, err + } + + if rawResult == nil { + return nil, nil + } + + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnObject(selection, rawResult) + if err != nil { + return nil, err + } + return result, nil + + case "hello": + + selection, err := queryFields.AsObject() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ + "cause": err.Error(), + }) + } + rawResult, err := FunctionHello(ctx, state) + if err != nil { + return nil, err + } + + if rawResult == nil { + return nil, nil + } + + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnObject(selection, rawResult) + if err != nil { + return nil, err + } + return result, nil + + case "getArticles": + + selection, err := queryFields.AsArray() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be array", map[string]any{ + "cause": err.Error(), + }) + } + var args GetArticlesArguments + if parseErr := args.FromValue(rawArgs); parseErr != nil { + return nil, schema.UnprocessableContentError("failed to resolve arguments", map[string]any{ + "cause": parseErr.Error(), + }) + } + + connector_addSpanEvent(span, logger, "execute_function", map[string]any{ + "arguments": args, + }) + rawResult, err := GetArticles(ctx, state, &args) + if err != nil { + return nil, err + } + + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnArrayIntoSlice(selection, rawResult) + if err != nil { + return nil, err + } + return result, nil + + default: + return nil, utils.ErrHandlerNotfound + } +} + +var enumValues_FunctionName = []string{"getAuthor", "getAuthor2", "getBool", "getTypes", "hello", "getArticles"} + +// MutationExists check if the mutation name exists +func (dch DataConnectorHandler) MutationExists(name string) bool { + return slices.Contains(enumValues_ProcedureName, name) +} +func (dch DataConnectorHandler) Mutation(ctx context.Context, state *types.State, operation *schema.MutationOperation) (schema.MutationOperationResults, error) { + span := trace.SpanFromContext(ctx) + logger := connector.GetLogger(ctx) + connector_addSpanEvent(span, logger, "validate_request", map[string]any{ + "operations_name": operation.Name, + }) + + switch operation.Name { + case "create_article": + + selection, err := operation.Fields.AsObject() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ + "cause": err.Error(), + }) + } + var args CreateArticleArguments + if err := json.Unmarshal(operation.Arguments, &args); err != nil { + return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ + "cause": err.Error(), + }) + } + span.AddEvent("execute_procedure") + rawResult, err := CreateArticle(ctx, state, &args) + + if err != nil { + return nil, err + } + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnObject(selection, rawResult) + + if err != nil { + return nil, err + } + return schema.NewProcedureResult(result).Encode(), nil + + case "increase": + + if len(operation.Fields) > 0 { + return nil, schema.UnprocessableContentError("cannot evaluate selection fields for scalar", nil) + } + span.AddEvent("execute_procedure") + result, err := Increase(ctx, state) + if err != nil { + return nil, err + } + return schema.NewProcedureResult(result).Encode(), nil + + case "createAuthor": + + selection, err := operation.Fields.AsObject() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be object", map[string]any{ + "cause": err.Error(), + }) + } + var args CreateAuthorArguments + if err := json.Unmarshal(operation.Arguments, &args); err != nil { + return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ + "cause": err.Error(), + }) + } + span.AddEvent("execute_procedure") + rawResult, err := ProcedureCreateAuthor(ctx, state, &args) + + if err != nil { + return nil, err + } + + if rawResult == nil { + return nil, nil + } + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnObject(selection, rawResult) + + if err != nil { + return nil, err + } + return schema.NewProcedureResult(result).Encode(), nil + + case "createAuthors": + + selection, err := operation.Fields.AsArray() + if err != nil { + return nil, schema.UnprocessableContentError("the selection field type must be array", map[string]any{ + "cause": err.Error(), + }) + } + var args CreateAuthorsArguments + if err := json.Unmarshal(operation.Arguments, &args); err != nil { + return nil, schema.UnprocessableContentError("failed to decode arguments", map[string]any{ + "cause": err.Error(), + }) + } + span.AddEvent("execute_procedure") + rawResult, err := ProcedureCreateAuthors(ctx, state, &args) + + if err != nil { + return nil, err + } + connector_addSpanEvent(span, logger, "evaluate_response_selection", map[string]any{ + "raw_result": rawResult, + }) + result, err := utils.EvalNestedColumnArrayIntoSlice(selection, rawResult) + + if err != nil { + return nil, err + } + return schema.NewProcedureResult(result).Encode(), nil + + default: + return nil, utils.ErrHandlerNotfound + } +} + +var enumValues_ProcedureName = []string{"create_article", "increase", "createAuthor", "createAuthors"} + +func connector_addSpanEvent(span trace.Span, logger *slog.Logger, name string, data map[string]any, options ...trace.EventOption) { + logger.Debug(name, slog.Any("data", data)) + attrs := utils.DebugJSONAttributes(data, utils.IsDebug(logger)) + span.AddEvent(name, append(options, trace.WithAttributes(attrs...))...) +} diff --git a/example/codegen/schema.generated.go b/example/codegen/schema.generated.go new file mode 100644 index 0000000..9968881 --- /dev/null +++ b/example/codegen/schema.generated.go @@ -0,0 +1,1191 @@ +// Code generated by github.com/hasura/ndc-sdk-go/cmd/hasura-ndc-go, DO NOT EDIT. +package main + +import ( + "github.com/hasura/ndc-sdk-go/schema" +) + +func toPtr[V any](value V) *V { + return &value +} + +// GetConnectorSchema gets the generated connector schema +func GetConnectorSchema() *schema.SchemaResponse { + return &schema.SchemaResponse{ + Collections: []schema.CollectionInfo{}, + ObjectTypes: schema.SchemaResponseObjectTypes{ + "Author": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "author": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Author")).Encode(), + }, + "created_at": schema.ObjectField{ + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + "status": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("AuthorStatus")).Encode(), + }, + "tags": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("String")).Encode(), + }, + }, + }, + "BaseAuthor": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "name": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "CreateArticleArgumentsAuthor": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "created_at": schema.ObjectField{ + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("UUID").Encode(), + }, + }, + }, + "CreateArticleResult": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "authors": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Author")).Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + }, + }, + "CreateAuthorArguments": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "name": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "CreateAuthorResult": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "created_at": schema.ObjectField{ + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "name": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "GetArticlesResult": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "Name": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "GetAuthorResult": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "id": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "name": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "GetAuthorResult_types": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "id": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "name": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "GetTypesArguments": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "ArrayBigInt": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("BigInt")).Encode(), + }, + "ArrayBigIntPtr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("BigInt"))).Encode(), + }, + "ArrayBool": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Boolean")).Encode(), + }, + "ArrayBoolPtr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Boolean"))).Encode(), + }, + "ArrayFloat32": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Float32")).Encode(), + }, + "ArrayFloat32Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Float32"))).Encode(), + }, + "ArrayFloat64": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Float64")).Encode(), + }, + "ArrayFloat64Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Float64"))).Encode(), + }, + "ArrayInt": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int32")).Encode(), + }, + "ArrayInt16": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int16")).Encode(), + }, + "ArrayInt16Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int16"))).Encode(), + }, + "ArrayInt32": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int32")).Encode(), + }, + "ArrayInt32Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32"))).Encode(), + }, + "ArrayInt64": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int64")).Encode(), + }, + "ArrayInt64Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int64"))).Encode(), + }, + "ArrayInt8": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int8")).Encode(), + }, + "ArrayInt8Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int8"))).Encode(), + }, + "ArrayIntPtr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32"))).Encode(), + }, + "ArrayJSON": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("JSON")).Encode(), + }, + "ArrayJSONPtr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("JSON"))).Encode(), + }, + "ArrayMap": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("JSON")).Encode(), + }, + "ArrayMapPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("JSON"))).Encode(), + }, + "ArrayObject": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("GetTypesArgumentsArrayObject")).Encode(), + }, + "ArrayObjectPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("GetTypesArgumentsArrayObjectPtr"))).Encode(), + }, + "ArrayRawJSON": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("RawJSON")).Encode(), + }, + "ArrayRawJSONPtr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("RawJSON"))).Encode(), + }, + "ArrayString": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("String")).Encode(), + }, + "ArrayStringPtr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("String"))).Encode(), + }, + "ArrayTime": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("TimestampTZ")).Encode(), + }, + "ArrayTimePtr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("TimestampTZ"))).Encode(), + }, + "ArrayUUID": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("UUID")).Encode(), + }, + "ArrayUUIDPtr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("UUID"))).Encode(), + }, + "ArrayUint": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int32")).Encode(), + }, + "ArrayUint16": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int16")).Encode(), + }, + "ArrayUint16Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int16"))).Encode(), + }, + "ArrayUint32": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int32")).Encode(), + }, + "ArrayUint32Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32"))).Encode(), + }, + "ArrayUint64": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int64")).Encode(), + }, + "ArrayUint64Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int64"))).Encode(), + }, + "ArrayUint8": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Int8")).Encode(), + }, + "ArrayUint8Ptr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int8"))).Encode(), + }, + "ArrayUintPtr": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32"))).Encode(), + }, + "BigInt": schema.ObjectField{ + Type: schema.NewNamedType("BigInt").Encode(), + }, + "BigIntPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("BigInt")).Encode(), + }, + "Bool": schema.ObjectField{ + Type: schema.NewNamedType("Boolean").Encode(), + }, + "BoolPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Boolean")).Encode(), + }, + "Bytes": schema.ObjectField{ + Type: schema.NewNamedType("Bytes").Encode(), + }, + "BytesPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Bytes")).Encode(), + }, + "CustomScalar": schema.ObjectField{ + Type: schema.NewNamedType("CommentString").Encode(), + }, + "CustomScalarPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("CommentString")).Encode(), + }, + "Date": schema.ObjectField{ + Type: schema.NewNamedType("Date").Encode(), + }, + "DatePtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Date")).Encode(), + }, + "Enum": schema.ObjectField{ + Type: schema.NewNamedType("SomeEnum").Encode(), + }, + "EnumPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("SomeEnum")).Encode(), + }, + "Float32": schema.ObjectField{ + Type: schema.NewNamedType("Float32").Encode(), + }, + "Float32Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Float32")).Encode(), + }, + "Float64": schema.ObjectField{ + Type: schema.NewNamedType("Float64").Encode(), + }, + "Float64Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Float64")).Encode(), + }, + "Int": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "Int16": schema.ObjectField{ + Type: schema.NewNamedType("Int16").Encode(), + }, + "Int16Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int16")).Encode(), + }, + "Int32": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "Int32Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int32")).Encode(), + }, + "Int64": schema.ObjectField{ + Type: schema.NewNamedType("Int64").Encode(), + }, + "Int64Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int64")).Encode(), + }, + "Int8": schema.ObjectField{ + Type: schema.NewNamedType("Int8").Encode(), + }, + "Int8Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int8")).Encode(), + }, + "IntPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int32")).Encode(), + }, + "JSON": schema.ObjectField{ + Type: schema.NewNamedType("JSON").Encode(), + }, + "JSONPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("JSON")).Encode(), + }, + "Map": schema.ObjectField{ + Type: schema.NewNamedType("JSON").Encode(), + }, + "MapPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("JSON")).Encode(), + }, + "NamedArray": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Author")).Encode(), + }, + "NamedArrayPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Author"))).Encode(), + }, + "NamedObject": schema.ObjectField{ + Type: schema.NewNamedType("Author").Encode(), + }, + "NamedObjectPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Author")).Encode(), + }, + "Object": schema.ObjectField{ + Type: schema.NewNamedType("GetTypesArgumentsObject").Encode(), + }, + "ObjectPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("GetTypesArgumentsObjectPtr")).Encode(), + }, + "PtrArrayBigInt": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("BigInt"))).Encode(), + }, + "PtrArrayBigIntPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("BigInt")))).Encode(), + }, + "PtrArrayBool": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Boolean"))).Encode(), + }, + "PtrArrayBoolPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Boolean")))).Encode(), + }, + "PtrArrayFloat32": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Float32"))).Encode(), + }, + "PtrArrayFloat32Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Float32")))).Encode(), + }, + "PtrArrayFloat64": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Float64"))).Encode(), + }, + "PtrArrayFloat64Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Float64")))).Encode(), + }, + "PtrArrayInt": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int32"))).Encode(), + }, + "PtrArrayInt16": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int16"))).Encode(), + }, + "PtrArrayInt16Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int16")))).Encode(), + }, + "PtrArrayInt32": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int32"))).Encode(), + }, + "PtrArrayInt32Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32")))).Encode(), + }, + "PtrArrayInt64": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int64"))).Encode(), + }, + "PtrArrayInt64Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int64")))).Encode(), + }, + "PtrArrayInt8": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int8"))).Encode(), + }, + "PtrArrayInt8Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int8")))).Encode(), + }, + "PtrArrayIntPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32")))).Encode(), + }, + "PtrArrayJSON": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("JSON"))).Encode(), + }, + "PtrArrayJSONPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("JSON")))).Encode(), + }, + "PtrArrayRawJSON": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("RawJSON"))).Encode(), + }, + "PtrArrayRawJSONPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("RawJSON")))).Encode(), + }, + "PtrArrayString": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("String"))).Encode(), + }, + "PtrArrayStringPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("String")))).Encode(), + }, + "PtrArrayTime": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("TimestampTZ"))).Encode(), + }, + "PtrArrayTimePtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("TimestampTZ")))).Encode(), + }, + "PtrArrayUUID": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("UUID"))).Encode(), + }, + "PtrArrayUUIDPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("UUID")))).Encode(), + }, + "PtrArrayUint": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int32"))).Encode(), + }, + "PtrArrayUint16": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int16"))).Encode(), + }, + "PtrArrayUint16Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int16")))).Encode(), + }, + "PtrArrayUint32": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int32"))).Encode(), + }, + "PtrArrayUint32Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32")))).Encode(), + }, + "PtrArrayUint64": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int64"))).Encode(), + }, + "PtrArrayUint64Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int64")))).Encode(), + }, + "PtrArrayUint8": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int8"))).Encode(), + }, + "PtrArrayUint8Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int8")))).Encode(), + }, + "PtrArrayUintPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32")))).Encode(), + }, + "RawJSON": schema.ObjectField{ + Type: schema.NewNamedType("RawJSON").Encode(), + }, + "RawJSONPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("RawJSON")).Encode(), + }, + "String": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + "StringPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("String")).Encode(), + }, + "Text": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + "TextPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("String")).Encode(), + }, + "Time": schema.ObjectField{ + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "TimePtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("TimestampTZ")).Encode(), + }, + "URL": schema.ObjectField{ + Type: schema.NewNamedType("URL").Encode(), + }, + "UUID": schema.ObjectField{ + Type: schema.NewNamedType("UUID").Encode(), + }, + "UUIDPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("UUID")).Encode(), + }, + "Uint": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "Uint16": schema.ObjectField{ + Type: schema.NewNamedType("Int16").Encode(), + }, + "Uint16Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int16")).Encode(), + }, + "Uint32": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "Uint32Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int32")).Encode(), + }, + "Uint64": schema.ObjectField{ + Type: schema.NewNamedType("Int64").Encode(), + }, + "Uint64Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int64")).Encode(), + }, + "Uint8": schema.ObjectField{ + Type: schema.NewNamedType("Int8").Encode(), + }, + "Uint8Ptr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int8")).Encode(), + }, + "UintPtr": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("Int32")).Encode(), + }, + }, + }, + "GetTypesArgumentsArrayObject": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "content": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "GetTypesArgumentsArrayObjectPtr": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "content": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "GetTypesArgumentsObject": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "created_at": schema.ObjectField{ + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("UUID").Encode(), + }, + }, + }, + "GetTypesArgumentsObjectPtr": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "Lat": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "Long": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + }, + }, + "HelloResult": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "error": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("JSON")).Encode(), + }, + "foo": schema.ObjectField{ + Type: schema.NewNamedType("Foo").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("UUID").Encode(), + }, + "num": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "text": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + }, + Functions: []schema.FunctionInfo{ + { + Name: "getAuthor", + ResultType: schema.NewNullableType(schema.NewNamedType("GetAuthorResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "id": { + Type: schema.NewNamedType("String").Encode(), + }, + "name": { + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + { + Name: "getAuthor2", + ResultType: schema.NewNamedType("GetAuthorResult_types").Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "id": { + Type: schema.NewNamedType("String").Encode(), + }, + "name": { + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + { + Name: "getBool", + Description: toPtr("return an scalar boolean"), + ResultType: schema.NewNamedType("Boolean").Encode(), + Arguments: map[string]schema.ArgumentInfo{}, + }, + { + Name: "getTypes", + ResultType: schema.NewNullableType(schema.NewNamedType("GetTypesArguments")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "ArrayBigInt": { + Type: schema.NewArrayType(schema.NewNamedType("BigInt")).Encode(), + }, + "ArrayBigIntPtr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("BigInt"))).Encode(), + }, + "ArrayBool": { + Type: schema.NewArrayType(schema.NewNamedType("Boolean")).Encode(), + }, + "ArrayBoolPtr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Boolean"))).Encode(), + }, + "ArrayFloat32": { + Type: schema.NewArrayType(schema.NewNamedType("Float32")).Encode(), + }, + "ArrayFloat32Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Float32"))).Encode(), + }, + "ArrayFloat64": { + Type: schema.NewArrayType(schema.NewNamedType("Float64")).Encode(), + }, + "ArrayFloat64Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Float64"))).Encode(), + }, + "ArrayInt": { + Type: schema.NewArrayType(schema.NewNamedType("Int32")).Encode(), + }, + "ArrayInt16": { + Type: schema.NewArrayType(schema.NewNamedType("Int16")).Encode(), + }, + "ArrayInt16Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int16"))).Encode(), + }, + "ArrayInt32": { + Type: schema.NewArrayType(schema.NewNamedType("Int32")).Encode(), + }, + "ArrayInt32Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32"))).Encode(), + }, + "ArrayInt64": { + Type: schema.NewArrayType(schema.NewNamedType("Int64")).Encode(), + }, + "ArrayInt64Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int64"))).Encode(), + }, + "ArrayInt8": { + Type: schema.NewArrayType(schema.NewNamedType("Int8")).Encode(), + }, + "ArrayInt8Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int8"))).Encode(), + }, + "ArrayIntPtr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32"))).Encode(), + }, + "ArrayJSON": { + Type: schema.NewArrayType(schema.NewNamedType("JSON")).Encode(), + }, + "ArrayJSONPtr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("JSON"))).Encode(), + }, + "ArrayMap": { + Type: schema.NewArrayType(schema.NewNamedType("JSON")).Encode(), + }, + "ArrayMapPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("JSON"))).Encode(), + }, + "ArrayObject": { + Type: schema.NewArrayType(schema.NewNamedType("GetTypesArgumentsArrayObject")).Encode(), + }, + "ArrayObjectPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("GetTypesArgumentsArrayObjectPtr"))).Encode(), + }, + "ArrayRawJSON": { + Type: schema.NewArrayType(schema.NewNamedType("RawJSON")).Encode(), + }, + "ArrayRawJSONPtr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("RawJSON"))).Encode(), + }, + "ArrayString": { + Type: schema.NewArrayType(schema.NewNamedType("String")).Encode(), + }, + "ArrayStringPtr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("String"))).Encode(), + }, + "ArrayTime": { + Type: schema.NewArrayType(schema.NewNamedType("TimestampTZ")).Encode(), + }, + "ArrayTimePtr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("TimestampTZ"))).Encode(), + }, + "ArrayUUID": { + Type: schema.NewArrayType(schema.NewNamedType("UUID")).Encode(), + }, + "ArrayUUIDPtr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("UUID"))).Encode(), + }, + "ArrayUint": { + Type: schema.NewArrayType(schema.NewNamedType("Int32")).Encode(), + }, + "ArrayUint16": { + Type: schema.NewArrayType(schema.NewNamedType("Int16")).Encode(), + }, + "ArrayUint16Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int16"))).Encode(), + }, + "ArrayUint32": { + Type: schema.NewArrayType(schema.NewNamedType("Int32")).Encode(), + }, + "ArrayUint32Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32"))).Encode(), + }, + "ArrayUint64": { + Type: schema.NewArrayType(schema.NewNamedType("Int64")).Encode(), + }, + "ArrayUint64Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int64"))).Encode(), + }, + "ArrayUint8": { + Type: schema.NewArrayType(schema.NewNamedType("Int8")).Encode(), + }, + "ArrayUint8Ptr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int8"))).Encode(), + }, + "ArrayUintPtr": { + Type: schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32"))).Encode(), + }, + "BigInt": { + Type: schema.NewNamedType("BigInt").Encode(), + }, + "BigIntPtr": { + Type: schema.NewNullableType(schema.NewNamedType("BigInt")).Encode(), + }, + "Bool": { + Type: schema.NewNamedType("Boolean").Encode(), + }, + "BoolPtr": { + Type: schema.NewNullableType(schema.NewNamedType("Boolean")).Encode(), + }, + "Bytes": { + Type: schema.NewNamedType("Bytes").Encode(), + }, + "BytesPtr": { + Type: schema.NewNullableType(schema.NewNamedType("Bytes")).Encode(), + }, + "CustomScalar": { + Type: schema.NewNamedType("CommentString").Encode(), + }, + "CustomScalarPtr": { + Type: schema.NewNullableType(schema.NewNamedType("CommentString")).Encode(), + }, + "Date": { + Type: schema.NewNamedType("Date").Encode(), + }, + "DatePtr": { + Type: schema.NewNullableType(schema.NewNamedType("Date")).Encode(), + }, + "Enum": { + Type: schema.NewNamedType("SomeEnum").Encode(), + }, + "EnumPtr": { + Type: schema.NewNullableType(schema.NewNamedType("SomeEnum")).Encode(), + }, + "Float32": { + Type: schema.NewNamedType("Float32").Encode(), + }, + "Float32Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Float32")).Encode(), + }, + "Float64": { + Type: schema.NewNamedType("Float64").Encode(), + }, + "Float64Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Float64")).Encode(), + }, + "Int": { + Type: schema.NewNamedType("Int32").Encode(), + }, + "Int16": { + Type: schema.NewNamedType("Int16").Encode(), + }, + "Int16Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Int16")).Encode(), + }, + "Int32": { + Type: schema.NewNamedType("Int32").Encode(), + }, + "Int32Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Int32")).Encode(), + }, + "Int64": { + Type: schema.NewNamedType("Int64").Encode(), + }, + "Int64Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Int64")).Encode(), + }, + "Int8": { + Type: schema.NewNamedType("Int8").Encode(), + }, + "Int8Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Int8")).Encode(), + }, + "IntPtr": { + Type: schema.NewNullableType(schema.NewNamedType("Int32")).Encode(), + }, + "JSON": { + Type: schema.NewNamedType("JSON").Encode(), + }, + "JSONPtr": { + Type: schema.NewNullableType(schema.NewNamedType("JSON")).Encode(), + }, + "Map": { + Type: schema.NewNamedType("JSON").Encode(), + }, + "MapPtr": { + Type: schema.NewNullableType(schema.NewNamedType("JSON")).Encode(), + }, + "NamedArray": { + Type: schema.NewArrayType(schema.NewNamedType("Author")).Encode(), + }, + "NamedArrayPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Author"))).Encode(), + }, + "NamedObject": { + Type: schema.NewNamedType("Author").Encode(), + }, + "NamedObjectPtr": { + Type: schema.NewNullableType(schema.NewNamedType("Author")).Encode(), + }, + "Object": { + Type: schema.NewNamedType("GetTypesArgumentsObject").Encode(), + }, + "ObjectPtr": { + Type: schema.NewNullableType(schema.NewNamedType("GetTypesArgumentsObjectPtr")).Encode(), + }, + "PtrArrayBigInt": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("BigInt"))).Encode(), + }, + "PtrArrayBigIntPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("BigInt")))).Encode(), + }, + "PtrArrayBool": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Boolean"))).Encode(), + }, + "PtrArrayBoolPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Boolean")))).Encode(), + }, + "PtrArrayFloat32": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Float32"))).Encode(), + }, + "PtrArrayFloat32Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Float32")))).Encode(), + }, + "PtrArrayFloat64": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Float64"))).Encode(), + }, + "PtrArrayFloat64Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Float64")))).Encode(), + }, + "PtrArrayInt": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int32"))).Encode(), + }, + "PtrArrayInt16": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int16"))).Encode(), + }, + "PtrArrayInt16Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int16")))).Encode(), + }, + "PtrArrayInt32": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int32"))).Encode(), + }, + "PtrArrayInt32Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32")))).Encode(), + }, + "PtrArrayInt64": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int64"))).Encode(), + }, + "PtrArrayInt64Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int64")))).Encode(), + }, + "PtrArrayInt8": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int8"))).Encode(), + }, + "PtrArrayInt8Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int8")))).Encode(), + }, + "PtrArrayIntPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32")))).Encode(), + }, + "PtrArrayJSON": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("JSON"))).Encode(), + }, + "PtrArrayJSONPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("JSON")))).Encode(), + }, + "PtrArrayRawJSON": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("RawJSON"))).Encode(), + }, + "PtrArrayRawJSONPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("RawJSON")))).Encode(), + }, + "PtrArrayString": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("String"))).Encode(), + }, + "PtrArrayStringPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("String")))).Encode(), + }, + "PtrArrayTime": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("TimestampTZ"))).Encode(), + }, + "PtrArrayTimePtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("TimestampTZ")))).Encode(), + }, + "PtrArrayUUID": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("UUID"))).Encode(), + }, + "PtrArrayUUIDPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("UUID")))).Encode(), + }, + "PtrArrayUint": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int32"))).Encode(), + }, + "PtrArrayUint16": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int16"))).Encode(), + }, + "PtrArrayUint16Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int16")))).Encode(), + }, + "PtrArrayUint32": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int32"))).Encode(), + }, + "PtrArrayUint32Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32")))).Encode(), + }, + "PtrArrayUint64": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int64"))).Encode(), + }, + "PtrArrayUint64Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int64")))).Encode(), + }, + "PtrArrayUint8": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNamedType("Int8"))).Encode(), + }, + "PtrArrayUint8Ptr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int8")))).Encode(), + }, + "PtrArrayUintPtr": { + Type: schema.NewNullableType(schema.NewArrayType(schema.NewNullableType(schema.NewNamedType("Int32")))).Encode(), + }, + "RawJSON": { + Type: schema.NewNamedType("RawJSON").Encode(), + }, + "RawJSONPtr": { + Type: schema.NewNullableType(schema.NewNamedType("RawJSON")).Encode(), + }, + "String": { + Type: schema.NewNamedType("String").Encode(), + }, + "StringPtr": { + Type: schema.NewNullableType(schema.NewNamedType("String")).Encode(), + }, + "Text": { + Type: schema.NewNamedType("String").Encode(), + }, + "TextPtr": { + Type: schema.NewNullableType(schema.NewNamedType("String")).Encode(), + }, + "Time": { + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "TimePtr": { + Type: schema.NewNullableType(schema.NewNamedType("TimestampTZ")).Encode(), + }, + "URL": { + Type: schema.NewNamedType("URL").Encode(), + }, + "UUID": { + Type: schema.NewNamedType("UUID").Encode(), + }, + "UUIDPtr": { + Type: schema.NewNullableType(schema.NewNamedType("UUID")).Encode(), + }, + "Uint": { + Type: schema.NewNamedType("Int32").Encode(), + }, + "Uint16": { + Type: schema.NewNamedType("Int16").Encode(), + }, + "Uint16Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Int16")).Encode(), + }, + "Uint32": { + Type: schema.NewNamedType("Int32").Encode(), + }, + "Uint32Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Int32")).Encode(), + }, + "Uint64": { + Type: schema.NewNamedType("Int64").Encode(), + }, + "Uint64Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Int64")).Encode(), + }, + "Uint8": { + Type: schema.NewNamedType("Int8").Encode(), + }, + "Uint8Ptr": { + Type: schema.NewNullableType(schema.NewNamedType("Int8")).Encode(), + }, + "UintPtr": { + Type: schema.NewNullableType(schema.NewNamedType("Int32")).Encode(), + }, + }, + }, + { + Name: "hello", + Description: toPtr("sends a hello message"), + ResultType: schema.NewNullableType(schema.NewNamedType("HelloResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{}, + }, + { + Name: "getArticles", + Description: toPtr("GetArticles"), + ResultType: schema.NewArrayType(schema.NewNamedType("GetArticlesResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "Limit": { + Type: schema.NewNamedType("Float64").Encode(), + }, + "name": { + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + }, + Procedures: []schema.ProcedureInfo{ + { + Name: "create_article", + Description: toPtr("CreateArticle"), + ResultType: schema.NewNamedType("CreateArticleResult").Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "author": { + Type: schema.NewNamedType("CreateArticleArgumentsAuthor").Encode(), + }, + }, + }, + { + Name: "increase", + Description: toPtr("Increase"), + ResultType: schema.NewNamedType("Int32").Encode(), + Arguments: map[string]schema.ArgumentInfo{}, + }, + { + Name: "createAuthor", + Description: toPtr("creates an author"), + ResultType: schema.NewNullableType(schema.NewNamedType("CreateAuthorResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "name": { + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + { + Name: "createAuthors", + Description: toPtr("creates a list of authors"), + ResultType: schema.NewArrayType(schema.NewNamedType("CreateAuthorResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "Authors": { + Type: schema.NewArrayType(schema.NewNamedType("CreateAuthorArguments")).Encode(), + }, + }, + }, + }, + ScalarTypes: schema.SchemaResponseScalarTypes{ + "AuthorStatus": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationEnum([]string{"active", "inactive"}).Encode(), + }, + "BigInt": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationBigInteger().Encode(), + }, + "Boolean": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationBoolean().Encode(), + }, + "Bytes": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationBytes().Encode(), + }, + "CommentString": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationString().Encode(), + }, + "Date": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationDate().Encode(), + }, + "Float32": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationFloat32().Encode(), + }, + "Float64": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationFloat64().Encode(), + }, + "Foo": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationJSON().Encode(), + }, + "Int16": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationInt16().Encode(), + }, + "Int32": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationInt32().Encode(), + }, + "Int64": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationInt64().Encode(), + }, + "Int8": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationInt8().Encode(), + }, + "JSON": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationJSON().Encode(), + }, + "RawJSON": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationJSON().Encode(), + }, + "SomeEnum": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationEnum([]string{"foo", "bar"}).Encode(), + }, + "String": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationString().Encode(), + }, + "TimestampTZ": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationTimestampTZ().Encode(), + }, + "URL": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationString().Encode(), + }, + "UUID": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationUUID().Encode(), + }, + }, + } +} diff --git a/example/codegen/schema.generated.json b/example/codegen/schema.generated.json deleted file mode 100644 index 890bb20..0000000 --- a/example/codegen/schema.generated.json +++ /dev/null @@ -1,3241 +0,0 @@ -{ - "collections": [], - "functions": [ - { - "arguments": {}, - "description": "return an scalar boolean", - "name": "getBool", - "result_type": { - "name": "Boolean", - "type": "named" - } - }, - { - "arguments": { - "ArrayBigInt": { - "type": { - "element_type": { - "name": "BigInt", - "type": "named" - }, - "type": "array" - } - }, - "ArrayBigIntPtr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "BigInt", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayBool": { - "type": { - "element_type": { - "name": "Boolean", - "type": "named" - }, - "type": "array" - } - }, - "ArrayBoolPtr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Boolean", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayFloat32": { - "type": { - "element_type": { - "name": "Float32", - "type": "named" - }, - "type": "array" - } - }, - "ArrayFloat32Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Float32", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayFloat64": { - "type": { - "element_type": { - "name": "Float64", - "type": "named" - }, - "type": "array" - } - }, - "ArrayFloat64Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Float64", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayInt": { - "type": { - "element_type": { - "name": "Int32", - "type": "named" - }, - "type": "array" - } - }, - "ArrayInt16": { - "type": { - "element_type": { - "name": "Int16", - "type": "named" - }, - "type": "array" - } - }, - "ArrayInt16Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int16", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayInt32": { - "type": { - "element_type": { - "name": "Int32", - "type": "named" - }, - "type": "array" - } - }, - "ArrayInt32Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayInt64": { - "type": { - "element_type": { - "name": "Int64", - "type": "named" - }, - "type": "array" - } - }, - "ArrayInt64Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int64", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayInt8": { - "type": { - "element_type": { - "name": "Int8", - "type": "named" - }, - "type": "array" - } - }, - "ArrayInt8Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int8", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayIntPtr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayJSON": { - "type": { - "element_type": { - "name": "JSON", - "type": "named" - }, - "type": "array" - } - }, - "ArrayJSONPtr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "JSON", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayMap": { - "type": { - "element_type": { - "name": "JSON", - "type": "named" - }, - "type": "array" - } - }, - "ArrayMapPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "JSON", - "type": "named" - }, - "type": "array" - } - } - }, - "ArrayObject": { - "type": { - "element_type": { - "name": "GetTypesArgumentsArrayObject", - "type": "named" - }, - "type": "array" - } - }, - "ArrayObjectPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "GetTypesArgumentsArrayObjectPtr", - "type": "named" - }, - "type": "array" - } - } - }, - "ArrayRawJSON": { - "type": { - "element_type": { - "name": "RawJSON", - "type": "named" - }, - "type": "array" - } - }, - "ArrayRawJSONPtr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "RawJSON", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayString": { - "type": { - "element_type": { - "name": "String", - "type": "named" - }, - "type": "array" - } - }, - "ArrayStringPtr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "String", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayTime": { - "type": { - "element_type": { - "name": "TimestampTZ", - "type": "named" - }, - "type": "array" - } - }, - "ArrayTimePtr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "TimestampTZ", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayUUID": { - "type": { - "element_type": { - "name": "UUID", - "type": "named" - }, - "type": "array" - } - }, - "ArrayUUIDPtr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "UUID", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayUint": { - "type": { - "element_type": { - "name": "Int32", - "type": "named" - }, - "type": "array" - } - }, - "ArrayUint16": { - "type": { - "element_type": { - "name": "Int16", - "type": "named" - }, - "type": "array" - } - }, - "ArrayUint16Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int16", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayUint32": { - "type": { - "element_type": { - "name": "Int32", - "type": "named" - }, - "type": "array" - } - }, - "ArrayUint32Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayUint64": { - "type": { - "element_type": { - "name": "Int64", - "type": "named" - }, - "type": "array" - } - }, - "ArrayUint64Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int64", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayUint8": { - "type": { - "element_type": { - "name": "Int8", - "type": "named" - }, - "type": "array" - } - }, - "ArrayUint8Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int8", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayUintPtr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - }, - "type": "array" - } - }, - "BigInt": { - "type": { - "name": "BigInt", - "type": "named" - } - }, - "BigIntPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "BigInt", - "type": "named" - } - } - }, - "Bool": { - "type": { - "name": "Boolean", - "type": "named" - } - }, - "BoolPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Boolean", - "type": "named" - } - } - }, - "Bytes": { - "type": { - "name": "Bytes", - "type": "named" - } - }, - "BytesPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Bytes", - "type": "named" - } - } - }, - "CustomScalar": { - "type": { - "name": "CommentString", - "type": "named" - } - }, - "CustomScalarPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "CommentString", - "type": "named" - } - } - }, - "Date": { - "type": { - "name": "Date", - "type": "named" - } - }, - "DatePtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Date", - "type": "named" - } - } - }, - "Enum": { - "type": { - "name": "SomeEnum", - "type": "named" - } - }, - "EnumPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "SomeEnum", - "type": "named" - } - } - }, - "Float32": { - "type": { - "name": "Float32", - "type": "named" - } - }, - "Float32Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Float32", - "type": "named" - } - } - }, - "Float64": { - "type": { - "name": "Float64", - "type": "named" - } - }, - "Float64Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Float64", - "type": "named" - } - } - }, - "Int": { - "type": { - "name": "Int32", - "type": "named" - } - }, - "Int16": { - "type": { - "name": "Int16", - "type": "named" - } - }, - "Int16Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int16", - "type": "named" - } - } - }, - "Int32": { - "type": { - "name": "Int32", - "type": "named" - } - }, - "Int32Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - } - }, - "Int64": { - "type": { - "name": "Int64", - "type": "named" - } - }, - "Int64Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int64", - "type": "named" - } - } - }, - "Int8": { - "type": { - "name": "Int8", - "type": "named" - } - }, - "Int8Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int8", - "type": "named" - } - } - }, - "IntPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - } - }, - "JSON": { - "type": { - "name": "JSON", - "type": "named" - } - }, - "JSONPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "JSON", - "type": "named" - } - } - }, - "Map": { - "type": { - "name": "JSON", - "type": "named" - } - }, - "MapPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "JSON", - "type": "named" - } - } - }, - "NamedArray": { - "type": { - "element_type": { - "name": "Author", - "type": "named" - }, - "type": "array" - } - }, - "NamedArrayPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Author", - "type": "named" - }, - "type": "array" - } - } - }, - "NamedObject": { - "type": { - "name": "Author", - "type": "named" - } - }, - "NamedObjectPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Author", - "type": "named" - } - } - }, - "Object": { - "type": { - "name": "GetTypesArgumentsObject", - "type": "named" - } - }, - "ObjectPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "GetTypesArgumentsObjectPtr", - "type": "named" - } - } - }, - "PtrArrayBigInt": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "BigInt", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayBigIntPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "BigInt", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayBool": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Boolean", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayBoolPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Boolean", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayFloat32": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Float32", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayFloat32Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Float32", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayFloat64": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Float64", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayFloat64Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Float64", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayInt": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int32", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayInt16": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int16", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayInt16Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int16", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayInt32": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int32", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayInt32Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayInt64": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int64", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayInt64Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int64", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayInt8": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int8", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayInt8Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int8", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayIntPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayJSON": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "JSON", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayJSONPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "JSON", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayRawJSON": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "RawJSON", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayRawJSONPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "RawJSON", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayString": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "String", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayStringPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "String", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayTime": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "TimestampTZ", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayTimePtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "TimestampTZ", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayUUID": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "UUID", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayUUIDPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "UUID", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayUint": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int32", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayUint16": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int16", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayUint16Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int16", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayUint32": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int32", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayUint32Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayUint64": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int64", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayUint64Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int64", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayUint8": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int8", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayUint8Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int8", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayUintPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - }, - "type": "array" - } - } - }, - "RawJSON": { - "type": { - "name": "RawJSON", - "type": "named" - } - }, - "RawJSONPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "RawJSON", - "type": "named" - } - } - }, - "String": { - "type": { - "name": "String", - "type": "named" - } - }, - "StringPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "String", - "type": "named" - } - } - }, - "Text": { - "type": { - "name": "String", - "type": "named" - } - }, - "TextPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "String", - "type": "named" - } - } - }, - "Time": { - "type": { - "name": "TimestampTZ", - "type": "named" - } - }, - "TimePtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "TimestampTZ", - "type": "named" - } - } - }, - "URL": { - "type": { - "name": "URL", - "type": "named" - } - }, - "UUID": { - "type": { - "name": "UUID", - "type": "named" - } - }, - "UUIDPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "UUID", - "type": "named" - } - } - }, - "Uint": { - "type": { - "name": "Int32", - "type": "named" - } - }, - "Uint16": { - "type": { - "name": "Int16", - "type": "named" - } - }, - "Uint16Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int16", - "type": "named" - } - } - }, - "Uint32": { - "type": { - "name": "Int32", - "type": "named" - } - }, - "Uint32Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - } - }, - "Uint64": { - "type": { - "name": "Int64", - "type": "named" - } - }, - "Uint64Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int64", - "type": "named" - } - } - }, - "Uint8": { - "type": { - "name": "Int8", - "type": "named" - } - }, - "Uint8Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int8", - "type": "named" - } - } - }, - "UintPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - } - } - }, - "name": "getTypes", - "result_type": { - "type": "nullable", - "underlying_type": { - "name": "GetTypesArguments", - "type": "named" - } - } - }, - { - "arguments": {}, - "description": "sends a hello message", - "name": "hello", - "result_type": { - "type": "nullable", - "underlying_type": { - "name": "HelloResult", - "type": "named" - } - } - }, - { - "arguments": { - "Limit": { - "type": { - "name": "Float64", - "type": "named" - } - } - }, - "description": "GetArticles", - "name": "getArticles", - "result_type": { - "element_type": { - "name": "GetArticlesResult", - "type": "named" - }, - "type": "array" - } - } - ], - "object_types": { - "Author": { - "fields": { - "author": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Author", - "type": "named" - } - } - }, - "created_at": { - "type": { - "name": "TimestampTZ", - "type": "named" - } - }, - "id": { - "type": { - "name": "String", - "type": "named" - } - }, - "tags": { - "type": { - "element_type": { - "name": "String", - "type": "named" - }, - "type": "array" - } - } - } - }, - "CreateArticleArgumentsAuthor": { - "fields": { - "created_at": { - "type": { - "name": "TimestampTZ", - "type": "named" - } - }, - "id": { - "type": { - "name": "UUID", - "type": "named" - } - } - } - }, - "CreateArticleResult": { - "fields": { - "authors": { - "type": { - "element_type": { - "name": "Author", - "type": "named" - }, - "type": "array" - } - }, - "id": { - "type": { - "name": "Int32", - "type": "named" - } - } - } - }, - "CreateAuthorResult": { - "fields": { - "created_at": { - "type": { - "name": "TimestampTZ", - "type": "named" - } - }, - "id": { - "type": { - "name": "Int32", - "type": "named" - } - }, - "name": { - "type": { - "name": "String", - "type": "named" - } - } - } - }, - "GetArticlesResult": { - "fields": { - "Name": { - "type": { - "name": "String", - "type": "named" - } - }, - "id": { - "type": { - "name": "String", - "type": "named" - } - } - } - }, - "GetTypesArguments": { - "fields": { - "ArrayBigInt": { - "type": { - "element_type": { - "name": "BigInt", - "type": "named" - }, - "type": "array" - } - }, - "ArrayBigIntPtr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "BigInt", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayBool": { - "type": { - "element_type": { - "name": "Boolean", - "type": "named" - }, - "type": "array" - } - }, - "ArrayBoolPtr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Boolean", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayFloat32": { - "type": { - "element_type": { - "name": "Float32", - "type": "named" - }, - "type": "array" - } - }, - "ArrayFloat32Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Float32", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayFloat64": { - "type": { - "element_type": { - "name": "Float64", - "type": "named" - }, - "type": "array" - } - }, - "ArrayFloat64Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Float64", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayInt": { - "type": { - "element_type": { - "name": "Int32", - "type": "named" - }, - "type": "array" - } - }, - "ArrayInt16": { - "type": { - "element_type": { - "name": "Int16", - "type": "named" - }, - "type": "array" - } - }, - "ArrayInt16Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int16", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayInt32": { - "type": { - "element_type": { - "name": "Int32", - "type": "named" - }, - "type": "array" - } - }, - "ArrayInt32Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayInt64": { - "type": { - "element_type": { - "name": "Int64", - "type": "named" - }, - "type": "array" - } - }, - "ArrayInt64Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int64", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayInt8": { - "type": { - "element_type": { - "name": "Int8", - "type": "named" - }, - "type": "array" - } - }, - "ArrayInt8Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int8", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayIntPtr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayJSON": { - "type": { - "element_type": { - "name": "JSON", - "type": "named" - }, - "type": "array" - } - }, - "ArrayJSONPtr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "JSON", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayMap": { - "type": { - "element_type": { - "name": "JSON", - "type": "named" - }, - "type": "array" - } - }, - "ArrayMapPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "JSON", - "type": "named" - }, - "type": "array" - } - } - }, - "ArrayObject": { - "type": { - "element_type": { - "name": "GetTypesArgumentsArrayObject", - "type": "named" - }, - "type": "array" - } - }, - "ArrayObjectPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "GetTypesArgumentsArrayObjectPtr", - "type": "named" - }, - "type": "array" - } - } - }, - "ArrayRawJSON": { - "type": { - "element_type": { - "name": "RawJSON", - "type": "named" - }, - "type": "array" - } - }, - "ArrayRawJSONPtr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "RawJSON", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayString": { - "type": { - "element_type": { - "name": "String", - "type": "named" - }, - "type": "array" - } - }, - "ArrayStringPtr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "String", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayTime": { - "type": { - "element_type": { - "name": "TimestampTZ", - "type": "named" - }, - "type": "array" - } - }, - "ArrayTimePtr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "TimestampTZ", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayUUID": { - "type": { - "element_type": { - "name": "UUID", - "type": "named" - }, - "type": "array" - } - }, - "ArrayUUIDPtr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "UUID", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayUint": { - "type": { - "element_type": { - "name": "Int32", - "type": "named" - }, - "type": "array" - } - }, - "ArrayUint16": { - "type": { - "element_type": { - "name": "Int16", - "type": "named" - }, - "type": "array" - } - }, - "ArrayUint16Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int16", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayUint32": { - "type": { - "element_type": { - "name": "Int32", - "type": "named" - }, - "type": "array" - } - }, - "ArrayUint32Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayUint64": { - "type": { - "element_type": { - "name": "Int64", - "type": "named" - }, - "type": "array" - } - }, - "ArrayUint64Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int64", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayUint8": { - "type": { - "element_type": { - "name": "Int8", - "type": "named" - }, - "type": "array" - } - }, - "ArrayUint8Ptr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int8", - "type": "named" - } - }, - "type": "array" - } - }, - "ArrayUintPtr": { - "type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - }, - "type": "array" - } - }, - "BigInt": { - "type": { - "name": "BigInt", - "type": "named" - } - }, - "BigIntPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "BigInt", - "type": "named" - } - } - }, - "Bool": { - "type": { - "name": "Boolean", - "type": "named" - } - }, - "BoolPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Boolean", - "type": "named" - } - } - }, - "Bytes": { - "type": { - "name": "Bytes", - "type": "named" - } - }, - "BytesPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Bytes", - "type": "named" - } - } - }, - "CustomScalar": { - "type": { - "name": "CommentString", - "type": "named" - } - }, - "CustomScalarPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "CommentString", - "type": "named" - } - } - }, - "Date": { - "type": { - "name": "Date", - "type": "named" - } - }, - "DatePtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Date", - "type": "named" - } - } - }, - "Enum": { - "type": { - "name": "SomeEnum", - "type": "named" - } - }, - "EnumPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "SomeEnum", - "type": "named" - } - } - }, - "Float32": { - "type": { - "name": "Float32", - "type": "named" - } - }, - "Float32Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Float32", - "type": "named" - } - } - }, - "Float64": { - "type": { - "name": "Float64", - "type": "named" - } - }, - "Float64Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Float64", - "type": "named" - } - } - }, - "Int": { - "type": { - "name": "Int32", - "type": "named" - } - }, - "Int16": { - "type": { - "name": "Int16", - "type": "named" - } - }, - "Int16Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int16", - "type": "named" - } - } - }, - "Int32": { - "type": { - "name": "Int32", - "type": "named" - } - }, - "Int32Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - } - }, - "Int64": { - "type": { - "name": "Int64", - "type": "named" - } - }, - "Int64Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int64", - "type": "named" - } - } - }, - "Int8": { - "type": { - "name": "Int8", - "type": "named" - } - }, - "Int8Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int8", - "type": "named" - } - } - }, - "IntPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - } - }, - "JSON": { - "type": { - "name": "JSON", - "type": "named" - } - }, - "JSONPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "JSON", - "type": "named" - } - } - }, - "Map": { - "type": { - "name": "JSON", - "type": "named" - } - }, - "MapPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "JSON", - "type": "named" - } - } - }, - "NamedArray": { - "type": { - "element_type": { - "name": "Author", - "type": "named" - }, - "type": "array" - } - }, - "NamedArrayPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Author", - "type": "named" - }, - "type": "array" - } - } - }, - "NamedObject": { - "type": { - "name": "Author", - "type": "named" - } - }, - "NamedObjectPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Author", - "type": "named" - } - } - }, - "Object": { - "type": { - "name": "GetTypesArgumentsObject", - "type": "named" - } - }, - "ObjectPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "GetTypesArgumentsObjectPtr", - "type": "named" - } - } - }, - "PtrArrayBigInt": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "BigInt", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayBigIntPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "BigInt", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayBool": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Boolean", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayBoolPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Boolean", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayFloat32": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Float32", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayFloat32Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Float32", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayFloat64": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Float64", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayFloat64Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Float64", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayInt": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int32", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayInt16": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int16", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayInt16Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int16", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayInt32": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int32", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayInt32Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayInt64": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int64", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayInt64Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int64", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayInt8": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int8", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayInt8Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int8", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayIntPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayJSON": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "JSON", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayJSONPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "JSON", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayRawJSON": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "RawJSON", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayRawJSONPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "RawJSON", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayString": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "String", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayStringPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "String", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayTime": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "TimestampTZ", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayTimePtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "TimestampTZ", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayUUID": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "UUID", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayUUIDPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "UUID", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayUint": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int32", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayUint16": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int16", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayUint16Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int16", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayUint32": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int32", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayUint32Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayUint64": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int64", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayUint64Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int64", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayUint8": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "name": "Int8", - "type": "named" - }, - "type": "array" - } - } - }, - "PtrArrayUint8Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int8", - "type": "named" - } - }, - "type": "array" - } - } - }, - "PtrArrayUintPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "element_type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - }, - "type": "array" - } - } - }, - "RawJSON": { - "type": { - "name": "RawJSON", - "type": "named" - } - }, - "RawJSONPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "RawJSON", - "type": "named" - } - } - }, - "String": { - "type": { - "name": "String", - "type": "named" - } - }, - "StringPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "String", - "type": "named" - } - } - }, - "Text": { - "type": { - "name": "String", - "type": "named" - } - }, - "TextPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "String", - "type": "named" - } - } - }, - "Time": { - "type": { - "name": "TimestampTZ", - "type": "named" - } - }, - "TimePtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "TimestampTZ", - "type": "named" - } - } - }, - "URL": { - "type": { - "name": "URL", - "type": "named" - } - }, - "UUID": { - "type": { - "name": "UUID", - "type": "named" - } - }, - "UUIDPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "UUID", - "type": "named" - } - } - }, - "Uint": { - "type": { - "name": "Int32", - "type": "named" - } - }, - "Uint16": { - "type": { - "name": "Int16", - "type": "named" - } - }, - "Uint16Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int16", - "type": "named" - } - } - }, - "Uint32": { - "type": { - "name": "Int32", - "type": "named" - } - }, - "Uint32Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - } - }, - "Uint64": { - "type": { - "name": "Int64", - "type": "named" - } - }, - "Uint64Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int64", - "type": "named" - } - } - }, - "Uint8": { - "type": { - "name": "Int8", - "type": "named" - } - }, - "Uint8Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int8", - "type": "named" - } - } - }, - "UintPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "Int32", - "type": "named" - } - } - } - } - }, - "GetTypesArgumentsArrayObject": { - "fields": { - "content": { - "type": { - "name": "String", - "type": "named" - } - } - } - }, - "GetTypesArgumentsArrayObjectPtr": { - "fields": { - "content": { - "type": { - "name": "String", - "type": "named" - } - } - } - }, - "GetTypesArgumentsObject": { - "fields": { - "created_at": { - "type": { - "name": "TimestampTZ", - "type": "named" - } - }, - "id": { - "type": { - "name": "UUID", - "type": "named" - } - } - } - }, - "GetTypesArgumentsObjectPtr": { - "fields": { - "Lat": { - "type": { - "name": "Int32", - "type": "named" - } - }, - "Long": { - "type": { - "name": "Int32", - "type": "named" - } - } - } - }, - "HelloResult": { - "fields": { - "error": { - "type": { - "type": "nullable", - "underlying_type": { - "name": "JSON", - "type": "named" - } - } - }, - "foo": { - "type": { - "name": "Foo", - "type": "named" - } - }, - "id": { - "type": { - "name": "UUID", - "type": "named" - } - }, - "num": { - "type": { - "name": "Int32", - "type": "named" - } - }, - "text": { - "type": { - "name": "String", - "type": "named" - } - } - } - } - }, - "procedures": [ - { - "arguments": { - "author": { - "type": { - "name": "CreateArticleArgumentsAuthor", - "type": "named" - } - } - }, - "description": "CreateArticle", - "name": "create_article", - "result_type": { - "type": "nullable", - "underlying_type": { - "name": "CreateArticleResult", - "type": "named" - } - } - }, - { - "arguments": {}, - "description": "Increase", - "name": "increase", - "result_type": { - "name": "Int32", - "type": "named" - } - }, - { - "arguments": { - "name": { - "type": { - "name": "String", - "type": "named" - } - } - }, - "description": "creates an author", - "name": "createAuthor", - "result_type": { - "type": "nullable", - "underlying_type": { - "name": "CreateAuthorResult", - "type": "named" - } - } - }, - { - "arguments": { - "names": { - "type": { - "element_type": { - "name": "String", - "type": "named" - }, - "type": "array" - } - } - }, - "description": "creates a list of authors", - "name": "createAuthors", - "result_type": { - "element_type": { - "name": "CreateAuthorResult", - "type": "named" - }, - "type": "array" - } - } - ], - "scalar_types": { - "BigInt": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "type": "biginteger" - } - }, - "Boolean": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "type": "boolean" - } - }, - "Bytes": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "type": "bytes" - } - }, - "CommentString": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "type": "string" - } - }, - "Date": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "type": "date" - } - }, - "Float32": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "type": "float32" - } - }, - "Float64": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "type": "float64" - } - }, - "Foo": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "type": "json" - } - }, - "Int16": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "type": "int16" - } - }, - "Int32": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "type": "int32" - } - }, - "Int64": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "type": "int64" - } - }, - "Int8": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "type": "int8" - } - }, - "JSON": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "type": "json" - } - }, - "RawJSON": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "type": "json" - } - }, - "SomeEnum": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "one_of": [ - "foo", - "bar" - ], - "type": "enum" - } - }, - "String": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "type": "string" - } - }, - "TimestampTZ": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "type": "timestamptz" - } - }, - "URL": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "type": "string" - } - }, - "UUID": { - "aggregate_functions": {}, - "comparison_operators": {}, - "representation": { - "type": "uuid" - } - } - } -} \ No newline at end of file diff --git a/example/codegen/testdata/capabilities b/example/codegen/testdata/capabilities deleted file mode 100644 index 15f68b5..0000000 --- a/example/codegen/testdata/capabilities +++ /dev/null @@ -1,9 +0,0 @@ -{ - "version": "0.1.6", - "capabilities": { - "query": { - "variables": {} - }, - "mutation": {} - } -} diff --git a/example/codegen/testdata/mutation/createAuthors/expected.json b/example/codegen/testdata/mutation/createAuthors/expected.json index fe915d5..a9667cb 100644 --- a/example/codegen/testdata/mutation/createAuthors/expected.json +++ b/example/codegen/testdata/mutation/createAuthors/expected.json @@ -2,10 +2,15 @@ "operation_results": [ { "result": [ + { + "created_at": "0001-01-01T00:00:00Z", + "id": 0, + "name": "Author 1" + }, { "created_at": "0001-01-01T00:00:00Z", "id": 1, - "name": "JwqvHKZBCJ" + "name": "Author 2" } ], "type": "procedure" diff --git a/example/codegen/testdata/mutation/createAuthors/request.json b/example/codegen/testdata/mutation/createAuthors/request.json index 9c2b042..a3de634 100644 --- a/example/codegen/testdata/mutation/createAuthors/request.json +++ b/example/codegen/testdata/mutation/createAuthors/request.json @@ -5,8 +5,13 @@ "type": "procedure", "name": "createAuthors", "arguments": { - "names": [ - "JwqvHKZBCJ" + "Authors": [ + { + "name": "Author 1" + }, + { + "name": "Author 2" + } ] }, "fields": { @@ -31,4 +36,4 @@ } } ] -} \ No newline at end of file +} diff --git a/example/codegen/testdata/query/getArticles/expected.json b/example/codegen/testdata/query/getArticles/expected.json index 661f7bc..b76348b 100644 --- a/example/codegen/testdata/query/getArticles/expected.json +++ b/example/codegen/testdata/query/getArticles/expected.json @@ -4,7 +4,7 @@ { "__value": [ { - "Name": "Article 1", + "Name": "foo", "id": "1" } ] diff --git a/example/codegen/testdata/query/getArticles/request.json b/example/codegen/testdata/query/getArticles/request.json index 8362aa6..6eed19e 100644 --- a/example/codegen/testdata/query/getArticles/request.json +++ b/example/codegen/testdata/query/getArticles/request.json @@ -1,8 +1,12 @@ { "arguments": { + "name": { + "type": "literal", + "value": "foo" + }, "Limit": { "type": "literal", - "value": 0.6437828488913296 + "value": 10 } }, "collection": "getArticles", @@ -31,4 +35,4 @@ } } } -} \ No newline at end of file +} diff --git a/example/codegen/testdata/schema b/example/codegen/testdata/schema deleted file mode 100644 index 550cb1c..0000000 --- a/example/codegen/testdata/schema +++ /dev/null @@ -1,1138 +0,0 @@ -{ - "scalar_types": { - "Boolean": { - "representation": { - "type": "boolean" - }, - "aggregate_functions": {}, - "comparison_operators": {} - }, - "CommentString": { - "representation": { - "type": "string" - }, - "aggregate_functions": {}, - "comparison_operators": {} - }, - "Float32": { - "representation": { - "type": "float32" - }, - "aggregate_functions": {}, - "comparison_operators": {} - }, - "Float64": { - "representation": { - "type": "float64" - }, - "aggregate_functions": {}, - "comparison_operators": {} - }, - "Foo": { - "aggregate_functions": {}, - "comparison_operators": {} - }, - "Int16": { - "representation": { - "type": "int16" - }, - "aggregate_functions": {}, - "comparison_operators": {} - }, - "Int32": { - "representation": { - "type": "int32" - }, - "aggregate_functions": {}, - "comparison_operators": {} - }, - "Int64": { - "representation": { - "type": "int64" - }, - "aggregate_functions": {}, - "comparison_operators": {} - }, - "Int8": { - "representation": { - "type": "int8" - }, - "aggregate_functions": {}, - "comparison_operators": {} - }, - "SomeEnum": { - "representation": { - "type": "enum", - "one_of": [ - "foo", - "bar" - ] - }, - "aggregate_functions": {}, - "comparison_operators": {} - }, - "String": { - "representation": { - "type": "string" - }, - "aggregate_functions": {}, - "comparison_operators": {} - }, - "TimestampTZ": { - "representation": { - "type": "timestamptz" - }, - "aggregate_functions": {}, - "comparison_operators": {} - }, - "UUID": { - "representation": { - "type": "uuid" - }, - "aggregate_functions": {}, - "comparison_operators": {} - } - }, - "object_types": { - "Author": { - "fields": { - "created_at": { - "type": { - "type": "named", - "name": "TimestampTZ" - } - }, - "id": { - "type": { - "type": "named", - "name": "String" - } - }, - "tags": { - "type": { - "type": "array", - "element_type": { - "type": "named", - "name": "String" - } - } - } - } - }, - "CreateArticleArgumentsAuthor": { - "fields": { - "created_at": { - "type": { - "type": "named", - "name": "TimestampTZ" - } - }, - "id": { - "type": { - "type": "named", - "name": "UUID" - } - } - } - }, - "CreateArticleResult": { - "fields": { - "authors": { - "type": { - "type": "array", - "element_type": { - "type": "named", - "name": "Author" - } - } - }, - "id": { - "type": { - "type": "named", - "name": "Int32" - } - } - } - }, - "CreateAuthorResult": { - "fields": { - "created_at": { - "type": { - "type": "named", - "name": "TimestampTZ" - } - }, - "id": { - "type": { - "type": "named", - "name": "Int32" - } - }, - "name": { - "type": { - "type": "named", - "name": "String" - } - } - } - }, - "GetArticlesResult": { - "fields": { - "Name": { - "type": { - "type": "named", - "name": "String" - } - }, - "id": { - "type": { - "type": "named", - "name": "String" - } - } - } - }, - "GetTypesArguments": { - "fields": { - "ArrayObject": { - "type": { - "type": "array", - "element_type": { - "type": "named", - "name": "GetTypesArgumentsArrayObject" - } - } - }, - "ArrayObjectPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "array", - "element_type": { - "type": "named", - "name": "GetTypesArgumentsArrayObjectPtr" - } - } - } - }, - "Bool": { - "type": { - "type": "named", - "name": "Boolean" - } - }, - "BoolPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Boolean" - } - } - }, - "CustomScalar": { - "type": { - "type": "named", - "name": "CommentString" - } - }, - "CustomScalarPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "CommentString" - } - } - }, - "Enum": { - "type": { - "type": "named", - "name": "SomeEnum" - } - }, - "EnumPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "SomeEnum" - } - } - }, - "Float32": { - "type": { - "type": "named", - "name": "Float32" - } - }, - "Float32Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Float32" - } - } - }, - "Float64": { - "type": { - "type": "named", - "name": "Float64" - } - }, - "Float64Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Float64" - } - } - }, - "Int": { - "type": { - "type": "named", - "name": "Int32" - } - }, - "Int16": { - "type": { - "type": "named", - "name": "Int16" - } - }, - "Int16Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int16" - } - } - }, - "Int32": { - "type": { - "type": "named", - "name": "Int32" - } - }, - "Int32Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int32" - } - } - }, - "Int64": { - "type": { - "type": "named", - "name": "Int64" - } - }, - "Int64Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int64" - } - } - }, - "Int8": { - "type": { - "type": "named", - "name": "Int8" - } - }, - "Int8Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int8" - } - } - }, - "IntPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int32" - } - } - }, - "NamedArray": { - "type": { - "type": "array", - "element_type": { - "type": "named", - "name": "Author" - } - } - }, - "NamedArrayPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "array", - "element_type": { - "type": "named", - "name": "Author" - } - } - } - }, - "NamedObject": { - "type": { - "type": "named", - "name": "Author" - } - }, - "NamedObjectPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Author" - } - } - }, - "Object": { - "type": { - "type": "named", - "name": "GetTypesArgumentsObject" - } - }, - "ObjectPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "GetTypesArgumentsObjectPtr" - } - } - }, - "String": { - "type": { - "type": "named", - "name": "String" - } - }, - "StringPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "String" - } - } - }, - "Text": { - "type": { - "type": "named", - "name": "String" - } - }, - "TextPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "String" - } - } - }, - "Time": { - "type": { - "type": "named", - "name": "TimestampTZ" - } - }, - "TimePtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "TimestampTZ" - } - } - }, - "UUID": { - "type": { - "type": "named", - "name": "UUID" - } - }, - "UUIDArray": { - "type": { - "type": "array", - "element_type": { - "type": "named", - "name": "UUID" - } - } - }, - "UUIDPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "UUID" - } - } - }, - "Uint": { - "type": { - "type": "named", - "name": "Int32" - } - }, - "Uint16": { - "type": { - "type": "named", - "name": "Int16" - } - }, - "Uint16Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int16" - } - } - }, - "Uint32": { - "type": { - "type": "named", - "name": "Int32" - } - }, - "Uint32Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int32" - } - } - }, - "Uint64": { - "type": { - "type": "named", - "name": "Int64" - } - }, - "Uint64Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int64" - } - } - }, - "Uint8": { - "type": { - "type": "named", - "name": "Int8" - } - }, - "Uint8Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int8" - } - } - }, - "UintPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int32" - } - } - } - } - }, - "GetTypesArgumentsArrayObject": { - "fields": { - "content": { - "type": { - "type": "named", - "name": "String" - } - } - } - }, - "GetTypesArgumentsArrayObjectPtr": { - "fields": { - "content": { - "type": { - "type": "named", - "name": "String" - } - } - } - }, - "GetTypesArgumentsObject": { - "fields": { - "created_at": { - "type": { - "type": "named", - "name": "TimestampTZ" - } - }, - "id": { - "type": { - "type": "named", - "name": "UUID" - } - } - } - }, - "GetTypesArgumentsObjectPtr": { - "fields": { - "Lat": { - "type": { - "type": "named", - "name": "Int32" - } - }, - "Long": { - "type": { - "type": "named", - "name": "Int32" - } - } - } - }, - "HelloResult": { - "fields": { - "foo": { - "type": { - "type": "named", - "name": "Foo" - } - }, - "id": { - "type": { - "type": "named", - "name": "UUID" - } - }, - "num": { - "type": { - "type": "named", - "name": "Int32" - } - }, - "text": { - "type": { - "type": "named", - "name": "String" - } - } - } - } - }, - "collections": [], - "functions": [ - { - "name": "getBool", - "description": "return an scalar boolean", - "arguments": {}, - "result_type": { - "type": "named", - "name": "Boolean" - } - }, - { - "name": "getTypes", - "arguments": { - "ArrayObject": { - "type": { - "type": "array", - "element_type": { - "type": "named", - "name": "GetTypesArgumentsArrayObject" - } - } - }, - "ArrayObjectPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "array", - "element_type": { - "type": "named", - "name": "GetTypesArgumentsArrayObjectPtr" - } - } - } - }, - "Bool": { - "type": { - "type": "named", - "name": "Boolean" - } - }, - "BoolPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Boolean" - } - } - }, - "CustomScalar": { - "type": { - "type": "named", - "name": "CommentString" - } - }, - "CustomScalarPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "CommentString" - } - } - }, - "Enum": { - "type": { - "type": "named", - "name": "SomeEnum" - } - }, - "EnumPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "SomeEnum" - } - } - }, - "Float32": { - "type": { - "type": "named", - "name": "Float32" - } - }, - "Float32Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Float32" - } - } - }, - "Float64": { - "type": { - "type": "named", - "name": "Float64" - } - }, - "Float64Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Float64" - } - } - }, - "Int": { - "type": { - "type": "named", - "name": "Int32" - } - }, - "Int16": { - "type": { - "type": "named", - "name": "Int16" - } - }, - "Int16Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int16" - } - } - }, - "Int32": { - "type": { - "type": "named", - "name": "Int32" - } - }, - "Int32Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int32" - } - } - }, - "Int64": { - "type": { - "type": "named", - "name": "Int64" - } - }, - "Int64Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int64" - } - } - }, - "Int8": { - "type": { - "type": "named", - "name": "Int8" - } - }, - "Int8Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int8" - } - } - }, - "IntPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int32" - } - } - }, - "NamedArray": { - "type": { - "type": "array", - "element_type": { - "type": "named", - "name": "Author" - } - } - }, - "NamedArrayPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "array", - "element_type": { - "type": "named", - "name": "Author" - } - } - } - }, - "NamedObject": { - "type": { - "type": "named", - "name": "Author" - } - }, - "NamedObjectPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Author" - } - } - }, - "Object": { - "type": { - "type": "named", - "name": "GetTypesArgumentsObject" - } - }, - "ObjectPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "GetTypesArgumentsObjectPtr" - } - } - }, - "String": { - "type": { - "type": "named", - "name": "String" - } - }, - "StringPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "String" - } - } - }, - "Text": { - "type": { - "type": "named", - "name": "String" - } - }, - "TextPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "String" - } - } - }, - "Time": { - "type": { - "type": "named", - "name": "TimestampTZ" - } - }, - "TimePtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "TimestampTZ" - } - } - }, - "UUID": { - "type": { - "type": "named", - "name": "UUID" - } - }, - "UUIDArray": { - "type": { - "type": "array", - "element_type": { - "type": "named", - "name": "UUID" - } - } - }, - "UUIDPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "UUID" - } - } - }, - "Uint": { - "type": { - "type": "named", - "name": "Int32" - } - }, - "Uint16": { - "type": { - "type": "named", - "name": "Int16" - } - }, - "Uint16Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int16" - } - } - }, - "Uint32": { - "type": { - "type": "named", - "name": "Int32" - } - }, - "Uint32Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int32" - } - } - }, - "Uint64": { - "type": { - "type": "named", - "name": "Int64" - } - }, - "Uint64Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int64" - } - } - }, - "Uint8": { - "type": { - "type": "named", - "name": "Int8" - } - }, - "Uint8Ptr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int8" - } - } - }, - "UintPtr": { - "type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "Int32" - } - } - } - }, - "result_type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "GetTypesArguments" - } - } - }, - { - "name": "hello", - "description": "sends a hello message", - "arguments": {}, - "result_type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "HelloResult" - } - } - }, - { - "name": "getArticles", - "description": "GetArticles", - "arguments": { - "Limit": { - "type": { - "type": "named", - "name": "Float64" - } - } - }, - "result_type": { - "type": "array", - "element_type": { - "type": "named", - "name": "GetArticlesResult" - } - } - } - ], - "procedures": [ - { - "name": "create_article", - "description": "CreateArticle", - "arguments": { - "author": { - "type": { - "type": "named", - "name": "CreateArticleArgumentsAuthor" - } - } - }, - "result_type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "CreateArticleResult" - } - } - }, - { - "name": "increase", - "description": "Increase", - "arguments": {}, - "result_type": { - "type": "named", - "name": "Int32" - } - }, - { - "name": "createAuthor", - "description": "creates an author", - "arguments": { - "name": { - "type": { - "type": "named", - "name": "String" - } - } - }, - "result_type": { - "type": "nullable", - "underlying_type": { - "type": "named", - "name": "CreateAuthorResult" - } - } - }, - { - "name": "createAuthors", - "description": "creates a list of authors", - "arguments": { - "names": { - "type": { - "type": "array", - "element_type": { - "type": "named", - "name": "String" - } - } - } - }, - "result_type": { - "type": "array", - "element_type": { - "type": "named", - "name": "CreateAuthorResult" - } - } - } - ] -} \ No newline at end of file diff --git a/example/codegen/types/arguments/types.generated.go b/example/codegen/types/arguments/types.generated.go index 22a8938..73ab32f 100644 --- a/example/codegen/types/arguments/types.generated.go +++ b/example/codegen/types/arguments/types.generated.go @@ -1,4 +1,4 @@ -// Code generated by github.com/hasura/ndc-sdk-go/codegen, DO NOT EDIT. +// Code generated by github.com/hasura/ndc-sdk-go/cmd/hasura-ndc-go, DO NOT EDIT. package arguments import ( @@ -10,16 +10,16 @@ import ( "time" ) -var arguments_Decoder = utils.NewDecoder() +var connector_Decoder = utils.NewDecoder() // FromValue decodes values from map func (j *GetTypesArguments) FromValue(input map[string]any) error { var err error - err = arguments_Decoder.DecodeObjectValue(&j.ArrayBigInt, input, "ArrayBigInt") + err = connector_Decoder.DecodeObjectValue(&j.ArrayBigInt, input, "ArrayBigInt") if err != nil { return err } - err = arguments_Decoder.DecodeObjectValue(&j.ArrayBigIntPtr, input, "ArrayBigIntPtr") + err = connector_Decoder.DecodeObjectValue(&j.ArrayBigIntPtr, input, "ArrayBigIntPtr") if err != nil { return err } @@ -95,23 +95,23 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { if err != nil { return err } - err = arguments_Decoder.DecodeObjectValue(&j.ArrayMap, input, "ArrayMap") + err = connector_Decoder.DecodeObjectValue(&j.ArrayMap, input, "ArrayMap") if err != nil { return err } j.ArrayMapPtr = new([]map[string]any) - err = arguments_Decoder.DecodeNullableObjectValue(j.ArrayMapPtr, input, "ArrayMapPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.ArrayMapPtr, input, "ArrayMapPtr") if err != nil { return err } - err = arguments_Decoder.DecodeObjectValue(&j.ArrayObject, input, "ArrayObject") + err = connector_Decoder.DecodeObjectValue(&j.ArrayObject, input, "ArrayObject") if err != nil { return err } j.ArrayObjectPtr = new([]struct { Content string "json:\"content\"" }) - err = arguments_Decoder.DecodeNullableObjectValue(j.ArrayObjectPtr, input, "ArrayObjectPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.ArrayObjectPtr, input, "ArrayObjectPtr") if err != nil { return err } @@ -187,12 +187,12 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { if err != nil { return err } - err = arguments_Decoder.DecodeObjectValue(&j.BigInt, input, "BigInt") + err = connector_Decoder.DecodeObjectValue(&j.BigInt, input, "BigInt") if err != nil { return err } j.BigIntPtr = new(scalar.BigInt) - err = arguments_Decoder.DecodeNullableObjectValue(j.BigIntPtr, input, "BigIntPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.BigIntPtr, input, "BigIntPtr") if err != nil { return err } @@ -204,39 +204,39 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { if err != nil { return err } - err = arguments_Decoder.DecodeObjectValue(&j.Bytes, input, "Bytes") + err = connector_Decoder.DecodeObjectValue(&j.Bytes, input, "Bytes") if err != nil { return err } j.BytesPtr = new(scalar.Bytes) - err = arguments_Decoder.DecodeNullableObjectValue(j.BytesPtr, input, "BytesPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.BytesPtr, input, "BytesPtr") if err != nil { return err } - err = arguments_Decoder.DecodeObjectValue(&j.CustomScalar, input, "CustomScalar") + err = connector_Decoder.DecodeObjectValue(&j.CustomScalar, input, "CustomScalar") if err != nil { return err } j.CustomScalarPtr = new(types.CommentText) - err = arguments_Decoder.DecodeNullableObjectValue(j.CustomScalarPtr, input, "CustomScalarPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.CustomScalarPtr, input, "CustomScalarPtr") if err != nil { return err } - err = arguments_Decoder.DecodeObjectValue(&j.Date, input, "Date") + err = connector_Decoder.DecodeObjectValue(&j.Date, input, "Date") if err != nil { return err } j.DatePtr = new(scalar.Date) - err = arguments_Decoder.DecodeNullableObjectValue(j.DatePtr, input, "DatePtr") + err = connector_Decoder.DecodeNullableObjectValue(j.DatePtr, input, "DatePtr") if err != nil { return err } - err = arguments_Decoder.DecodeObjectValue(&j.Enum, input, "Enum") + err = connector_Decoder.DecodeObjectValue(&j.Enum, input, "Enum") if err != nil { return err } j.EnumPtr = new(types.SomeEnum) - err = arguments_Decoder.DecodeNullableObjectValue(j.EnumPtr, input, "EnumPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.EnumPtr, input, "EnumPtr") if err != nil { return err } @@ -304,34 +304,34 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { if err != nil { return err } - err = arguments_Decoder.DecodeObjectValue(&j.Map, input, "Map") + err = connector_Decoder.DecodeObjectValue(&j.Map, input, "Map") if err != nil { return err } j.MapPtr = new(map[string]any) - err = arguments_Decoder.DecodeNullableObjectValue(j.MapPtr, input, "MapPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.MapPtr, input, "MapPtr") if err != nil { return err } - err = arguments_Decoder.DecodeObjectValue(&j.NamedArray, input, "NamedArray") + err = connector_Decoder.DecodeObjectValue(&j.NamedArray, input, "NamedArray") if err != nil { return err } j.NamedArrayPtr = new([]types.Author) - err = arguments_Decoder.DecodeNullableObjectValue(j.NamedArrayPtr, input, "NamedArrayPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.NamedArrayPtr, input, "NamedArrayPtr") if err != nil { return err } - err = arguments_Decoder.DecodeObjectValue(&j.NamedObject, input, "NamedObject") + err = connector_Decoder.DecodeObjectValue(&j.NamedObject, input, "NamedObject") if err != nil { return err } j.NamedObjectPtr = new(types.Author) - err = arguments_Decoder.DecodeNullableObjectValue(j.NamedObjectPtr, input, "NamedObjectPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.NamedObjectPtr, input, "NamedObjectPtr") if err != nil { return err } - err = arguments_Decoder.DecodeObjectValue(&j.Object, input, "Object") + err = connector_Decoder.DecodeObjectValue(&j.Object, input, "Object") if err != nil { return err } @@ -339,22 +339,22 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { Long int Lat int }) - err = arguments_Decoder.DecodeNullableObjectValue(j.ObjectPtr, input, "ObjectPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.ObjectPtr, input, "ObjectPtr") if err != nil { return err } j.PtrArrayBigInt = new([]scalar.BigInt) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayBigInt, input, "PtrArrayBigInt") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayBigInt, input, "PtrArrayBigInt") if err != nil { return err } j.PtrArrayBigIntPtr = new([]*scalar.BigInt) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayBigIntPtr, input, "PtrArrayBigIntPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayBigIntPtr, input, "PtrArrayBigIntPtr") if err != nil { return err } j.PtrArrayBool = new([]bool) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayBool, input, "PtrArrayBool") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayBool, input, "PtrArrayBool") if err != nil { return err } @@ -363,7 +363,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayFloat32 = new([]float32) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayFloat32, input, "PtrArrayFloat32") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayFloat32, input, "PtrArrayFloat32") if err != nil { return err } @@ -372,7 +372,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayFloat64 = new([]float64) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayFloat64, input, "PtrArrayFloat64") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayFloat64, input, "PtrArrayFloat64") if err != nil { return err } @@ -381,12 +381,12 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayInt = new([]int) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayInt, input, "PtrArrayInt") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayInt, input, "PtrArrayInt") if err != nil { return err } j.PtrArrayInt16 = new([]int16) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayInt16, input, "PtrArrayInt16") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayInt16, input, "PtrArrayInt16") if err != nil { return err } @@ -395,7 +395,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayInt32 = new([]int32) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayInt32, input, "PtrArrayInt32") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayInt32, input, "PtrArrayInt32") if err != nil { return err } @@ -404,7 +404,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayInt64 = new([]int64) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayInt64, input, "PtrArrayInt64") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayInt64, input, "PtrArrayInt64") if err != nil { return err } @@ -413,7 +413,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayInt8 = new([]int8) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayInt8, input, "PtrArrayInt8") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayInt8, input, "PtrArrayInt8") if err != nil { return err } @@ -426,7 +426,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayJSON = new([]any) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayJSON, input, "PtrArrayJSON") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayJSON, input, "PtrArrayJSON") if err != nil { return err } @@ -435,7 +435,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayRawJSON = new([]json.RawMessage) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayRawJSON, input, "PtrArrayRawJSON") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayRawJSON, input, "PtrArrayRawJSON") if err != nil { return err } @@ -444,7 +444,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayString = new([]string) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayString, input, "PtrArrayString") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayString, input, "PtrArrayString") if err != nil { return err } @@ -453,7 +453,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayTime = new([]time.Time) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayTime, input, "PtrArrayTime") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayTime, input, "PtrArrayTime") if err != nil { return err } @@ -462,7 +462,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayUUID = new([]uuid.UUID) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayUUID, input, "PtrArrayUUID") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayUUID, input, "PtrArrayUUID") if err != nil { return err } @@ -471,12 +471,12 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayUint = new([]uint) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayUint, input, "PtrArrayUint") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayUint, input, "PtrArrayUint") if err != nil { return err } j.PtrArrayUint16 = new([]uint16) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayUint16, input, "PtrArrayUint16") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayUint16, input, "PtrArrayUint16") if err != nil { return err } @@ -485,7 +485,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayUint32 = new([]uint32) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayUint32, input, "PtrArrayUint32") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayUint32, input, "PtrArrayUint32") if err != nil { return err } @@ -494,7 +494,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayUint64 = new([]uint64) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayUint64, input, "PtrArrayUint64") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayUint64, input, "PtrArrayUint64") if err != nil { return err } @@ -503,7 +503,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { return err } j.PtrArrayUint8 = new([]uint8) - err = arguments_Decoder.DecodeNullableObjectValue(j.PtrArrayUint8, input, "PtrArrayUint8") + err = connector_Decoder.DecodeNullableObjectValue(j.PtrArrayUint8, input, "PtrArrayUint8") if err != nil { return err } @@ -531,12 +531,12 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { if err != nil { return err } - err = arguments_Decoder.DecodeObjectValue(&j.Text, input, "Text") + err = connector_Decoder.DecodeObjectValue(&j.Text, input, "Text") if err != nil { return err } j.TextPtr = new(types.Text) - err = arguments_Decoder.DecodeNullableObjectValue(j.TextPtr, input, "TextPtr") + err = connector_Decoder.DecodeNullableObjectValue(j.TextPtr, input, "TextPtr") if err != nil { return err } @@ -548,7 +548,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { if err != nil { return err } - err = arguments_Decoder.DecodeObjectValue(&j.URL, input, "URL") + err = connector_Decoder.DecodeObjectValue(&j.URL, input, "URL") if err != nil { return err } diff --git a/example/codegen/types/types.generated.go b/example/codegen/types/types.generated.go index dbefba1..76f7f98 100644 --- a/example/codegen/types/types.generated.go +++ b/example/codegen/types/types.generated.go @@ -1,4 +1,4 @@ -// Code generated by github.com/hasura/ndc-sdk-go/codegen, DO NOT EDIT. +// Code generated by github.com/hasura/ndc-sdk-go/cmd/hasura-ndc-go, DO NOT EDIT. package types import ( @@ -8,7 +8,35 @@ import ( "slices" ) -var types_Decoder = utils.NewDecoder() +var connector_Decoder = utils.NewDecoder() + +// FromValue decodes values from map +func (j *Author) FromValue(input map[string]any) error { + var err error + j.Author = new(Author) + err = connector_Decoder.DecodeNullableObjectValue(j.Author, input, "author") + if err != nil { + return err + } + j.CreatedAt, err = utils.GetDateTime(input, "created_at") + if err != nil { + return err + } + j.ID, err = utils.GetString(input, "id") + if err != nil { + return err + } + j.Status = new(AuthorStatus) + err = connector_Decoder.DecodeNullableObjectValue(j.Status, input, "status") + if err != nil { + return err + } + err = connector_Decoder.DecodeObjectValue(&j.Tags, input, "tags") + if err != nil { + return err + } + return nil +} // ToMap encodes the struct to a value map func (j Author) ToMap() map[string]any { @@ -18,11 +46,82 @@ func (j Author) ToMap() map[string]any { } r["created_at"] = j.CreatedAt r["id"] = j.ID + r["status"] = j.Status r["tags"] = j.Tags return r } +// ToMap encodes the struct to a value map +func (j GetAuthorResult) ToMap() map[string]any { + r := make(map[string]any) + r["id"] = j.ID + r["name"] = j.Name + + return r +} + +// ScalarName get the schema name of the scalar +func (j AuthorStatus) ScalarName() string { + return "AuthorStatus" +} + +const ( + AuthorStatusActive AuthorStatus = "active" + AuthorStatusInactive AuthorStatus = "inactive" +) + +var enumValues_AuthorStatus = []AuthorStatus{AuthorStatusActive, AuthorStatusInactive} + +// ParseAuthorStatus parses a AuthorStatus enum from string +func ParseAuthorStatus(input string) (AuthorStatus, error) { + result := AuthorStatus(input) + if !slices.Contains(enumValues_AuthorStatus, result) { + return AuthorStatus(""), errors.New("failed to parse AuthorStatus, expect one of AuthorStatusActive, AuthorStatusInactive") + } + + return result, nil +} + +// IsValid checks if the value is invalid +func (j AuthorStatus) IsValid() bool { + return slices.Contains(enumValues_AuthorStatus, j) +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *AuthorStatus) UnmarshalJSON(b []byte) error { + var rawValue string + if err := json.Unmarshal(b, &rawValue); err != nil { + return err + } + + value, err := ParseAuthorStatus(rawValue) + if err != nil { + return err + } + + *j = value + return nil +} + +// FromValue decodes the scalar from an unknown value +func (s *AuthorStatus) FromValue(value any) error { + valueStr, err := utils.DecodeNullableString(value) + if err != nil { + return err + } + if valueStr == nil { + return nil + } + result, err := ParseAuthorStatus(*valueStr) + if err != nil { + return err + } + + *s = result + return nil +} + // ScalarName get the schema name of the scalar func (j CommentText) ScalarName() string { return "CommentString" diff --git a/example/codegen/types/types.go b/example/codegen/types/types.go index 292c9df..66c967f 100644 --- a/example/codegen/types/types.go +++ b/example/codegen/types/types.go @@ -39,9 +39,19 @@ func (ct *CommentText) FromValue(value any) (err error) { // @enum foo, bar type SomeEnum string +// AuthorStatus +// @enum active, inactive +type AuthorStatus string + type Author struct { - ID string `json:"id"` - CreatedAt time.Time `json:"created_at"` - Tags []string `json:"tags"` - Author *Author `json:"author"` + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + Tags []string `json:"tags"` + Status *AuthorStatus `json:"status"` + Author *Author `json:"author"` +} + +type GetAuthorResult struct { + ID int `json:"id"` + Name string `json:"name"` } diff --git a/schema/extend.go b/schema/extend.go index c356557..20d5fd7 100644 --- a/schema/extend.go +++ b/schema/extend.go @@ -122,30 +122,6 @@ func (j *Argument) UnmarshalJSON(b []byte) error { return nil } -// NewLiteralArgument creates an argument with a literal value -// -// Deprecated: use [NewArgumentLiteral] instead -// -// [NewArgumentLiteral]: https://pkg.go.dev/github.com/hasura/ndc-sdk-go/schema#NewArgumentLiteral -func NewLiteralArgument(value any) *Argument { - return &Argument{ - Type: ArgumentTypeLiteral, - Value: value, - } -} - -// NewVariableArgument creates an argument with a variable name -// -// Deprecated: use [NewArgumentVariable] instead -// -// [NewArgumentVariable]: https://pkg.go.dev/github.com/hasura/ndc-sdk-go/schema#NewArgumentVariable -func NewVariableArgument(name string) *Argument { - return &Argument{ - Type: ArgumentTypeVariable, - Name: name, - } -} - // AsLiteral converts the instance to ArgumentLiteral func (j Argument) AsLiteral() (*ArgumentLiteral, error) { if j.Type != ArgumentTypeLiteral { diff --git a/utils/connector.go b/utils/connector.go index 2654fa2..4e1c231 100644 --- a/utils/connector.go +++ b/utils/connector.go @@ -12,6 +12,8 @@ const ( errFunctionValueFieldRequired = "__value field is required in query function type" ) +var ErrHandlerNotfound = errors.New("connector handler not found") + // Scalar abstracts a scalar interface to determine when evaluating type Scalar interface { ScalarName() string @@ -264,3 +266,59 @@ func EvalFunctionSelectionFieldValue(request *schema.QueryRequest) (schema.Neste } return valueColumn.Fields, nil } + +// MergeSchemas merge multiple connector schemas into one schema +func MergeSchemas(schemas ...*schema.SchemaResponse) (*schema.SchemaResponse, []error) { + var errs []error + result := schema.SchemaResponse{ + ObjectTypes: schema.SchemaResponseObjectTypes{}, + ScalarTypes: schema.SchemaResponseScalarTypes{}, + } + collectionMap := map[string]schema.CollectionInfo{} + functionMap := map[string]schema.FunctionInfo{} + procedureMap := map[string]schema.ProcedureInfo{} + + for _, s := range schemas { + if s == nil { + continue + } + for _, col := range s.Collections { + if _, ok := collectionMap[col.Name]; ok { + errs = append(errs, fmt.Errorf("collection `%s` exists", col.Name)) + } + collectionMap[col.Name] = col + } + + for _, fn := range s.Functions { + if _, ok := functionMap[fn.Name]; ok { + errs = append(errs, fmt.Errorf("function `%s` exists", fn.Name)) + } + functionMap[fn.Name] = fn + } + + for _, fn := range s.Procedures { + if _, ok := procedureMap[fn.Name]; ok { + errs = append(errs, fmt.Errorf("procedure `%s` exists", fn.Name)) + } + procedureMap[fn.Name] = fn + } + + result.Collections = GetSortedValuesByKey(collectionMap) + result.Functions = GetSortedValuesByKey(functionMap) + result.Procedures = GetSortedValuesByKey(procedureMap) + + for k, sl := range s.ScalarTypes { + if _, ok := result.ScalarTypes[k]; ok { + errs = append(errs, fmt.Errorf("scalar type %s exists", k)) + } + result.ScalarTypes[k] = sl + } + for k, obj := range s.ObjectTypes { + if _, ok := result.ObjectTypes[k]; ok { + errs = append(errs, fmt.Errorf("object type %s exists", k)) + } + result.ObjectTypes[k] = obj + } + } + return &result, errs +} diff --git a/utils/connector_test.go b/utils/connector_test.go index 4998f99..fa308de 100644 --- a/utils/connector_test.go +++ b/utils/connector_test.go @@ -1,6 +1,7 @@ package utils import ( + "errors" "testing" "time" @@ -140,3 +141,547 @@ func TestEvalNestedFields(t *testing.T) { }) } } + +func TestMergeSchemas(t *testing.T) { + testCases := []struct { + Name string + Inputs []*schema.SchemaResponse + Expected schema.SchemaResponse + Errors []error + }{ + { + Name: "simple", + Inputs: []*schema.SchemaResponse{ + nil, + { + Collections: []schema.CollectionInfo{ + { + Name: "Foo", + Arguments: schema.CollectionInfoArguments{}, + }, + }, + ObjectTypes: schema.SchemaResponseObjectTypes{ + "GetArticlesResult": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "Name": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "GetTypesArguments": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "ArrayBigInt": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("BigInt")).Encode(), + }, + }, + }, + "GetTypesArgumentsArrayObject": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "content": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "GetTypesArgumentsObjectPtr": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "Lat": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "Long": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + }, + }, + "HelloResult": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "error": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("JSON")).Encode(), + }, + "foo": schema.ObjectField{ + Type: schema.NewNamedType("Foo").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("UUID").Encode(), + }, + "num": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "text": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + }, + Functions: []schema.FunctionInfo{ + { + Name: "getBool", + Description: ToPtr("return an scalar boolean"), + ResultType: schema.NewNamedType("Boolean").Encode(), + Arguments: map[string]schema.ArgumentInfo{}, + }, + { + Name: "hello", + Description: ToPtr("sends a hello message"), + ResultType: schema.NewNullableType(schema.NewNamedType("HelloResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{}, + }, + { + Name: "getArticles", + Description: ToPtr("GetArticles"), + ResultType: schema.NewArrayType(schema.NewNamedType("GetArticlesResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "Limit": { + Type: schema.NewNamedType("Float64").Encode(), + }, + }, + }, + }, + Procedures: []schema.ProcedureInfo{ + { + Name: "create_article", + Description: ToPtr("CreateArticle"), + ResultType: schema.NewNullableType(schema.NewNamedType("CreateArticleResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "author": { + Type: schema.NewNamedType("CreateArticleArgumentsAuthor").Encode(), + }, + }, + }, + { + Name: "createAuthor", + Description: ToPtr("creates an author"), + ResultType: schema.NewNullableType(schema.NewNamedType("CreateAuthorResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "name": { + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + { + Name: "createAuthors", + Description: ToPtr("creates a list of authors"), + ResultType: schema.NewArrayType(schema.NewNamedType("CreateAuthorResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "names": { + Type: schema.NewArrayType(schema.NewNamedType("String")).Encode(), + }, + }, + }, + }, + ScalarTypes: schema.SchemaResponseScalarTypes{ + "BigInt": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationBigInteger().Encode(), + }, + "JSON": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationJSON().Encode(), + }, + "TimestampTZ": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationTimestampTZ().Encode(), + }, + "UUID": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationUUID().Encode(), + }, + }, + }, + { + Collections: []schema.CollectionInfo{ + { + Name: "Foo", + Arguments: schema.CollectionInfoArguments{}, + }, + }, + ObjectTypes: schema.SchemaResponseObjectTypes{ + "Author": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "created_at": schema.ObjectField{ + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "CreateArticleArgumentsAuthor": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "created_at": schema.ObjectField{ + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("UUID").Encode(), + }, + }, + }, + "CreateArticleResult": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "authors": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Author")).Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + }, + }, + "CreateAuthorResult": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "created_at": schema.ObjectField{ + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "name": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "GetTypesArgumentsArrayObjectPtr": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "content": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "GetTypesArgumentsObject": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "created_at": schema.ObjectField{ + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("UUID").Encode(), + }, + }, + }, + "GetTypesArgumentsObjectPtr": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "Lat": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "Long": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + }, + }, + "HelloResult": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "error": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("JSON")).Encode(), + }, + "foo": schema.ObjectField{ + Type: schema.NewNamedType("Foo").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("UUID").Encode(), + }, + "num": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "text": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + }, + Functions: []schema.FunctionInfo{ + { + Name: "getBool", + Description: ToPtr("return an scalar boolean"), + ResultType: schema.NewNamedType("Boolean").Encode(), + Arguments: map[string]schema.ArgumentInfo{}, + }, + { + Name: "getTypes", + ResultType: schema.NewNullableType(schema.NewNamedType("GetTypesArguments")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "ArrayBigInt": { + Type: schema.NewArrayType(schema.NewNamedType("BigInt")).Encode(), + }, + }, + }, + }, + Procedures: []schema.ProcedureInfo{ + { + Name: "create_article", + Description: ToPtr("CreateArticle"), + ResultType: schema.NewNullableType(schema.NewNamedType("CreateArticleResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "author": { + Type: schema.NewNamedType("CreateArticleArgumentsAuthor").Encode(), + }, + }, + }, + { + Name: "increase", + Description: ToPtr("Increase"), + ResultType: schema.NewNamedType("Int32").Encode(), + Arguments: map[string]schema.ArgumentInfo{}, + }, + }, + ScalarTypes: schema.SchemaResponseScalarTypes{ + "BigInt": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationBigInteger().Encode(), + }, + "Boolean": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationBoolean().Encode(), + }, + }, + }, + }, + Expected: schema.SchemaResponse{ + Collections: []schema.CollectionInfo{ + { + Name: "Foo", + Arguments: schema.CollectionInfoArguments{}, + }, + }, + ObjectTypes: schema.SchemaResponseObjectTypes{ + "Author": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "created_at": schema.ObjectField{ + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "CreateArticleArgumentsAuthor": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "created_at": schema.ObjectField{ + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("UUID").Encode(), + }, + }, + }, + "CreateArticleResult": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "authors": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("Author")).Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + }, + }, + "CreateAuthorResult": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "created_at": schema.ObjectField{ + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "name": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "GetArticlesResult": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "Name": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "GetTypesArguments": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "ArrayBigInt": schema.ObjectField{ + Type: schema.NewArrayType(schema.NewNamedType("BigInt")).Encode(), + }, + }, + }, + "GetTypesArgumentsArrayObject": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "content": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "GetTypesArgumentsArrayObjectPtr": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "content": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + "GetTypesArgumentsObject": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "created_at": schema.ObjectField{ + Type: schema.NewNamedType("TimestampTZ").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("UUID").Encode(), + }, + }, + }, + "GetTypesArgumentsObjectPtr": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "Lat": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "Long": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + }, + }, + "HelloResult": schema.ObjectType{ + Fields: schema.ObjectTypeFields{ + "error": schema.ObjectField{ + Type: schema.NewNullableType(schema.NewNamedType("JSON")).Encode(), + }, + "foo": schema.ObjectField{ + Type: schema.NewNamedType("Foo").Encode(), + }, + "id": schema.ObjectField{ + Type: schema.NewNamedType("UUID").Encode(), + }, + "num": schema.ObjectField{ + Type: schema.NewNamedType("Int32").Encode(), + }, + "text": schema.ObjectField{ + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + }, + Functions: []schema.FunctionInfo{ + { + Name: "getArticles", + Description: ToPtr("GetArticles"), + ResultType: schema.NewArrayType(schema.NewNamedType("GetArticlesResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "Limit": { + Type: schema.NewNamedType("Float64").Encode(), + }, + }, + }, + { + Name: "getBool", + Description: ToPtr("return an scalar boolean"), + ResultType: schema.NewNamedType("Boolean").Encode(), + Arguments: map[string]schema.ArgumentInfo{}, + }, + { + Name: "getTypes", + ResultType: schema.NewNullableType(schema.NewNamedType("GetTypesArguments")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "ArrayBigInt": { + Type: schema.NewArrayType(schema.NewNamedType("BigInt")).Encode(), + }, + }, + }, + { + Name: "hello", + Description: ToPtr("sends a hello message"), + ResultType: schema.NewNullableType(schema.NewNamedType("HelloResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{}, + }, + }, + Procedures: []schema.ProcedureInfo{ + { + Name: "createAuthor", + Description: ToPtr("creates an author"), + ResultType: schema.NewNullableType(schema.NewNamedType("CreateAuthorResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "name": { + Type: schema.NewNamedType("String").Encode(), + }, + }, + }, + { + Name: "createAuthors", + Description: ToPtr("creates a list of authors"), + ResultType: schema.NewArrayType(schema.NewNamedType("CreateAuthorResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "names": { + Type: schema.NewArrayType(schema.NewNamedType("String")).Encode(), + }, + }, + }, + { + Name: "create_article", + Description: ToPtr("CreateArticle"), + ResultType: schema.NewNullableType(schema.NewNamedType("CreateArticleResult")).Encode(), + Arguments: map[string]schema.ArgumentInfo{ + "author": { + Type: schema.NewNamedType("CreateArticleArgumentsAuthor").Encode(), + }, + }, + }, + { + Name: "increase", + Description: ToPtr("Increase"), + ResultType: schema.NewNamedType("Int32").Encode(), + Arguments: map[string]schema.ArgumentInfo{}, + }, + }, + ScalarTypes: schema.SchemaResponseScalarTypes{ + "BigInt": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationBigInteger().Encode(), + }, + "Boolean": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationBoolean().Encode(), + }, + "JSON": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationJSON().Encode(), + }, + "TimestampTZ": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationTimestampTZ().Encode(), + }, + "UUID": schema.ScalarType{ + AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, + Representation: schema.NewTypeRepresentationUUID().Encode(), + }, + }, + }, + Errors: []error{ + errors.New("collection type Foo exists"), + errors.New("function type getBool exists"), + errors.New("procedure type create_article exists"), + errors.New("object type HelloResult exists"), + errors.New("object type GetTypesArgumentsObjectPtr exists"), + errors.New("scalar type BigInt exists"), + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.Name, func(t *testing.T) { + result, errs := MergeSchemas(tc.Inputs...) + assertDeepEqual(t, tc.Errors, errs) + assertDeepEqual(t, tc.Expected.Collections, result.Collections) + assertDeepEqual(t, tc.Expected.Functions, result.Functions) + assertDeepEqual(t, tc.Expected.ObjectTypes, result.ObjectTypes) + assertDeepEqual(t, tc.Expected.Procedures, result.Procedures) + assertDeepEqual(t, tc.Expected.ScalarTypes, result.ScalarTypes) + }) + } +} diff --git a/utils/decode.go b/utils/decode.go index 23f9242..c848bdc 100644 --- a/utils/decode.go +++ b/utils/decode.go @@ -430,6 +430,22 @@ func (d Decoder) decodeValue(target any, value any) error { return nil } +// DecodeObject tries to decode an object from a map +func (d Decoder) DecodeObject(target any, value map[string]any) error { + if len(value) == 0 { + return nil + } + if IsNil(target) { + return errors.New("the decoded target must be not null") + } + + if t, ok := target.(ObjectDecoder); ok { + return t.FromValue(value) + } + + return decodeAnyValue(target, value, d.decodeHook) +} + // DecodeNullableInt tries to convert an unknown value to a nullable integer func DecodeNullableInt[T int | int8 | int16 | int32 | int64](value any) (*T, error) { return decodeNullableInt(value, convertNullableInt[T]) diff --git a/utils/encode.go b/utils/encode.go index 9da2463..611669b 100644 --- a/utils/encode.go +++ b/utils/encode.go @@ -14,41 +14,6 @@ type MapEncoder interface { ToMap() map[string]any } -// EncodeMap encodes an object to a map[string]any, using json tag to convert object keys -// -// Deprecated: use EncodeObject instead -func EncodeMap[T MapEncoder](input T) map[string]any { - if IsNil(input) { - return nil - } - return input.ToMap() -} - -// EncodeMaps encode objects to a slice of map[string]any, using json tag to convert object keys -// -// Deprecated: use EncodeObjectSlice instead -func EncodeMaps[T MapEncoder](inputs []T) []map[string]any { - var results []map[string]any - for _, item := range inputs { - results = append(results, item.ToMap()) - } - return results -} - -// EncodeNullableMaps encode objects to a slice of map[string]any, using json tag to convert object keys -// -// Deprecated: use EncodeNullableObjectSlice instead -func EncodeNullableMaps[T MapEncoder](inputs *[]T) []map[string]any { - if inputs == nil { - return nil - } - var results []map[string]any - for _, item := range *inputs { - results = append(results, item.ToMap()) - } - return results -} - // EncodeObject encodes an unknown type to a map[string]any, using json tag to convert object keys func EncodeObject(input any) (map[string]any, error) { if IsNil(input) { diff --git a/utils/helper.go b/utils/helper.go index d0a7a2c..bdf444a 100644 --- a/utils/helper.go +++ b/utils/helper.go @@ -1,8 +1,12 @@ package utils import ( + "cmp" + "context" "fmt" + "log/slog" "reflect" + "slices" ) // GetDefault returns the value or default one if value is empty @@ -34,6 +38,35 @@ func GetDefaultValuePtr[T comparable](value *T, defaultValue T) T { return *value } +// GetKeys gets keys of a map +func GetKeys[K cmp.Ordered, V any](input map[K]V) []K { + var results []K + for key := range input { + results = append(results, key) + } + return results +} + +// GetSortedKeys gets keys of a map and sorts them +func GetSortedKeys[K cmp.Ordered, V any](input map[K]V) []K { + results := GetKeys(input) + slices.Sort(results) + return results +} + +// GetSortedValuesByKey gets values of a map and sorts by keys +func GetSortedValuesByKey[K cmp.Ordered, V any](input map[K]V) []V { + if len(input) == 0 { + return []V{} + } + keys := GetSortedKeys(input) + results := make([]V, len(input)) + for i, k := range keys { + results[i] = input[k] + } + return results +} + // ToPtr converts a value to its pointer func ToPtr[V any](value V) *V { return &value @@ -42,7 +75,8 @@ func ToPtr[V any](value V) *V { // ToPtrs converts the value slice to pointer slice func ToPtrs[T any](input []T) []*T { results := make([]*T, len(input)) - for i, v := range input { + for i := range input { + v := input[i] results[i] = &v } return results @@ -83,3 +117,20 @@ func UnwrapPointerFromAny(value any) (any, bool) { } return reflectValue.Interface(), true } + +// IsDebug checks if the log level is debug +func IsDebug(logger *slog.Logger) bool { + return logger.Enabled(context.TODO(), slog.LevelDebug) +} + +// MergeMap merges two value maps into one +func MergeMap[K comparable, V any](dest map[K]V, src map[K]V) map[K]V { + result := dest + if result == nil { + result = map[K]V{} + } + for k, v := range src { + result[k] = v + } + return result +} diff --git a/utils/helper_test.go b/utils/helper_test.go new file mode 100644 index 0000000..e41c4a3 --- /dev/null +++ b/utils/helper_test.go @@ -0,0 +1,52 @@ +package utils + +import ( + "log/slog" + "testing" +) + +func TestToPtrs(t *testing.T) { + _, err := PointersToValues([]*string{ToPtr(""), nil}) + assertError(t, err, "element at 1 must not be nil") + input, err := PointersToValues(ToPtrs([]string{"a", "b", "c"})) + assertNoError(t, err) + expected, err := PointersToValues([]*string{ToPtr("a"), ToPtr("b"), ToPtr("c")}) + assertNoError(t, err) + + assertDeepEqual(t, expected, input) +} + +func TestIsDebug(t *testing.T) { + if IsDebug(slog.Default()) { + t.Error("expected debug mode, got false") + } +} + +func TestGetSortedKeys(t *testing.T) { + input := map[string]any{ + "b": "b", + "c": "c", + "a": "a", + "d": "d", + } + expected := []string{"a", "b", "c", "d"} + + assertDeepEqual(t, expected, GetSortedKeys(input)) +} + +func TestMergeMap(t *testing.T) { + mapA := map[string]any{ + "a": 2, + "c": 3, + } + mapB := map[string]any{ + "a": 1, + "b": 2, + } + assertDeepEqual(t, mapB, MergeMap(nil, mapB)) + assertDeepEqual(t, map[string]any{ + "a": 1, + "b": 2, + "c": 3, + }, MergeMap(mapA, mapB)) +}