diff --git a/Makefile b/Makefile index 552d142..853db79 100644 --- a/Makefile +++ b/Makefile @@ -52,7 +52,7 @@ clean: rm -f cmd/hasura-ndc-go/testdata/*/source/schema.generated.json rm -f cmd/hasura-ndc-go/testdata/*/source/**/schema.generated.json rm -f cmd/hasura-ndc-go/testdata/*/source/**/types.generated.go - rm -rf cmd/hasura-ndc-go/testdata/*/source/testdata + rm -rf cmd/hasura-ndc-go/testdata/**/testdata .PHONY: build-codegen build-codegen: diff --git a/cmd/hasura-ndc-go/connector.go b/cmd/hasura-ndc-go/connector.go index 01f67c7..42d6702 100644 --- a/cmd/hasura-ndc-go/connector.go +++ b/cmd/hasura-ndc-go/connector.go @@ -387,18 +387,18 @@ func (cg *connectorGenerator) genObjectMethods() error { for _, objectName := range objectKeys { object := cg.rawSchema.Objects[objectName] - if object.IsAnonymous { + if object.IsAnonymous || !strings.HasPrefix(object.PackagePath, cg.moduleName) { continue } sb := cg.getOrCreateTypeBuilder(object.PackagePath) sb.builder.WriteString(fmt.Sprintf(` // ToMap encodes the struct to a value map -func (j %s) ToMap() (map[string]any, error) { +func (j %s) ToMap() map[string]any { r := make(map[string]any) `, objectName)) cg.genObjectToMap(sb, object, "j", "r") sb.builder.WriteString(` - return r, nil + return r }`) } @@ -433,7 +433,7 @@ func (cg *connectorGenerator) genToMapProperty(sb *connectorTypeBuilder, field * if isArrayFragments(fragments) { varName := formatLocalFieldName(selector) valueName := fmt.Sprintf("%s_v", varName) - sb.builder.WriteString(fmt.Sprintf(" %s := make([]map[string]any, len(%s))\n", varName, selector)) + sb.builder.WriteString(fmt.Sprintf(" %s := make([]any, len(%s))\n", varName, selector)) sb.builder.WriteString(fmt.Sprintf(" for i, %s := range %s {\n", valueName, selector)) cg.genToMapProperty(sb, field, valueName, fmt.Sprintf("%s[i]", varName), ty, fragments[1:]) sb.builder.WriteString(" }\n") @@ -443,13 +443,8 @@ func (cg *connectorGenerator) genToMapProperty(sb *connectorTypeBuilder, field * isAnonymous := strings.HasPrefix(strings.Join(fragments, ""), "struct{") if !isAnonymous { - sb.SetImport("fmt", "") - sb.builder.WriteString(fmt.Sprintf(` itemResult, err := utils.EncodeObject(%s) - if err != nil { - return nil, fmt.Errorf("failed to encode %s: %%s", err) - } - %s = itemResult - `, selector, field.Name, assigner)) + sb.builder.WriteString(fmt.Sprintf(` %s = %s + `, assigner, selector)) return selector } innerObject, ok := cg.rawSchema.Objects[ty.Name] diff --git a/cmd/hasura-ndc-go/schema.go b/cmd/hasura-ndc-go/schema.go index 15e6c60..c35f464 100644 --- a/cmd/hasura-ndc-go/schema.go +++ b/cmd/hasura-ndc-go/schema.go @@ -336,6 +336,7 @@ func (rcs RawConnectorSchema) IsCustomType(name string) bool { type SchemaParser struct { context context.Context moduleName string + rawSchema *RawConnectorSchema packages []*packages.Package packageIndex int } @@ -436,9 +437,10 @@ func parseRawConnectorSchemaFromGoCode(ctx context.Context, moduleName string, f moduleName: moduleName, packages: packageList, packageIndex: i, + rawSchema: rawSchema, } - err = sp.parseRawConnectorSchema(rawSchema, packageList[i].Types) + err = sp.parseRawConnectorSchema(packageList[i].Types) parseSchemaTask.End() if err != nil { return nil, err @@ -472,11 +474,11 @@ func evalPackageTypesLocation(name string, moduleName string, filePath string, c } // parse raw connector schema from Go code -func (sp *SchemaParser) parseRawConnectorSchema(rawSchema *RawConnectorSchema, pkg *types.Package) error { +func (sp *SchemaParser) parseRawConnectorSchema(pkg *types.Package) error { for _, name := range pkg.Scope().Names() { _, task := trace.NewTask(sp.context, fmt.Sprintf("parse_%s_schema_%s", sp.GetCurrentPackage().Name, name)) - err := sp.parsePackageScope(rawSchema, pkg, name) + err := sp.parsePackageScope(pkg, name) task.End() if err != nil { return err @@ -486,7 +488,7 @@ func (sp *SchemaParser) parseRawConnectorSchema(rawSchema *RawConnectorSchema, p return nil } -func (sp *SchemaParser) parsePackageScope(rawSchema *RawConnectorSchema, pkg *types.Package, name string) error { +func (sp *SchemaParser) parsePackageScope(pkg *types.Package, name string) error { switch obj := pkg.Scope().Lookup(name).(type) { case *types.Func: // only parse public functions @@ -521,7 +523,7 @@ func (sp *SchemaParser) parsePackageScope(rawSchema *RawConnectorSchema, pkg *ty // ignore 2 first parameters (context and state) if params.Len() == 3 { arg := params.At(2) - arguments, argumentType, err := sp.parseArgumentTypes(rawSchema, arg.Type(), []string{}) + arguments, argumentType, err := sp.parseArgumentTypes(arg.Type(), []string{}) if err != nil { return err } @@ -529,7 +531,7 @@ func (sp *SchemaParser) parsePackageScope(rawSchema *RawConnectorSchema, pkg *ty opInfo.Arguments = arguments } - resultType, err := sp.parseType(rawSchema, nil, resultTuple.At(0).Type(), []string{}, false) + resultType, err := sp.parseType(nil, resultTuple.At(0).Type(), []string{}, false) if err != nil { return err } @@ -537,19 +539,19 @@ func (sp *SchemaParser) parsePackageScope(rawSchema *RawConnectorSchema, pkg *ty switch opInfo.Kind { case OperationProcedure: - rawSchema.Procedures = append(rawSchema.Procedures, ProcedureInfo(*opInfo)) + sp.rawSchema.Procedures = append(sp.rawSchema.Procedures, ProcedureInfo(*opInfo)) case OperationFunction: - rawSchema.Functions = append(rawSchema.Functions, FunctionInfo(*opInfo)) + sp.rawSchema.Functions = append(sp.rawSchema.Functions, FunctionInfo(*opInfo)) } } return nil } -func (sp *SchemaParser) parseArgumentTypes(rawSchema *RawConnectorSchema, ty types.Type, fieldPaths []string) (map[string]ArgumentInfo, *TypeInfo, error) { +func (sp *SchemaParser) parseArgumentTypes(ty types.Type, fieldPaths []string) (map[string]ArgumentInfo, *TypeInfo, error) { switch inferredType := ty.(type) { case *types.Pointer: - return sp.parseArgumentTypes(rawSchema, inferredType.Elem(), fieldPaths) + return sp.parseArgumentTypes(inferredType.Elem(), fieldPaths) case *types.Struct: result := make(map[string]ArgumentInfo) for i := 0; i < inferredType.NumFields(); i++ { @@ -563,7 +565,7 @@ func (sp *SchemaParser) parseArgumentTypes(rawSchema *RawConnectorSchema, ty typ PackagePath: fieldPackage.Path(), } } - fieldType, err := sp.parseType(rawSchema, typeInfo, fieldVar.Type(), append(fieldPaths, fieldVar.Name()), false) + fieldType, err := sp.parseType(typeInfo, fieldVar.Type(), append(fieldPaths, fieldVar.Name()), false) if err != nil { return nil, nil, err } @@ -578,7 +580,7 @@ func (sp *SchemaParser) parseArgumentTypes(rawSchema *RawConnectorSchema, ty typ } return result, nil, nil case *types.Named: - arguments, _, err := sp.parseArgumentTypes(rawSchema, inferredType.Obj().Type().Underlying(), append(fieldPaths, inferredType.Obj().Name())) + arguments, _, err := sp.parseArgumentTypes(inferredType.Obj().Type().Underlying(), append(fieldPaths, inferredType.Obj().Name())) if err != nil { return nil, nil, err } @@ -599,14 +601,14 @@ func (sp *SchemaParser) parseArgumentTypes(rawSchema *RawConnectorSchema, ty typ } } -func (sp *SchemaParser) parseType(rawSchema *RawConnectorSchema, rootType *TypeInfo, ty types.Type, fieldPaths []string, skipNullable bool) (*TypeInfo, error) { +func (sp *SchemaParser) parseType(rootType *TypeInfo, ty types.Type, fieldPaths []string, skipNullable bool) (*TypeInfo, error) { switch inferredType := ty.(type) { case *types.Pointer: if skipNullable { - return sp.parseType(rawSchema, rootType, inferredType.Elem(), fieldPaths, false) + return sp.parseType(rootType, inferredType.Elem(), fieldPaths, false) } - innerType, err := sp.parseType(rawSchema, rootType, inferredType.Elem(), fieldPaths, false) + innerType, err := sp.parseType(rootType, inferredType.Elem(), fieldPaths, false) if err != nil { return nil, err } @@ -653,10 +655,14 @@ func (sp *SchemaParser) parseType(rawSchema *RawConnectorSchema, rootType *TypeI IsAnonymous: isAnonymous, Fields: map[string]*ObjectField{}, } + // temporarily add the object type to raw schema to avoid infinite loop + sp.rawSchema.ObjectSchemas[rootType.Name] = objType + sp.rawSchema.Objects[rootType.Name] = objFields + for i := 0; i < inferredType.NumFields(); i++ { fieldVar := inferredType.Field(i) fieldTag := inferredType.Tag(i) - fieldType, err := sp.parseType(rawSchema, nil, fieldVar.Type(), append(fieldPaths, fieldVar.Name()), false) + fieldType, err := sp.parseType(nil, fieldVar.Type(), append(fieldPaths, fieldVar.Name()), false) if err != nil { return nil, err } @@ -670,8 +676,8 @@ func (sp *SchemaParser) parseType(rawSchema *RawConnectorSchema, rootType *TypeI Type: fieldType, } } - rawSchema.ObjectSchemas[rootType.Name] = objType - rawSchema.Objects[rootType.Name] = objFields + sp.rawSchema.ObjectSchemas[rootType.Name] = objType + sp.rawSchema.Objects[rootType.Name] = objFields return rootType, nil case *types.Named: @@ -682,19 +688,27 @@ func (sp *SchemaParser) parseType(rawSchema *RawConnectorSchema, rootType *TypeI } innerPkg := innerType.Pkg() - var packagePath string - if innerPkg != nil { - packagePath = innerPkg.Path() + if _, ok := sp.rawSchema.Objects[innerType.Name()]; ok { + ty := &TypeInfo{ + Name: innerType.Name(), + SchemaName: innerType.Name(), + PackageName: innerPkg.Name(), + PackagePath: innerPkg.Path(), + TypeAST: innerType.Type(), + Schema: schema.NewNamedType(innerType.Name()), + TypeFragments: []string{innerType.Name()}, + } + return ty, nil } - typeInfo, err := sp.parseTypeInfoFromComments(innerType.Name(), packagePath, innerType.Parent()) + typeInfo, err := sp.parseTypeInfoFromComments(innerType.Name(), innerPkg.Path(), innerType.Parent()) if err != nil { return nil, err } if innerPkg != nil { var scalarName ScalarName typeInfo.PackageName = innerPkg.Name() - typeInfo.PackagePath = packagePath + typeInfo.PackagePath = innerPkg.Path() scalarSchema := schema.NewScalarType() switch innerPkg.Path() { @@ -734,13 +748,13 @@ func (sp *SchemaParser) parseType(rawSchema *RawConnectorSchema, rootType *TypeI typeInfo.IsScalar = true typeInfo.Schema = schema.NewNamedType(string(scalarName)) typeInfo.TypeAST = ty - rawSchema.ScalarSchemas[string(scalarName)] = *scalarSchema + sp.rawSchema.ScalarSchemas[string(scalarName)] = *scalarSchema return typeInfo, nil } } if typeInfo.IsScalar { - rawSchema.CustomScalars[typeInfo.Name] = typeInfo + sp.rawSchema.CustomScalars[typeInfo.Name] = typeInfo scalarSchema := schema.NewScalarType() if typeInfo.ScalarRepresentation != nil { scalarSchema.Representation = typeInfo.ScalarRepresentation @@ -748,38 +762,38 @@ func (sp *SchemaParser) parseType(rawSchema *RawConnectorSchema, rootType *TypeI // requires representation since NDC spec v0.1.2 scalarSchema.Representation = schema.NewTypeRepresentationJSON().Encode() } - rawSchema.ScalarSchemas[typeInfo.SchemaName] = *scalarSchema + sp.rawSchema.ScalarSchemas[typeInfo.SchemaName] = *scalarSchema return typeInfo, nil } - return sp.parseType(rawSchema, typeInfo, innerType.Type().Underlying(), append(fieldPaths, innerType.Name()), false) + return sp.parseType(typeInfo, innerType.Type().Underlying(), append(fieldPaths, innerType.Name()), false) case *types.Basic: var scalarName ScalarName switch inferredType.Kind() { case types.Bool: scalarName = ScalarBoolean - rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] + sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] case types.Int8, types.Uint8: scalarName = ScalarInt8 - rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] + sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] case types.Int16, types.Uint16: scalarName = ScalarInt16 - rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] + sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] case types.Int, types.Int32, types.Uint, types.Uint32: scalarName = ScalarInt32 - rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] + sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] case types.Int64, types.Uint64: scalarName = ScalarInt64 - rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] + sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] case types.Float32: scalarName = ScalarFloat32 - rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] + sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] case types.Float64: scalarName = ScalarFloat64 - rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] + sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] case types.String: scalarName = ScalarString - rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] + sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] default: return nil, fmt.Errorf("%s: unsupported scalar type <%s>", strings.Join(fieldPaths, "."), inferredType.String()) } @@ -797,7 +811,7 @@ func (sp *SchemaParser) parseType(rawSchema *RawConnectorSchema, rootType *TypeI return rootType, nil case *types.Array: - innerType, err := sp.parseType(rawSchema, nil, inferredType.Elem(), fieldPaths, false) + innerType, err := sp.parseType(nil, inferredType.Elem(), fieldPaths, false) if err != nil { return nil, err } @@ -805,7 +819,7 @@ func (sp *SchemaParser) parseType(rawSchema *RawConnectorSchema, rootType *TypeI innerType.Schema = schema.NewArrayType(innerType.Schema) return innerType, nil case *types.Slice: - innerType, err := sp.parseType(rawSchema, nil, inferredType.Elem(), fieldPaths, false) + innerType, err := sp.parseType(nil, inferredType.Elem(), fieldPaths, false) if err != nil { return nil, err } @@ -825,8 +839,8 @@ func (sp *SchemaParser) parseType(rawSchema *RawConnectorSchema, rootType *TypeI rootType.PackagePath = "" } - if _, ok := rawSchema.ScalarSchemas[string(scalarName)]; !ok { - rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] + if _, ok := sp.rawSchema.ScalarSchemas[string(scalarName)]; !ok { + sp.rawSchema.ScalarSchemas[string(scalarName)] = defaultScalarTypes[scalarName] } rootType.TypeFragments = append(rootType.TypeFragments, inferredType.String()) rootType.Schema = schema.NewNamedType(string(scalarName)) diff --git a/cmd/hasura-ndc-go/testdata/basic/expected/functions.go.tmpl b/cmd/hasura-ndc-go/testdata/basic/expected/functions.go.tmpl index 7b6c38c..7e6dc13 100644 --- a/cmd/hasura-ndc-go/testdata/basic/expected/functions.go.tmpl +++ b/cmd/hasura-ndc-go/testdata/basic/expected/functions.go.tmpl @@ -3,7 +3,6 @@ package functions import ( "encoding/json" "errors" - "fmt" "github.com/google/uuid" "github.com/hasura/ndc-sdk-go/scalar" "github.com/hasura/ndc-sdk-go/utils" @@ -598,48 +597,44 @@ func (j *GetArticlesArguments) FromValue(input map[string]any) error { return nil } // ToMap encodes the struct to a value map -func (j Author) ToMap() (map[string]any, error) { +func (j Author) ToMap() map[string]any { r := make(map[string]any) r["created_at"] = j.CreatedAt r["id"] = j.ID - return r, nil + return r } // ToMap encodes the struct to a value map -func (j CreateArticleResult) ToMap() (map[string]any, error) { +func (j CreateArticleResult) ToMap() map[string]any { r := make(map[string]any) - j_Authors := make([]map[string]any, len(j.Authors)) + j_Authors := make([]any, len(j.Authors)) for i, j_Authors_v := range j.Authors { - itemResult, err := utils.EncodeObject(j_Authors_v) - if err != nil { - return nil, fmt.Errorf("failed to encode Authors: %s", err) - } - j_Authors[i] = itemResult + j_Authors[i] = j_Authors_v } r["authors"] = j_Authors r["id"] = j.ID - return r, nil + return r } // ToMap encodes the struct to a value map -func (j CreateAuthorResult) ToMap() (map[string]any, error) { +func (j CreateAuthorResult) ToMap() map[string]any { r := make(map[string]any) r["created_at"] = j.CreatedAt r["id"] = j.ID r["name"] = j.Name - return r, nil + return r } // ToMap encodes the struct to a value map -func (j GetArticlesResult) ToMap() (map[string]any, error) { +func (j GetArticlesResult) ToMap() map[string]any { r := make(map[string]any) r["id"] = j.ID r["Name"] = j.Name - return r, nil + return r } // ToMap encodes the struct to a value map -func (j GetTypesArguments) ToMap() (map[string]any, error) { +func (j GetTypesArguments) ToMap() map[string]any { r := make(map[string]any) r["ArrayBigInt"] = j.ArrayBigInt r["ArrayBigIntPtr"] = j.ArrayBigIntPtr @@ -663,7 +658,7 @@ func (j GetTypesArguments) ToMap() (map[string]any, error) { r["ArrayJSONPtr"] = j.ArrayJSONPtr r["ArrayMap"] = j.ArrayMap r["ArrayMapPtr"] = j.ArrayMapPtr - j_ArrayObject := make([]map[string]any, len(j.ArrayObject)) + j_ArrayObject := make([]any, len(j.ArrayObject)) for i, j_ArrayObject_v := range j.ArrayObject { j_ArrayObject_v_obj := make(map[string]any) j_ArrayObject_v_obj["content"] = j_ArrayObject_v.Content @@ -671,7 +666,7 @@ func (j GetTypesArguments) ToMap() (map[string]any, error) { } r["ArrayObject"] = j_ArrayObject if j.ArrayObjectPtr != nil { - j_ArrayObjectPtr := make([]map[string]any, len((*j.ArrayObjectPtr))) + j_ArrayObjectPtr := make([]any, len((*j.ArrayObjectPtr))) for i, j_ArrayObjectPtr_v := range (*j.ArrayObjectPtr) { j_ArrayObjectPtr_v_obj := make(map[string]any) j_ArrayObjectPtr_v_obj["content"] = j_ArrayObjectPtr_v.Content @@ -725,37 +720,21 @@ func (j GetTypesArguments) ToMap() (map[string]any, error) { r["JSONPtr"] = j.JSONPtr r["Map"] = j.Map r["MapPtr"] = j.MapPtr - j_NamedArray := make([]map[string]any, len(j.NamedArray)) + j_NamedArray := make([]any, len(j.NamedArray)) for i, j_NamedArray_v := range j.NamedArray { - itemResult, err := utils.EncodeObject(j_NamedArray_v) - if err != nil { - return nil, fmt.Errorf("failed to encode NamedArray: %s", err) - } - j_NamedArray[i] = itemResult + j_NamedArray[i] = j_NamedArray_v } r["NamedArray"] = j_NamedArray if j.NamedArrayPtr != nil { - j_NamedArrayPtr := make([]map[string]any, len((*j.NamedArrayPtr))) + j_NamedArrayPtr := make([]any, len((*j.NamedArrayPtr))) for i, j_NamedArrayPtr_v := range (*j.NamedArrayPtr) { - itemResult, err := utils.EncodeObject(j_NamedArrayPtr_v) - if err != nil { - return nil, fmt.Errorf("failed to encode NamedArrayPtr: %s", err) - } - j_NamedArrayPtr[i] = itemResult + j_NamedArrayPtr[i] = j_NamedArrayPtr_v } r["NamedArrayPtr"] = j_NamedArrayPtr } - itemResult, err := utils.EncodeObject(j.NamedObject) - if err != nil { - return nil, fmt.Errorf("failed to encode NamedObject: %s", err) - } - r["NamedObject"] = itemResult + r["NamedObject"] = j.NamedObject if j.NamedObjectPtr != nil { - itemResult, err := utils.EncodeObject((*j.NamedObjectPtr)) - if err != nil { - return nil, fmt.Errorf("failed to encode NamedObjectPtr: %s", err) - } - r["NamedObjectPtr"] = itemResult + r["NamedObjectPtr"] = (*j.NamedObjectPtr) } j_Object_obj := make(map[string]any) j_Object_obj["created_at"] = j.Object.CreatedAt @@ -827,17 +806,17 @@ func (j GetTypesArguments) ToMap() (map[string]any, error) { r["Uint8Ptr"] = j.Uint8Ptr r["UintPtr"] = j.UintPtr - return r, nil + return r } // ToMap encodes the struct to a value map -func (j HelloResult) ToMap() (map[string]any, error) { +func (j HelloResult) ToMap() map[string]any { r := make(map[string]any) r["foo"] = j.Foo r["id"] = j.ID r["num"] = j.Num r["text"] = j.Text - return r, nil + return r } // ScalarName get the schema name of the scalar func (j CommentText) ScalarName() string { diff --git a/cmd/hasura-ndc-go/testdata/basic/source/go.mod b/cmd/hasura-ndc-go/testdata/basic/source/go.mod index 2e3f56c..09ba672 100644 --- a/cmd/hasura-ndc-go/testdata/basic/source/go.mod +++ b/cmd/hasura-ndc-go/testdata/basic/source/go.mod @@ -5,8 +5,6 @@ go 1.21 require ( github.com/google/uuid v1.6.0 github.com/hasura/ndc-sdk-go v1.2.0 - go.opentelemetry.io/otel v1.27.0 - go.opentelemetry.io/otel/trace v1.27.0 ) require ( @@ -23,6 +21,7 @@ require ( github.com/prometheus/common v0.54.0 // indirect github.com/prometheus/procfs v0.15.1 // indirect go.opentelemetry.io/contrib/propagators/b3 v1.27.0 // indirect + go.opentelemetry.io/otel v1.27.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.27.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.27.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.27.0 // indirect @@ -32,6 +31,7 @@ require ( go.opentelemetry.io/otel/metric v1.27.0 // indirect go.opentelemetry.io/otel/sdk v1.27.0 // indirect go.opentelemetry.io/otel/sdk/metric v1.27.0 // indirect + go.opentelemetry.io/otel/trace v1.27.0 // indirect go.opentelemetry.io/proto/otlp v1.3.1 // indirect golang.org/x/net v0.26.0 // indirect golang.org/x/sys v0.21.0 // indirect diff --git a/cmd/hasura-ndc-go/testdata/empty/source/go.mod b/cmd/hasura-ndc-go/testdata/empty/source/go.mod index a3147c0..9ea37ce 100644 --- a/cmd/hasura-ndc-go/testdata/empty/source/go.mod +++ b/cmd/hasura-ndc-go/testdata/empty/source/go.mod @@ -2,11 +2,7 @@ module github.com/hasura/ndc-codegen-empty-test go 1.21 -require ( - github.com/hasura/ndc-sdk-go v1.2.0 - go.opentelemetry.io/otel v1.27.0 - go.opentelemetry.io/otel/trace v1.27.0 -) +require github.com/hasura/ndc-sdk-go v1.2.0 require ( github.com/alecthomas/kong v0.9.0 // indirect @@ -23,6 +19,7 @@ require ( github.com/prometheus/common v0.54.0 // indirect github.com/prometheus/procfs v0.15.1 // indirect go.opentelemetry.io/contrib/propagators/b3 v1.27.0 // indirect + go.opentelemetry.io/otel v1.27.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.27.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.27.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.27.0 // indirect @@ -32,6 +29,7 @@ require ( go.opentelemetry.io/otel/metric v1.27.0 // indirect go.opentelemetry.io/otel/sdk v1.27.0 // indirect go.opentelemetry.io/otel/sdk/metric v1.27.0 // indirect + go.opentelemetry.io/otel/trace v1.27.0 // indirect go.opentelemetry.io/proto/otlp v1.3.1 // indirect golang.org/x/net v0.26.0 // indirect golang.org/x/sys v0.21.0 // indirect diff --git a/cmd/hasura-ndc-go/testdata/subdir/expected/functions.go.tmpl b/cmd/hasura-ndc-go/testdata/subdir/expected/functions.go.tmpl index a7d4968..b1e94cc 100644 --- a/cmd/hasura-ndc-go/testdata/subdir/expected/functions.go.tmpl +++ b/cmd/hasura-ndc-go/testdata/subdir/expected/functions.go.tmpl @@ -8,6 +8,10 @@ var functions_Decoder = utils.NewDecoder() // FromValue decodes values from map func (j *GetArticlesArguments) FromValue(input map[string]any) error { var err error + err = functions_Decoder.DecodeObjectValue(&j.Author, input, "Author") + if err != nil { + return err + } j.Limit, err = utils.GetFloat[float64](input, "Limit") if err != nil { return err @@ -15,9 +19,10 @@ func (j *GetArticlesArguments) FromValue(input map[string]any) error { return nil } // ToMap encodes the struct to a value map -func (j GetArticlesResult) ToMap() (map[string]any, error) { +func (j GetArticlesResult) ToMap() map[string]any { r := make(map[string]any) - r["id"] = j.ID + r["Author"] = j.Author + r["id"] = j.ID - return r, nil + return r } \ No newline at end of file diff --git a/cmd/hasura-ndc-go/testdata/subdir/expected/schema.json b/cmd/hasura-ndc-go/testdata/subdir/expected/schema.json index ce467c6..9a303fc 100644 --- a/cmd/hasura-ndc-go/testdata/subdir/expected/schema.json +++ b/cmd/hasura-ndc-go/testdata/subdir/expected/schema.json @@ -3,6 +3,12 @@ "functions": [ { "arguments": { + "Author": { + "type": { + "name": "Author", + "type": "named" + } + }, "Limit": { "type": { "name": "Float64", @@ -22,8 +28,48 @@ } ], "object_types": { + "Author": { + "fields": { + "author": { + "type": { + "type": "nullable", + "underlying_type": { + "name": "Author", + "type": "named" + } + } + }, + "created_at": { + "type": { + "name": "TimestampTZ", + "type": "named" + } + }, + "id": { + "type": { + "name": "String", + "type": "named" + } + }, + "tags": { + "type": { + "element_type": { + "name": "String", + "type": "named" + }, + "type": "array" + } + } + } + }, "GetArticlesResult": { "fields": { + "Author": { + "type": { + "name": "Author", + "type": "named" + } + }, "id": { "type": { "name": "String", @@ -48,6 +94,13 @@ "representation": { "type": "string" } + }, + "TimestampTZ": { + "aggregate_functions": {}, + "comparison_operators": {}, + "representation": { + "type": "timestamptz" + } } } -} \ No newline at end of file +} diff --git a/cmd/hasura-ndc-go/testdata/subdir/source/connector/functions/comment.go b/cmd/hasura-ndc-go/testdata/subdir/source/connector/functions/comment.go index 1619cf8..1771d28 100644 --- a/cmd/hasura-ndc-go/testdata/subdir/source/connector/functions/comment.go +++ b/cmd/hasura-ndc-go/testdata/subdir/source/connector/functions/comment.go @@ -3,15 +3,18 @@ package functions import ( "context" + example "github.com/hasura/ndc-codegen-example/types" "github.com/hasura/ndc-codegen-subdir-test/types" ) type GetArticlesArguments struct { Limit float64 + example.Author } type GetArticlesResult struct { ID string `json:"id"` + example.Author } // GetArticles diff --git a/cmd/hasura-ndc-go/testdata/subdir/source/go.mod b/cmd/hasura-ndc-go/testdata/subdir/source/go.mod index dcf5e69..a0471ac 100644 --- a/cmd/hasura-ndc-go/testdata/subdir/source/go.mod +++ b/cmd/hasura-ndc-go/testdata/subdir/source/go.mod @@ -3,9 +3,10 @@ module github.com/hasura/ndc-codegen-subdir-test go 1.21 require ( - github.com/hasura/ndc-sdk-go v1.2.1 + github.com/hasura/ndc-sdk-go v1.2.2 go.opentelemetry.io/otel v1.27.0 go.opentelemetry.io/otel/trace v1.27.0 + github.com/hasura/ndc-codegen-example v1.2.2 ) require ( @@ -43,3 +44,5 @@ require ( ) replace github.com/hasura/ndc-sdk-go => ../../../../../ + +replace github.com/hasura/ndc-codegen-example => ../../../../../example/codegen diff --git a/example/codegen/functions/types.generated.go b/example/codegen/functions/types.generated.go index 0aed859..9d934b9 100644 --- a/example/codegen/functions/types.generated.go +++ b/example/codegen/functions/types.generated.go @@ -2,7 +2,6 @@ package functions import ( - "fmt" "github.com/hasura/ndc-sdk-go/utils" ) @@ -19,50 +18,46 @@ func (j *GetArticlesArguments) FromValue(input map[string]any) error { } // ToMap encodes the struct to a value map -func (j CreateArticleResult) ToMap() (map[string]any, error) { +func (j CreateArticleResult) ToMap() map[string]any { r := make(map[string]any) - j_Authors := make([]map[string]any, len(j.Authors)) + j_Authors := make([]any, len(j.Authors)) for i, j_Authors_v := range j.Authors { - itemResult, err := utils.EncodeObject(j_Authors_v) - if err != nil { - return nil, fmt.Errorf("failed to encode Authors: %s", err) - } - j_Authors[i] = itemResult + j_Authors[i] = j_Authors_v } r["authors"] = j_Authors r["id"] = j.ID - return r, nil + return r } // ToMap encodes the struct to a value map -func (j CreateAuthorResult) ToMap() (map[string]any, error) { +func (j CreateAuthorResult) ToMap() map[string]any { r := make(map[string]any) r["created_at"] = j.CreatedAt r["id"] = j.ID r["name"] = j.Name - return r, nil + return r } // ToMap encodes the struct to a value map -func (j GetArticlesResult) ToMap() (map[string]any, error) { +func (j GetArticlesResult) ToMap() map[string]any { r := make(map[string]any) r["id"] = j.ID r["Name"] = j.Name - return r, nil + return r } // ToMap encodes the struct to a value map -func (j HelloResult) ToMap() (map[string]any, error) { +func (j HelloResult) ToMap() map[string]any { r := make(map[string]any) r["foo"] = j.Foo r["id"] = j.ID r["num"] = j.Num r["text"] = j.Text - return r, nil + return r } // ScalarName get the schema name of the scalar diff --git a/example/codegen/schema.generated.json b/example/codegen/schema.generated.json index f544a67..5c20fa1 100644 --- a/example/codegen/schema.generated.json +++ b/example/codegen/schema.generated.json @@ -1437,6 +1437,15 @@ "object_types": { "Author": { "fields": { + "author": { + "type": { + "type": "nullable", + "underlying_type": { + "name": "Author", + "type": "named" + } + } + }, "created_at": { "type": { "name": "TimestampTZ", diff --git a/example/codegen/types/arguments/types.generated.go b/example/codegen/types/arguments/types.generated.go index f9312c4..3e4d93a 100644 --- a/example/codegen/types/arguments/types.generated.go +++ b/example/codegen/types/arguments/types.generated.go @@ -3,7 +3,6 @@ package arguments import ( "encoding/json" - "fmt" "github.com/google/uuid" "github.com/hasura/ndc-codegen-example/types" "github.com/hasura/ndc-sdk-go/scalar" @@ -601,7 +600,7 @@ func (j *GetTypesArguments) FromValue(input map[string]any) error { } // ToMap encodes the struct to a value map -func (j GetTypesArguments) ToMap() (map[string]any, error) { +func (j GetTypesArguments) ToMap() map[string]any { r := make(map[string]any) r["ArrayBigInt"] = j.ArrayBigInt r["ArrayBigIntPtr"] = j.ArrayBigIntPtr @@ -625,7 +624,7 @@ func (j GetTypesArguments) ToMap() (map[string]any, error) { r["ArrayJSONPtr"] = j.ArrayJSONPtr r["ArrayMap"] = j.ArrayMap r["ArrayMapPtr"] = j.ArrayMapPtr - j_ArrayObject := make([]map[string]any, len(j.ArrayObject)) + j_ArrayObject := make([]any, len(j.ArrayObject)) for i, j_ArrayObject_v := range j.ArrayObject { j_ArrayObject_v_obj := make(map[string]any) j_ArrayObject_v_obj["content"] = j_ArrayObject_v.Content @@ -633,7 +632,7 @@ func (j GetTypesArguments) ToMap() (map[string]any, error) { } r["ArrayObject"] = j_ArrayObject if j.ArrayObjectPtr != nil { - j_ArrayObjectPtr := make([]map[string]any, len((*j.ArrayObjectPtr))) + j_ArrayObjectPtr := make([]any, len((*j.ArrayObjectPtr))) for i, j_ArrayObjectPtr_v := range *j.ArrayObjectPtr { j_ArrayObjectPtr_v_obj := make(map[string]any) j_ArrayObjectPtr_v_obj["content"] = j_ArrayObjectPtr_v.Content @@ -689,37 +688,21 @@ func (j GetTypesArguments) ToMap() (map[string]any, error) { r["JSONPtr"] = j.JSONPtr r["Map"] = j.Map r["MapPtr"] = j.MapPtr - j_NamedArray := make([]map[string]any, len(j.NamedArray)) + j_NamedArray := make([]any, len(j.NamedArray)) for i, j_NamedArray_v := range j.NamedArray { - itemResult, err := utils.EncodeObject(j_NamedArray_v) - if err != nil { - return nil, fmt.Errorf("failed to encode NamedArray: %s", err) - } - j_NamedArray[i] = itemResult + j_NamedArray[i] = j_NamedArray_v } r["NamedArray"] = j_NamedArray if j.NamedArrayPtr != nil { - j_NamedArrayPtr := make([]map[string]any, len((*j.NamedArrayPtr))) + j_NamedArrayPtr := make([]any, len((*j.NamedArrayPtr))) for i, j_NamedArrayPtr_v := range *j.NamedArrayPtr { - itemResult, err := utils.EncodeObject(j_NamedArrayPtr_v) - if err != nil { - return nil, fmt.Errorf("failed to encode NamedArrayPtr: %s", err) - } - j_NamedArrayPtr[i] = itemResult + j_NamedArrayPtr[i] = j_NamedArrayPtr_v } r["NamedArrayPtr"] = j_NamedArrayPtr } - itemResult, err := utils.EncodeObject(j.NamedObject) - if err != nil { - return nil, fmt.Errorf("failed to encode NamedObject: %s", err) - } - r["NamedObject"] = itemResult + r["NamedObject"] = j.NamedObject if j.NamedObjectPtr != nil { - itemResult, err := utils.EncodeObject((*j.NamedObjectPtr)) - if err != nil { - return nil, fmt.Errorf("failed to encode NamedObjectPtr: %s", err) - } - r["NamedObjectPtr"] = itemResult + r["NamedObjectPtr"] = (*j.NamedObjectPtr) } j_Object_obj := make(map[string]any) j_Object_obj["created_at"] = j.Object.CreatedAt @@ -790,5 +773,5 @@ func (j GetTypesArguments) ToMap() (map[string]any, error) { r["Uint8Ptr"] = j.Uint8Ptr r["UintPtr"] = j.UintPtr - return r, nil + return r } diff --git a/example/codegen/types/types.generated.go b/example/codegen/types/types.generated.go index 72aa8f4..dbefba1 100644 --- a/example/codegen/types/types.generated.go +++ b/example/codegen/types/types.generated.go @@ -11,13 +11,16 @@ import ( var types_Decoder = utils.NewDecoder() // ToMap encodes the struct to a value map -func (j Author) ToMap() (map[string]any, error) { +func (j Author) ToMap() map[string]any { r := make(map[string]any) + if j.Author != nil { + r["author"] = (*j.Author) + } r["created_at"] = j.CreatedAt r["id"] = j.ID r["tags"] = j.Tags - return r, nil + return r } // ScalarName get the schema name of the scalar diff --git a/example/codegen/types/types.go b/example/codegen/types/types.go index 6505cf7..292c9df 100644 --- a/example/codegen/types/types.go +++ b/example/codegen/types/types.go @@ -43,4 +43,5 @@ type Author struct { ID string `json:"id"` CreatedAt time.Time `json:"created_at"` Tags []string `json:"tags"` + Author *Author `json:"author"` } diff --git a/utils/connector.go b/utils/connector.go index 22efa72..62dd7f9 100644 --- a/utils/connector.go +++ b/utils/connector.go @@ -165,12 +165,12 @@ func evalObjectWithColumnSelection(fields map[string]schema.Field, data map[stri if err != nil { return nil, err } - output[fi.Column] = nestedValue + output[key] = nestedValue } else { - output[fi.Column] = col + output[key] = col } } else { - output[fi.Column] = nil + output[key] = nil } case *schema.RelationshipField: return nil, &schema.ErrorResponse{ diff --git a/utils/connector_test.go b/utils/connector_test.go index e833042..4998f99 100644 --- a/utils/connector_test.go +++ b/utils/connector_test.go @@ -88,6 +88,42 @@ func TestEvalNestedFields(t *testing.T) { }, }, }, + { + Name: "rename_fields", + Input: struct { + ID string `json:"id"` + Name string `json:"name"` + Articles []struct { + Name string + } `json:"articles"` + }{ + ID: "1", + Name: "John", + Articles: []struct { + Name string + }{ + { + Name: "Article 1", + }, + }, + }, + Selection: schema.NewNestedObject(map[string]schema.FieldEncoder{ + "id": schema.NewColumnField("id", nil), + "Name": schema.NewColumnField("name", nil), + "articles": schema.NewColumnField("articles", schema.NewNestedArray(schema.NewNestedObject(map[string]schema.FieldEncoder{ + "name": schema.NewColumnField("Name", nil), + }))), + }).Encode(), + Expected: map[string]any{ + "id": "1", + "Name": "John", + "articles": []map[string]any{ + { + "name": "Article 1", + }, + }, + }, + }, } for _, tc := range testCases { diff --git a/utils/decode.go b/utils/decode.go index 3bc6777..00ed0c5 100644 --- a/utils/decode.go +++ b/utils/decode.go @@ -107,11 +107,7 @@ func (d Decoder) decodeValue(target any, value any) error { case map[string]any: return t.FromValue(v) case MapEncoder: - object, err := v.ToMap() - if err != nil { - return err - } - return t.FromValue(object) + return t.FromValue(v.ToMap()) default: return errors.New("the value must be an object-liked") } @@ -1298,11 +1294,7 @@ func decodeValueHookFunc() mapstructure.DecodeHookFunc { case map[string]any: err = objDecoder.FromValue(v) case MapEncoder: - object, toMapErr := v.ToMap() - if toMapErr != nil { - return nil, toMapErr - } - mapValue := object + mapValue := v.ToMap() err = objDecoder.FromValue(mapValue) } if err != nil { diff --git a/utils/encode.go b/utils/encode.go index 7ebf939..9da2463 100644 --- a/utils/encode.go +++ b/utils/encode.go @@ -11,7 +11,42 @@ import ( // MapEncoder abstracts a type with the ToMap method to encode type to map type MapEncoder interface { - ToMap() (map[string]any, error) + ToMap() map[string]any +} + +// EncodeMap encodes an object to a map[string]any, using json tag to convert object keys +// +// Deprecated: use EncodeObject instead +func EncodeMap[T MapEncoder](input T) map[string]any { + if IsNil(input) { + return nil + } + return input.ToMap() +} + +// EncodeMaps encode objects to a slice of map[string]any, using json tag to convert object keys +// +// Deprecated: use EncodeObjectSlice instead +func EncodeMaps[T MapEncoder](inputs []T) []map[string]any { + var results []map[string]any + for _, item := range inputs { + results = append(results, item.ToMap()) + } + return results +} + +// EncodeNullableMaps encode objects to a slice of map[string]any, using json tag to convert object keys +// +// Deprecated: use EncodeNullableObjectSlice instead +func EncodeNullableMaps[T MapEncoder](inputs *[]T) []map[string]any { + if inputs == nil { + return nil + } + var results []map[string]any + for _, item := range *inputs { + results = append(results, item.ToMap()) + } + return results } // EncodeObject encodes an unknown type to a map[string]any, using json tag to convert object keys @@ -53,7 +88,7 @@ func encodeObject(input any, fieldPath string) (map[string]any, error) { case map[string]any: return value, nil case MapEncoder: - return value.ToMap() + return value.ToMap(), nil case Scalar: return nil, &schema.ErrorResponse{ Message: "cannot encode scalar to object", @@ -75,7 +110,11 @@ func encodeObject(input any, fieldPath string) (map[string]any, error) { kind := inputValue.Kind() switch kind { case reflect.Pointer: - return encodeObject(inputValue.Elem().Interface(), fieldPath) + v, ok := UnwrapPointerFromReflectValue(inputValue) + if !ok { + return nil, nil + } + return encodeObject(v.Interface(), fieldPath) case reflect.Struct: return encodeStruct(inputValue), nil default: @@ -171,10 +210,10 @@ func EncodeObjects(input any) ([]map[string]any, error) { } func encodeObjects(input any, fieldPath string) ([]map[string]any, error) { - if IsNil(input) { + inputValue, ok := UnwrapPointerFromReflectValue(reflect.ValueOf(input)) + if !ok { return nil, nil } - inputValue := reflect.ValueOf(input) inputKind := inputValue.Kind() if inputKind != reflect.Array && inputKind != reflect.Slice { return nil, &schema.ErrorResponse{ diff --git a/utils/encode_test.go b/utils/encode_test.go new file mode 100644 index 0000000..23b6c4a --- /dev/null +++ b/utils/encode_test.go @@ -0,0 +1,149 @@ +package utils + +import ( + "testing" + + "github.com/hasura/ndc-sdk-go/schema" +) + +type mockAuthor struct { + ID string `json:"id"` + FirstName string `json:"first_name"` + LastName *string `json:"last_name"` + Address struct { + Street string `json:"street"` + } `json:"address"` +} + +func (ma mockAuthor) ToMap() (map[string]any, error) { + result := map[string]any{} + result["id"] = ma.ID + result["first_name"] = ma.FirstName + result["last_name"] = ma.LastName + result["address"] = map[string]any{ + "street": ma.Address.Street, + } + return result, nil +} + +type mockArticle struct { + ID int `json:"id"` + Authors []mockAuthor `json:"authors"` +} + +func (ma mockArticle) ToMap() (map[string]any, error) { + result := map[string]any{} + result["id"] = ma.ID + authors := make([]map[string]any, len(ma.Authors)) + for i, author := range ma.Authors { + au, err := author.ToMap() + if err != nil { + return nil, err + } + authors[i] = au + } + result["authors"] = authors + return result, nil +} + +type mockArticleLazy mockArticle + +func (ma mockArticleLazy) ToMap() (map[string]any, error) { + result := map[string]any{} + result["id"] = ma.ID + result["authors"] = ma.Authors + return result, nil +} + +var benchmarkEvalNestedColumnFixture = mockArticle{ + ID: 1, + Authors: []mockAuthor{ + { + ID: "1", + FirstName: "Luke", + LastName: ToPtr("Skywalker"), + }, + }, +} + +var benchmarkEvalSingleFieldSelection = schema.NewNestedObject(map[string]schema.FieldEncoder{ + "id": schema.NewColumnField("id", nil), +}).Encode() + +var benchmarkEvalAllFieldsSelection = schema.NewNestedObject(map[string]schema.FieldEncoder{ + "id": schema.NewColumnField("id", nil), + "authors": schema.NewColumnField("authors", schema.NewNestedArray(schema.NewNestedObject(map[string]schema.FieldEncoder{ + "id": schema.NewColumnField("id", nil), + "first_name": schema.NewColumnField("first_name", nil), + "last_name": schema.NewColumnField("last_name", nil), + "address": schema.NewColumnField("address", schema.NewNestedObject(map[string]schema.FieldEncoder{ + "street": schema.NewColumnField("street", nil), + })), + }))), +}).Encode() + +// BenchmarkEvalSingleFieldToMap-32 1390357 797.6 ns/op 1512 B/op 15 allocs/op +func BenchmarkEvalSingleFieldToMap(b *testing.B) { + for i := 0; i < b.N; i++ { + _, err := EvalNestedColumnFields(benchmarkEvalSingleFieldSelection, benchmarkEvalNestedColumnFixture) + if err != nil { + panic(err) + } + } +} + +// BenchmarkEvalSingleFieldToMapLazy-32 2420421 491.2 ns/op 800 B/op 8 allocs/op +func BenchmarkEvalSingleFieldToMapLazy(b *testing.B) { + value := mockArticleLazy(benchmarkEvalNestedColumnFixture) + for i := 0; i < b.N; i++ { + _, err := EvalNestedColumnFields(benchmarkEvalSingleFieldSelection, value) + if err != nil { + panic(err) + } + } +} + +// BenchmarkEvalSingleFieldAny-32 706143 1596 ns/op 1776 B/op 26 allocs/op +func BenchmarkEvalSingleFieldAny(b *testing.B) { + type Object mockArticle + var object = Object(benchmarkEvalNestedColumnFixture) + for i := 0; i < b.N; i++ { + _, err := EvalNestedColumnFields(benchmarkEvalSingleFieldSelection, object) + if err != nil { + panic(err) + } + } +} + +// BenchmarkEvalAllFieldToMap-32 478923 2319 ns/op 2708 B/op 38 allocs/op +func BenchmarkEvalAllFieldToMap(b *testing.B) { + for i := 0; i < b.N; i++ { + _, err := EvalNestedColumnFields(benchmarkEvalAllFieldsSelection, benchmarkEvalNestedColumnFixture) + if err != nil { + panic(err) + } + } +} + +// BenchmarkEvalAllFieldToMapLazy-32 461304 2345 ns/op 2764 B/op 38 allocs/op +func BenchmarkEvalAllFieldToMapLazy(b *testing.B) { + value := mockArticleLazy(benchmarkEvalNestedColumnFixture) + for i := 0; i < b.N; i++ { + _, err := EvalNestedColumnFields(benchmarkEvalAllFieldsSelection, value) + if err != nil { + panic(err) + } + } +} + +// BenchmarkEvalAllFieldAny-32 368212 3279 ns/op 2972 B/op 49 allocs/op +func BenchmarkEvalAllFieldAny(b *testing.B) { + type Object mockArticle + var object = Object(benchmarkEvalNestedColumnFixture) + for i := 0; i < b.N; i++ { + _, err := EvalNestedColumnFields(benchmarkEvalAllFieldsSelection, object) + if err != nil { + panic(err) + } + } +}