Skip to content

Commit

Permalink
Use conventions to support GraphQL mutations and adjust query names (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
mattjohnsonpint authored Oct 26, 2024
1 parent 5f7a1a9 commit 3f87df6
Show file tree
Hide file tree
Showing 13 changed files with 294 additions and 157 deletions.
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,8 @@ In previous releases, the name "Hypermode" was used for all three._
- Fix runtime shutdown issues with `modus dev` [#508](https://github.com/hypermodeinc/modus/pull/508)
- Monitored manifest and env files for changes [#509](https://github.com/hypermodeinc/modus/pull/509)
- Log bad GraphQL requests in dev [#510](https://github.com/hypermodeinc/modus/pull/510)
- Add jwks endpoint key support to auth [#511](https://github.com/hypermodeinc/modus/pull/511)
- Add JWKS endpoint key support to auth [#511](https://github.com/hypermodeinc/modus/pull/511)
- Use conventions to support GraphQL mutations and adjust query names [#513](https://github.com/hypermodeinc/modus/pull/513)

## 2024-10-02 - Version 0.12.7

Expand Down
1 change: 1 addition & 0 deletions cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@
"jsonlogs",
"jsonparser",
"jsonschema",
"JWKS",
"langsupport",
"ldflags",
"legacymodels",
Expand Down
5 changes: 3 additions & 2 deletions runtime/graphql/datasource/configuration.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import (
)

type HypDSConfig struct {
WasmHost wasmhost.WasmHost
MapTypes []string
WasmHost wasmhost.WasmHost
FieldsToFunctions map[string]string
MapTypes []string
}
54 changes: 33 additions & 21 deletions runtime/graphql/datasource/planner.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,9 @@ type HypDSPlanner struct {
variables resolve.Variables
fields map[int]fieldInfo
template struct {
function *fieldInfo
data []byte
fieldInfo *fieldInfo
functionName string
data []byte
}
}

Expand Down Expand Up @@ -71,7 +72,7 @@ func (p *HypDSPlanner) DownstreamResponseFieldAlias(downstreamFieldRef int) (ali

func (p *HypDSPlanner) DataSourcePlanningBehavior() plan.DataSourcePlanningBehavior {
return plan.DataSourcePlanningBehavior{
// This needs to be true, so we can distinguish results for multiple function calls in the same query.
// This needs to be true, so we can distinguish results for multiple function calls in the same operation.
// Example:
// query SayHello {
// a: sayHello(name: "Sam")
Expand Down Expand Up @@ -101,19 +102,17 @@ func (p *HypDSPlanner) EnterDocument(operation, definition *ast.Document) {

func (p *HypDSPlanner) EnterField(ref int) {

// Capture information about every field in the query.
// Capture information about every field in the operation.
f := p.captureField(ref)
p.fields[ref] = *f

// If the field is enclosed by a root node, then it represents the function we want to call.
if p.enclosingTypeIsRootNode() {
// Capture only the fields that represent function calls.
if p.currentNodeIsFunctionCall() {

// Save the field for the function.
p.template.function = f
p.template.fieldInfo = f
p.template.functionName = p.config.FieldsToFunctions[f.Name]

// Also capture the input data for the function.
err := p.captureInputData(ref)
if err != nil {
if err := p.captureInputData(ref); err != nil {
logger.Err(p.ctx, err).Msg("Error capturing input data.")
return
}
Expand All @@ -122,11 +121,11 @@ func (p *HypDSPlanner) EnterField(ref int) {

func (p *HypDSPlanner) LeaveDocument(operation, definition *ast.Document) {
// Stitch the captured fields together to form a tree.
p.stitchFields(p.template.function)
p.stitchFields(p.template.fieldInfo)
}

func (p *HypDSPlanner) stitchFields(f *fieldInfo) {
if len(f.fieldRefs) == 0 {
if f == nil || len(f.fieldRefs) == 0 {
return
}

Expand All @@ -138,14 +137,21 @@ func (p *HypDSPlanner) stitchFields(f *fieldInfo) {
}
}

func (p *HypDSPlanner) enclosingTypeIsRootNode() bool {
func (p *HypDSPlanner) currentNodeIsFunctionCall() bool {
if p.visitor.Walker.CurrentKind != ast.NodeKindField {
return false
}

enclosingTypeDef := p.visitor.Walker.EnclosingTypeDefinition
for _, node := range p.visitor.Operation.RootNodes {
if node.Ref == enclosingTypeDef.Ref {
return true
}
if enclosingTypeDef.Kind != ast.NodeKindObjectTypeDefinition {
return false
}
return false

// TODO: This works, but it's a hack. We should find a better way to determine if the field is a function call.
// The previous approach of root node testing worked for queries, but not for mutations.
// The enclosing type name should not be relevant.
enclosingTypeName := p.visitor.Definition.ObjectTypeDefinitionNameString(enclosingTypeDef.Ref)
return enclosingTypeName == "Query" || enclosingTypeName == "Mutation"
}

func (p *HypDSPlanner) captureField(ref int) *fieldInfo {
Expand Down Expand Up @@ -217,7 +223,13 @@ func (p *HypDSPlanner) captureInputData(fieldRef int) error {
}

func (p *HypDSPlanner) ConfigureFetch() resolve.FetchConfiguration {
fnJson, err := utils.JsonSerialize(p.template.function)
fieldInfoJson, err := utils.JsonSerialize(p.template.fieldInfo)
if err != nil {
logger.Error(p.ctx).Err(err).Msg("Error serializing json while configuring graphql fetch.")
return resolve.FetchConfiguration{}
}

functionNameJson, err := utils.JsonSerialize(p.template.functionName)
if err != nil {
logger.Error(p.ctx).Err(err).Msg("Error serializing json while configuring graphql fetch.")
return resolve.FetchConfiguration{}
Expand All @@ -226,7 +238,7 @@ func (p *HypDSPlanner) ConfigureFetch() resolve.FetchConfiguration {
// Note: we have to build the rest of the template manually, because the data field may
// contain placeholders for variables, such as $$0$$ which are not valid in JSON.
// They are replaced with the actual values by the time Load is called.
inputTemplate := fmt.Sprintf(`{"fn":%s,"data":%s}`, fnJson, p.template.data)
inputTemplate := fmt.Sprintf(`{"field":%s,"function":%s,"data":%s}`, fieldInfoJson, functionNameJson, p.template.data)

return resolve.FetchConfiguration{
Input: inputTemplate,
Expand Down
17 changes: 9 additions & 8 deletions runtime/graphql/datasource/source.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,9 @@ import (
const DataSourceName = "ModusDataSource"

type callInfo struct {
Function fieldInfo `json:"fn"`
Parameters map[string]any `json:"data"`
FieldInfo fieldInfo `json:"field"`
FunctionName string `json:"function"`
Parameters map[string]any `json:"data"`
}

type ModusDataSource struct {
Expand Down Expand Up @@ -65,7 +66,7 @@ func (*ModusDataSource) LoadWithFiles(ctx context.Context, input []byte, files [
func (ds *ModusDataSource) callFunction(ctx context.Context, callInfo *callInfo) (any, []resolve.GraphQLError, error) {

// Get the function info
fnInfo, err := ds.WasmHost.GetFunctionInfo(callInfo.Function.Name)
fnInfo, err := ds.WasmHost.GetFunctionInfo(callInfo.FunctionName)
if err != nil {
return nil, nil, err
}
Expand All @@ -79,7 +80,7 @@ func (ds *ModusDataSource) callFunction(ctx context.Context, callInfo *callInfo)

// Store the execution info into the function output map.
outputMap := ctx.Value(utils.FunctionOutputContextKey).(map[string]wasmhost.ExecutionInfo)
outputMap[callInfo.Function.AliasOrName()] = execInfo
outputMap[callInfo.FieldInfo.AliasOrName()] = execInfo

// Transform messages (and error lines in the output buffers) to GraphQL errors.
messages := append(execInfo.Messages(), utils.TransformConsoleOutput(execInfo.Buffers())...)
Expand Down Expand Up @@ -107,7 +108,7 @@ func (ds *ModusDataSource) callFunction(ctx context.Context, callInfo *callInfo)

func writeGraphQLResponse(ctx context.Context, out *bytes.Buffer, result any, gqlErrors []resolve.GraphQLError, fnErr error, ci *callInfo) error {

fieldName := ci.Function.AliasOrName()
fieldName := ci.FieldInfo.AliasOrName()

// Include the function error
if fnErr != nil {
Expand Down Expand Up @@ -139,7 +140,7 @@ func writeGraphQLResponse(ctx context.Context, out *bytes.Buffer, result any, gq
msg := fmt.Sprintf("Function completed successfully, but the result contains a %v value that cannot be serialized to JSON.", err.Value)
logger.Warn(ctx).
Bool("user_visible", true).
Str("function", ci.Function.Name).
Str("function", ci.FunctionName).
Str("result", fmt.Sprintf("%+v", result)).
Msg(msg)
fmt.Fprintf(out, `{"errors":[{"message":"%s","path":["%s"],"extensions":{"level":"error"}}]}`, msg, fieldName)
Expand All @@ -149,7 +150,7 @@ func writeGraphQLResponse(ctx context.Context, out *bytes.Buffer, result any, gq
}

// Transform the data
if r, err := transformValue(jsonResult, &ci.Function); err != nil {
if r, err := transformValue(jsonResult, &ci.FieldInfo); err != nil {
return err
} else {
jsonData = r
Expand Down Expand Up @@ -395,7 +396,7 @@ func transformErrors(messages []utils.LogMessage, ci *callInfo) []resolve.GraphQ
if msg.IsError() {
errors = append(errors, resolve.GraphQLError{
Message: msg.Message,
Path: []any{ci.Function.AliasOrName()},
Path: []any{ci.FieldInfo.AliasOrName()},
Extensions: map[string]interface{}{
"level": msg.Level,
},
Expand Down
53 changes: 35 additions & 18 deletions runtime/graphql/engine/engine.go
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,9 @@ func generateSchema(ctx context.Context, md *metadata.Metadata) (*gql.Schema, *d
}

cfg := &datasource.HypDSConfig{
WasmHost: wasmhost.GetWasmHost(ctx),
MapTypes: generated.MapTypes,
WasmHost: wasmhost.GetWasmHost(ctx),
FieldsToFunctions: generated.FieldsToFunctions,
MapTypes: generated.MapTypes,
}

return schema, cfg, nil
Expand All @@ -106,24 +107,25 @@ func getDatasourceConfig(ctx context.Context, schema *gql.Schema, cfg *datasourc
defer span.Finish()

queryTypeName := schema.QueryTypeName()
queryFieldNames := getAllQueryFields(ctx, schema)
queryFieldNames := getTypeFields(ctx, schema, queryTypeName)

mutationTypeName := schema.MutationTypeName()
mutationFieldNames := getTypeFields(ctx, schema, mutationTypeName)

rootNodes := []plan.TypeField{
{
TypeName: queryTypeName,
FieldNames: queryFieldNames,
},
{
TypeName: mutationTypeName,
FieldNames: mutationFieldNames,
},
}

var childNodes []plan.TypeField
for _, f := range queryFieldNames {
fields := schema.GetAllNestedFieldChildrenFromTypeField(queryTypeName, f, gql.NewSkipReservedNamesFunc())
for _, field := range fields {
childNodes = append(childNodes, plan.TypeField{
TypeName: field.TypeName,
FieldNames: field.FieldNames,
})
}
}
childNodes := []plan.TypeField{}
childNodes = append(childNodes, getChildNodes(queryFieldNames, schema, queryTypeName)...)
childNodes = append(childNodes, getChildNodes(mutationFieldNames, schema, mutationTypeName)...)

return plan.NewDataSourceConfiguration(
datasource.DataSourceName,
Expand All @@ -133,6 +135,24 @@ func getDatasourceConfig(ctx context.Context, schema *gql.Schema, cfg *datasourc
)
}

func getChildNodes(fieldNames []string, schema *gql.Schema, typeName string) []plan.TypeField {
var foundFields = make(map[string]bool)
var childNodes []plan.TypeField
for _, fieldName := range fieldNames {
fields := schema.GetAllNestedFieldChildrenFromTypeField(typeName, fieldName, gql.NewSkipReservedNamesFunc())
for _, field := range fields {
if !foundFields[field.TypeName] {
foundFields[field.TypeName] = true
childNodes = append(childNodes, plan.TypeField{
TypeName: field.TypeName,
FieldNames: field.FieldNames,
})
}
}
}
return childNodes
}

func makeEngine(ctx context.Context, schema *gql.Schema, datasourceConfig plan.DataSourceConfiguration[datasource.HypDSConfig]) (*engine.ExecutionEngine, error) {
span, ctx := utils.NewSentrySpanForCurrentFunc(ctx)
defer span.Finish()
Expand All @@ -150,17 +170,14 @@ func makeEngine(ctx context.Context, schema *gql.Schema, datasourceConfig plan.D
return engine.NewExecutionEngine(ctx, adapter, engineConfig, resolverOptions)
}

func getAllQueryFields(ctx context.Context, s *gql.Schema) []string {
func getTypeFields(ctx context.Context, s *gql.Schema, typeName string) []string {
span, _ := utils.NewSentrySpanForCurrentFunc(ctx)
defer span.Finish()

doc := s.Document()
queryTypeName := s.QueryTypeName()

fields := make([]string, 0)
for _, objectType := range doc.ObjectTypeDefinitions {
typeName := doc.Input.ByteSliceString(objectType.Name)
if typeName == queryTypeName {
if doc.Input.ByteSliceString(objectType.Name) == typeName {
for _, fieldRef := range objectType.FieldsDefinition.Refs {
field := doc.FieldDefinitions[fieldRef]
fieldName := doc.Input.ByteSliceString(field.Name)
Expand Down
8 changes: 4 additions & 4 deletions runtime/graphql/graphql.go
Original file line number Diff line number Diff line change
Expand Up @@ -100,14 +100,14 @@ func handleGraphQLRequest(w http.ResponseWriter, r *http.Request) {
options = append(options, eng.WithRequestTraceOptions(traceOpts))
}

// Execute the GraphQL query
// Execute the GraphQL operation
resultWriter := gql.NewEngineResultWriter()
if err := engine.Execute(ctx, &gqlRequest, &resultWriter, options...); err != nil {

if report, ok := err.(operationreport.Report); ok {
if len(report.InternalErrors) > 0 {
// Log internal errors, but don't return them to the client
msg := "Failed to execute GraphQL query."
msg := "Failed to execute GraphQL operation."
logger.Err(ctx, err).Msg(msg)
http.Error(w, msg, http.StatusInternalServerError)
return
Expand All @@ -124,10 +124,10 @@ func handleGraphQLRequest(w http.ResponseWriter, r *http.Request) {
// cleanup empty arrays from error message before logging
errMsg := strings.Replace(err.Error(), ", locations: []", "", 1)
errMsg = strings.Replace(errMsg, ", path: []", "", 1)
logger.Warn(ctx).Str("error", errMsg).Msg("Failed to execute GraphQL query.")
logger.Warn(ctx).Str("error", errMsg).Msg("Failed to execute GraphQL operation.")
}
} else {
msg := "Failed to execute GraphQL query."
msg := "Failed to execute GraphQL operation."
logger.Err(ctx, err).Msg(msg)
http.Error(w, fmt.Sprintf("%s\n%v", msg, err), http.StatusInternalServerError)
}
Expand Down
62 changes: 62 additions & 0 deletions runtime/graphql/schemagen/conventions.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
/*
* Copyright 2024 Hypermode Inc.
* Licensed under the terms of the Apache License, Version 2.0
* See the LICENSE file that accompanied this code for further details.
*
* SPDX-FileCopyrightText: 2024 Hypermode Inc. <hello@hypermode.com>
* SPDX-License-Identifier: Apache-2.0
*/

package schemagen

import "strings"

// prefixes that are used to identify query fields, and will be trimmed from the field name
var queryTrimPrefixes = []string{"get", "list"}

// prefixes that are used to identify mutation fields
var mutationPrefixes = []string{
"mutate",
"post", "patch", "put", "delete",
"add", "update", "insert", "upsert",
"create", "edit", "save", "remove", "alter", "modify",
}

func isMutation(fnName string) bool {
prefix := getPrefix(fnName, mutationPrefixes)
if prefix == "" {
return false
}

// embedders are not mutations
embedders := getEmbedderFields()
return !embedders[fnName]
}

func getFieldName(fnName string) string {
prefix := getPrefix(fnName, queryTrimPrefixes)
fieldName := strings.TrimPrefix(fnName, prefix)
return strings.ToLower(fieldName[:1]) + fieldName[1:]
}

func getPrefix(fnName string, prefixes []string) string {
for _, prefix := range prefixes {
// check for exact match
fnNameLowered := strings.ToLower(fnName)
if fnNameLowered == prefix {
return prefix
}

// check for a prefix, but only if the prefix is NOT followed by a lowercase letter
// for example, we want to match "addPost" but not "additionalPosts"
prefixLen := len(prefix)
if len(fnName) > prefixLen && strings.HasPrefix(fnNameLowered, prefix) {
c := fnName[prefixLen]
if c < 'a' || c > 'z' {
return prefix
}
}
}

return ""
}
Loading

0 comments on commit 3f87df6

Please sign in to comment.