diff --git a/v2/pkg/ast/path.go b/v2/pkg/ast/path.go index 19493ec60c..aa133fbc99 100644 --- a/v2/pkg/ast/path.go +++ b/v2/pkg/ast/path.go @@ -128,7 +128,20 @@ func (p Path) String() string { return out } +// DotDelimitedString returns a dot-separated string representation of the path. +// Inline fragments include their reference number, e.g., "query.user.$1User.name". func (p Path) DotDelimitedString() string { + return p.dotDelimitedString(true) +} + +// DotDelimitedStringWithoutFragmentRefs returns a dot-separated string representation of the path. +// Unlike DotDelimitedString, inline fragments omit their reference number, +// e.g., "query.user.$User.name". +func (p Path) DotDelimitedStringWithoutFragmentRefs() string { + return p.dotDelimitedString(false) +} + +func (p Path) dotDelimitedString(includeFragmentRefs bool) string { builder := strings.Builder{} toGrow := 0 @@ -160,7 +173,9 @@ func (p Path) DotDelimitedString() string { } case InlineFragmentName: builder.WriteString(InlineFragmentPathPrefix) - builder.WriteString(strconv.Itoa(p[i].FragmentRef)) + if includeFragmentRefs { + builder.WriteString(strconv.Itoa(p[i].FragmentRef)) + } builder.WriteString(unsafebytes.BytesToString(p[i].FieldName)) } } diff --git a/v2/pkg/ast/path_test.go b/v2/pkg/ast/path_test.go new file mode 100644 index 0000000000..f7577b53a9 --- /dev/null +++ b/v2/pkg/ast/path_test.go @@ -0,0 +1,219 @@ +package ast_test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/wundergraph/graphql-go-tools/v2/pkg/ast" +) + +func TestPath_DotDelimitedString(t *testing.T) { + tests := []struct { + name string + path ast.Path + want string + wantNoRef string + }{ + { + name: "returns operation type for root query path", + path: ast.Path{ + {Kind: ast.FieldName, FieldName: []byte("query")}, + }, + want: "query", + wantNoRef: "query", + }, + { + name: "returns operation type for root mutation path", + path: ast.Path{ + {Kind: ast.FieldName, FieldName: []byte("mutation")}, + }, + want: "mutation", + wantNoRef: "mutation", + }, + { + name: "returns operation type for root subscription path", + path: ast.Path{ + {Kind: ast.FieldName, FieldName: []byte("subscription")}, + }, + want: "subscription", + wantNoRef: "subscription", + }, + { + name: "converts empty field name to query as fallback", + path: ast.Path{ + {Kind: ast.FieldName, FieldName: []byte("")}, + }, + want: "query", + wantNoRef: "query", + }, + { + name: "joins operation and field with dot separator", + path: ast.Path{ + {Kind: ast.FieldName, FieldName: []byte("query")}, + {Kind: ast.FieldName, FieldName: []byte("user")}, + }, + want: "query.user", + wantNoRef: "query.user", + }, + { + name: "nested query fields contain all elements in path", + path: ast.Path{ + {Kind: ast.FieldName, FieldName: []byte("query")}, + {Kind: ast.FieldName, FieldName: []byte("user")}, + {Kind: ast.FieldName, FieldName: []byte("name")}, + }, + want: "query.user.name", + wantNoRef: "query.user.name", + }, + { + name: "nested mutation fields contain all elements in path", + path: ast.Path{ + {Kind: ast.FieldName, FieldName: []byte("mutation")}, + {Kind: ast.FieldName, FieldName: []byte("createUser")}, + {Kind: ast.FieldName, FieldName: []byte("id")}, + }, + want: "mutation.createUser.id", + wantNoRef: "mutation.createUser.id", + }, + { + name: "nested subscription fields contain all elements in path", + path: ast.Path{ + {Kind: ast.FieldName, FieldName: []byte("subscription")}, + {Kind: ast.FieldName, FieldName: []byte("userUpdated")}, + {Kind: ast.FieldName, FieldName: []byte("name")}, + }, + want: "subscription.userUpdated.name", + wantNoRef: "subscription.userUpdated.name", + }, + { + name: "includes field aliases in path", + path: ast.Path{ + {Kind: ast.FieldName, FieldName: []byte("query")}, + {Kind: ast.FieldName, FieldName: []byte("myUser")}, // alias is stored in path + {Kind: ast.FieldName, FieldName: []byte("email")}, + }, + want: "query.myUser.email", + wantNoRef: "query.myUser.email", + }, + { + name: "array indexes are represented as numbers", + path: ast.Path{ + {Kind: ast.FieldName, FieldName: []byte("query")}, + {Kind: ast.FieldName, FieldName: []byte("users")}, + {Kind: ast.ArrayIndex, ArrayIndex: 0}, + {Kind: ast.FieldName, FieldName: []byte("name")}, + }, + want: "query.users.0.name", + wantNoRef: "query.users.0.name", + }, + { + name: "multiple array indexes are all included in sequence", + path: ast.Path{ + {Kind: ast.FieldName, FieldName: []byte("query")}, + {Kind: ast.FieldName, FieldName: []byte("matrix")}, + {Kind: ast.ArrayIndex, ArrayIndex: 0}, + {Kind: ast.ArrayIndex, ArrayIndex: 1}, + }, + want: "query.matrix.0.1", + wantNoRef: "query.matrix.0.1", + }, + { + name: "inline fragments are prefixed with dollar sign and include fragment ref", + path: ast.Path{ + {Kind: ast.FieldName, FieldName: []byte("query")}, + {Kind: ast.FieldName, FieldName: []byte("node")}, + {Kind: ast.InlineFragmentName, FieldName: []byte("User"), FragmentRef: 1}, + {Kind: ast.FieldName, FieldName: []byte("name")}, + }, + want: "query.node.$1User.name", + wantNoRef: "query.node.$User.name", + }, + { + name: "multiple inline fragments each include their own ref number", + path: ast.Path{ + {Kind: ast.FieldName, FieldName: []byte("query")}, + {Kind: ast.FieldName, FieldName: []byte("search")}, + {Kind: ast.InlineFragmentName, FieldName: []byte("User"), FragmentRef: 1}, + {Kind: ast.FieldName, FieldName: []byte("profile")}, + {Kind: ast.InlineFragmentName, FieldName: []byte("PublicProfile"), FragmentRef: 2}, + {Kind: ast.FieldName, FieldName: []byte("bio")}, + }, + want: "query.search.$1User.profile.$2PublicProfile.bio", + wantNoRef: "query.search.$User.profile.$PublicProfile.bio", + }, + { + name: "inline fragments work in subscription operations", + path: ast.Path{ + {Kind: ast.FieldName, FieldName: []byte("subscription")}, + {Kind: ast.FieldName, FieldName: []byte("messageAdded")}, + {Kind: ast.InlineFragmentName, FieldName: []byte("TextMessage"), FragmentRef: 1}, + {Kind: ast.FieldName, FieldName: []byte("text")}, + }, + want: "subscription.messageAdded.$1TextMessage.text", + wantNoRef: "subscription.messageAdded.$TextMessage.text", + }, + { + name: "combines fields, array indexes, and fragments in correct order", + path: ast.Path{ + {Kind: ast.FieldName, FieldName: []byte("query")}, + {Kind: ast.FieldName, FieldName: []byte("items")}, + {Kind: ast.ArrayIndex, ArrayIndex: 0}, + {Kind: ast.InlineFragmentName, FieldName: []byte("Product"), FragmentRef: 1}, + {Kind: ast.FieldName, FieldName: []byte("variants")}, + {Kind: ast.ArrayIndex, ArrayIndex: 2}, + {Kind: ast.FieldName, FieldName: []byte("price")}, + }, + want: "query.items.0.$1Product.variants.2.price", + wantNoRef: "query.items.0.$Product.variants.2.price", + }, + { + name: "handles deeply nested paths with multiple fragments and arrays", + path: ast.Path{ + {Kind: ast.FieldName, FieldName: []byte("query")}, + {Kind: ast.FieldName, FieldName: []byte("organization")}, + {Kind: ast.FieldName, FieldName: []byte("teams")}, + {Kind: ast.ArrayIndex, ArrayIndex: 1}, + {Kind: ast.InlineFragmentName, FieldName: []byte("EngineeringTeam"), FragmentRef: 1}, + {Kind: ast.FieldName, FieldName: []byte("members")}, + {Kind: ast.ArrayIndex, ArrayIndex: 0}, + {Kind: ast.InlineFragmentName, FieldName: []byte("Developer"), FragmentRef: 2}, + {Kind: ast.FieldName, FieldName: []byte("languages")}, + }, + want: "query.organization.teams.1.$1EngineeringTeam.members.0.$2Developer.languages", + wantNoRef: "query.organization.teams.1.$EngineeringTeam.members.0.$Developer.languages", + }, + { + name: "combines aliases and inline fragments", + path: ast.Path{ + {Kind: ast.FieldName, FieldName: []byte("mutation")}, + {Kind: ast.FieldName, FieldName: []byte("myUser")}, // aliased field + {Kind: ast.InlineFragmentName, FieldName: []byte("AdminUser"), FragmentRef: 1}, + {Kind: ast.FieldName, FieldName: []byte("permissions")}, + }, + want: "mutation.myUser.$1AdminUser.permissions", + wantNoRef: "mutation.myUser.$AdminUser.permissions", + }, + { + name: "zero fragment refs are included in output", + path: ast.Path{ + {Kind: ast.FieldName, FieldName: []byte("query")}, + {Kind: ast.FieldName, FieldName: []byte("entity")}, + {Kind: ast.InlineFragmentName, FieldName: []byte("Node"), FragmentRef: 0}, + {Kind: ast.FieldName, FieldName: []byte("id")}, + }, + want: "query.entity.$0Node.id", + wantNoRef: "query.entity.$Node.id", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := tt.path.DotDelimitedString() + assert.Equal(t, tt.want, got) + + gotNoRef := tt.path.DotDelimitedStringWithoutFragmentRefs() + assert.Equal(t, tt.wantNoRef, gotNoRef) + }) + } +} diff --git a/v2/pkg/astnormalization/astnormalization.go b/v2/pkg/astnormalization/astnormalization.go index 04631f8398..7989b055e5 100644 --- a/v2/pkg/astnormalization/astnormalization.go +++ b/v2/pkg/astnormalization/astnormalization.go @@ -70,7 +70,6 @@ package astnormalization import ( "github.com/wundergraph/graphql-go-tools/v2/pkg/ast" - "github.com/wundergraph/graphql-go-tools/v2/pkg/astnormalization/uploads" "github.com/wundergraph/graphql-go-tools/v2/pkg/astvisitor" "github.com/wundergraph/graphql-go-tools/v2/pkg/operationreport" ) @@ -245,7 +244,8 @@ func (o *OperationNormalizer) setupOperationWalkers() { if o.options.extractVariables { extractVariablesWalker := astvisitor.NewWalkerWithID(8, "ExtractVariables") - extractVariables(&extractVariablesWalker) + // disabling field arg mapping as it's only necessary on the variable normalizer + extractVariables(&extractVariablesWalker, false) o.operationWalkers = append(o.operationWalkers, walkerStage{ name: "extractVariables", walker: &extractVariablesWalker, @@ -345,61 +345,6 @@ func (o *OperationNormalizer) NormalizeNamedOperation(operation, definition *ast } } -type VariablesNormalizer struct { - firstDetectUnused *astvisitor.Walker - secondExtract *astvisitor.Walker - thirdDeleteUnused *astvisitor.Walker - fourthCoerce *astvisitor.Walker - variablesExtractionVisitor *variablesExtractionVisitor -} - -func NewVariablesNormalizer() *VariablesNormalizer { - // delete unused modifying variables refs, - // so it is safer to run it sequentially with the extraction - thirdDeleteUnused := astvisitor.NewWalkerWithID(8, "DeleteUnusedVariables") - del := deleteUnusedVariables(&thirdDeleteUnused) - - // register variable usage detection on the first stage - // and pass usage information to the deletion visitor - // so it keeps variables that are defined but not used at all - // ensuring that validation can still catch them - firstDetectUnused := astvisitor.NewWalkerWithID(8, "DetectVariableUsage") - detectVariableUsage(&firstDetectUnused, del) - - secondExtract := astvisitor.NewWalkerWithID(8, "ExtractVariables") - variablesExtractionVisitor := extractVariables(&secondExtract) - extractVariablesDefaultValue(&secondExtract) - - fourthCoerce := astvisitor.NewWalkerWithID(0, "VariablesCoercion") - inputCoercionForList(&fourthCoerce) - - return &VariablesNormalizer{ - firstDetectUnused: &firstDetectUnused, - secondExtract: &secondExtract, - thirdDeleteUnused: &thirdDeleteUnused, - fourthCoerce: &fourthCoerce, - variablesExtractionVisitor: variablesExtractionVisitor, - } -} - -func (v *VariablesNormalizer) NormalizeOperation(operation, definition *ast.Document, report *operationreport.Report) []uploads.UploadPathMapping { - v.firstDetectUnused.Walk(operation, definition, report) - if report.HasErrors() { - return nil - } - v.secondExtract.Walk(operation, definition, report) - if report.HasErrors() { - return nil - } - v.thirdDeleteUnused.Walk(operation, definition, report) - if report.HasErrors() { - return nil - } - v.fourthCoerce.Walk(operation, definition, report) - - return v.variablesExtractionVisitor.uploadsPath -} - type fragmentCycleVisitor struct { *astvisitor.Walker diff --git a/v2/pkg/astnormalization/astnormalization_test.go b/v2/pkg/astnormalization/astnormalization_test.go index 2bc22e84ac..3c13fd7280 100644 --- a/v2/pkg/astnormalization/astnormalization_test.go +++ b/v2/pkg/astnormalization/astnormalization_test.go @@ -1018,7 +1018,7 @@ func TestVariablesNormalizer(t *testing.T) { operationDocument := unsafeparser.ParseGraphqlDocumentString(input) operationDocument.Input.Variables = []byte(`{}`) - normalizer := NewVariablesNormalizer() + normalizer := NewVariablesNormalizer(VariablesNormalizerOptions{EnableFieldArgumentMapping: false}) report := operationreport.Report{} normalizer.NormalizeOperation(&operationDocument, &definitionDocument, &report) require.False(t, report.HasErrors(), report.Error()) @@ -1040,9 +1040,10 @@ func TestVariablesNormalizer(t *testing.T) { operationDocument := unsafeparser.ParseGraphqlDocumentString(`mutation Foo($varOne: [Input2!]! $varTwo: Input2!) { hello(arg: {twoList: $varOne two: $varTwo}) }`) operationDocument.Input.Variables = []byte(`{"varOne":[{"oneList":[{"list":[null,null],"value":null}],"one":{"list":[null],"value":null}}],"varTwo":{"oneList":[{"list":[null,null],"value":null}],"one":{"list":[null],"value":null}}}`) - normalizer := NewVariablesNormalizer() + // create normalizer without field arg mapping + normalizer := NewVariablesNormalizer(VariablesNormalizerOptions{EnableFieldArgumentMapping: false}) report := operationreport.Report{} - uploadsMapping := normalizer.NormalizeOperation(&operationDocument, &definitionDocument, &report) + result := normalizer.NormalizeOperation(&operationDocument, &definitionDocument, &report) require.False(t, report.HasErrors(), report.Error()) out := unsafeprinter.Print(&operationDocument) @@ -1060,7 +1061,346 @@ func TestVariablesNormalizer(t *testing.T) { {VariableName: "a", OriginalUploadPath: "variables.varTwo.oneList.0.value", NewUploadPath: "variables.a.two.oneList.0.value"}, {VariableName: "a", OriginalUploadPath: "variables.varTwo.one.list.0", NewUploadPath: "variables.a.two.one.list.0"}, {VariableName: "a", OriginalUploadPath: "variables.varTwo.one.value", NewUploadPath: "variables.a.two.one.value"}, - }, uploadsMapping) + }, result.UploadsMapping) + + // Verify field argument mapping is not populated + assert.Nil(t, result.FieldArgumentMapping) + }) + + t.Run("field argument mapping", func(t *testing.T) { + const fieldArgMappingSchema = ` + type Query { + viewer: User + user(id: ID!, active: Boolean): User + users(name: String!, limit: Int!, offset: Int): [User!]! + paginatedUsers(paging: Paging!): [String!]! + node(id: ID!): Node + } + input Paging { + limit: Int! + offset: Int! + } + interface Node { + id: ID! + User: UserContainer + } + type UserContainer { + posts(limit: Int!): [Post!]! + } + type User implements Node { + id: ID! + name: String! + posts(limit: Int!): [Post!]! + items(limit: Int!): [Item!]! + User: UserContainer + } + type Admin implements Node { + id: ID! + role: String! + items(limit: Int!): [Item!]! + User: UserContainer + } + type Post { + id: ID! + title: String! + } + type Item { + id: ID! + name: String! + } + ` + + testCases := []struct { + name string + operation string + variables string + expectedMapping FieldArgumentMapping + }{ + { + name: "mapping references the name of the variable", + operation: ` + query GetUser($userId: ID!) { + user(id: $userId) { name } + }`, + variables: `{"userId": "123"}`, + expectedMapping: FieldArgumentMapping{"query.user.id": "userId"}, + }, + { + name: "mapping references an extracted variable on literal values", + operation: ` + query GetUser { + user(id: "123") { name } + }`, + variables: `{}`, + expectedMapping: FieldArgumentMapping{"query.user.id": "a"}, + }, + { + name: "mapping references an extracted variable on multiple literal values", + operation: ` + query GetUsers { + firstAlias: user(id: "user-1") { name } + secondAlias: user(id: "user-2") { name } + }`, + variables: `{}`, + expectedMapping: FieldArgumentMapping{ + "query.firstAlias.id": "a", + "query.secondAlias.id": "b", + }, + }, + { + name: "mapping references an extracted variable on multiple variable values", + operation: ` + query GetUsers($id1: ID!, $id2: ID!) { + firstAlias: user(id: $id1) { name } + secondAlias: user(id: $id2) { name } + }`, + variables: `{"id1": "user-1", "id2": "user-2"}`, + expectedMapping: FieldArgumentMapping{ + "query.firstAlias.id": "id1", + "query.secondAlias.id": "id2", + }, + }, + { + name: "mapping correctly builds paths on nested field arguments", + operation: ` + query GetUserPosts($userId: ID!, $limit: Int!) { + user(id: $userId) { + posts(limit: $limit) { title } + } + }`, + variables: `{"userId": "123", "limit": 10}`, + expectedMapping: FieldArgumentMapping{ + "query.user.id": "userId", + "query.user.posts.limit": "limit", + }, + }, + { + name: "multiple variables refs used correctly in mapping", + operation: ` + query SearchUsers($name: String!, $limit: Int!, $offset: Int) { + users(name: $name, limit: $limit, offset: $offset) { id } + }`, + variables: `{"name": "john", "limit": 10, "offset": 5}`, + expectedMapping: FieldArgumentMapping{ + "query.users.name": "name", + "query.users.limit": "limit", + "query.users.offset": "offset", + }, + }, + { + name: "mixed inline and variables get correctly mapped", + operation: ` + query GetUser($userId: ID!) { + user(id: $userId, active: true) { name } + }`, + variables: `{"userId": "123"}`, + expectedMapping: FieldArgumentMapping{ + "query.user.id": "userId", + "query.user.active": "a", + }, + }, + { + name: "empty mapping when no arguments", + operation: ` + query GetViewer { + viewer { id name } + }`, + variables: `{}`, + expectedMapping: FieldArgumentMapping{}, + }, + { + name: "reused literal values are recorded for all field arguments", + operation: ` + query GetUsers { + firstAlias: user(id: "123") { name } + secondAlias: user(id: "123") { name } + }`, + variables: `{}`, + expectedMapping: FieldArgumentMapping{ + "query.firstAlias.id": "a", + "query.secondAlias.id": "a", + }, + }, + { + name: "multiple reused literal values with different values are all recorded", + operation: ` + query GetUsers { + firstAlias: user(id: "123") { name } + secondAlias: user(id: "123") { name } + thirdAlias: user(id: "456") { name } + fourthAlias: user(id: "456") { name } + }`, + variables: `{}`, + expectedMapping: FieldArgumentMapping{ + "query.firstAlias.id": "a", + "query.secondAlias.id": "a", + "query.thirdAlias.id": "b", + "query.fourthAlias.id": "b", + }, + }, + { + name: "inline fragment with arguments includes dollar prefix in path", + operation: ` + query GetNode($id: ID!, $limit: Int!) { + node(id: $id) { + id + ... on User { + name + posts(limit: $limit) { title } + } + } + }`, + variables: `{"id": "123", "limit": 10}`, + expectedMapping: FieldArgumentMapping{ + "query.node.id": "id", + "query.node.$User.posts.limit": "limit", + }, + }, + { + name: "multiple inline fragments with arguments each get dollar prefix", + operation: ` + query GetNode($id: ID!, $userLimit: Int!, $adminLimit: Int!) { + node(id: $id) { + id + ... on User { + name + items(limit: $userLimit) { name } + } + ... on Admin { + role + items(limit: $adminLimit) { name } + } + } + }`, + variables: `{"id": "123", "userLimit": 5, "adminLimit": 10}`, + expectedMapping: FieldArgumentMapping{ + "query.node.id": "id", + "query.node.$User.items.limit": "userLimit", + "query.node.$Admin.items.limit": "adminLimit", + }, + }, + { + name: "inline fragment with literal argument gets extracted variable", + operation: ` + query GetNode($id: ID!) { + node(id: $id) { + id + ... on User { + name + posts(limit: 20) { title } + } + } + }`, + variables: `{"id": "123"}`, + expectedMapping: FieldArgumentMapping{ + "query.node.id": "id", + "query.node.$User.posts.limit": "a", + }, + }, + { + name: "nested inline fragments include all dollar prefixes in path", + operation: ` + query GetNode($id: ID!, $limit: Int!) { + node(id: $id) { + ... on User { + User { + posts(limit: $limit) { title } + } + } + } + }`, + variables: `{"id": "123", "limit": 10}`, + expectedMapping: FieldArgumentMapping{ + "query.node.id": "id", + "query.node.$User.User.posts.limit": "limit", + }, + }, + { + name: "inline fragment in aliased field includes alias in path", + operation: ` + query GetNode($id: ID!, $limit: Int!) { + nodeAlias: node(id: $id) { + id + ... on User { + name + posts(limit: $limit) { title } + } + } + }`, + variables: `{"id": "123", "limit": 10}`, + expectedMapping: FieldArgumentMapping{ + "query.nodeAlias.id": "id", + "query.nodeAlias.$User.posts.limit": "limit", + }, + }, + { + name: "multiple inline fragments with mixed variables and literals", + operation: ` + query GetNode($id: ID!, $userLimit: Int!) { + node(id: $id) { + id + ... on User { + items(limit: $userLimit) { name } + } + ... on Admin { + items(limit: 15) { name } + } + } + }`, + variables: `{"id": "123", "userLimit": 5}`, + expectedMapping: FieldArgumentMapping{ + "query.node.id": "id", + "query.node.$User.items.limit": "userLimit", + "query.node.$Admin.items.limit": "a", + }, + }, + { + // There is no hard requirement for excluding directive arguments + // but at the moment this is not supported because it would involve more + // complex changes to the variable normalizer. + name: "directive arguments are not included in field argument mapping", + operation: ` + query GetUser($userId: ID!, $limit: Int!, $includeItems: Boolean!, $skipPosts: Boolean!) { + user(id: $userId) { + name + posts(limit: $limit) @skip(if: $skipPosts) { title } + items(limit: 10) @include(if: $includeItems) { name } + } + }`, + variables: `{"userId": "123", "limit": 5, "includeItems": true, "skipPosts": false}`, + expectedMapping: FieldArgumentMapping{ + "query.user.id": "userId", + "query.user.posts.limit": "limit", + "query.user.items.limit": "a", + }, + }, + { + name: "inline input object argument is extracted as a variable", + operation: ` + query Search($limit: Int!, $offset: Int!) { + paginatedUsers(paging: {limit: $limit, offset: $offset}) + }`, + variables: `{"limit": 10, "offset": 5}`, + expectedMapping: FieldArgumentMapping{ + "query.paginatedUsers.paging": "a", + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + definitionDocument := unsafeparser.ParseGraphqlDocumentStringWithBaseSchema(fieldArgMappingSchema) + operationDocument := unsafeparser.ParseGraphqlDocumentString(tc.operation) + operationDocument.Input.Variables = []byte(tc.variables) + + normalizer := NewVariablesNormalizer(VariablesNormalizerOptions{EnableFieldArgumentMapping: true}) + report := operationreport.Report{} + result := normalizer.NormalizeOperation(&operationDocument, &definitionDocument, &report) + require.False(t, report.HasErrors(), report.Error()) + + assert.Equal(t, tc.expectedMapping, result.FieldArgumentMapping) + }) + } }) } @@ -1089,7 +1429,7 @@ var mustString = func(str string, err error) string { } type registerNormalizeFunc func(walker *astvisitor.Walker) -type registerNormalizeVariablesFunc func(walker *astvisitor.Walker) *variablesExtractionVisitor +type registerNormalizeVariablesFunc func(walker *astvisitor.Walker, withFieldArgMapping bool) *variablesExtractionVisitor type registerNormalizeVariablesDefaulValueFunc func(walker *astvisitor.Walker) *variablesDefaultValueExtractionVisitor type registerNormalizeDeleteVariablesFunc func(walker *astvisitor.Walker) *deleteUnusedVariablesVisitor @@ -1185,7 +1525,7 @@ var runWithVariablesExtraction = func(t *testing.T, normalizeFunc registerNormal t.Helper() runWithVariablesAssert(t, func(walker *astvisitor.Walker) { - normalizeFunc(walker) + normalizeFunc(walker, false) }, definition, operation, operationName, expectedOutput, variablesInput, expectedVariables, additionalNormalizers...) } @@ -1193,7 +1533,7 @@ var runWithVariablesExtractionAndPreNormalize = func(t *testing.T, normalizeFunc t.Helper() runWithVariablesAssertAndPreNormalize(t, func(walker *astvisitor.Walker) { - normalizeFunc(walker) + normalizeFunc(walker, false) }, definition, operation, operationName, expectedOutput, variablesInput, expectedVariables, prerequisites...) } @@ -1289,7 +1629,7 @@ var runWithExpectedErrors = func(t *testing.T, normalizeFunc registerNormalizeVa report := operationreport.Report{} walker := astvisitor.NewWalker(48) - normalizeFunc(&walker) + normalizeFunc(&walker, false) for _, fn := range additionalNormalizers { fn(&walker) diff --git a/v2/pkg/astnormalization/variables_extraction.go b/v2/pkg/astnormalization/variables_extraction.go index 6a8e7b8165..f3950b42c5 100644 --- a/v2/pkg/astnormalization/variables_extraction.go +++ b/v2/pkg/astnormalization/variables_extraction.go @@ -14,10 +14,13 @@ import ( "github.com/wundergraph/graphql-go-tools/v2/pkg/internal/unsafebytes" ) -func extractVariables(walker *astvisitor.Walker) *variablesExtractionVisitor { +func extractVariables(walker *astvisitor.Walker, withFieldArgMapping bool) *variablesExtractionVisitor { visitor := &variablesExtractionVisitor{ Walker: walker, uploadFinder: uploads.NewUploadFinder(), + fieldArgumentMapping: fieldArgMappingOption{ + enabled: withFieldArgMapping, + }, } walker.RegisterEnterDocumentVisitor(visitor) walker.RegisterEnterArgumentVisitor(visitor) @@ -34,6 +37,12 @@ type variablesExtractionVisitor struct { extractedVariableTypeRefs []int uploadFinder *uploads.UploadFinder uploadsPath []uploads.UploadPathMapping + fieldArgumentMapping fieldArgMappingOption +} + +type fieldArgMappingOption struct { + enabled bool + result FieldArgumentMapping } func (v *variablesExtractionVisitor) EnterArgument(ref int) { @@ -63,6 +72,10 @@ func (v *variablesExtractionVisitor) EnterArgument(ref int) { if len(uploadsMapping) > 0 { v.uploadsPath = append(v.uploadsPath, uploadsMapping...) } + // Record the field argument mapping for existing variables + if v.fieldArgumentMapping.enabled { + v.recordFieldArgumentMapping(ref, "") + } return } @@ -78,6 +91,12 @@ func (v *variablesExtractionVisitor) EnterArgument(ref int) { value := v.operation.AddVariableValue(variable) v.operation.Arguments[ref].Value.Kind = ast.ValueKindVariable v.operation.Arguments[ref].Value.Ref = value + + // Record the field argument mapping for already extracted variable + if v.fieldArgumentMapping.enabled { + v.recordFieldArgumentMapping(ref, string(name)) + } + return } variableNameBytes := v.operation.GenerateUnusedVariableDefinitionName(v.Ancestors[0].Ref) @@ -141,12 +160,54 @@ func (v *variablesExtractionVisitor) EnterArgument(ref int) { v.operation.OperationDefinitions[v.Ancestors[0].Ref].VariableDefinitions.Refs = append(v.operation.OperationDefinitions[v.Ancestors[0].Ref].VariableDefinitions.Refs, newVariableRef) v.operation.OperationDefinitions[v.Ancestors[0].Ref].HasVariableDefinitions = true + + // Record the field argument mapping for the newly extracted variable + if v.fieldArgumentMapping.enabled { + v.recordFieldArgumentMapping(ref, string(variableNameBytes)) + } } func (v *variablesExtractionVisitor) EnterDocument(operation, definition *ast.Document) { v.operation, v.definition = operation, definition v.extractedVariables = v.extractedVariables[:0] v.extractedVariableTypeRefs = v.extractedVariableTypeRefs[:0] + if v.fieldArgumentMapping.enabled { + v.fieldArgumentMapping.result = make(FieldArgumentMapping) + } +} + +// recordFieldArgumentMapping records the currently visited field argument +// of v in v.fieldArgumentMapping, alongside its matching variable name or literal value. +// If varName is empty, it looks up the variable name from the operation or stores the literal value. +func (v *variablesExtractionVisitor) recordFieldArgumentMapping(ref int, varName string) { + // Guard to prevent nil panics. + // v.fieldArgumentMapping.result should always have a value if we are here + // but if this changes by accident this prevents a nil panic. + if v.fieldArgumentMapping.result == nil { + return + } + + fieldPath := v.Path.DotDelimitedStringWithoutFragmentRefs() + if fieldPath == "" { + return + } + argName := v.operation.ArgumentNameString(ref) + key := fieldPath + "." + argName + + if varName != "" { + // Variable name was provided (from extraction) + v.fieldArgumentMapping.result[key] = varName + return + } + + if v.operation.Arguments[ref].Value.Kind != ast.ValueKindVariable { + // We should not land here because at this point all argument values should have become variables. + // If we land here for whatever reason we ignore this field argument. + return + } + + varName = v.operation.VariableValueNameString(v.operation.Arguments[ref].Value.Ref) + v.fieldArgumentMapping.result[key] = varName } func (v *variablesExtractionVisitor) variableExists(variableValue []byte, inputValueDefinition int) (exists bool, name []byte, definition int) { diff --git a/v2/pkg/astnormalization/variables_extraction_test.go b/v2/pkg/astnormalization/variables_extraction_test.go index 4066e296e8..ca0e86441f 100644 --- a/v2/pkg/astnormalization/variables_extraction_test.go +++ b/v2/pkg/astnormalization/variables_extraction_test.go @@ -559,8 +559,8 @@ func TestVariablesExtraction(t *testing.T) { t.Run("arg has inline object value with upload passed via variable", func(t *testing.T) { var visitor *variablesExtractionVisitor - register := func(walker *astvisitor.Walker) *variablesExtractionVisitor { - visitor = extractVariables(walker) + register := func(walker *astvisitor.Walker, withFieldArgMapping bool) *variablesExtractionVisitor { + visitor = extractVariables(walker, withFieldArgMapping) return visitor } @@ -580,8 +580,8 @@ func TestVariablesExtraction(t *testing.T) { t.Run("arg has inline objects with variables which have nested file uploads", func(t *testing.T) { var visitor *variablesExtractionVisitor - register := func(walker *astvisitor.Walker) *variablesExtractionVisitor { - visitor = extractVariables(walker) + register := func(walker *astvisitor.Walker, withFieldArgMapping bool) *variablesExtractionVisitor { + visitor = extractVariables(walker, withFieldArgMapping) return visitor } @@ -616,8 +616,8 @@ func TestVariablesExtraction(t *testing.T) { t.Run("arg of type upload", func(t *testing.T) { var visitor *variablesExtractionVisitor - register := func(walker *astvisitor.Walker) *variablesExtractionVisitor { - visitor = extractVariables(walker) + register := func(walker *astvisitor.Walker, withFieldArgMapping bool) *variablesExtractionVisitor { + visitor = extractVariables(walker, withFieldArgMapping) return visitor } @@ -637,8 +637,8 @@ func TestVariablesExtraction(t *testing.T) { t.Run("arg has nested upload in a variable", func(t *testing.T) { var visitor *variablesExtractionVisitor - register := func(walker *astvisitor.Walker) *variablesExtractionVisitor { - visitor = extractVariables(walker) + register := func(walker *astvisitor.Walker, withFieldArgMapping bool) *variablesExtractionVisitor { + visitor = extractVariables(walker, withFieldArgMapping) return visitor } diff --git a/v2/pkg/astnormalization/variables_mapper_test.go b/v2/pkg/astnormalization/variables_mapper_test.go index f11f704010..1d65d19f5c 100644 --- a/v2/pkg/astnormalization/variables_mapper_test.go +++ b/v2/pkg/astnormalization/variables_mapper_test.go @@ -47,7 +47,7 @@ func TestVariablesMapper(t *testing.T) { WithRemoveFragmentDefinitions(), WithRemoveUnusedVariables(), ) - variablesNormalizer := NewVariablesNormalizer() + variablesNormalizer := NewVariablesNormalizer(VariablesNormalizerOptions{EnableFieldArgumentMapping: false}) testCases := []struct { name string diff --git a/v2/pkg/astnormalization/variables_normalization.go b/v2/pkg/astnormalization/variables_normalization.go new file mode 100644 index 0000000000..06d7a51ee4 --- /dev/null +++ b/v2/pkg/astnormalization/variables_normalization.go @@ -0,0 +1,97 @@ +package astnormalization + +import ( + "github.com/wundergraph/graphql-go-tools/v2/pkg/ast" + "github.com/wundergraph/graphql-go-tools/v2/pkg/astnormalization/uploads" + "github.com/wundergraph/graphql-go-tools/v2/pkg/astvisitor" + "github.com/wundergraph/graphql-go-tools/v2/pkg/operationreport" +) + +type VariablesNormalizer struct { + firstDetectUnused *astvisitor.Walker + secondExtract *astvisitor.Walker + thirdDeleteUnused *astvisitor.Walker + fourthCoerce *astvisitor.Walker + variablesExtractionVisitor *variablesExtractionVisitor +} + +// VariablesNormalizerOptions allows to configure a VariablesNormalizer. +type VariablesNormalizerOptions struct { + // EnableFieldArgumentMapping enables field argument mapping. + // If true it contains the map as part of the NormalizeOperation result. + EnableFieldArgumentMapping bool +} + +// NewVariablesNormalizer creates a new variable normalizer. +func NewVariablesNormalizer(options VariablesNormalizerOptions) *VariablesNormalizer { + // delete unused modifying variables refs, + // so it is safer to run it sequentially with the extraction + thirdDeleteUnused := astvisitor.NewWalkerWithID(8, "DeleteUnusedVariables") + del := deleteUnusedVariables(&thirdDeleteUnused) + + // register variable usage detection on the first stage + // and pass usage information to the deletion visitor + // so it keeps variables that are defined but not used at all + // ensuring that validation can still catch them + firstDetectUnused := astvisitor.NewWalkerWithID(8, "DetectVariableUsage") + detectVariableUsage(&firstDetectUnused, del) + + secondExtract := astvisitor.NewWalkerWithID(8, "ExtractVariables") + variablesExtractionVisitor := extractVariables(&secondExtract, options.EnableFieldArgumentMapping) + extractVariablesDefaultValue(&secondExtract) + + fourthCoerce := astvisitor.NewWalkerWithID(0, "VariablesCoercion") + inputCoercionForList(&fourthCoerce) + + return &VariablesNormalizer{ + firstDetectUnused: &firstDetectUnused, + secondExtract: &secondExtract, + thirdDeleteUnused: &thirdDeleteUnused, + fourthCoerce: &fourthCoerce, + variablesExtractionVisitor: variablesExtractionVisitor, + } +} + +// NormalizeOperation processes GraphQL operation variables. +// It detects and removes unused variables, extracts variables from inline values +// and coerces variable values. +// https://spec.graphql.org/September2025/#sec-Coercing-Variable-Values +// It modifies the operation in place and +// returns metadata including field argument mappings and upload paths. +// Field argument mapping is done only when it is enabled in v, +// else VariablesNormalizerResult.FieldArgumentMapping will be nil. +// Any errors encountered during normalization are reported via the report parameter. +func (v *VariablesNormalizer) NormalizeOperation(operation, definition *ast.Document, report *operationreport.Report) VariablesNormalizationResult { + v.firstDetectUnused.Walk(operation, definition, report) + if report.HasErrors() { + return VariablesNormalizationResult{} + } + v.secondExtract.Walk(operation, definition, report) + if report.HasErrors() { + return VariablesNormalizationResult{} + } + v.thirdDeleteUnused.Walk(operation, definition, report) + if report.HasErrors() { + return VariablesNormalizationResult{} + } + v.fourthCoerce.Walk(operation, definition, report) + + return VariablesNormalizationResult{ + UploadsMapping: v.variablesExtractionVisitor.uploadsPath, + FieldArgumentMapping: v.variablesExtractionVisitor.fieldArgumentMapping.result, + } +} + +// FieldArgumentMapping maps the path of field arguments inside an operation +// to the name of their mapped variables. +// Key: "rootOperationType.fieldPath.argumentName" (e.g., "query.posts.limit") +// Value: name of variable of field argument after variable normalization. +type FieldArgumentMapping map[string]string + +// VariablesNormalizationResult contains the results of variable normalization. +type VariablesNormalizationResult struct { + // UploadsMapping tracks file upload variables and how their paths change during normalization. + UploadsMapping []uploads.UploadPathMapping + // FieldArgumentMapping maps field arguments to their variable names for fast lookup. + FieldArgumentMapping FieldArgumentMapping +} diff --git a/v2/pkg/astnormalization/variables_unused_deletion_test.go b/v2/pkg/astnormalization/variables_unused_deletion_test.go index c0323b978f..c372045567 100644 --- a/v2/pkg/astnormalization/variables_unused_deletion_test.go +++ b/v2/pkg/astnormalization/variables_unused_deletion_test.go @@ -41,7 +41,7 @@ func TestVariablesUnusedDeletion(t *testing.T) { del := deleteUnusedVariables(&secondWalker) detectVariableUsage(&firstWalker, del) - extractVariables(&firstWalker) + extractVariables(&firstWalker, false) rep := &operationreport.Report{} firstWalker.Walk(&operationDocument, &definitionDocument, rep) diff --git a/v2/pkg/engine/datasource/graphql_datasource/graphql_datasource.go b/v2/pkg/engine/datasource/graphql_datasource/graphql_datasource.go index f4268d1f6a..b15aa2e96f 100644 --- a/v2/pkg/engine/datasource/graphql_datasource/graphql_datasource.go +++ b/v2/pkg/engine/datasource/graphql_datasource/graphql_datasource.go @@ -784,7 +784,7 @@ func (p *Planner[T]) allowField(ref int) bool { // In addition, we skip field if its path are equal to planner parent path // This is required to correctly plan on datasource which has corresponding child/root node, // but we don't need to add it to the query as we are in the nested request - currentPath := fmt.Sprintf("%s.%s", p.visitor.Walker.Path.DotDelimitedString(), fieldAliasOrName) + currentPath := p.visitor.Walker.Path.DotDelimitedString() + "." + fieldAliasOrName if p.dataSourcePlannerConfig.ParentPath != "query" && p.dataSourcePlannerConfig.ParentPath == currentPath { p.DebugPrint("allowField: false path:", currentPath, "is equal to parent path", p.dataSourcePlannerConfig.ParentPath) return false diff --git a/v2/pkg/engine/plan/datasource_filter_collect_nodes_visitor.go b/v2/pkg/engine/plan/datasource_filter_collect_nodes_visitor.go index 82f05fddea..6f981d9523 100644 --- a/v2/pkg/engine/plan/datasource_filter_collect_nodes_visitor.go +++ b/v2/pkg/engine/plan/datasource_filter_collect_nodes_visitor.go @@ -596,8 +596,8 @@ func (f *treeBuilderVisitor) collectFieldInfo(fieldRef int) { onFragment := f.walker.Path.EndsWithFragment() parentPathWithoutFragment := f.walker.Path.WithoutInlineFragmentNames().DotDelimitedString() - currentPath := fmt.Sprintf("%s.%s", parentPath, fieldAliasOrName) - currentPathWithoutFragments := fmt.Sprintf("%s.%s", parentPathWithoutFragment, fieldAliasOrName) + currentPath := parentPath + "." + fieldAliasOrName + currentPathWithoutFragments := parentPathWithoutFragment + "." + fieldAliasOrName f.fieldInfo[fieldRef] = fieldInfo{ typeName: typeName,