diff --git a/.github/workflows/unit-test.yaml b/.github/workflows/unit-test.yaml new file mode 100644 index 0000000..399b5c6 --- /dev/null +++ b/.github/workflows/unit-test.yaml @@ -0,0 +1,26 @@ +name: Go Test + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + +jobs: + test: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Go 1.25 + uses: actions/setup-go@v5 + with: + go-version: '1.25' + cache: true + + - name: Get dependencies + run: go mod download + + - name: Run Tests + run: go test -v -race -cover ./... \ No newline at end of file diff --git a/ast/ast.go b/ast/ast.go new file mode 100644 index 0000000..edf56aa --- /dev/null +++ b/ast/ast.go @@ -0,0 +1,49 @@ +package ast + +import ( + "bytes" +) + +type Node interface { + TokenLiteral() string + String() string +} + +type Definition interface { + Node + definitionNode() +} + +type Value interface { + Node + valueNode() +} + +type Type interface { + Node + typeNode() +} + +type Selection interface { + Node + selectionNode() +} + +type Document struct { + Definitions []Definition +} + +func (d *Document) TokenLiteral() string { + if len(d.Definitions) > 0 { + return d.Definitions[0].TokenLiteral() + } + return "" +} + +func (d *Document) String() string { + var out bytes.Buffer + for _, s := range d.Definitions { + out.WriteString(s.String()) + } + return out.String() +} diff --git a/ast/ast_test.go b/ast/ast_test.go new file mode 100644 index 0000000..f264682 --- /dev/null +++ b/ast/ast_test.go @@ -0,0 +1,109 @@ +package ast_test + +import ( + "testing" + + "github.com/n9te9/graphql-parser/ast" + "github.com/n9te9/graphql-parser/token" +) + +func TestDocument_String(t *testing.T) { + tests := []struct { + name string + node *ast.Document + expected string + }{ + { + name: "Document with Single Query", + node: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Token: token.Token{Literal: "query"}, + Operation: ast.Query, + Name: &ast.Name{Token: token.Token{Literal: "MyQuery"}, Value: "MyQuery"}, + SelectionSet: []ast.Selection{ + &ast.Field{ + Token: token.Token{Literal: "user"}, + Name: &ast.Name{Token: token.Token{Literal: "user"}, Value: "user"}, + }, + }, + }, + }, + }, + expected: "query MyQuery { user }", + }, + { + name: "Document with Multiple Definitions", + node: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Token: token.Token{Literal: "query"}, + Operation: ast.Query, + Name: &ast.Name{Token: token.Token{Literal: "GetU"}, Value: "GetU"}, + SelectionSet: []ast.Selection{ + &ast.Field{ + Token: token.Token{Literal: "u"}, + Name: &ast.Name{Token: token.Token{Literal: "u"}, Value: "u"}, + }, + }, + }, + &ast.OperationDefinition{ + Token: token.Token{Literal: "mutation"}, + Operation: ast.Mutation, + Name: &ast.Name{Token: token.Token{Literal: "SaveU"}, Value: "SaveU"}, + SelectionSet: []ast.Selection{ + &ast.Field{ + Token: token.Token{Literal: "s"}, + Name: &ast.Name{Token: token.Token{Literal: "s"}, Value: "s"}, + }, + }, + }, + }, + }, + expected: "query GetU { u }mutation SaveU { s }", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := tt.node.String(); got != tt.expected { + t.Errorf("String() mismatch.\n got: %s\n expected: %s", got, tt.expected) + } + }) + } +} + +func TestDocument_TokenLiteral(t *testing.T) { + tests := []struct { + name string + node *ast.Document + expected string + }{ + { + name: "Document with Definitions", + node: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Token: token.Token{Literal: "query"}, + }, + }, + }, + expected: "query", + }, + { + name: "Empty Document", + node: &ast.Document{ + Definitions: []ast.Definition{}, + }, + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := tt.node.TokenLiteral(); got != tt.expected { + t.Errorf("TokenLiteral() mismatch.\n got: %s\n expected: %s", got, tt.expected) + } + }) + } +} diff --git a/ast/definition.go b/ast/definition.go new file mode 100644 index 0000000..f9e8736 --- /dev/null +++ b/ast/definition.go @@ -0,0 +1,245 @@ +package ast + +import ( + "bytes" + "strings" + + "github.com/n9te9/graphql-parser/token" +) + +type OperationType string + +const ( + Query OperationType = "query" + Mutation OperationType = "mutation" + Subscription OperationType = "subscription" +) + +type OperationDefinition struct { + Token token.Token + Operation OperationType + Name *Name + VariableDefinitions []*VariableDefinition + Directives []*Directive + SelectionSet []Selection +} + +func (o *OperationDefinition) TokenLiteral() string { return o.Token.Literal } +func (o *OperationDefinition) String() string { + var out bytes.Buffer + + if o.Operation != "" { + out.WriteString(string(o.Operation)) + out.WriteString(" ") + } + + if o.Name != nil { + out.WriteString(o.Name.String()) + } + + if len(o.VariableDefinitions) > 0 { + out.WriteString("(") + defs := []string{} + for _, d := range o.VariableDefinitions { + defs = append(defs, d.String()) + } + out.WriteString(strings.Join(defs, ", ")) + out.WriteString(")") + } + + if len(o.Directives) > 0 { + out.WriteString(" ") + dirs := []string{} + for _, d := range o.Directives { + dirs = append(dirs, d.String()) + } + out.WriteString(strings.Join(dirs, " ")) + } + + if len(o.SelectionSet) > 0 { + out.WriteString(" ") + out.WriteString(selectionSetToString(o.SelectionSet)) + } + + return out.String() +} +func (o *OperationDefinition) definitionNode() {} + +type VariableDefinition struct { + Token token.Token + Variable *Variable + Type Type + DefaultValue Value + Directives []*Directive +} + +func (v *VariableDefinition) TokenLiteral() string { return v.Token.Literal } +func (v *VariableDefinition) String() string { + var out bytes.Buffer + out.WriteString(v.Variable.String()) + out.WriteString(": ") + out.WriteString(v.Type.String()) + + if v.DefaultValue != nil { + out.WriteString(" = ") + out.WriteString(v.DefaultValue.String()) + } + + if len(v.Directives) > 0 { + out.WriteString(" ") + dirs := []string{} + for _, d := range v.Directives { + dirs = append(dirs, d.String()) + } + out.WriteString(strings.Join(dirs, " ")) + } + + return out.String() +} + +type Field struct { + Token token.Token + Alias *Name + Name *Name + Arguments []*Argument + Directives []*Directive + SelectionSet []Selection +} + +func (f *Field) TokenLiteral() string { return f.Token.Literal } +func (f *Field) String() string { + var out bytes.Buffer + + if f.Alias != nil { + out.WriteString(f.Alias.String() + ": ") + } + out.WriteString(f.Name.String()) + + if len(f.Arguments) > 0 { + out.WriteString("(") + args := []string{} + for _, a := range f.Arguments { + args = append(args, a.String()) + } + out.WriteString(strings.Join(args, ", ")) + out.WriteString(")") + } + + if len(f.Directives) > 0 { + out.WriteString(" ") + dirs := []string{} + for _, d := range f.Directives { + dirs = append(dirs, d.String()) + } + out.WriteString(strings.Join(dirs, " ")) + } + + if len(f.SelectionSet) > 0 { + out.WriteString(" ") + out.WriteString(selectionSetToString(f.SelectionSet)) + } + + return out.String() +} +func (f *Field) selectionNode() {} + +type FragmentSpread struct { + Token token.Token + Name *Name + Directives []*Directive +} + +func (fs *FragmentSpread) TokenLiteral() string { return fs.Token.Literal } +func (fs *FragmentSpread) String() string { + var out bytes.Buffer + out.WriteString("...") + out.WriteString(fs.Name.String()) + + if len(fs.Directives) > 0 { + out.WriteString(" ") + dirs := []string{} + for _, d := range fs.Directives { + dirs = append(dirs, d.String()) + } + out.WriteString(strings.Join(dirs, " ")) + } + + return out.String() +} +func (fs *FragmentSpread) selectionNode() {} + +type InlineFragment struct { + Token token.Token + TypeCondition *NamedType + Directives []*Directive + SelectionSet []Selection +} + +func (i *InlineFragment) TokenLiteral() string { return i.Token.Literal } +func (i *InlineFragment) String() string { + var out bytes.Buffer + out.WriteString("...") + + if i.TypeCondition != nil { + out.WriteString(" on ") + out.WriteString(i.TypeCondition.String()) + } + + if len(i.Directives) > 0 { + out.WriteString(" ") + dirs := []string{} + for _, d := range i.Directives { + dirs = append(dirs, d.String()) + } + out.WriteString(strings.Join(dirs, " ")) + } + + out.WriteString(" ") + out.WriteString(selectionSetToString(i.SelectionSet)) + return out.String() +} +func (i *InlineFragment) selectionNode() {} + +func selectionSetToString(selections []Selection) string { + var out bytes.Buffer + out.WriteString("{ ") + sels := []string{} + for _, s := range selections { + sels = append(sels, s.String()) + } + out.WriteString(strings.Join(sels, " ")) + out.WriteString(" }") + return out.String() +} + +type FragmentDefinition struct { + Token token.Token + Name *Name + TypeCondition *NamedType + Directives []*Directive + SelectionSet []Selection +} + +func (fd *FragmentDefinition) TokenLiteral() string { return fd.Token.Literal } +func (fd *FragmentDefinition) String() string { + var out bytes.Buffer + out.WriteString("fragment ") + out.WriteString(fd.Name.String()) + out.WriteString(" on ") + out.WriteString(fd.TypeCondition.String()) + + if len(fd.Directives) > 0 { + out.WriteString(" ") + dirs := []string{} + for _, d := range fd.Directives { + dirs = append(dirs, d.String()) + } + out.WriteString(strings.Join(dirs, " ")) + } + + out.WriteString(" ") + out.WriteString(selectionSetToString(fd.SelectionSet)) + return out.String() +} + +func (fd *FragmentDefinition) definitionNode() {} diff --git a/ast/definition_test.go b/ast/definition_test.go new file mode 100644 index 0000000..30b3af5 --- /dev/null +++ b/ast/definition_test.go @@ -0,0 +1,165 @@ +package ast_test + +import ( + "testing" + + "github.com/n9te9/graphql-parser/ast" + "github.com/n9te9/graphql-parser/token" +) + +func TestDefinition_String(t *testing.T) { + tests := []struct { + name string + node ast.Node + expected string + }{ + { + name: "Simple Field", + node: &ast.Field{ + Token: token.Token{Literal: "name"}, + Name: &ast.Name{Token: token.Token{Literal: "name"}, Value: "name"}, + }, + expected: "name", + }, + { + name: "Field with Alias and Arguments", + node: &ast.Field{ + Token: token.Token{Literal: "user"}, + Alias: &ast.Name{Token: token.Token{Literal: "me"}, Value: "me"}, + Name: &ast.Name{Token: token.Token{Literal: "user"}, Value: "user"}, + Arguments: []*ast.Argument{ + { + Token: token.Token{Literal: "id"}, + Name: &ast.Name{Token: token.Token{Literal: "id"}, Value: "id"}, + Value: &ast.IntValue{Token: token.Token{Literal: "1"}, Value: 1}, + }, + }, + }, + expected: "me: user(id: 1)", + }, + { + name: "Field with Directives and SelectionSet", + node: &ast.Field{ + Token: token.Token{Literal: "hero"}, + Name: &ast.Name{Token: token.Token{Literal: "hero"}, Value: "hero"}, + Directives: []*ast.Directive{ + { + Token: token.Token{Literal: "@"}, + Name: "skip", + Arguments: []*ast.Argument{ + { + Token: token.Token{Literal: "if"}, + Name: &ast.Name{Token: token.Token{Literal: "if"}, Value: "if"}, + Value: &ast.BooleanValue{Token: token.Token{Literal: "true"}, Value: true}, + }, + }, + }, + }, + SelectionSet: []ast.Selection{ + &ast.Field{ + Token: token.Token{Literal: "name"}, + Name: &ast.Name{Token: token.Token{Literal: "name"}, Value: "name"}, + }, + }, + }, + expected: "hero @skip(if: true) { name }", + }, + { + name: "Fragment Spread", + node: &ast.FragmentSpread{ + Token: token.Token{Literal: "..."}, + Name: &ast.Name{Token: token.Token{Literal: "MyFragment"}, Value: "MyFragment"}, + }, + expected: "...MyFragment", + }, + { + name: "Inline Fragment", + node: &ast.InlineFragment{ + Token: token.Token{Literal: "..."}, + TypeCondition: &ast.NamedType{ + Token: token.Token{Literal: "User"}, + Name: &ast.Name{Token: token.Token{Literal: "User"}, Value: "User"}, + }, + SelectionSet: []ast.Selection{ + &ast.Field{ + Token: token.Token{Literal: "id"}, + Name: &ast.Name{Token: token.Token{Literal: "id"}, Value: "id"}, + }, + }, + }, + expected: "... on User { id }", + }, + { + name: "Operation Definition (Query)", + node: &ast.OperationDefinition{ + Token: token.Token{Literal: "query"}, + Operation: ast.Query, + Name: &ast.Name{Token: token.Token{Literal: "MyQuery"}, Value: "MyQuery"}, + VariableDefinitions: []*ast.VariableDefinition{ + { + Token: token.Token{Literal: "$"}, + Variable: &ast.Variable{Token: token.Token{Literal: "$"}, Name: "id"}, + Type: &ast.NonNullType{ + Token: token.Token{Literal: "!"}, + Type: &ast.NamedType{ + Token: token.Token{Literal: "ID"}, + Name: &ast.Name{Token: token.Token{Literal: "ID"}, Value: "ID"}, + }, + }, + }, + }, + SelectionSet: []ast.Selection{ + &ast.Field{ + Token: token.Token{Literal: "user"}, + Name: &ast.Name{Token: token.Token{Literal: "user"}, Value: "user"}, + }, + }, + }, + expected: "query MyQuery($id: ID!) { user }", + }, + { + name: "Operation Definition (Mutation)", + node: &ast.OperationDefinition{ + Token: token.Token{Literal: "mutation"}, + Operation: ast.Mutation, + SelectionSet: []ast.Selection{ + &ast.Field{ + Token: token.Token{Literal: "createUser"}, + Name: &ast.Name{Token: token.Token{Literal: "createUser"}, Value: "createUser"}, + }, + }, + }, + expected: "mutation { createUser }", + }, + { + name: "Fragment Definition", + node: &ast.FragmentDefinition{ + Token: token.Token{Literal: "fragment"}, + Name: &ast.Name{Token: token.Token{Literal: "UserParts"}, Value: "UserParts"}, + TypeCondition: &ast.NamedType{ + Token: token.Token{Literal: "User"}, + Name: &ast.Name{Token: token.Token{Literal: "User"}, Value: "User"}, + }, + SelectionSet: []ast.Selection{ + &ast.Field{ + Token: token.Token{Literal: "id"}, + Name: &ast.Name{Token: token.Token{Literal: "id"}, Value: "id"}, + }, + &ast.Field{ + Token: token.Token{Literal: "name"}, + Name: &ast.Name{Token: token.Token{Literal: "name"}, Value: "name"}, + }, + }, + }, + expected: "fragment UserParts on User { id name }", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := tt.node.String(); got != tt.expected { + t.Errorf("String() mismatch.\n got: %s\n expected: %s", got, tt.expected) + } + }) + } +} diff --git a/ast/directive.go b/ast/directive.go new file mode 100644 index 0000000..5cf14ef --- /dev/null +++ b/ast/directive.go @@ -0,0 +1,43 @@ +package ast + +import ( + "bytes" + "strings" + + "github.com/n9te9/graphql-parser/token" +) + +type Directive struct { + Token token.Token + Name string + Arguments []*Argument +} + +func (d *Directive) TokenLiteral() string { return d.Token.Literal } +func (d *Directive) String() string { + var out bytes.Buffer + out.WriteString("@") + out.WriteString(d.Name) + + if len(d.Arguments) > 0 { + out.WriteString("(") + args := []string{} + for _, a := range d.Arguments { + args = append(args, a.String()) + } + out.WriteString(strings.Join(args, ", ")) + out.WriteString(")") + } + return out.String() +} + +type Argument struct { + Token token.Token + Name *Name + Value Value +} + +func (a *Argument) TokenLiteral() string { return a.Token.Literal } +func (a *Argument) String() string { + return a.Name.String() + ": " + a.Value.String() +} diff --git a/ast/directive_test.go b/ast/directive_test.go new file mode 100644 index 0000000..322fc7a --- /dev/null +++ b/ast/directive_test.go @@ -0,0 +1,69 @@ +package ast_test + +import ( + "testing" + + "github.com/n9te9/graphql-parser/ast" + "github.com/n9te9/graphql-parser/token" +) + +func TestDirective_String(t *testing.T) { + tests := []struct { + name string + node ast.Node + expected string + }{ + { + name: "Directive without Arguments", + node: &ast.Directive{ + Token: token.Token{Literal: "@"}, + Name: "deprecated", + }, + expected: "@deprecated", + }, + { + name: "Directive with Single Argument", + node: &ast.Directive{ + Token: token.Token{Literal: "@"}, + Name: "key", + Arguments: []*ast.Argument{ + { + Token: token.Token{Literal: "fields"}, + Name: &ast.Name{Token: token.Token{Literal: "fields"}, Value: "fields"}, + Value: &ast.StringValue{Token: token.Token{Literal: "id"}, Value: "id"}, + }, + }, + }, + expected: "@key(fields: \"id\")", + }, + { + name: "Directive with Multiple Arguments", + node: &ast.Directive{ + Token: token.Token{Literal: "@"}, + Name: "myDirective", + Arguments: []*ast.Argument{ + { + Token: token.Token{Literal: "arg1"}, + Name: &ast.Name{Token: token.Token{Literal: "arg1"}, Value: "arg1"}, + Value: &ast.IntValue{Token: token.Token{Literal: "1"}, Value: 1}, + }, + { + Token: token.Token{Literal: "arg2"}, + Name: &ast.Name{Token: token.Token{Literal: "arg2"}, Value: "arg2"}, + Value: &ast.BooleanValue{Token: token.Token{Literal: "true"}, Value: true}, + }, + }, + }, + // Note: Argument order depends on implementation logic, assuming preserving order here + expected: "@myDirective(arg1: 1, arg2: true)", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := tt.node.String(); got != tt.expected { + t.Errorf("String() mismatch.\n got: %s\n expected: %s", got, tt.expected) + } + }) + } +} diff --git a/ast/schema.go b/ast/schema.go new file mode 100644 index 0000000..a35e4ea --- /dev/null +++ b/ast/schema.go @@ -0,0 +1,497 @@ +package ast + +import ( + "bytes" + + "github.com/n9te9/graphql-parser/token" +) + +type ObjectTypeDefinition struct { + Description string + Token token.Token + Name *Name + Interfaces []*NamedType + Directives []*Directive + Fields []*FieldDefinition +} + +func (def *ObjectTypeDefinition) TokenLiteral() string { return def.Token.Literal } +func (def *ObjectTypeDefinition) String() string { + var out bytes.Buffer + + out.WriteString("type ") + out.WriteString(def.Name.String()) + + if len(def.Interfaces) > 0 { + out.WriteString(" implements ") + for i, iface := range def.Interfaces { + if i > 0 { + out.WriteString(" & ") + } + out.WriteString(iface.String()) + } + } + + if len(def.Directives) > 0 { + for _, d := range def.Directives { + out.WriteString(" ") + out.WriteString(d.String()) + } + } + + if len(def.Fields) > 0 { + out.WriteString(" {") + for _, f := range def.Fields { + out.WriteString(" ") + out.WriteString(f.String()) + } + out.WriteString(" }") + } + + return out.String() +} + +func (def *ObjectTypeDefinition) definitionNode() {} + +type FieldDefinition struct { + Description string + Token token.Token + Name *Name + Arguments []*InputValueDefinition + Type Type + Directives []*Directive +} + +func (fd *FieldDefinition) TokenLiteral() string { return fd.Token.Literal } +func (fd *FieldDefinition) String() string { + var out bytes.Buffer + + out.WriteString(fd.Name.String()) + + if len(fd.Arguments) > 0 { + out.WriteString("(") + for i, arg := range fd.Arguments { + if i > 0 { + out.WriteString(", ") + } + out.WriteString(arg.String()) + } + out.WriteString(")") + } + + out.WriteString(": ") + out.WriteString(fd.Type.String()) + + if len(fd.Directives) > 0 { + for _, d := range fd.Directives { + out.WriteString(" ") + out.WriteString(d.String()) + } + } + + return out.String() +} + +type InputValueDefinition struct { + Description string + Token token.Token + Name *Name + Type Type + DefaultValue Value + Directives []*Directive +} + +func (ivd *InputValueDefinition) TokenLiteral() string { return ivd.Token.Literal } +func (ivd *InputValueDefinition) String() string { + var out bytes.Buffer + + out.WriteString(ivd.Name.String()) + out.WriteString(": ") + out.WriteString(ivd.Type.String()) + + if ivd.DefaultValue != nil { + out.WriteString(" = ") + out.WriteString(ivd.DefaultValue.String()) + } + + if len(ivd.Directives) > 0 { + for _, d := range ivd.Directives { + out.WriteString(" ") + out.WriteString(d.String()) + } + } + + return out.String() +} + +type InterfaceTypeDefinition struct { + Description string + Token token.Token + Name *Name + Interfaces []*NamedType + Directives []*Directive + Fields []*FieldDefinition +} + +func (def *InterfaceTypeDefinition) TokenLiteral() string { return def.Token.Literal } +func (def *InterfaceTypeDefinition) String() string { + var out bytes.Buffer + + out.WriteString("interface ") + out.WriteString(def.Name.String()) + + if len(def.Interfaces) > 0 { + out.WriteString(" implements ") + for i, iface := range def.Interfaces { + if i > 0 { + out.WriteString(" & ") + } + out.WriteString(iface.String()) + } + } + + if len(def.Directives) > 0 { + for _, d := range def.Directives { + out.WriteString(" ") + out.WriteString(d.String()) + } + } + + if len(def.Fields) > 0 { + out.WriteString(" {") + for _, f := range def.Fields { + out.WriteString(" ") + out.WriteString(f.String()) + } + out.WriteString(" }") + } + + return out.String() +} + +func (def *InterfaceTypeDefinition) definitionNode() {} + +type UnionTypeDefinition struct { + Description string + Token token.Token + Name *Name + Directives []*Directive + Types []*NamedType +} + +func (def *UnionTypeDefinition) TokenLiteral() string { return def.Token.Literal } +func (def *UnionTypeDefinition) String() string { + var out bytes.Buffer + out.WriteString("union ") + out.WriteString(def.Name.String()) + + if len(def.Directives) > 0 { + for _, d := range def.Directives { + out.WriteString(" ") + out.WriteString(d.String()) + } + } + + if len(def.Types) > 0 { + out.WriteString(" = ") + for i, t := range def.Types { + if i > 0 { + out.WriteString(" | ") + } + out.WriteString(t.String()) + } + } + return out.String() +} + +func (def *UnionTypeDefinition) definitionNode() {} + +type EnumTypeDefinition struct { + Description string + Token token.Token + Name *Name + Directives []*Directive + Values []*EnumValueDefinition +} + +func (def *EnumTypeDefinition) TokenLiteral() string { return def.Token.Literal } +func (def *EnumTypeDefinition) String() string { + var out bytes.Buffer + out.WriteString("enum ") + out.WriteString(def.Name.String()) + + if len(def.Directives) > 0 { + for _, d := range def.Directives { + out.WriteString(" ") + out.WriteString(d.String()) + } + } + + if len(def.Values) > 0 { + out.WriteString(" {") + for _, v := range def.Values { + out.WriteString(" ") + out.WriteString(v.String()) + } + out.WriteString(" }") + } + return out.String() +} + +type EnumValueDefinition struct { + Description string + Token token.Token + Name *Name + Directives []*Directive +} + +func (v *EnumValueDefinition) TokenLiteral() string { return v.Token.Literal } +func (v *EnumValueDefinition) String() string { + var out bytes.Buffer + out.WriteString(v.Name.String()) + if len(v.Directives) > 0 { + for _, d := range v.Directives { + out.WriteString(" ") + out.WriteString(d.String()) + } + } + return out.String() +} + +func (def *EnumTypeDefinition) definitionNode() {} + +type ScalarTypeDefinition struct { + Description string + Token token.Token + Name *Name + Directives []*Directive +} + +func (def *ScalarTypeDefinition) TokenLiteral() string { return def.Token.Literal } +func (def *ScalarTypeDefinition) String() string { + var out bytes.Buffer + out.WriteString("scalar ") + out.WriteString(def.Name.String()) + if len(def.Directives) > 0 { + for _, d := range def.Directives { + out.WriteString(" ") + out.WriteString(d.String()) + } + } + return out.String() +} + +func (def *ScalarTypeDefinition) definitionNode() {} + +type InputObjectTypeDefinition struct { + Description string + Token token.Token + Name *Name + Directives []*Directive + Fields []*InputValueDefinition +} + +func (def *InputObjectTypeDefinition) TokenLiteral() string { return def.Token.Literal } +func (def *InputObjectTypeDefinition) String() string { + var out bytes.Buffer + out.WriteString("input ") + out.WriteString(def.Name.String()) + + if len(def.Directives) > 0 { + for _, d := range def.Directives { + out.WriteString(" ") + out.WriteString(d.String()) + } + } + + if len(def.Fields) > 0 { + out.WriteString(" {") + for _, f := range def.Fields { + out.WriteString(" ") + out.WriteString(f.String()) + } + out.WriteString(" }") + } + return out.String() +} + +func (def *InputObjectTypeDefinition) definitionNode() {} + +type DirectiveDefinition struct { + Description string + Token token.Token + Name *Name + Arguments []*InputValueDefinition + Repeatable bool + Locations []*Name +} + +func (def *DirectiveDefinition) TokenLiteral() string { return def.Token.Literal } +func (def *DirectiveDefinition) String() string { + var out bytes.Buffer + out.WriteString("directive @") + out.WriteString(def.Name.String()) + + if len(def.Arguments) > 0 { + out.WriteString("(") + for i, arg := range def.Arguments { + if i > 0 { + out.WriteString(", ") + } + out.WriteString(arg.String()) + } + out.WriteString(")") + } + + if def.Repeatable { + out.WriteString(" repeatable") + } + + out.WriteString(" on ") + for i, loc := range def.Locations { + if i > 0 { + out.WriteString(" | ") + } + out.WriteString(loc.String()) + } + return out.String() +} + +func (def *DirectiveDefinition) definitionNode() {} + +type SchemaDefinition struct { + Token token.Token // 'schema' + Description string + Directives []*Directive + OperationTypes []*OperationTypeDefinition +} + +func (def *SchemaDefinition) TokenLiteral() string { return def.Token.Literal } +func (def *SchemaDefinition) String() string { + var out bytes.Buffer + out.WriteString("schema") + + if len(def.Directives) > 0 { + for _, d := range def.Directives { + out.WriteString(" ") + out.WriteString(d.String()) + } + } + + out.WriteString(" {") + for _, op := range def.OperationTypes { + out.WriteString(" ") + out.WriteString(op.String()) + } + out.WriteString(" }") + return out.String() +} + +func (def *SchemaDefinition) definitionNode() {} + +type OperationTypeDefinition struct { + Operation token.TokenType + Type *NamedType +} + +func (op *OperationTypeDefinition) String() string { + return op.Operation.String() + ": " + op.Type.String() +} + +type SchemaExtension struct { + Token token.Token + Directives []*Directive + OperationTypes []*OperationTypeDefinition +} + +func (e *SchemaExtension) TokenLiteral() string { return e.Token.Literal } +func (e *SchemaExtension) String() string { + return "" +} + +func (e *SchemaExtension) definitionNode() {} + +type ScalarTypeExtension struct { + Token token.Token + Name *Name + Directives []*Directive +} + +func (e *ScalarTypeExtension) TokenLiteral() string { return e.Token.Literal } +func (e *ScalarTypeExtension) String() string { + return "" +} + +func (e *ScalarTypeExtension) definitionNode() {} + +type ObjectTypeExtension struct { + Token token.Token + Name *Name + Interfaces []*NamedType + Directives []*Directive + Fields []*FieldDefinition +} + +func (e *ObjectTypeExtension) TokenLiteral() string { return e.Token.Literal } +func (e *ObjectTypeExtension) String() string { + return "extend type " + e.Name.String() + " ..." +} + +func (e *ObjectTypeExtension) definitionNode() {} + +type InterfaceTypeExtension struct { + Token token.Token + Name *Name + Interfaces []*NamedType + Directives []*Directive + Fields []*FieldDefinition +} + +func (e *InterfaceTypeExtension) TokenLiteral() string { return e.Token.Literal } +func (e *InterfaceTypeExtension) String() string { + return "extend interface " + e.Name.String() + " ..." +} + +func (e *InterfaceTypeExtension) definitionNode() {} + +type UnionTypeExtension struct { + Token token.Token + Name *Name + Directives []*Directive + Types []*NamedType +} + +func (e *UnionTypeExtension) TokenLiteral() string { return e.Token.Literal } +func (e *UnionTypeExtension) String() string { + return "extend union " + e.Name.String() + " ..." +} + +func (e *UnionTypeExtension) definitionNode() {} + +type EnumTypeExtension struct { + Token token.Token + Name *Name + Directives []*Directive + Values []*EnumValueDefinition +} + +func (e *EnumTypeExtension) TokenLiteral() string { return e.Token.Literal } +func (e *EnumTypeExtension) String() string { + return "extend enum " + e.Name.String() + " ..." +} + +func (e *EnumTypeExtension) definitionNode() {} + +type InputObjectTypeExtension struct { + Token token.Token + Name *Name + Directives []*Directive + Fields []*InputValueDefinition +} + +func (e *InputObjectTypeExtension) TokenLiteral() string { return e.Token.Literal } +func (e *InputObjectTypeExtension) String() string { + return "extend input " + e.Name.String() + " ..." +} + +func (e *InputObjectTypeExtension) definitionNode() {} diff --git a/ast/type.go b/ast/type.go new file mode 100644 index 0000000..2f3f014 --- /dev/null +++ b/ast/type.go @@ -0,0 +1,42 @@ +package ast + +import ( + "bytes" + + "github.com/n9te9/graphql-parser/token" +) + +type NamedType struct { + Token token.Token + Name *Name +} + +func (n *NamedType) TokenLiteral() string { return n.Token.Literal } +func (n *NamedType) String() string { return n.Name.String() } +func (n *NamedType) typeNode() {} + +type ListType struct { + Token token.Token + Type Type +} + +func (l *ListType) TokenLiteral() string { return l.Token.Literal } +func (l *ListType) String() string { + var out bytes.Buffer + out.WriteString("[") + out.WriteString(l.Type.String()) + out.WriteString("]") + return out.String() +} +func (l *ListType) typeNode() {} + +type NonNullType struct { + Token token.Token + Type Type +} + +func (n *NonNullType) TokenLiteral() string { return n.Token.Literal } +func (n *NonNullType) String() string { + return n.Type.String() + "!" +} +func (n *NonNullType) typeNode() {} diff --git a/ast/type_test.go b/ast/type_test.go new file mode 100644 index 0000000..3249dd0 --- /dev/null +++ b/ast/type_test.go @@ -0,0 +1,83 @@ +package ast_test + +import ( + "testing" + + "github.com/n9te9/graphql-parser/ast" + "github.com/n9te9/graphql-parser/token" +) + +func TestType_String(t *testing.T) { + tests := []struct { + name string + node ast.Node + expected string + }{ + { + name: "Named Type", + node: &ast.NamedType{ + Token: token.Token{Literal: "String"}, + Name: &ast.Name{Token: token.Token{Literal: "String"}, Value: "String"}, + }, + expected: "String", + }, + { + name: "List Type", + node: &ast.ListType{ + Token: token.Token{Literal: "["}, + Type: &ast.NamedType{ + Token: token.Token{Literal: "Int"}, + Name: &ast.Name{Token: token.Token{Literal: "Int"}, Value: "Int"}, + }, + }, + expected: "[Int]", + }, + { + name: "Non-Null Type (Named)", + node: &ast.NonNullType{ + Token: token.Token{Literal: "!"}, + Type: &ast.NamedType{ + Token: token.Token{Literal: "ID"}, + Name: &ast.Name{Token: token.Token{Literal: "ID"}, Value: "ID"}, + }, + }, + expected: "ID!", + }, + { + name: "Non-Null Type (List)", + node: &ast.NonNullType{ + Token: token.Token{Literal: "!"}, + Type: &ast.ListType{ + Token: token.Token{Literal: "["}, + Type: &ast.NamedType{ + Token: token.Token{Literal: "User"}, + Name: &ast.Name{Token: token.Token{Literal: "User"}, Value: "User"}, + }, + }, + }, + expected: "[User]!", + }, + { + name: "List of Non-Null Type", + node: &ast.ListType{ + Token: token.Token{Literal: "["}, + Type: &ast.NonNullType{ + Token: token.Token{Literal: "!"}, + Type: &ast.NamedType{ + Token: token.Token{Literal: "String"}, + Name: &ast.Name{Token: token.Token{Literal: "String"}, Value: "String"}, + }, + }, + }, + expected: "[String!]", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := tt.node.String(); got != tt.expected { + t.Errorf("String() mismatch.\n got: %s\n expected: %s", got, tt.expected) + } + }) + } +} diff --git a/ast/value.go b/ast/value.go new file mode 100644 index 0000000..59a666a --- /dev/null +++ b/ast/value.go @@ -0,0 +1,128 @@ +package ast + +import ( + "bytes" + "fmt" + "strings" + + "github.com/n9te9/graphql-parser/token" +) + +type IntValue struct { + Token token.Token + Value int64 +} + +func (i *IntValue) TokenLiteral() string { return i.Token.Literal } +func (i *IntValue) String() string { return i.Token.Literal } +func (i *IntValue) valueNode() {} + +type FloatValue struct { + Token token.Token + Value float64 +} + +func (f *FloatValue) TokenLiteral() string { return f.Token.Literal } +func (f *FloatValue) String() string { return f.Token.Literal } +func (f *FloatValue) valueNode() {} + +type StringValue struct { + Token token.Token + Value string +} + +func (s *StringValue) TokenLiteral() string { return s.Token.Literal } +func (s *StringValue) String() string { return fmt.Sprintf("%q", s.Value) } // 引用符付きで出力 +func (s *StringValue) valueNode() {} + +type BooleanValue struct { + Token token.Token + Value bool +} + +func (b *BooleanValue) TokenLiteral() string { return b.Token.Literal } +func (b *BooleanValue) String() string { return b.Token.Literal } +func (b *BooleanValue) valueNode() {} + +type NullValue struct { + Token token.Token +} + +func (n *NullValue) TokenLiteral() string { return n.Token.Literal } +func (n *NullValue) String() string { return "null" } +func (n *NullValue) valueNode() {} + +type EnumValue struct { + Token token.Token + Value string +} + +func (e *EnumValue) TokenLiteral() string { return e.Token.Literal } +func (e *EnumValue) String() string { return e.Value } +func (e *EnumValue) valueNode() {} + +type ListValue struct { + Token token.Token + Values []Value +} + +func (l *ListValue) TokenLiteral() string { return l.Token.Literal } +func (l *ListValue) String() string { + var out bytes.Buffer + vals := []string{} + for _, v := range l.Values { + vals = append(vals, v.String()) + } + out.WriteString("[") + out.WriteString(strings.Join(vals, ", ")) + out.WriteString("]") + return out.String() +} +func (l *ListValue) valueNode() {} + +type ObjectValue struct { + Token token.Token + Fields []*ObjectField +} + +func (o *ObjectValue) TokenLiteral() string { return o.Token.Literal } +func (o *ObjectValue) String() string { + var out bytes.Buffer + fields := []string{} + for _, f := range o.Fields { + fields = append(fields, f.String()) + } + out.WriteString("{") + out.WriteString(strings.Join(fields, ", ")) + out.WriteString("}") + return out.String() +} +func (o *ObjectValue) valueNode() {} + +type ObjectField struct { + Token token.Token + Name *Name + Value Value +} + +func (o *ObjectField) TokenLiteral() string { return o.Token.Literal } +func (o *ObjectField) String() string { + return o.Name.String() + ": " + o.Value.String() +} + +type Variable struct { + Token token.Token + Name string +} + +func (v *Variable) TokenLiteral() string { return v.Token.Literal } +func (v *Variable) String() string { return "$" + v.Name } +func (v *Variable) valueNode() {} + +type Name struct { + Token token.Token + Value string +} + +func (n *Name) TokenLiteral() string { return n.Token.Literal } +func (n *Name) String() string { return n.Value } diff --git a/ast/value_test.go b/ast/value_test.go new file mode 100644 index 0000000..054eaf0 --- /dev/null +++ b/ast/value_test.go @@ -0,0 +1,118 @@ +package ast_test + +import ( + "testing" + + "github.com/n9te9/graphql-parser/ast" + "github.com/n9te9/graphql-parser/token" +) + +func TestValue_String(t *testing.T) { + tests := []struct { + name string + node ast.Node + expected string + }{ + { + name: "Integer Value", + node: &ast.IntValue{ + Token: token.Token{Literal: "123"}, + Value: 123, + }, + expected: "123", + }, + { + name: "Float Value", + node: &ast.FloatValue{ + Token: token.Token{Literal: "12.34"}, + Value: 12.34, + }, + expected: "12.34", + }, + { + name: "String Value", + node: &ast.StringValue{ + Token: token.Token{Literal: "hello"}, + Value: "hello", + }, + expected: "\"hello\"", + }, + { + name: "Boolean Value True", + node: &ast.BooleanValue{ + Token: token.Token{Literal: "true"}, + Value: true, + }, + expected: "true", + }, + { + name: "Boolean Value False", + node: &ast.BooleanValue{ + Token: token.Token{Literal: "false"}, + Value: false, + }, + expected: "false", + }, + { + name: "Null Value", + node: &ast.NullValue{ + Token: token.Token{Literal: "null"}, + }, + expected: "null", + }, + { + name: "Enum Value", + node: &ast.EnumValue{ + Token: token.Token{Literal: "USER_ROLE"}, + Value: "USER_ROLE", + }, + expected: "USER_ROLE", + }, + { + name: "Variable", + node: &ast.Variable{ + Token: token.Token{Literal: "$"}, + Name: "userId", + }, + expected: "$userId", + }, + { + name: "List Value", + node: &ast.ListValue{ + Token: token.Token{Literal: "["}, + Values: []ast.Value{ + &ast.IntValue{Token: token.Token{Literal: "1"}, Value: 1}, + &ast.IntValue{Token: token.Token{Literal: "2"}, Value: 2}, + }, + }, + expected: "[1, 2]", + }, + { + name: "Object Value", + node: &ast.ObjectValue{ + Token: token.Token{Literal: "{"}, + Fields: []*ast.ObjectField{ + { + Token: token.Token{Literal: "id"}, + Name: &ast.Name{Token: token.Token{Literal: "id"}, Value: "id"}, + Value: &ast.IntValue{Token: token.Token{Literal: "1"}, Value: 1}, + }, + { + Token: token.Token{Literal: "name"}, + Name: &ast.Name{Token: token.Token{Literal: "name"}, Value: "name"}, + Value: &ast.StringValue{Token: token.Token{Literal: "test"}, Value: "test"}, + }, + }, + }, + expected: "{id: 1, name: \"test\"}", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := tt.node.String(); got != tt.expected { + t.Errorf("String() mismatch.\n got: %s\n expected: %s", got, tt.expected) + } + }) + } +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..ed70227 --- /dev/null +++ b/go.mod @@ -0,0 +1,5 @@ +module github.com/n9te9/graphql-parser + +go 1.25.4 + +require github.com/google/go-cmp v0.7.0 diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..40e761a --- /dev/null +++ b/go.sum @@ -0,0 +1,2 @@ +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= diff --git a/lexer/lexer.go b/lexer/lexer.go new file mode 100644 index 0000000..6b0176c --- /dev/null +++ b/lexer/lexer.go @@ -0,0 +1,294 @@ +package lexer + +import ( + "github.com/n9te9/graphql-parser/token" +) + +type Lexer struct { + input []byte + position int + readPosition int + ch byte + line int +} + +func New(input string) *Lexer { + l := &Lexer{ + input: []byte(input), + line: 1, + } + l.readChar() + return l +} + +func (l *Lexer) Tokens() []token.Token { + tokens := make([]token.Token, 0) + for { + tok := l.NextToken() + tokens = append(tokens, tok) + if tok.Type == token.EOF { + break + } + } + + return tokens +} + +func (l *Lexer) NextToken() token.Token { + l.skipWhitespace() + + var tok token.Token + startPos := l.position + startLine := l.line + + switch l.ch { + case '$': + tok = l.newToken(token.DOLLAR, l.ch) + case '!': + tok = l.newToken(token.BANG, l.ch) + case '(': + tok = l.newToken(token.PAREN_L, l.ch) + case ')': + tok = l.newToken(token.PAREN_R, l.ch) + case ':': + tok = l.newToken(token.COLON, l.ch) + case '=': + tok = l.newToken(token.EQUALS, l.ch) + case '@': + tok = l.newToken(token.AT, l.ch) + case '[': + tok = l.newToken(token.BRACKET_L, l.ch) + case ']': + tok = l.newToken(token.BRACKET_R, l.ch) + case '{': + tok = l.newToken(token.BRACE_L, l.ch) + case '}': + tok = l.newToken(token.BRACE_R, l.ch) + case '|': + tok = l.newToken(token.PIPE, l.ch) + case '&': + tok = l.newToken(token.AMP, l.ch) + case '.': + if l.peekChar() == '.' && l.peekChar2() == '.' { + l.readChar() + l.readChar() + tok = token.Token{Type: token.SPREAD, Literal: "...", Line: startLine} + } else { + tok = l.newToken(token.ILLEGAL, l.ch) + } + case '"': + if l.peekChar() == '"' && l.peekChar2() == '"' { + l.readChar() + l.readChar() + tok.Literal = l.readBlockString() + tok.Type = token.BLOCK_STRING + } else { + tok.Literal = l.readString() + tok.Type = token.STRING + } + tok.Line = startLine + tok.Start = startPos + tok.End = l.position + return tok + case 0: + tok.Literal = "" + tok.Type = token.EOF + tok.Line = l.line + tok.Start = startPos + tok.End = startPos + return tok + default: + if isLetter(l.ch) { + tok.Literal = l.readIdentifier() + tok.Type = token.LookupIdent(tok.Literal) + tok.Line = startLine + tok.Start = startPos + tok.End = l.position + return tok + } else if isDigit(l.ch) || l.ch == '-' { + tok.Literal, tok.Type = l.readNumber() + tok.Line = startLine + tok.Start = startPos + tok.End = l.position + return tok + } else { + tok = l.newToken(token.ILLEGAL, l.ch) + } + } + l.readChar() + + tok.Start = startPos + tok.End = l.position + tok.Line = startLine + + return tok +} + +func (l *Lexer) readChar() { + if l.readPosition >= len(l.input) { + l.ch = 0 + } else { + l.ch = l.input[l.readPosition] + } + l.position = l.readPosition + l.readPosition += 1 +} + +func (l *Lexer) peekChar() byte { + if l.readPosition >= len(l.input) { + return 0 + } + return l.input[l.readPosition] +} + +func (l *Lexer) peekChar2() byte { + if l.readPosition+1 >= len(l.input) { + return 0 + } + return l.input[l.readPosition+1] +} + +func (l *Lexer) newToken(tokenType token.TokenType, ch byte) token.Token { + return token.Token{Type: tokenType, Literal: string([]byte{ch}), Line: l.line} +} + +func (l *Lexer) readIdentifier() string { + position := l.position + for isLetter(l.ch) || isDigit(l.ch) { + l.readChar() + } + return string(l.input[position:l.position]) +} + +func (l *Lexer) readNumber() (string, token.TokenType) { + position := l.position + tokenType := token.INT + + if l.ch == '-' { + l.readChar() + } + + if l.ch == '0' { + l.readChar() + if isDigit(l.ch) { + return string(l.input[position:l.position]), token.ILLEGAL + } + } else { + l.readDigits() + } + + if l.ch == '.' { + tokenType = token.FLOAT + l.readChar() + l.readDigits() + } + + if l.ch == 'e' || l.ch == 'E' { + tokenType = token.FLOAT + l.readChar() + if l.ch == '+' || l.ch == '-' { + l.readChar() + } + l.readDigits() + } + + return string(l.input[position:l.position]), tokenType +} + +func (l *Lexer) readDigits() { + for isDigit(l.ch) { + l.readChar() + } +} + +func (l *Lexer) readString() string { + l.readChar() + position := l.position + for { + if l.ch == '"' || l.ch == 0 { + break + } + if l.ch == '\\' { + l.readChar() + } + l.readChar() + } + str := string(l.input[position:l.position]) + + if l.ch == '"' { + l.readChar() + } + + return str +} + +func (l *Lexer) readBlockString() string { + l.readChar() + position := l.position + for { + if l.ch == 0 { + break + } + if l.ch == '"' && l.peekChar() == '"' && l.peekChar2() == '"' { + break + } + if l.ch == '\n' { + l.line++ + } + l.readChar() + } + str := string(l.input[position:l.position]) + + // skip the ending """ + l.readChar() + l.readChar() + l.readChar() + + return str +} + +func (l *Lexer) skipWhitespace() { + for { + switch l.ch { + case ' ', '\t', ',': + l.readChar() + + case '\n': + l.line++ + l.readChar() + + case '\r': + l.line++ + if l.peekChar() == '\n' { + l.readChar() + } + l.readChar() + + case '#': + for l.ch != '\n' && l.ch != '\r' && l.ch != 0 { + l.readChar() + } + + case 0xEF: + // --- BOM (Byte Order Mark) check --- + if l.peekChar() == 0xBB && l.peekChar2() == 0xBF { + l.readChar() + l.readChar() + l.readChar() + } else { + return + } + + default: + return + } + } +} + +func isLetter(ch byte) bool { + return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' +} + +func isDigit(ch byte) bool { + return '0' <= ch && ch <= '9' +} diff --git a/lexer/lexer_test.go b/lexer/lexer_test.go new file mode 100644 index 0000000..e25eb1b --- /dev/null +++ b/lexer/lexer_test.go @@ -0,0 +1,213 @@ +package lexer + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/n9te9/graphql-parser/token" +) + +func TestTokens(t *testing.T) { + tests := []struct { + name string + input string + expected []token.Token + }{ + { + name: "Punctuators", + input: "! $ ( ) ... : = @ [ ] { | } &", + expected: []token.Token{ + {Type: token.BANG, Literal: "!", Line: 1, Start: 0, End: 1}, + {Type: token.DOLLAR, Literal: "$", Line: 1, Start: 2, End: 3}, + {Type: token.PAREN_L, Literal: "(", Line: 1, Start: 4, End: 5}, + {Type: token.PAREN_R, Literal: ")", Line: 1, Start: 6, End: 7}, + {Type: token.SPREAD, Literal: "...", Line: 1, Start: 8, End: 11}, + {Type: token.COLON, Literal: ":", Line: 1, Start: 12, End: 13}, + {Type: token.EQUALS, Literal: "=", Line: 1, Start: 14, End: 15}, + {Type: token.AT, Literal: "@", Line: 1, Start: 16, End: 17}, + {Type: token.BRACKET_L, Literal: "[", Line: 1, Start: 18, End: 19}, + {Type: token.BRACKET_R, Literal: "]", Line: 1, Start: 20, End: 21}, + {Type: token.BRACE_L, Literal: "{", Line: 1, Start: 22, End: 23}, + {Type: token.PIPE, Literal: "|", Line: 1, Start: 24, End: 25}, + {Type: token.BRACE_R, Literal: "}", Line: 1, Start: 26, End: 27}, + {Type: token.AMP, Literal: "&", Line: 1, Start: 28, End: 29}, + {Type: token.EOF, Literal: "", Line: 1, Start: 29, End: 29}, + }, + }, + { + name: "Keywords", + input: `query mutation subscription fragment type input enum union +interface scalar directive extend schema implements on true false null`, + expected: []token.Token{ + {Type: token.QUERY, Literal: "query", Line: 1, Start: 0, End: 5}, + {Type: token.MUTATION, Literal: "mutation", Line: 1, Start: 6, End: 14}, + {Type: token.SUBSCRIPTION, Literal: "subscription", Line: 1, Start: 15, End: 27}, + {Type: token.FRAGMENT, Literal: "fragment", Line: 1, Start: 28, End: 36}, + {Type: token.TYPE, Literal: "type", Line: 1, Start: 37, End: 41}, + {Type: token.INPUT, Literal: "input", Line: 1, Start: 42, End: 47}, + {Type: token.ENUM, Literal: "enum", Line: 1, Start: 48, End: 52}, + {Type: token.UNION, Literal: "union", Line: 1, Start: 53, End: 58}, + {Type: token.INTERFACE, Literal: "interface", Line: 2, Start: 60, End: 69}, + {Type: token.SCALAR, Literal: "scalar", Line: 2, Start: 70, End: 76}, + {Type: token.DIRECTIVE, Literal: "directive", Line: 2, Start: 77, End: 86}, + {Type: token.EXTEND, Literal: "extend", Line: 2, Start: 87, End: 93}, + {Type: token.SCHEMA, Literal: "schema", Line: 2, Start: 94, End: 100}, + {Type: token.IMPLEMENTS, Literal: "implements", Line: 2, Start: 101, End: 111}, + {Type: token.ON, Literal: "on", Line: 2, Start: 112, End: 114}, + {Type: token.TRUE, Literal: "true", Line: 2, Start: 115, End: 119}, + {Type: token.FALSE, Literal: "false", Line: 2, Start: 120, End: 125}, + {Type: token.NULL, Literal: "null", Line: 2, Start: 126, End: 130}, + {Type: token.EOF, Literal: "", Line: 2, Start: 130, End: 130}, + }, + }, + { + name: "Line Terminators (CRLF and CR)", + input: "a\r\nb\rc", + expected: []token.Token{ + {Type: token.IDENT, Literal: "a", Line: 1, Start: 0, End: 1}, + {Type: token.IDENT, Literal: "b", Line: 2, Start: 3, End: 4}, + {Type: token.IDENT, Literal: "c", Line: 3, Start: 5, End: 6}, + {Type: token.EOF, Literal: "", Line: 3, Start: 6, End: 6}, + }, + }, + { + name: "Multi-byte Characters (Valid)", + input: "\"あ\"\n# 🍺\n\"\"\"\nあ\n\"\"\"", + expected: []token.Token{ + {Type: token.STRING, Literal: "あ", Line: 1, Start: 0, End: 5}, + {Type: token.BLOCK_STRING, Literal: "\nあ\n", Line: 3, Start: 13, End: 24}, + {Type: token.EOF, Literal: "", Line: 5, Start: 24, End: 24}, + }, + }, + { + name: "Multi-byte Characters (Illegal Identifier)", + input: "queryあ", + expected: []token.Token{ + {Type: token.QUERY, Literal: "query", Line: 1, Start: 0, End: 5}, + {Type: token.ILLEGAL, Literal: "\xe3", Line: 1, Start: 5, End: 6}, + {Type: token.ILLEGAL, Literal: "\x81", Line: 1, Start: 6, End: 7}, + {Type: token.ILLEGAL, Literal: "\x82", Line: 1, Start: 7, End: 8}, + {Type: token.EOF, Literal: "", Line: 1, Start: 8, End: 8}, + }, + }, + { + name: "Integers and Floats", + input: `0 +1234 +-56 +1.23 +-1.23 +1e5 +1.23e+4 +-1.23E-5`, + expected: []token.Token{ + {Type: token.INT, Literal: "0", Line: 1, Start: 0, End: 1}, + {Type: token.INT, Literal: "1234", Line: 2, Start: 2, End: 6}, + {Type: token.INT, Literal: "-56", Line: 3, Start: 7, End: 10}, + {Type: token.FLOAT, Literal: "1.23", Line: 4, Start: 11, End: 15}, + {Type: token.FLOAT, Literal: "-1.23", Line: 5, Start: 16, End: 21}, + {Type: token.FLOAT, Literal: "1e5", Line: 6, Start: 22, End: 25}, + {Type: token.FLOAT, Literal: "1.23e+4", Line: 7, Start: 26, End: 33}, + {Type: token.FLOAT, Literal: "-1.23E-5", Line: 8, Start: 34, End: 42}, + {Type: token.EOF, Literal: "", Line: 8, Start: 42, End: 42}, + }, + }, + { + name: "Strings", + input: `"simple" +"with \" escaped quote" +"with unicode \u1234" +"""block string""" +""" +multi +line +"""`, + expected: []token.Token{ + {Type: token.STRING, Literal: "simple", Line: 1, Start: 0, End: 8}, + {Type: token.STRING, Literal: "with \\\" escaped quote", Line: 2, Start: 9, End: 32}, + {Type: token.STRING, Literal: "with unicode \\u1234", Line: 3, Start: 33, End: 54}, + {Type: token.BLOCK_STRING, Literal: "block string", Line: 4, Start: 55, End: 73}, + {Type: token.BLOCK_STRING, Literal: "\nmulti\nline\n", Line: 5, Start: 74, End: 92}, + {Type: token.EOF, Literal: "", Line: 8, Start: 92, End: 92}, + }, + }, + { + name: "Comments and Commas (Ignored Tokens)", + input: "query, # This is a comment\n{ id }", + expected: []token.Token{ + {Type: token.QUERY, Literal: "query", Line: 1, Start: 0, End: 5}, + {Type: token.BRACE_L, Literal: "{", Line: 2, Start: 27, End: 28}, + {Type: token.IDENT, Literal: "id", Line: 2, Start: 29, End: 31}, + {Type: token.BRACE_R, Literal: "}", Line: 2, Start: 32, End: 33}, + {Type: token.EOF, Literal: "", Line: 2, Start: 33, End: 33}, + }, + }, + { + name: "Variables and Directives", + input: "query ($var: String) @directive", + expected: []token.Token{ + {Type: token.QUERY, Literal: "query", Line: 1, Start: 0, End: 5}, + {Type: token.PAREN_L, Literal: "(", Line: 1, Start: 6, End: 7}, + {Type: token.DOLLAR, Literal: "$", Line: 1, Start: 7, End: 8}, + {Type: token.IDENT, Literal: "var", Line: 1, Start: 8, End: 11}, + {Type: token.COLON, Literal: ":", Line: 1, Start: 11, End: 12}, + {Type: token.IDENT, Literal: "String", Line: 1, Start: 13, End: 19}, + {Type: token.PAREN_R, Literal: ")", Line: 1, Start: 19, End: 20}, + {Type: token.AT, Literal: "@", Line: 1, Start: 21, End: 22}, + {Type: token.DIRECTIVE, Literal: "directive", Line: 1, Start: 22, End: 31}, + {Type: token.EOF, Literal: "", Line: 1, Start: 31, End: 31}, + }, + }, + { + name: "Complex Schema Definition", + input: `type User implements Node { + id: ID! + name: String +}`, + expected: []token.Token{ + {Type: token.TYPE, Literal: "type", Line: 1, Start: 0, End: 4}, + {Type: token.IDENT, Literal: "User", Line: 1, Start: 5, End: 9}, + {Type: token.IMPLEMENTS, Literal: "implements", Line: 1, Start: 10, End: 20}, + {Type: token.IDENT, Literal: "Node", Line: 1, Start: 21, End: 25}, + {Type: token.BRACE_L, Literal: "{", Line: 1, Start: 26, End: 27}, + {Type: token.IDENT, Literal: "id", Line: 2, Start: 30, End: 32}, + {Type: token.COLON, Literal: ":", Line: 2, Start: 32, End: 33}, + {Type: token.IDENT, Literal: "ID", Line: 2, Start: 34, End: 36}, + {Type: token.BANG, Literal: "!", Line: 2, Start: 36, End: 37}, + {Type: token.IDENT, Literal: "name", Line: 3, Start: 40, End: 44}, + {Type: token.COLON, Literal: ":", Line: 3, Start: 44, End: 45}, + {Type: token.IDENT, Literal: "String", Line: 3, Start: 46, End: 52}, + {Type: token.BRACE_R, Literal: "}", Line: 4, Start: 53, End: 54}, + {Type: token.EOF, Literal: "", Line: 4, Start: 54, End: 54}, + }, + }, + { + name: "BOM (Byte Order Mark)", + // 1. Start of file: \xef\xbb\xbf + // 2. Concatenated file (start of line): ...\n\xef\xbb\xbf... + input: "\xef\xbb\xbfquery\n\xef\xbb\xbf{ id }", + expected: []token.Token{ + {Type: token.QUERY, Literal: "query", Line: 1, Start: 3, End: 8}, // BOM(3bytes) skipped + {Type: token.BRACE_L, Literal: "{", Line: 2, Start: 12, End: 13}, // \n(1) + BOM(3) skipped + {Type: token.IDENT, Literal: "id", Line: 2, Start: 14, End: 16}, + {Type: token.BRACE_R, Literal: "}", Line: 2, Start: 17, End: 18}, + {Type: token.EOF, Literal: "", Line: 2, Start: 18, End: 18}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := New(tt.input) + got := l.Tokens() + + if len(got) != len(tt.expected) { + t.Fatalf("Tokens length mismatch. want=%d, got=%d", len(tt.expected), len(got)) + } + + if d := cmp.Diff(tt.expected, got); d != "" { + t.Errorf("Tokens mismatch (-want +got):\n%s", d) + } + }) + } +} diff --git a/parser/parser.go b/parser/parser.go new file mode 100644 index 0000000..6203f91 --- /dev/null +++ b/parser/parser.go @@ -0,0 +1,153 @@ +package parser + +import ( + "fmt" + + "github.com/n9te9/graphql-parser/ast" + "github.com/n9te9/graphql-parser/lexer" + "github.com/n9te9/graphql-parser/token" +) + +type Parser struct { + l *lexer.Lexer + errors []string + + curToken token.Token + peekToken token.Token +} + +func New(l *lexer.Lexer) *Parser { + p := &Parser{ + l: l, + errors: []string{}, + } + + // Read two tokens, so curToken and peekToken are both set + p.nextToken() + p.nextToken() + + return p +} + +func (p *Parser) Errors() []string { + return p.errors +} + +func (p *Parser) nextToken() { + p.curToken = p.peekToken + p.peekToken = p.l.NextToken() +} + +func (p *Parser) ParseDocument() *ast.Document { + doc := &ast.Document{} + doc.Definitions = []ast.Definition{} + + for p.curToken.Type != token.EOF { + stmt := p.parseDefinition() + if stmt != nil { + doc.Definitions = append(doc.Definitions, stmt) + } + + if len(p.errors) > 0 { + return nil + } + } + + return doc +} + +func (p *Parser) parseDefinition() ast.Definition { + description := p.parseDescription() + switch p.curToken.Type { + case token.QUERY, token.MUTATION, token.SUBSCRIPTION, token.BRACE_L: + return p.parseOperationDefinition() + case token.FRAGMENT: + return p.parseFragmentDefinition() + case token.TYPE: + return p.parseObjectTypeDefinition(description) + case token.INTERFACE: + return p.parseInterfaceTypeDefinition(description) + case token.UNION: + return p.parseUnionTypeDefinition(description) + case token.ENUM: + return p.parseEnumTypeDefinition(description) + case token.INPUT: + return p.parseInputObjectTypeDefinition(description) + case token.SCALAR: + return p.parseScalarTypeDefinition(description) + case token.SCHEMA: + return p.parseSchemaDefinition(description) + case token.DIRECTIVE: + return p.parseDirectiveDefinition(description) + case token.EXTEND: + return p.parseExtendDefinition(description) + default: + p.errors = append(p.errors, fmt.Sprintf("Unexpected token at top level: %s", p.curToken.Literal)) + return nil + } +} + +// curTokenIs checks if the current token type matches t. +func (p *Parser) curTokenIs(t token.TokenType) bool { + return p.curToken.Type == t +} + +// peekTokenIs checks if the next token type matches t. +func (p *Parser) peekTokenIs(t token.TokenType) bool { + return p.peekToken.Type == t +} + +// expectPeek checks if the next token is t. If so, it advances the tokens. +// If not, it records an error. +func (p *Parser) expectPeek(t token.TokenType) bool { + if p.peekTokenIs(t) { + p.nextToken() + return true + } + p.peekError(t) + return false +} + +func (p *Parser) peekError(t token.TokenType) { + msg := fmt.Sprintf("expected next token to be %s, got %s instead", t, p.peekToken.Type) + p.errors = append(p.errors, msg) +} + +func (p *Parser) parseType() ast.Type { + var t ast.Type + + if p.curTokenIs(token.BRACKET_L) { + listType := &ast.ListType{Token: p.curToken} + p.nextToken() + listType.Type = p.parseType() + if !p.curTokenIs(token.BRACKET_R) { + p.peekError(token.BRACKET_R) + return nil + } + t = listType + p.nextToken() + } else if p.curTokenIs(token.IDENT) { + t = &ast.NamedType{ + Token: p.curToken, + Name: &ast.Name{Token: p.curToken, Value: p.curToken.Literal}, + } + p.nextToken() + } + + if p.curTokenIs(token.BANG) { + nonNull := &ast.NonNullType{Token: p.curToken, Type: t} + p.nextToken() + return nonNull + } + return t +} + +func (p *Parser) parseName() (*ast.Name, error) { + if p.curTokenIs(token.IDENT) || p.isKeywordToken() { + name := &ast.Name{Token: p.curToken, Value: p.curToken.Literal} + p.nextToken() + return name, nil + } + + return nil, fmt.Errorf("failed to parse name at line: %d column: %d", p.curToken.Line, p.curToken.Start) +} diff --git a/parser/parser_test.go b/parser/parser_test.go new file mode 100644 index 0000000..afde051 --- /dev/null +++ b/parser/parser_test.go @@ -0,0 +1,91 @@ +package parser_test + +import ( + "strings" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/n9te9/graphql-parser/ast" + "github.com/n9te9/graphql-parser/lexer" + "github.com/n9te9/graphql-parser/parser" + "github.com/n9te9/graphql-parser/token" +) + +func TestParser_ParseDocument(t *testing.T) { + tests := []struct { + name string + input string + wantErr string // Expected error message (empty string means no error) + expect *ast.Document // Expected AST on successful parse + }{ + { + name: "Empty Document", + input: ``, + wantErr: "", + expect: &ast.Document{ + Definitions: []ast.Definition{}, + }, + }, + { + name: "Document with Comments only", + input: `# This is a comment`, + wantErr: "", + expect: &ast.Document{ + Definitions: []ast.Definition{}, + }, + }, + { + name: "Unexpected Top Level Token", + input: `123`, // 数字はトップレベルに来れない + wantErr: "Unexpected token at top level: 123", + expect: nil, // エラー時はASTチェックをスキップするため nil でOK + }, + { + name: "Illegal Token (Semicolon)", + input: `;`, + wantErr: "Unexpected token at top level: ;", + expect: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := lexer.New(tt.input) + p := parser.New(l) + got := p.ParseDocument() + + errors := p.Errors() + + if tt.wantErr != "" { + if len(errors) == 0 { + t.Errorf("expected error containing %q, got none", tt.wantErr) + return + } + found := false + for _, err := range errors { + if strings.Contains(err, tt.wantErr) { + found = true + break + } + } + if !found { + t.Errorf("expected error containing %q, got %v", tt.wantErr, errors) + } + return + } else { + if len(errors) > 0 { + t.Fatalf("unexpected parser errors: %v", errors) + } + } + + opts := []cmp.Option{ + cmpopts.IgnoreTypes(token.Token{}), + } + + if diff := cmp.Diff(tt.expect, got, opts...); diff != "" { + t.Errorf("ParseDocument() mismatch (-want +got):\n%s", diff) + } + }) + } +} diff --git a/parser/query_parser.go b/parser/query_parser.go new file mode 100644 index 0000000..de21f46 --- /dev/null +++ b/parser/query_parser.go @@ -0,0 +1,502 @@ +package parser + +import ( + "fmt" + "math" + "strconv" + "strings" + + "github.com/n9te9/graphql-parser/ast" + "github.com/n9te9/graphql-parser/token" +) + +func (p *Parser) parseOperationDefinition() ast.Definition { + if p.curTokenIs(token.BRACE_L) { + return &ast.OperationDefinition{ + Operation: ast.Query, + SelectionSet: p.parseSelectionSet(), + } + } + + if !p.isOperationType() { + return nil + } + + stmt := &ast.OperationDefinition{ + Token: p.curToken, + Operation: ast.OperationType(p.curToken.Literal), + } + p.nextToken() + + if p.curTokenIs(token.IDENT) { + stmt.Name = &ast.Name{ + Token: p.curToken, + Value: p.curToken.Literal, + } + p.nextToken() + } + + stmt.VariableDefinitions = p.parseVariableDefinitions() + stmt.Directives = p.parseDirectives() + + if p.curTokenIs(token.BRACE_L) { + stmt.SelectionSet = p.parseSelectionSet() + } else { + p.peekError(token.BRACE_L) + return nil + } + + return stmt +} + +func (p *Parser) parseFragmentDefinition() ast.Definition { + stmt := &ast.FragmentDefinition{Token: p.curToken} + p.nextToken() + + fmt.Println(p.curToken) + if !p.isKeywordToken() && !p.curTokenIs(token.IDENT) { + p.errors = append(p.errors, "expected fragment name") + return nil + } + + stmt.Name = &ast.Name{Token: p.curToken, Value: p.curToken.Literal} + + if !p.expectPeek(token.ON) { + return nil + } + + if !p.expectPeek(token.IDENT) { + return nil + } + + stmt.TypeCondition = &ast.NamedType{ + Token: p.curToken, + Name: &ast.Name{Token: p.curToken, Value: p.curToken.Literal}, + } + + p.nextToken() + stmt.Directives = p.parseDirectives() + + if !p.curTokenIs(token.BRACE_L) { + p.peekError(token.BRACE_L) + return nil + } + + stmt.SelectionSet = p.parseSelectionSet() + + return stmt +} + +func (p *Parser) parseSelectionSet() []ast.Selection { + selections := []ast.Selection{} + + if p.curTokenIs(token.BRACE_L) { + p.nextToken() + } + + if p.curTokenIs(token.BRACE_R) { + p.errors = append(p.errors, "empty selection set") + return nil + } + + for !p.curTokenIs(token.BRACE_R) && !p.curTokenIs(token.EOF) { + selection := p.parseSelection() + if selection != nil { + selections = append(selections, selection) + } + } + + if p.curTokenIs(token.BRACE_R) { + p.nextToken() + } else { + p.peekError(token.BRACE_R) + return nil + } + + return selections +} + +func (p *Parser) parseSelection() ast.Selection { + if p.curTokenIs(token.IDENT) || p.isKeywordToken() { + return p.parseField() + } + + if p.curTokenIs(token.SPREAD) { + return p.parseFragment() + } + + p.nextToken() + return nil +} + +func (p *Parser) isKeywordToken() bool { + return p.curTokenIs(token.QUERY) || + p.curTokenIs(token.MUTATION) || + p.curTokenIs(token.SUBSCRIPTION) || + p.curTokenIs(token.FRAGMENT) || + p.curTokenIs(token.ON) || + p.curTokenIs(token.TYPE) || + p.curTokenIs(token.INPUT) || + p.curTokenIs(token.ENUM) || + p.curTokenIs(token.UNION) || + p.curTokenIs(token.INTERFACE) || + p.curTokenIs(token.SCALAR) || + p.curTokenIs(token.DIRECTIVE) +} + +func (p *Parser) parseFragment() ast.Selection { + startToken := p.curToken + p.nextToken() + + if p.curTokenIs(token.ON) { + p.nextToken() + + typeCondition := &ast.NamedType{ + Token: p.curToken, + Name: &ast.Name{Token: p.curToken, Value: p.curToken.Literal}, + } + p.nextToken() + + dirs := p.parseDirectives() + + return &ast.InlineFragment{ + Token: startToken, + TypeCondition: typeCondition, + Directives: dirs, + SelectionSet: p.parseSelectionSet(), + } + } + + if p.curTokenIs(token.AT) || p.curTokenIs(token.BRACE_L) { + dirs := p.parseDirectives() + + return &ast.InlineFragment{ + Token: startToken, + Directives: dirs, + SelectionSet: p.parseSelectionSet(), + } + } + + spread := &ast.FragmentSpread{ + Token: startToken, + Name: &ast.Name{Token: p.curToken, Value: p.curToken.Literal}, + } + p.nextToken() + spread.Directives = p.parseDirectives() + + return spread +} + +func (p *Parser) parseField() *ast.Field { + field := &ast.Field{Token: p.curToken} + + if p.peekTokenIs(token.COLON) { + field.Alias = &ast.Name{Token: p.curToken, Value: p.curToken.Literal} + p.nextToken() + p.nextToken() + } + + field.Name = &ast.Name{Token: p.curToken, Value: p.curToken.Literal} + p.nextToken() + + if p.curTokenIs(token.PAREN_L) { + field.Arguments = p.parseArguments() + } + field.Directives = p.parseDirectives() + + if p.curTokenIs(token.BRACE_L) { + field.SelectionSet = p.parseSelectionSet() + } + + return field +} + +func (p *Parser) isOperationType() bool { + return p.curTokenIs(token.QUERY) || p.curTokenIs(token.MUTATION) || p.curTokenIs(token.SUBSCRIPTION) +} + +// parseValue parses a value literal or variable +func (p *Parser) parseValue() ast.Value { + switch p.curToken.Type { + case token.INT: + val, _ := strconv.ParseInt(p.curToken.Literal, 10, 64) + lit := &ast.IntValue{Token: p.curToken, Value: val} + p.nextToken() + return lit + case token.FLOAT: + val, _ := strconv.ParseFloat(p.curToken.Literal, 64) + lit := &ast.FloatValue{Token: p.curToken, Value: val} + p.nextToken() + return lit + case token.STRING: + lit := &ast.StringValue{Token: p.curToken, Value: unquoteGeneric(p.curToken.Literal)} + p.nextToken() + return lit + case token.TRUE, token.FALSE: + val, _ := strconv.ParseBool(p.curToken.Literal) + lit := &ast.BooleanValue{Token: p.curToken, Value: val} + p.nextToken() + return lit + case token.NULL: + lit := &ast.NullValue{Token: p.curToken} + p.nextToken() + return lit + case token.IDENT: + // Enum Value (e.g. ACTIVE) + lit := &ast.EnumValue{Token: p.curToken, Value: p.curToken.Literal} + p.nextToken() + return lit + case token.DOLLAR: + v := p.parseVariable() + p.nextToken() + return v + case token.BRACKET_L: + v := p.parseListValue() + return v + case token.BRACE_L: + v := p.parseObjectValue() + return v + case token.BLOCK_STRING: + lit := &ast.StringValue{Token: p.curToken, Value: dedentBlockStringValue(p.curToken.Literal)} + p.nextToken() + return lit + default: + p.errors = append(p.errors, fmt.Sprintf("unexpected token for value: %s", p.curToken.Type)) + return nil + } +} + +func unquoteGeneric(s string) string { + quoted := `"` + s + `"` + + if unquoted, err := strconv.Unquote(quoted); err == nil { + return unquoted + } + return s +} + +func dedentBlockStringValue(rawString string) string { + lines := strings.Split(rawString, "\n") + + commonIndent := math.MaxInt + foundCommonIndent := false + + for i, line := range lines { + if i == 0 { + continue + } + + indent := countLeadingWhitespace(line) + + if indent < len(line) { + if indent < commonIndent { + commonIndent = indent + foundCommonIndent = true + } + } + } + + if !foundCommonIndent { + commonIndent = 0 + } + + if commonIndent > 0 { + for i, line := range lines { + if i == 0 { + continue + } + if len(line) >= commonIndent { + lines[i] = line[commonIndent:] + } + } + } + + for len(lines) > 0 { + if isBlank(lines[0]) { + lines = lines[1:] + } else { + break + } + } + + for len(lines) > 0 { + if isBlank(lines[len(lines)-1]) { + lines = lines[:len(lines)-1] + } else { + break + } + } + + return strings.Join(lines, "\n") +} + +func countLeadingWhitespace(s string) int { + count := 0 + for _, r := range s { + if r == ' ' || r == '\t' { + count++ + } else { + break + } + } + return count +} + +func isBlank(s string) bool { + return countLeadingWhitespace(s) == len(s) +} + +// parseVariable parses $name +func (p *Parser) parseVariable() *ast.Variable { + v := &ast.Variable{Token: p.curToken} + if !p.peekTokenIs(token.IDENT) && !p.peekTokenIs(token.INPUT) { + return nil + } + p.nextToken() + v.Name = p.curToken.Literal + + return v +} + +// parseListValue parses [val1, val2] +func (p *Parser) parseListValue() *ast.ListValue { + lit := &ast.ListValue{Token: p.curToken} + p.nextToken() // skip '[' + + for !p.curTokenIs(token.BRACKET_R) && !p.curTokenIs(token.EOF) { + val := p.parseValue() + if val != nil { + lit.Values = append(lit.Values, val) + } + } + + if !p.curTokenIs(token.BRACKET_R) { + return nil + } + p.nextToken() + + return lit +} + +// parseObjectValue parses { key: value } +func (p *Parser) parseObjectValue() *ast.ObjectValue { + lit := &ast.ObjectValue{Token: p.curToken} + p.nextToken() // skip '{' + + for !p.curTokenIs(token.BRACE_R) && !p.curTokenIs(token.EOF) { + field := p.parseObjectField() + if field != nil { + lit.Fields = append(lit.Fields, field) + } + } + + if !p.curTokenIs(token.BRACE_R) { + return nil + } + p.nextToken() + + return lit +} + +func (p *Parser) parseObjectField() *ast.ObjectField { + field := &ast.ObjectField{Token: p.curToken} + field.Name = &ast.Name{Token: p.curToken, Value: p.curToken.Literal} + + if !p.expectPeek(token.COLON) { + return nil + } + p.nextToken() + field.Value = p.parseValue() + return field +} + +// parseArguments parses (arg1: val1, arg2: val2) +func (p *Parser) parseArguments() []*ast.Argument { + p.nextToken() + + var args []*ast.Argument + for !p.curTokenIs(token.PAREN_R) && !p.curTokenIs(token.EOF) { + arg := &ast.Argument{Token: p.curToken} + arg.Name = &ast.Name{Token: p.curToken, Value: p.curToken.Literal} + + if !p.expectPeek(token.COLON) { + p.peekError(token.COLON) + return nil + } + p.nextToken() + + arg.Value = p.parseValue() + args = append(args, arg) + + if p.curTokenIs(token.COMMA) { + p.nextToken() + } + } + + p.nextToken() // skip ')' + return args +} + +// parseDirectives parses @dir(args) @dir2 +func (p *Parser) parseDirectives() []*ast.Directive { + var directives []*ast.Directive + + for p.curTokenIs(token.AT) { + d := &ast.Directive{Token: p.curToken} + p.nextToken() // skip '@' + + if !p.curTokenIs(token.IDENT) { + p.errors = append(p.errors, "expected directive name") + return nil + } + d.Name = p.curToken.Literal + p.nextToken() + + if p.curTokenIs(token.PAREN_L) { + d.Arguments = p.parseArguments() + } + + directives = append(directives, d) + } + + return directives +} + +func (p *Parser) parseVariableDefinitions() []*ast.VariableDefinition { + if !p.curTokenIs(token.PAREN_L) { + return nil + } + p.nextToken() + + var vars []*ast.VariableDefinition + for !p.curTokenIs(token.PAREN_R) && !p.curTokenIs(token.EOF) { + def := &ast.VariableDefinition{Token: p.curToken} + + def.Variable = p.parseVariable() + + if !p.expectPeek(token.COLON) { + return nil + } + + p.nextToken() + + def.Type = p.parseType() + + if p.curTokenIs(token.EQUALS) { + p.nextToken() + def.DefaultValue = p.parseValue() + } + + def.Directives = p.parseDirectives() + vars = append(vars, def) + + if p.curTokenIs(token.COMMA) { + p.nextToken() + } + } + + if p.curTokenIs(token.PAREN_R) { + p.nextToken() + } + return vars +} diff --git a/parser/query_parser_test.go b/parser/query_parser_test.go new file mode 100644 index 0000000..620e9e8 --- /dev/null +++ b/parser/query_parser_test.go @@ -0,0 +1,1101 @@ +package parser_test + +import ( + "strings" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/n9te9/graphql-parser/ast" + "github.com/n9te9/graphql-parser/lexer" + "github.com/n9te9/graphql-parser/parser" + "github.com/n9te9/graphql-parser/token" +) + +func TestParseOperationDefinition(t *testing.T) { + tests := []struct { + name string + input string + wantErr string + expect *ast.Document + }{ + { + name: "Shorthand Query", + input: `{ myField }`, + wantErr: "", + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "myField"}, + }, + }, + }, + }, + }, + }, + { + name: "Named Query", + input: `query MyQuery { user }`, + wantErr: "", + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + Name: &ast.Name{Value: "MyQuery"}, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "user"}, + }, + }, + }, + }, + }, + }, + { + name: "Mutation", + input: `mutation CreateUser { createUser }`, + wantErr: "", + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Mutation, + Name: &ast.Name{Value: "CreateUser"}, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "createUser"}, + }, + }, + }, + }, + }, + }, + { + name: "Subscription", + input: `subscription NewMessages { messageAdded }`, + wantErr: "", + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Subscription, + Name: &ast.Name{Value: "NewMessages"}, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "messageAdded"}, + }, + }, + }, + }, + }, + }, + { + name: "Missing Closing Brace", + input: `query { user`, + wantErr: "expected next token to be }, got EOF instead", + expect: nil, + }, + { + name: "Missing Selection Set", + input: `query MyQuery`, + wantErr: "expected next token to be {, got EOF instead", + expect: nil, + }, + { + name: "Invalid Token at Start", + input: `123 query { user }`, + wantErr: "Unexpected token at top level: 123", + expect: nil, + }, + + { + name: "Query with Variables", + input: `query getUser($id: ID!) { user(id: $id) }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + Name: &ast.Name{Value: "getUser"}, + VariableDefinitions: []*ast.VariableDefinition{ + { + Variable: &ast.Variable{Name: "id"}, + Type: &ast.NonNullType{ + Type: &ast.NamedType{Name: &ast.Name{Value: "ID"}}, + }, + }, + }, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "user"}, + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "id"}, + Value: &ast.Variable{Name: "id"}, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Field with Literal Arguments", + input: `{ user(id: 123, status: "ACTIVE") }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "user"}, + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "id"}, + Value: &ast.IntValue{Value: 123}, + }, + { + Name: &ast.Name{Value: "status"}, + Value: &ast.StringValue{Value: "ACTIVE"}, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Directives on Query and Field", + input: `query @cached(ttl: 60) { user @skip(if: true) }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + Directives: []*ast.Directive{ + { + Name: "cached", + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "ttl"}, + Value: &ast.IntValue{Value: 60}, + }, + }, + }, + }, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "user"}, + Directives: []*ast.Directive{ + { + Name: "skip", + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "if"}, + Value: &ast.BooleanValue{Value: true}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, { + name: "Complex Query with Fragments and Objects", + input: ` + query GetUser($input: UserFilter) { + user(filter: $input) { + id + ... on Admin { + role + } + ...UserFields + } + } + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + Name: &ast.Name{Value: "GetUser"}, + VariableDefinitions: []*ast.VariableDefinition{ + { + Variable: &ast.Variable{Name: "input"}, + Type: &ast.NamedType{Name: &ast.Name{Value: "UserFilter"}}, + }, + }, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "user"}, + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "filter"}, + Value: &ast.Variable{Name: "input"}, + }, + }, + SelectionSet: []ast.Selection{ + &ast.Field{Name: &ast.Name{Value: "id"}}, + &ast.InlineFragment{ + TypeCondition: &ast.NamedType{Name: &ast.Name{Value: "Admin"}}, + SelectionSet: []ast.Selection{ + &ast.Field{Name: &ast.Name{Value: "role"}}, + }, + }, + &ast.FragmentSpread{Name: &ast.Name{Value: "UserFields"}}, + }, + }, + }, + }, + }, + }, + }, { + name: "Recursive Input Values (Nested Objects and Lists)", + input: ` + query { + complexField( + filter: { + tags: ["go", "graphql"], + metadata: { count: 10, active: true } + }, + matrix: [[1, 2], [3, 4]] + ) + } + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "complexField"}, + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "filter"}, + Value: &ast.ObjectValue{ + Fields: []*ast.ObjectField{ + { + Name: &ast.Name{Value: "tags"}, + Value: &ast.ListValue{ + Values: []ast.Value{ + &ast.StringValue{Value: "go"}, + &ast.StringValue{Value: "graphql"}, + }, + }, + }, + { + Name: &ast.Name{Value: "metadata"}, + Value: &ast.ObjectValue{ + Fields: []*ast.ObjectField{ + { + Name: &ast.Name{Value: "count"}, + Value: &ast.IntValue{Value: 10}, + }, + { + Name: &ast.Name{Value: "active"}, + Value: &ast.BooleanValue{Value: true}, + }, + }, + }, + }, + }, + }, + }, + { + Name: &ast.Name{Value: "matrix"}, + Value: &ast.ListValue{ + Values: []ast.Value{ + &ast.ListValue{ + Values: []ast.Value{ + &ast.IntValue{Value: 1}, + &ast.IntValue{Value: 2}, + }, + }, + &ast.ListValue{ + Values: []ast.Value{ + &ast.IntValue{Value: 3}, + &ast.IntValue{Value: 4}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Float and Null Literals", + input: `{ check(val: 123.45, old: null) }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "check"}, + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "val"}, + Value: &ast.FloatValue{Value: 123.45}, + }, + { + Name: &ast.Name{Value: "old"}, + Value: &ast.NullValue{}, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Alias and Nested Selection Set", + input: `{ user: currentUser { id, name: fullName } }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + SelectionSet: []ast.Selection{ + &ast.Field{ + Alias: &ast.Name{Value: "user"}, + Name: &ast.Name{Value: "currentUser"}, + SelectionSet: []ast.Selection{ + &ast.Field{Name: &ast.Name{Value: "id"}}, + &ast.Field{ + Alias: &ast.Name{Value: "name"}, + Name: &ast.Name{Value: "fullName"}, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Variables with Default Values", + input: `query ($id: ID = "default-id", $limit: Int = 10) { user(id: $id) }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + VariableDefinitions: []*ast.VariableDefinition{ + { + Variable: &ast.Variable{Name: "id"}, + Type: &ast.NamedType{Name: &ast.Name{Value: "ID"}}, + DefaultValue: &ast.StringValue{Value: "default-id"}, + }, + { + Variable: &ast.Variable{Name: "limit"}, + Type: &ast.NamedType{Name: &ast.Name{Value: "Int"}}, + DefaultValue: &ast.IntValue{Value: 10}, + }, + }, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "user"}, + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "id"}, + Value: &ast.Variable{Name: "id"}, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Inline Fragment without Type Condition", + input: `{ user { id ... @include(if: $expanded) { email } } }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "user"}, + SelectionSet: []ast.Selection{ + &ast.Field{Name: &ast.Name{Value: "id"}}, + &ast.InlineFragment{ + TypeCondition: nil, + Directives: []*ast.Directive{ + { + Name: "include", + Arguments: []*ast.Argument{ + {Name: &ast.Name{Value: "if"}, Value: &ast.Variable{Name: "expanded"}}, + }, + }, + }, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "email"}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Fragment with Directives", + input: `fragment UserFields on User @deprecated(reason: "use newFields") { id }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.FragmentDefinition{ + Name: &ast.Name{Value: "UserFields"}, + TypeCondition: &ast.NamedType{Name: &ast.Name{Value: "User"}}, + Directives: []*ast.Directive{ + { + Name: "deprecated", + Arguments: []*ast.Argument{ + {Name: &ast.Name{Value: "reason"}, Value: &ast.StringValue{Value: "use newFields"}}, + }, + }, + }, + SelectionSet: []ast.Selection{ + &ast.Field{Name: &ast.Name{Value: "id"}}, + }, + }, + }, + }, + }, + { + name: "Multiple Operations in One Document", + input: ` + query GetUser { user { id } } + mutation UpdateUser { updateUser { id } } + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, Name: &ast.Name{Value: "GetUser"}, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "user"}, + SelectionSet: []ast.Selection{ + &ast.Field{Name: &ast.Name{Value: "id"}}, + }, + }, + }, + }, + &ast.OperationDefinition{ + Operation: ast.Mutation, Name: &ast.Name{Value: "UpdateUser"}, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "updateUser"}, + SelectionSet: []ast.Selection{ + &ast.Field{Name: &ast.Name{Value: "id"}}, + }, + }, + }, + }, + }, + }, + }, + { + name: "Variable Definition with Directive", + input: `query ($id: ID! @deprecated(reason: "use uuid")) { user(id: $id) }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + VariableDefinitions: []*ast.VariableDefinition{ + { + Variable: &ast.Variable{Name: "id"}, + Type: &ast.NonNullType{ + Type: &ast.NamedType{Name: &ast.Name{Value: "ID"}}, + }, + Directives: []*ast.Directive{ + { + Name: "deprecated", + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "reason"}, + Value: &ast.StringValue{Value: "use uuid"}, + }, + }, + }, + }, + }, + }, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "user"}, + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "id"}, + Value: &ast.Variable{Name: "id"}, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Document with BOM", + input: "\uFEFFquery { id }", + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + SelectionSet: []ast.Selection{ + &ast.Field{Name: &ast.Name{Value: "id"}}, + }, + }, + }, + }, + }, + { + name: "Fragment Named on", + input: `fragment on on User { id }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.FragmentDefinition{ + Name: &ast.Name{Value: "on"}, // 名前が "on" + TypeCondition: &ast.NamedType{Name: &ast.Name{Value: "User"}}, + SelectionSet: []ast.Selection{ + &ast.Field{Name: &ast.Name{Value: "id"}}, + }, + }, + }, + }, + }, + { + name: "Fragment Spread with Directives", + input: `query { user { ...UserFields @include(if: $verbose) } }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "user"}, + SelectionSet: []ast.Selection{ + &ast.FragmentSpread{ + Name: &ast.Name{Value: "UserFields"}, + Directives: []*ast.Directive{ + { + Name: "include", + Arguments: []*ast.Argument{ + {Name: &ast.Name{Value: "if"}, Value: &ast.Variable{Name: "verbose"}}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Variable with Default Value and Directive", + input: `query ($limit: Int = 10 @deprecated) { users(limit: $limit) }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + VariableDefinitions: []*ast.VariableDefinition{ + { + Variable: &ast.Variable{Name: "limit"}, + Type: &ast.NamedType{Name: &ast.Name{Value: "Int"}}, + DefaultValue: &ast.IntValue{Value: 10}, + Directives: []*ast.Directive{ + {Name: "deprecated"}, + }, + }, + }, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "users"}, + Arguments: []*ast.Argument{ + {Name: &ast.Name{Value: "limit"}, Value: &ast.Variable{Name: "limit"}}, + }, + }, + }, + }, + }, + }, + }, + { + name: "Empty Selection Set", + input: `query { user { } }`, + wantErr: "empty selection set", + expect: nil, + }, + { + name: "Invalid Variable Definition (Missing Colon)", + input: `query getUser($id ID!) { user(id: $id) }`, + wantErr: "expected next token to be :, got IDENT instead", + expect: nil, + }, + { + name: "Invalid Argument (Missing Value)", + input: `{ user(id: ) }`, + wantErr: "unexpected token for value", + expect: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := lexer.New(tt.input) + p := parser.New(l) + got := p.ParseDocument() + + errors := p.Errors() + + if tt.wantErr != "" { + if len(errors) == 0 { + t.Errorf("expected error containing %q, got none", tt.wantErr) + return + } + found := false + for _, err := range errors { + if strings.Contains(err, tt.wantErr) { + found = true + break + } + } + if !found { + t.Errorf("expected error containing %q, got %v", tt.wantErr, errors) + } + return + } else { + if len(errors) > 0 { + t.Fatalf("unexpected parser errors: %v", errors) + } + } + + opts := []cmp.Option{ + cmpopts.IgnoreTypes(token.Token{}), + } + + if diff := cmp.Diff(tt.expect, got, opts...); diff != "" { + t.Errorf("ParseDocument() mismatch (-want +got):\n%s", diff) + } + }) + } +} + +func TestParseEdgeCases(t *testing.T) { + tests := []struct { + name string + input string + wantErr string + expect *ast.Document + }{ + { + name: "Keywords as Field Names", + input: `{ type query fragment on }`, + wantErr: "", + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + SelectionSet: []ast.Selection{ + &ast.Field{Name: &ast.Name{Value: "type"}}, + &ast.Field{Name: &ast.Name{Value: "query"}}, + &ast.Field{Name: &ast.Name{Value: "fragment"}}, + &ast.Field{Name: &ast.Name{Value: "on"}}, + }, + }, + }, + }, + }, + { + name: "Negative Int and Float", + input: `{ calculate(diff: -5, factor: -1.5) }`, + wantErr: "", + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "calculate"}, + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "diff"}, + Value: &ast.IntValue{Value: -5}, + }, + { + Name: &ast.Name{Value: "factor"}, + Value: &ast.FloatValue{Value: -1.5}, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Empty List and Object", + input: `{ search(ids: [], filter: {}) }`, + wantErr: "", + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "search"}, + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "ids"}, + Value: &ast.ListValue{Values: nil}, + }, + { + Name: &ast.Name{Value: "filter"}, + Value: &ast.ObjectValue{Fields: nil}, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Boolean vs Enum", + input: `{ check(a: true, b: TRUE, c: null, d: NULL) }`, + wantErr: "", + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "check"}, + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "a"}, + Value: &ast.BooleanValue{Value: true}, + }, + { + Name: &ast.Name{Value: "b"}, + Value: &ast.EnumValue{Value: "TRUE"}, + }, + { + Name: &ast.Name{Value: "c"}, + Value: &ast.NullValue{}, + }, + { + Name: &ast.Name{Value: "d"}, + Value: &ast.EnumValue{Value: "NULL"}, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Invalid Syntax (Missing Value)", + input: `{ user(id: ) }`, + wantErr: "unexpected token", + expect: nil, + }, + { + name: "Invalid Variable Definition", + input: `query($id) { user }`, + wantErr: "expected", + expect: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := lexer.New(tt.input) + p := parser.New(l) + got := p.ParseDocument() + + errors := p.Errors() + + if tt.wantErr != "" { + if len(errors) == 0 { + t.Errorf("expected error containing %q, got none", tt.wantErr) + return + } + + found := false + for _, err := range errors { + if strings.Contains(err, tt.wantErr) { + found = true + break + } + } + if !found { + t.Errorf("expected error containing %q, got %v", tt.wantErr, errors) + } + return + } + + if len(errors) > 0 { + t.Fatalf("unexpected parser errors: %v", errors) + } + + opts := []cmp.Option{ + cmpopts.IgnoreTypes(token.Token{}), + } + if diff := cmp.Diff(tt.expect, got, opts...); diff != "" { + t.Errorf("mismatch (-want +got):\n%s", diff) + } + }) + } +} + +func TestParseStrictSpecCompliance(t *testing.T) { + tests := []struct { + name string + input string + wantErr string + expect *ast.Document + }{ + { + name: "Block String with Indentation", + input: ` + { + description(text: """ + Hello, + World! + """) + } + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "description"}, + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "text"}, + Value: &ast.StringValue{Value: "Hello,\n World!"}, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "String with Unicode Escapes", + input: `{ user(name: "\u004E\u0061\u006E\u0061") }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "user"}, + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "name"}, + Value: &ast.StringValue{Value: "Nana"}, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Comments Everywhere", + input: ` + query { # This is a comment + user # comment after field + (id: 1) # comment inside arguments + } + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + SelectionSet: []ast.Selection{ + &ast.Field{ + Name: &ast.Name{Value: "user"}, + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "id"}, + Value: &ast.IntValue{Value: 1}, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Complex Default Value", + input: `query ($filter: Filter = { active: true, tags: ["a", "b"] }) { search }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + VariableDefinitions: []*ast.VariableDefinition{ + { + Variable: &ast.Variable{Name: "filter"}, + Type: &ast.NamedType{Name: &ast.Name{Value: "Filter"}}, + DefaultValue: &ast.ObjectValue{ + Fields: []*ast.ObjectField{ + { + Name: &ast.Name{Value: "active"}, + Value: &ast.BooleanValue{Value: true}, + }, + { + Name: &ast.Name{Value: "tags"}, + Value: &ast.ListValue{ + Values: []ast.Value{ + &ast.StringValue{Value: "a"}, + &ast.StringValue{Value: "b"}, + }, + }, + }, + }, + }, + }, + }, + SelectionSet: []ast.Selection{ + &ast.Field{Name: &ast.Name{Value: "search"}}, + }, + }, + }, + }, + }, + { + name: "Query Document with BOM", + input: "\uFEFF{ me }", + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.OperationDefinition{ + Operation: ast.Query, + SelectionSet: []ast.Selection{ + &ast.Field{Name: &ast.Name{Value: "me"}}, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := lexer.New(tt.input) + p := parser.New(l) + got := p.ParseDocument() + + errors := p.Errors() + if tt.wantErr != "" { + return + } + if len(errors) > 0 { + t.Fatalf("unexpected parser errors: %v", errors) + } + + opts := []cmp.Option{ + cmpopts.IgnoreTypes(token.Token{}), + cmpopts.EquateEmpty(), + } + if diff := cmp.Diff(tt.expect, got, opts...); diff != "" { + t.Errorf("mismatch (-want +got):\n%s", diff) + } + }) + } +} + +func TestParseFragmentDefinition(t *testing.T) { + tests := []struct { + name string + input string + wantErr string + expect *ast.Document + }{ + { + name: "Valid Fragment", + input: `fragment UserFields on User { id name }`, + wantErr: "", + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.FragmentDefinition{ + Name: &ast.Name{Value: "UserFields"}, + TypeCondition: &ast.NamedType{ + Name: &ast.Name{Value: "User"}, + }, + SelectionSet: []ast.Selection{ + &ast.Field{Name: &ast.Name{Value: "id"}}, + &ast.Field{Name: &ast.Name{Value: "name"}}, + }, + }, + }, + }, + }, + { + name: "Fragment Missing On", + input: `fragment UserFields User { id }`, + wantErr: "expected next token to be on, got IDENT instead", + expect: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := lexer.New(tt.input) + p := parser.New(l) + got := p.ParseDocument() + + errors := p.Errors() + + if tt.wantErr != "" { + if len(errors) == 0 { + t.Errorf("expected error containing %q, got none", tt.wantErr) + return + } + found := false + for _, err := range errors { + if strings.Contains(err, tt.wantErr) { + found = true + break + } + } + if !found { + t.Errorf("expected error containing %q, got %v", tt.wantErr, errors) + } + return + } else { + if len(errors) > 0 { + t.Fatalf("unexpected parser errors: %v", errors) + } + } + + opts := []cmp.Option{ + cmpopts.IgnoreTypes(token.Token{}), + } + + if diff := cmp.Diff(tt.expect, got, opts...); diff != "" { + t.Errorf("ParseDocument() mismatch (-want +got):\n%s", diff) + } + }) + } +} diff --git a/parser/schema_parser.go b/parser/schema_parser.go new file mode 100644 index 0000000..2b44525 --- /dev/null +++ b/parser/schema_parser.go @@ -0,0 +1,776 @@ +package parser + +import ( + "fmt" + + "github.com/n9te9/graphql-parser/ast" + "github.com/n9te9/graphql-parser/token" +) + +func (p *Parser) parseObjectTypeDefinition(description string) ast.Definition { + def := &ast.ObjectTypeDefinition{ + Description: description, + Token: p.curToken, + } + p.nextToken() + + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + return nil + } + + def.Name = name + + if p.curTokenIs(token.IMPLEMENTS) { + def.Interfaces = p.parseImplementsInterfaces() + } + + def.Directives = p.parseDirectives() + + if p.curTokenIs(token.BRACE_L) { + def.Fields = p.parseFieldDefinitions() + } + + return def +} + +func (p *Parser) parseImplementsInterfaces() []*ast.NamedType { + var interfaces []*ast.NamedType + p.nextToken() + + if p.curTokenIs(token.AMP) { + p.nextToken() + } + + for { + + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + return nil + } + + interfaces = append(interfaces, &ast.NamedType{ + Token: name.Token, + Name: name, + }) + + if !p.curTokenIs(token.AMP) { + break + } + + p.nextToken() + } + + return interfaces +} + +func (p *Parser) parseFieldDefinitions() []*ast.FieldDefinition { + p.nextToken() + + var fields []*ast.FieldDefinition + for !p.curTokenIs(token.BRACE_R) && !p.curTokenIs(token.EOF) { + canParse := p.curTokenIs(token.IDENT) || + p.isKeywordToken() || + p.curTokenIs(token.STRING) || + p.curTokenIs(token.BLOCK_STRING) + if canParse { + fields = append(fields, p.parseFieldDefinition()) + } else { + p.errors = append(p.errors, fmt.Sprintf("unexpected token: %s", p.curToken.Literal)) + p.nextToken() + } + } + + if !p.curTokenIs(token.BRACE_R) { + p.errors = append(p.errors, fmt.Sprintf("expected token } but, got %s", p.curToken.Literal)) + return nil + } + p.nextToken() + + return fields +} + +func (p *Parser) parseFieldDefinition() *ast.FieldDefinition { + description := p.parseDescription() + def := &ast.FieldDefinition{ + Description: description, + Token: p.curToken, + } + + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + return nil + } + def.Name = name + + if p.curTokenIs(token.PAREN_L) { + def.Arguments = p.parseInputValueDefinitions() + } + + if !p.curTokenIs(token.COLON) { + p.errors = append(p.errors, fmt.Sprintf("expected token : but, got %s", p.curToken.Literal)) + return nil + } + p.nextToken() + + def.Type = p.parseType() + def.Directives = p.parseDirectives() + + return def +} + +func (p *Parser) parseInputValueDefinitions() []*ast.InputValueDefinition { + if !p.curTokenIs(token.PAREN_L) { + p.errors = append(p.errors, fmt.Sprintf("expected token ( but, got %s", p.curToken.Literal)) + return nil + } + p.nextToken() + + var args []*ast.InputValueDefinition + + for !p.curTokenIs(token.PAREN_R) && !p.curTokenIs(token.EOF) { + description := p.parseDescription() + arg := &ast.InputValueDefinition{ + Description: description, + Token: p.curToken, + } + + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + return nil + } + arg.Name = name + if !p.curTokenIs(token.COLON) { + p.errors = append(p.errors, fmt.Sprintf("expected token : but, got %s", p.curToken.Literal)) + return nil + } + p.nextToken() + + arg.Type = p.parseType() + if p.curTokenIs(token.EQUALS) { + p.nextToken() + arg.DefaultValue = p.parseValue() + } + arg.Directives = p.parseDirectives() + + args = append(args, arg) + } + + if !p.curTokenIs(token.PAREN_R) { + p.errors = append(p.errors, fmt.Sprintf("expected token ) but, got %s", p.curToken.Literal)) + return nil + } + p.nextToken() + + return args +} + +func (p *Parser) parseInterfaceTypeDefinition(description string) ast.Definition { + def := &ast.InterfaceTypeDefinition{ + Token: p.curToken, + Description: description, + } + p.nextToken() + + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + return nil + } + def.Name = name + if p.curTokenIs(token.IMPLEMENTS) { + def.Interfaces = p.parseImplementsInterfaces() + } + + def.Directives = p.parseDirectives() + if p.curTokenIs(token.BRACE_L) { + def.Fields = p.parseFieldDefinitions() + } + + return def +} + +func (p *Parser) parseDescription() string { + if p.curTokenIs(token.STRING) { + val := unquoteGeneric(p.curToken.Literal) + p.nextToken() + return val + } + if p.curTokenIs(token.BLOCK_STRING) { + val := dedentBlockStringValue(p.curToken.Literal) + p.nextToken() + return val + } + return "" +} + +func (p *Parser) parseUnionTypeDefinition(description string) ast.Definition { + def := &ast.UnionTypeDefinition{ + Token: p.curToken, + Description: description, + } + p.nextToken() + + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + return nil + } + def.Name = name + def.Directives = p.parseDirectives() + + if p.curTokenIs(token.EQUALS) { + p.nextToken() + def.Types = p.parseUnionMemberTypes() + } + + return def +} + +func (p *Parser) parseUnionMemberTypes() []*ast.NamedType { + if p.curTokenIs(token.PIPE) { + p.nextToken() + } + + var types []*ast.NamedType + for { + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, fmt.Sprintf("expected type name in union, got %s", p.curToken.Literal)) + p.nextToken() + return nil + } + + types = append(types, &ast.NamedType{ + Token: name.Token, + Name: name, + }) + + if !p.curTokenIs(token.PIPE) { + break + } + p.nextToken() + } + return types +} + +func (p *Parser) parseEnumTypeDefinition(description string) ast.Definition { + def := &ast.EnumTypeDefinition{ + Token: p.curToken, + Description: description, + } + p.nextToken() + + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + return nil + } + def.Name = name + def.Directives = p.parseDirectives() + + if p.curTokenIs(token.BRACE_L) { + def.Values = p.parseEnumValueDefinitions() + } + + return def +} + +func (p *Parser) parseEnumValueDefinitions() []*ast.EnumValueDefinition { + p.nextToken() + + var values []*ast.EnumValueDefinition + for !p.curTokenIs(token.BRACE_R) && !p.curTokenIs(token.EOF) { + canEnumValidToken := p.curTokenIs(token.IDENT) || + p.isKeywordToken() || + p.curTokenIs(token.STRING) || + p.curTokenIs(token.BLOCK_STRING) + if canEnumValidToken { + values = append(values, p.parseEnumValueDefinition()) + } else { + p.errors = append(p.errors, fmt.Sprintf("unexpected token: %s", p.curToken.Literal)) + p.nextToken() + } + } + + if !p.curTokenIs(token.BRACE_R) { + p.errors = append(p.errors, fmt.Sprintf("expected token }, but got %s", p.curToken.Literal)) + return nil + } + p.nextToken() + + return values +} + +func (p *Parser) parseEnumValueDefinition() *ast.EnumValueDefinition { + description := p.parseDescription() + def := &ast.EnumValueDefinition{ + Token: p.curToken, + Description: description, + } + + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + return nil + } + def.Name = name + def.Directives = p.parseDirectives() + + return def +} + +func (p *Parser) parseInputObjectTypeDefinition(description string) ast.Definition { + def := &ast.InputObjectTypeDefinition{ + Description: description, + Token: p.curToken, + } + p.nextToken() + + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + } + def.Name = name + def.Directives = p.parseDirectives() + + if p.curTokenIs(token.BRACE_L) { + def.Fields = p.parseInputObjectFields() + } + + return def +} + +func (p *Parser) parseInputObjectFields() []*ast.InputValueDefinition { + p.nextToken() // skip '{' + + var fields []*ast.InputValueDefinition + for !p.curTokenIs(token.BRACE_R) && !p.curTokenIs(token.EOF) { + validInputObjectToken := p.curTokenIs(token.IDENT) || + p.isKeywordToken() || + p.curTokenIs(token.STRING) || + p.curTokenIs(token.BLOCK_STRING) + if validInputObjectToken { + fields = append(fields, p.parseInputObjectField()) + } else { + p.errors = append(p.errors, fmt.Sprintf("unexpected token in input object: %s", p.curToken.Literal)) + p.nextToken() + } + } + + if !p.curTokenIs(token.BRACE_R) { + p.errors = append(p.errors, fmt.Sprintf("expected token } but got %s", p.curToken.Literal)) + return nil + } + p.nextToken() // skip '}' + + return fields +} + +func (p *Parser) parseInputObjectField() *ast.InputValueDefinition { + description := p.parseDescription() + + def := &ast.InputValueDefinition{ + Description: description, + Token: p.curToken, + } + + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + } + def.Name = name + + if !p.curTokenIs(token.COLON) { + p.errors = append(p.errors, fmt.Sprintf("expected token :, but got %s", p.curToken.Literal)) + return nil + } + p.nextToken() // skip ':' + + def.Type = p.parseType() + + if p.curTokenIs(token.EQUALS) { + p.nextToken() + def.DefaultValue = p.parseValue() + } + + def.Directives = p.parseDirectives() + + return def +} + +func (p *Parser) parseScalarTypeDefinition(description string) ast.Definition { + def := &ast.ScalarTypeDefinition{ + Description: description, + Token: p.curToken, + } + p.nextToken() // skip 'scalar' + + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + } + def.Name = name + def.Directives = p.parseDirectives() + + return def +} + +func (p *Parser) parseDirectiveDefinition(description string) ast.Definition { + def := &ast.DirectiveDefinition{ + Description: description, + Token: p.curToken, + } + p.nextToken() // skip 'directive' + + if !p.curTokenIs(token.AT) { + p.errors = append(p.errors, fmt.Sprintf("expected token @, but got %s", p.curToken.Literal)) + return nil + } + p.nextToken() // skip '@' + + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + } + def.Name = name + + if p.curTokenIs(token.PAREN_L) { + def.Arguments = p.parseInputValueDefinitions() + } + + if p.curTokenIs(token.IDENT) && p.curToken.Literal == "repeatable" { + def.Repeatable = true + p.nextToken() + } + + if !p.curTokenIs(token.ON) { + p.errors = append(p.errors, fmt.Sprintf("expected token on, but got %s", p.curToken.Literal)) + return nil + } + p.nextToken() // skip 'on' + + // Locations (FIELD | OBJECT | ...) + def.Locations = p.parseDirectiveLocations() + + return def +} + +func (p *Parser) parseDirectiveLocations() []*ast.Name { + var locations []*ast.Name + + if p.curTokenIs(token.PIPE) { + p.nextToken() + } + + for { + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + } + locations = append(locations, name) + + if !p.curTokenIs(token.PIPE) { + break + } + p.nextToken() // skip '|' + } + + return locations +} + +func (p *Parser) parseSchemaDefinition(description string) ast.Definition { + def := &ast.SchemaDefinition{ + Token: p.curToken, + Description: description, + } + p.nextToken() // skip 'schema' + + def.Directives = p.parseDirectives() + + if !p.curTokenIs(token.BRACE_L) { + p.errors = append(p.errors, fmt.Sprintf("expected token { but got %s", p.curToken.Literal)) + return nil + } + p.nextToken() // skip '{' + + // Root Operation Types (query: Query, etc.) + for !p.curTokenIs(token.BRACE_R) && !p.curTokenIs(token.EOF) { + if p.curTokenIs(token.IDENT) || p.isKeywordToken() { + def.OperationTypes = append(def.OperationTypes, p.parseOperationTypeDefinition()) + } + } + + if !p.curTokenIs(token.BRACE_R) { + p.errors = append(p.errors, fmt.Sprintf("expected token } but got %s", p.curToken.Literal)) + return nil + } + p.nextToken() // skip '}' + + return def +} + +func (p *Parser) parseOperationTypeDefinition() *ast.OperationTypeDefinition { + def := &ast.OperationTypeDefinition{} + + if p.curTokenIs(token.QUERY) || p.curTokenIs(token.MUTATION) || p.curTokenIs(token.SUBSCRIPTION) { + def.Operation = p.curToken.Type + p.nextToken() + } else if p.curTokenIs(token.IDENT) { + switch p.curToken.Literal { + case "query": + def.Operation = token.QUERY + case "mutation": + def.Operation = token.MUTATION + case "subscription": + def.Operation = token.SUBSCRIPTION + default: + p.errors = append(p.errors, fmt.Sprintf("unexpected operation type: %s", p.curToken.Literal)) + return nil + } + p.nextToken() + } else { + p.errors = append(p.errors, fmt.Sprintf("expected operation type, got %s", p.curToken.Literal)) + return nil + } + + if !p.curTokenIs(token.COLON) { + return nil + } + p.nextToken() + + t, ok := p.parseType().(*ast.NamedType) + if !ok { + p.errors = append(p.errors, "failed type assertion *ast.NamedType") + return nil + } + + def.Type = t + + return def +} + +func (p *Parser) parseExtendDefinition(description string) ast.Definition { + if description != "" { + p.errors = append(p.errors, "unexpected description before 'extend': extensions cannot have descriptions") + return nil + } + + t := p.curToken + p.nextToken() + + switch p.curToken.Type { + case token.SCHEMA: + return p.parseSchemaExtension(t) + case token.SCALAR: + return p.parseScalarTypeExtension(t) + case token.TYPE: + return p.parseObjectTypeExtension(t) + case token.INPUT: + return p.parseInputObjectTypeExtension(t) + case token.INTERFACE: + return p.parseInterfaceTypeExtension(t) + case token.UNION: + return p.parseUnionTypeExtension(t) + case token.ENUM: + return p.parseEnumTypeExtension(t) + } + + p.errors = append(p.errors, fmt.Sprintf("unexpected token after extend: %s", p.curToken.Literal)) + return nil +} + +func (p *Parser) parseSchemaExtension(startToken token.Token) ast.Definition { + ext := &ast.SchemaExtension{Token: startToken} + p.nextToken() // skip 'schema' + + ext.Directives = p.parseDirectives() + + if p.curTokenIs(token.BRACE_L) { + p.nextToken() // skip '{' + for !p.curTokenIs(token.BRACE_R) && !p.curTokenIs(token.EOF) { + if p.curTokenIs(token.IDENT) || p.isKeywordToken() { + ext.OperationTypes = append(ext.OperationTypes, p.parseOperationTypeDefinition()) + } else { + p.nextToken() + } + } + if !p.curTokenIs(token.BRACE_R) { + return nil + } + + p.nextToken() // skip '}' + } + + if len(ext.Directives) == 0 && len(ext.OperationTypes) == 0 { + p.errors = append(p.errors, "unexpected token: extend schema must have directives or operation types") + return nil + } + + return ext +} + +func (p *Parser) parseScalarTypeExtension(startToken token.Token) ast.Definition { + ext := &ast.ScalarTypeExtension{Token: startToken} + p.nextToken() // skip 'scalar' + + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + return nil + } + + ext.Name = name + ext.Directives = p.parseDirectives() + + if len(ext.Directives) == 0 { + p.errors = append(p.errors, "unexpected token: extend scalar must have directives") + return nil + } + + return ext +} + +func (p *Parser) parseObjectTypeExtension(startToken token.Token) ast.Definition { + ext := &ast.ObjectTypeExtension{Token: startToken} + p.nextToken() // skip 'type' + + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + return nil + } + + ext.Name = name + + if p.curTokenIs(token.IMPLEMENTS) { + ext.Interfaces = p.parseImplementsInterfaces() + } + + ext.Directives = p.parseDirectives() + + if p.curTokenIs(token.BRACE_L) { + ext.Fields = p.parseFieldDefinitions() + } + + if len(ext.Interfaces) == 0 && len(ext.Directives) == 0 && len(ext.Fields) == 0 { + p.errors = append(p.errors, "unexpected token: extend type must have implements, directives or fields") + return nil + } + + return ext +} + +func (p *Parser) parseInterfaceTypeExtension(startToken token.Token) ast.Definition { + ext := &ast.InterfaceTypeExtension{Token: startToken} + p.nextToken() // skip 'interface' + + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + return nil + } + + ext.Name = name + + if p.curTokenIs(token.IMPLEMENTS) { + ext.Interfaces = p.parseImplementsInterfaces() + } + + ext.Directives = p.parseDirectives() + + if p.curTokenIs(token.BRACE_L) { + ext.Fields = p.parseFieldDefinitions() + } + + if len(ext.Interfaces) == 0 && len(ext.Directives) == 0 && len(ext.Fields) == 0 { + p.errors = append(p.errors, "unexpected token: extend interface must have implements, directives or fields") + return nil + } + + return ext +} + +// ------------------------------------------------------------------------- +// 5. Union Extension +// extend union Name @directive = ... +// ------------------------------------------------------------------------- +func (p *Parser) parseUnionTypeExtension(startToken token.Token) ast.Definition { + ext := &ast.UnionTypeExtension{Token: startToken} + p.nextToken() // skip 'union' + + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + return nil + } + + ext.Name = name + ext.Directives = p.parseDirectives() + + if p.curTokenIs(token.EQUALS) { + p.nextToken() + ext.Types = p.parseUnionMemberTypes() + } + + if len(ext.Directives) == 0 && len(ext.Types) == 0 { + p.errors = append(p.errors, "unexpected token: extend union must have directives or member types") + return nil + } + + return ext +} + +func (p *Parser) parseEnumTypeExtension(startToken token.Token) ast.Definition { + ext := &ast.EnumTypeExtension{Token: startToken} + p.nextToken() // skip 'enum' + + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + return nil + } + + ext.Name = name + ext.Directives = p.parseDirectives() + + if p.curTokenIs(token.BRACE_L) { + ext.Values = p.parseEnumValueDefinitions() + } + + if len(ext.Directives) == 0 && len(ext.Values) == 0 { + p.errors = append(p.errors, "unexpected token: extend enum must have directives or values") + return nil + } + + return ext +} + +func (p *Parser) parseInputObjectTypeExtension(startToken token.Token) ast.Definition { + ext := &ast.InputObjectTypeExtension{Token: startToken} + p.nextToken() // skip 'input' + + name, err := p.parseName() + if err != nil { + p.errors = append(p.errors, err.Error()) + return nil + } + + ext.Name = name + ext.Directives = p.parseDirectives() + + if p.curTokenIs(token.BRACE_L) { + ext.Fields = p.parseInputObjectFields() + } + + if len(ext.Directives) == 0 && len(ext.Fields) == 0 { + p.errors = append(p.errors, "unexpected token: extend input must have directives or fields") + return nil + } + + return ext +} diff --git a/parser/schema_parser_test.go b/parser/schema_parser_test.go new file mode 100644 index 0000000..63db5ad --- /dev/null +++ b/parser/schema_parser_test.go @@ -0,0 +1,1416 @@ +package parser_test + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/n9te9/graphql-parser/ast" + "github.com/n9te9/graphql-parser/lexer" + "github.com/n9te9/graphql-parser/parser" + "github.com/n9te9/graphql-parser/token" +) + +func TestParseObjectTypeDefinition(t *testing.T) { + tests := []struct { + name string + input string + wantErr string + expect *ast.Document + }{ + { + name: "Simple Type Definition", + input: ` + type User { + id: ID! + username: String + } + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.ObjectTypeDefinition{ + Name: &ast.Name{Value: "User"}, + Fields: []*ast.FieldDefinition{ + { + Name: &ast.Name{Value: "id"}, + Type: &ast.NonNullType{ + Type: &ast.NamedType{ + Name: &ast.Name{Value: "ID"}, + }, + }, + }, + { + Name: &ast.Name{ + Value: "username", + }, + Type: &ast.NamedType{ + Name: &ast.Name{Value: "String"}, + }, + }, + }, + }, + }, + }, + }, + { + name: "Type with Implements and Directives", + input: `type User implements Node & Entity @key(fields: "id") { id: ID! }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.ObjectTypeDefinition{ + Name: &ast.Name{Value: "User"}, + Interfaces: []*ast.NamedType{ + { + Name: &ast.Name{Value: "Node"}, + }, + { + Name: &ast.Name{Value: "Entity"}, + }, + }, + Directives: []*ast.Directive{ + { + Name: "key", + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "fields"}, + Value: &ast.StringValue{Value: "id"}, + }, + }, + }, + }, + Fields: []*ast.FieldDefinition{ + { + Name: &ast.Name{Value: "id"}, + Type: &ast.NonNullType{ + Type: &ast.NamedType{ + Name: &ast.Name{Value: "ID"}, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Field with Arguments", + input: `type Query { user(id: ID!): User }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.ObjectTypeDefinition{ + Name: &ast.Name{Value: "Query"}, + Fields: []*ast.FieldDefinition{ + { + Name: &ast.Name{Value: "user"}, + Arguments: []*ast.InputValueDefinition{ + { + Name: &ast.Name{Value: "id"}, + Type: &ast.NonNullType{ + Type: &ast.NamedType{Name: &ast.Name{Value: "ID"}}, + }, + }, + }, + Type: &ast.NamedType{Name: &ast.Name{Value: "User"}}, + }, + }, + }, + }, + }, + }, + { + name: "Field Argument with Default Value", + input: `type Query { posts(active: Boolean = true): [Post] }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.ObjectTypeDefinition{ + Name: &ast.Name{Value: "Query"}, + Fields: []*ast.FieldDefinition{ + { + Name: &ast.Name{Value: "posts"}, + Arguments: []*ast.InputValueDefinition{ + { + Name: &ast.Name{Value: "active"}, + Type: &ast.NamedType{Name: &ast.Name{Value: "Boolean"}}, + DefaultValue: &ast.BooleanValue{Value: true}, + }, + }, + Type: &ast.ListType{ + Type: &ast.NamedType{Name: &ast.Name{Value: "Post"}}, + }, + }, + }, + }, + }, + }, + }, + { + name: "Field and Argument Directives", + input: `type User { name: String @deprecated(reason: "old") }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.ObjectTypeDefinition{ + Name: &ast.Name{Value: "User"}, + Fields: []*ast.FieldDefinition{ + { + Name: &ast.Name{Value: "name"}, + Type: &ast.NamedType{Name: &ast.Name{Value: "String"}}, + Directives: []*ast.Directive{ + { + Name: "deprecated", + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "reason"}, + Value: &ast.StringValue{Value: "old"}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := lexer.New(tt.input) + p := parser.New(l) + got := p.ParseDocument() + + errors := p.Errors() + if tt.wantErr != "" { + if len(errors) == 0 { + t.Errorf("expected error %q, got none", tt.wantErr) + } + return + } + if len(errors) > 0 { + t.Fatalf("unexpected parser errors: %v", errors) + } + + opts := []cmp.Option{ + cmpopts.IgnoreTypes(token.Token{}), + } + if diff := cmp.Diff(tt.expect, got, opts...); diff != "" { + t.Errorf("mismatch (-want +got):\n%s", diff) + } + }) + } +} + +func TestParseObjectTypeDefinition_Strict(t *testing.T) { + tests := []struct { + name string + input string + wantErr string + expect *ast.Document + }{ + { + name: "Type with Description (Block String)", + input: ` + """ + Represents a User in the system. + Multi-line description. + """ + type User { + id: ID! + } + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.ObjectTypeDefinition{ + Description: "Represents a User in the system.\nMulti-line description.", + Name: &ast.Name{Value: "User"}, + Fields: []*ast.FieldDefinition{ + { + Name: &ast.Name{Value: "id"}, + Type: &ast.NonNullType{ + Type: &ast.NamedType{Name: &ast.Name{Value: "ID"}}, + }, + }, + }, + }, + }, + }, + }, + { + name: "Fields and Args with Description (String Literal)", + input: ` + type Query { + "Fetch user by ID" + user( + "The ID of the user" + id: ID! + ): User + } + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.ObjectTypeDefinition{ + Name: &ast.Name{Value: "Query"}, + Fields: []*ast.FieldDefinition{ + { + Description: "Fetch user by ID", + Name: &ast.Name{Value: "user"}, + Arguments: []*ast.InputValueDefinition{ + { + Description: "The ID of the user", + Name: &ast.Name{Value: "id"}, + Type: &ast.NonNullType{ + Type: &ast.NamedType{Name: &ast.Name{Value: "ID"}}, + }, + }, + }, + Type: &ast.NamedType{Name: &ast.Name{Value: "User"}}, + }, + }, + }, + }, + }, + }, + { + name: "Description with Directives", + input: ` + "A deprecated type" + type OldType @deprecated { + val: String + } + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.ObjectTypeDefinition{ + Description: "A deprecated type", + Name: &ast.Name{Value: "OldType"}, + Directives: []*ast.Directive{ + {Name: "deprecated"}, + }, + Fields: []*ast.FieldDefinition{ + { + Name: &ast.Name{Value: "val"}, + Type: &ast.NamedType{Name: &ast.Name{Value: "String"}}, + }, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := lexer.New(tt.input) + p := parser.New(l) + got := p.ParseDocument() + + errors := p.Errors() + if tt.wantErr != "" { + if len(errors) == 0 { + t.Errorf("expected error %q, got none", tt.wantErr) + } + return + } + if len(errors) > 0 { + t.Fatalf("unexpected parser errors: %v", errors) + } + + opts := []cmp.Option{ + cmpopts.IgnoreTypes(token.Token{}), + } + if diff := cmp.Diff(tt.expect, got, opts...); diff != "" { + t.Errorf("mismatch (-want +got):\n%s", diff) + } + }) + } +} + +func TestParseInterfaceTypeDefinition(t *testing.T) { + tests := []struct { + name string + input string + wantErr string + expect *ast.Document + }{ + { + name: "Simple Interface", + input: `interface Node { id: ID! }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.InterfaceTypeDefinition{ + Name: &ast.Name{Value: "Node"}, + Fields: []*ast.FieldDefinition{ + { + Name: &ast.Name{Value: "id"}, + Type: &ast.NonNullType{ + Type: &ast.NamedType{Name: &ast.Name{Value: "ID"}}, + }, + }, + }, + }, + }, + }, + }, + { + name: "Interface Implementing Interface", + input: `interface Resource implements Node { id: ID! url: String }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.InterfaceTypeDefinition{ + Name: &ast.Name{Value: "Resource"}, + Interfaces: []*ast.NamedType{ + {Name: &ast.Name{Value: "Node"}}, + }, + Fields: []*ast.FieldDefinition{ + { + Name: &ast.Name{Value: "id"}, + Type: &ast.NonNullType{ + Type: &ast.NamedType{Name: &ast.Name{Value: "ID"}}, + }, + }, + { + Name: &ast.Name{Value: "url"}, + Type: &ast.NamedType{Name: &ast.Name{Value: "String"}}, + }, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := lexer.New(tt.input) + p := parser.New(l) + got := p.ParseDocument() + + errors := p.Errors() + if tt.wantErr != "" { + if len(errors) == 0 { + t.Errorf("expected error %q, got none", tt.wantErr) + } + return + } + if len(errors) > 0 { + t.Fatalf("unexpected parser errors: %v", errors) + } + + opts := []cmp.Option{ + cmpopts.IgnoreTypes(token.Token{}), + } + if diff := cmp.Diff(tt.expect, got, opts...); diff != "" { + t.Errorf("mismatch (-want +got):\n%s", diff) + } + }) + } +} + +func TestParseInterfaceTypeDefinition_Strict(t *testing.T) { + tests := []struct { + name string + input string + wantErr string + expect *ast.Document + }{ + { + name: "Interface with Description (String Literal)", + input: ` + "Common fields for Node" + interface Node { + id: ID! + } + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.InterfaceTypeDefinition{ + Description: "Common fields for Node", + Name: &ast.Name{Value: "Node"}, + Fields: []*ast.FieldDefinition{ + { + Name: &ast.Name{Value: "id"}, + Type: &ast.NonNullType{ + Type: &ast.NamedType{Name: &ast.Name{Value: "ID"}}, + }, + }, + }, + }, + }, + }, + }, + { + name: "Interface with Block String Description and Directives", + input: ` + """ + An entity that has a URL. + Useful for Relay. + """ + interface Resource @deprecated(reason: "Use Node instead") { + url: String! + } + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.InterfaceTypeDefinition{ + Description: "An entity that has a URL.\nUseful for Relay.", + Name: &ast.Name{Value: "Resource"}, + Directives: []*ast.Directive{ + { + Name: "deprecated", + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "reason"}, + Value: &ast.StringValue{Value: "Use Node instead"}, + }, + }, + }, + }, + Fields: []*ast.FieldDefinition{ + { + Name: &ast.Name{Value: "url"}, + Type: &ast.NonNullType{ + Type: &ast.NamedType{Name: &ast.Name{Value: "String"}}, + }, + }, + }, + }, + }, + }, + }, + { + name: "Interface with Field Descriptions", + input: ` + interface UserLike { + "The username" + username: String + } + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.InterfaceTypeDefinition{ + Name: &ast.Name{Value: "UserLike"}, + Fields: []*ast.FieldDefinition{ + { + Description: "The username", + Name: &ast.Name{Value: "username"}, + Type: &ast.NamedType{Name: &ast.Name{Value: "String"}}, + }, + }, + }, + }, + }, + }, + { + name: "Type Implements with Leading Ampersand", + input: `type User implements & Node & Entity { id: ID }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.ObjectTypeDefinition{ + Name: &ast.Name{Value: "User"}, + Interfaces: []*ast.NamedType{ + {Name: &ast.Name{Value: "Node"}}, + {Name: &ast.Name{Value: "Entity"}}, + }, + Fields: []*ast.FieldDefinition{ + { + Name: &ast.Name{Value: "id"}, + Type: &ast.NamedType{Name: &ast.Name{Value: "ID"}}, + }, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := lexer.New(tt.input) + p := parser.New(l) + got := p.ParseDocument() + + errors := p.Errors() + if tt.wantErr != "" { + if len(errors) == 0 { + t.Errorf("expected error %q, got none", tt.wantErr) + } + return + } + if len(errors) > 0 { + t.Fatalf("unexpected parser errors: %v", errors) + } + + opts := []cmp.Option{ + cmpopts.IgnoreTypes(token.Token{}), + } + if diff := cmp.Diff(tt.expect, got, opts...); diff != "" { + t.Errorf("mismatch (-want +got):\n%s", diff) + } + }) + } +} + +func TestParseUnionTypeDefinition(t *testing.T) { + tests := []struct { + name string + input string + wantErr string + expect *ast.Document + }{ + { + name: "Simple Union", + input: `union SearchResult = Human | Droid | Starship`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.UnionTypeDefinition{ + Name: &ast.Name{Value: "SearchResult"}, + Types: []*ast.NamedType{ + {Name: &ast.Name{Value: "Human"}}, + {Name: &ast.Name{Value: "Droid"}}, + {Name: &ast.Name{Value: "Starship"}}, + }, + }, + }, + }, + }, + { + name: "Union with Leading Pipe and Directives", + input: ` + "Union description" + union Result @deprecated = | User | Error + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.UnionTypeDefinition{ + Description: "Union description", + Name: &ast.Name{Value: "Result"}, + Directives: []*ast.Directive{ + {Name: "deprecated"}, + }, + Types: []*ast.NamedType{ + {Name: &ast.Name{Value: "User"}}, + {Name: &ast.Name{Value: "Error"}}, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := lexer.New(tt.input) + p := parser.New(l) + got := p.ParseDocument() + + errors := p.Errors() + if tt.wantErr != "" { + if len(errors) == 0 { + t.Errorf("expected error %q, got none", tt.wantErr) + } + return + } + if len(errors) > 0 { + t.Fatalf("unexpected parser errors: %v", errors) + } + + opts := []cmp.Option{ + cmpopts.IgnoreTypes(token.Token{}), + } + if diff := cmp.Diff(tt.expect, got, opts...); diff != "" { + t.Errorf("mismatch (-want +got):\n%s", diff) + } + }) + } +} + +func TestParseEnumTypeDefinition(t *testing.T) { + tests := []struct { + name string + input string + wantErr string + expect *ast.Document + }{ + { + name: "Simple Enum", + input: `enum Color { RED GREEN BLUE }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.EnumTypeDefinition{ + Name: &ast.Name{Value: "Color"}, + Values: []*ast.EnumValueDefinition{ + {Name: &ast.Name{Value: "RED"}}, + {Name: &ast.Name{Value: "GREEN"}}, + {Name: &ast.Name{Value: "BLUE"}}, + }, + }, + }, + }, + }, + { + name: "Enum with Descriptions and Directives", + input: ` + "Episode enum" + enum Episode { + "Released in 1977" + NEWHOPE @deprecated + EMPIRE + JEDI + } + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.EnumTypeDefinition{ + Description: "Episode enum", + Name: &ast.Name{Value: "Episode"}, + Values: []*ast.EnumValueDefinition{ + { + Description: "Released in 1977", + Name: &ast.Name{Value: "NEWHOPE"}, + Directives: []*ast.Directive{ + {Name: "deprecated"}, + }, + }, + {Name: &ast.Name{Value: "EMPIRE"}}, + {Name: &ast.Name{Value: "JEDI"}}, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := lexer.New(tt.input) + p := parser.New(l) + got := p.ParseDocument() + + errors := p.Errors() + if tt.wantErr != "" { + if len(errors) == 0 { + t.Errorf("expected error %q, got none", tt.wantErr) + } + return + } + if len(errors) > 0 { + t.Fatalf("unexpected parser errors: %v", errors) + } + + opts := []cmp.Option{ + cmpopts.IgnoreTypes(token.Token{}), + } + if diff := cmp.Diff(tt.expect, got, opts...); diff != "" { + t.Errorf("mismatch (-want +got):\n%s", diff) + } + }) + } +} + +func TestParseUnionTypeDefinition_Strict(t *testing.T) { + tests := []struct { + name string + input string + wantErr string + expect *ast.Document + }{ + { + name: "Union without members", + input: `union EmptyUnion =`, + wantErr: "expected", + expect: nil, + }, + { + name: "Union with double pipe", + input: `union BadUnion = User || Post`, + wantErr: "expected", + expect: nil, + }, + { + name: "Union with keyword as type name", + input: `union Result = type | query`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.UnionTypeDefinition{ + Name: &ast.Name{Value: "Result"}, + Types: []*ast.NamedType{ + {Name: &ast.Name{Value: "type"}}, + {Name: &ast.Name{Value: "query"}}, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := lexer.New(tt.input) + p := parser.New(l) + got := p.ParseDocument() + + errors := p.Errors() + if tt.wantErr != "" { + if len(errors) == 0 { + t.Errorf("expected error containing %q, got none", tt.wantErr) + } + return + } + if len(errors) > 0 { + t.Fatalf("unexpected parser errors: %v", errors) + } + + opts := []cmp.Option{ + cmpopts.IgnoreTypes(token.Token{}), + } + if diff := cmp.Diff(tt.expect, got, opts...); diff != "" { + t.Errorf("mismatch (-want +got):\n%s", diff) + } + }) + } +} + +func TestParseEnumTypeDefinition_Strict(t *testing.T) { + tests := []struct { + name string + input string + wantErr string + expect *ast.Document + }{ + { + name: "Enum with boolean true", + input: `enum Bool { true }`, + wantErr: "unexpected token", + expect: nil, + }, + { + name: "Enum with null", + input: `enum Void { null }`, + wantErr: "unexpected token", + expect: nil, + }, + { + name: "Enum with keyword values", + input: `enum Keywords { type query mutation on }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.EnumTypeDefinition{ + Name: &ast.Name{Value: "Keywords"}, + Values: []*ast.EnumValueDefinition{ + {Name: &ast.Name{Value: "type"}}, + {Name: &ast.Name{Value: "query"}}, + {Name: &ast.Name{Value: "mutation"}}, + {Name: &ast.Name{Value: "on"}}, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := lexer.New(tt.input) + p := parser.New(l) + got := p.ParseDocument() + + errors := p.Errors() + if tt.wantErr != "" { + if len(errors) == 0 { + t.Errorf("expected error containing %q, got none", tt.wantErr) + } + return + } + if len(errors) > 0 { + t.Fatalf("unexpected parser errors: %v", errors) + } + + opts := []cmp.Option{ + cmpopts.IgnoreTypes(token.Token{}), + } + if diff := cmp.Diff(tt.expect, got, opts...); diff != "" { + t.Errorf("mismatch (-want +got):\n%s", diff) + } + }) + } +} + +func TestParseScalarTypeDefinition(t *testing.T) { + tests := []struct { + name string + input string + wantErr string + expect *ast.Document + }{ + { + name: "Simple Scalar", + input: `scalar Date`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.ScalarTypeDefinition{ + Name: &ast.Name{Value: "Date"}, + }, + }, + }, + }, + { + name: "Scalar with Description and Directive", + input: ` + "ISO 8601 Date" + scalar Date @specifiedBy(url: "https://tools.ietf.org/html/rfc3339") + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.ScalarTypeDefinition{ + Description: "ISO 8601 Date", + Name: &ast.Name{Value: "Date"}, + Directives: []*ast.Directive{ + { + Name: "specifiedBy", + Arguments: []*ast.Argument{ + { + Name: &ast.Name{Value: "url"}, + Value: &ast.StringValue{Value: "https://tools.ietf.org/html/rfc3339"}, + }, + }, + }, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := lexer.New(tt.input) + p := parser.New(l) + got := p.ParseDocument() + + errors := p.Errors() + if tt.wantErr != "" { + if len(errors) == 0 { + t.Errorf("expected error containing %q, got none", tt.wantErr) + } + return + } + if len(errors) > 0 { + t.Fatalf("unexpected parser errors: %v", errors) + } + + opts := []cmp.Option{ + cmpopts.IgnoreTypes(token.Token{}), + } + if diff := cmp.Diff(tt.expect, got, opts...); diff != "" { + t.Errorf("mismatch (-want +got):\n%s", diff) + } + }) + } +} + +func TestParseInputObjectTypeDefinition(t *testing.T) { + tests := []struct { + name string + input string + wantErr string + expect *ast.Document + }{ + { + name: "Simple Input Object", + input: `input UserInput { name: String! age: Int }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.InputObjectTypeDefinition{ + Name: &ast.Name{Value: "UserInput"}, + Fields: []*ast.InputValueDefinition{ + { + Name: &ast.Name{Value: "name"}, + Type: &ast.NonNullType{ + Type: &ast.NamedType{Name: &ast.Name{Value: "String"}}, + }, + }, + { + Name: &ast.Name{Value: "age"}, + Type: &ast.NamedType{Name: &ast.Name{Value: "Int"}}, + }, + }, + }, + }, + }, + }, + { + name: "Input Object with Description, Defaults and Directives", + input: ` + "Input for creating a user" + input CreateUserInput @validate { + "User name" + name: String! = "Guest" @length(max: 50) + role: Role = USER + } + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.InputObjectTypeDefinition{ + Description: "Input for creating a user", + Name: &ast.Name{Value: "CreateUserInput"}, + Directives: []*ast.Directive{ + {Name: "validate"}, + }, + Fields: []*ast.InputValueDefinition{ + { + Description: "User name", + Name: &ast.Name{Value: "name"}, + Type: &ast.NonNullType{ + Type: &ast.NamedType{Name: &ast.Name{Value: "String"}}, + }, + DefaultValue: &ast.StringValue{Value: "Guest"}, + Directives: []*ast.Directive{ + { + Name: "length", + Arguments: []*ast.Argument{ + {Name: &ast.Name{Value: "max"}, Value: &ast.IntValue{Value: 50}}, + }, + }, + }, + }, + { + Name: &ast.Name{Value: "role"}, + Type: &ast.NamedType{Name: &ast.Name{Value: "Role"}}, + DefaultValue: &ast.EnumValue{Value: "USER"}, + }, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := lexer.New(tt.input) + p := parser.New(l) + got := p.ParseDocument() + + errors := p.Errors() + if tt.wantErr != "" { + if len(errors) == 0 { + t.Errorf("expected error containing %q, got none", tt.wantErr) + } + return + } + if len(errors) > 0 { + t.Fatalf("unexpected parser errors: %v", errors) + } + + opts := []cmp.Option{ + cmpopts.IgnoreTypes(token.Token{}), + } + if diff := cmp.Diff(tt.expect, got, opts...); diff != "" { + t.Errorf("mismatch (-want +got):\n%s", diff) + } + }) + } +} + +func TestParseDirectiveDefinition(t *testing.T) { + tests := []struct { + name string + input string + wantErr string + expect *ast.Document + }{ + { + name: "Simple Directive Definition", + input: `directive @delegate on FIELD_DEFINITION`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.DirectiveDefinition{ + Name: &ast.Name{Value: "delegate"}, + Locations: []*ast.Name{ + {Value: "FIELD_DEFINITION"}, + }, + }, + }, + }, + }, + { + name: "Complex Directive Definition", + input: ` + "Make a directive repeatable" + directive @auth(role: String = "USER") repeatable on FIELD | OBJECT + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.DirectiveDefinition{ + Description: "Make a directive repeatable", + Name: &ast.Name{Value: "auth"}, + Arguments: []*ast.InputValueDefinition{ + { + Name: &ast.Name{Value: "role"}, + Type: &ast.NamedType{Name: &ast.Name{Value: "String"}}, + DefaultValue: &ast.StringValue{Value: "USER"}, + }, + }, + Repeatable: true, + Locations: []*ast.Name{ + {Value: "FIELD"}, + {Value: "OBJECT"}, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := lexer.New(tt.input) + p := parser.New(l) + got := p.ParseDocument() + + errors := p.Errors() + if tt.wantErr != "" { + if len(errors) == 0 { + t.Errorf("expected error containing %q, got none", tt.wantErr) + } + return + } + if len(errors) > 0 { + t.Fatalf("unexpected parser errors: %v", errors) + } + + opts := []cmp.Option{ + cmpopts.IgnoreTypes(token.Token{}), + } + if diff := cmp.Diff(tt.expect, got, opts...); diff != "" { + t.Errorf("mismatch (-want +got):\n%s", diff) + } + }) + } +} + +func TestParseSchemaDefinition(t *testing.T) { + tests := []struct { + name string + input string + wantErr string + expect *ast.Document + }{ + { + name: "Simple Schema", + input: `schema { query: MyQuery }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.SchemaDefinition{ + OperationTypes: []*ast.OperationTypeDefinition{ + { + Operation: token.QUERY, + Type: &ast.NamedType{Name: &ast.Name{Value: "MyQuery"}}, + }, + }, + }, + }, + }, + }, + { + name: "Full Schema with Directives", + input: ` + schema @link(url: "https://specs.apollo.dev/federation/v2.0") { + query: RootQuery + mutation: RootMutation + subscription: RootSubscription + } + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.SchemaDefinition{ + Directives: []*ast.Directive{ + { + Name: "link", + Arguments: []*ast.Argument{ + {Name: &ast.Name{Value: "url"}, Value: &ast.StringValue{Value: "https://specs.apollo.dev/federation/v2.0"}}, + }, + }, + }, + OperationTypes: []*ast.OperationTypeDefinition{ + { + Operation: token.QUERY, + Type: &ast.NamedType{Name: &ast.Name{Value: "RootQuery"}}, + }, + { + Operation: token.MUTATION, + Type: &ast.NamedType{Name: &ast.Name{Value: "RootMutation"}}, + }, + { + Operation: token.SUBSCRIPTION, + Type: &ast.NamedType{Name: &ast.Name{Value: "RootSubscription"}}, + }, + }, + }, + }, + }, + }, + { + name: "Schema Definition with Description", + input: ` + "The Root Schema" + schema { query: Query } + `, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.SchemaDefinition{ + Description: "The Root Schema", + OperationTypes: []*ast.OperationTypeDefinition{ + { + Operation: token.QUERY, + Type: &ast.NamedType{Name: &ast.Name{Value: "Query"}}, + }, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := lexer.New(tt.input) + p := parser.New(l) + got := p.ParseDocument() + + errors := p.Errors() + if tt.wantErr != "" { + if len(errors) == 0 { + t.Errorf("expected error containing %q, got none", tt.wantErr) + } + return + } + if len(errors) > 0 { + t.Fatalf("unexpected parser errors: %v", errors) + } + + opts := []cmp.Option{ + cmpopts.IgnoreTypes(token.Token{}), + } + if diff := cmp.Diff(tt.expect, got, opts...); diff != "" { + t.Errorf("mismatch (-want +got):\n%s", diff) + } + }) + } +} + +func TestParseExtension(t *testing.T) { + tests := []struct { + name string + input string + wantErr string + expect *ast.Document + }{ + { + name: "Extend Schema", + input: `extend schema @link(url: "spec") { mutation: MyMutation }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.SchemaExtension{ + Directives: []*ast.Directive{ + { + Name: "link", + Arguments: []*ast.Argument{ + {Name: &ast.Name{Value: "url"}, Value: &ast.StringValue{Value: "spec"}}, + }, + }, + }, + OperationTypes: []*ast.OperationTypeDefinition{ + { + Operation: token.MUTATION, + Type: &ast.NamedType{Name: &ast.Name{Value: "MyMutation"}}, + }, + }, + }, + }, + }, + }, + { + name: "Extend Type with Fields", + input: `extend type User { age: Int }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.ObjectTypeExtension{ + Name: &ast.Name{Value: "User"}, + Fields: []*ast.FieldDefinition{ + { + Name: &ast.Name{Value: "age"}, + Type: &ast.NamedType{Name: &ast.Name{Value: "Int"}}, + }, + }, + }, + }, + }, + }, + { + name: "Extend Type with Interfaces and Directives", + input: `extend type User implements Node @key(fields: "id")`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.ObjectTypeExtension{ + Name: &ast.Name{Value: "User"}, + Interfaces: []*ast.NamedType{ + {Name: &ast.Name{Value: "Node"}}, + }, + Directives: []*ast.Directive{ + { + Name: "key", + Arguments: []*ast.Argument{ + {Name: &ast.Name{Value: "fields"}, Value: &ast.StringValue{Value: "id"}}, + }, + }, + }, + }, + }, + }, + }, + { + name: "Extend Interface", + input: `extend interface Node @deprecated { createdAt: String }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.InterfaceTypeExtension{ + Name: &ast.Name{Value: "Node"}, + Directives: []*ast.Directive{ + {Name: "deprecated"}, + }, + Fields: []*ast.FieldDefinition{ + { + Name: &ast.Name{Value: "createdAt"}, + Type: &ast.NamedType{Name: &ast.Name{Value: "String"}}, + }, + }, + }, + }, + }, + }, + { + name: "Extend Union", + input: `extend union Result = Error | Warning`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.UnionTypeExtension{ + Name: &ast.Name{Value: "Result"}, + Types: []*ast.NamedType{ + {Name: &ast.Name{Value: "Error"}}, + {Name: &ast.Name{Value: "Warning"}}, + }, + }, + }, + }, + }, + { + name: "Extend Enum", + input: `extend enum Color { YELLOW }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.EnumTypeExtension{ + Name: &ast.Name{Value: "Color"}, + Values: []*ast.EnumValueDefinition{ + {Name: &ast.Name{Value: "YELLOW"}}, + }, + }, + }, + }, + }, + { + name: "Extend Scalar", + input: `extend scalar JSON @specifiedBy(url: "json")`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.ScalarTypeExtension{ + Name: &ast.Name{Value: "JSON"}, + Directives: []*ast.Directive{ + { + Name: "specifiedBy", + Arguments: []*ast.Argument{ + {Name: &ast.Name{Value: "url"}, Value: &ast.StringValue{Value: "json"}}, + }, + }, + }, + }, + }, + }, + }, + { + name: "Extend Input", + input: `extend input Filter { limit: Int }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.InputObjectTypeExtension{ + Name: &ast.Name{Value: "Filter"}, + Fields: []*ast.InputValueDefinition{ + { + Name: &ast.Name{Value: "limit"}, + Type: &ast.NamedType{Name: &ast.Name{Value: "Int"}}, + }, + }, + }, + }, + }, + }, + { + name: "Extend Interface with Implements", + input: `extend interface Node implements Entity { createdAt: String }`, + expect: &ast.Document{ + Definitions: []ast.Definition{ + &ast.InterfaceTypeExtension{ + Name: &ast.Name{Value: "Node"}, + Interfaces: []*ast.NamedType{ + {Name: &ast.Name{Value: "Entity"}}, + }, + Fields: []*ast.FieldDefinition{ + { + Name: &ast.Name{Value: "createdAt"}, + Type: &ast.NamedType{Name: &ast.Name{Value: "String"}}, + }, + }, + }, + }, + }, + }, + { + name: "Invalid: Extension with Description", + input: `"Do not describe extensions" extend type User { age: Int }`, + wantErr: "unexpected description", + expect: nil, + }, + { + name: "Invalid: Extend Type without anything", + input: `extend type User`, + wantErr: "unexpected", + expect: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + l := lexer.New(tt.input) + p := parser.New(l) + got := p.ParseDocument() + + errors := p.Errors() + if tt.wantErr != "" { + if len(errors) == 0 { + t.Errorf("expected error containing %q, got none", tt.wantErr) + } + return + } + if len(errors) > 0 { + t.Fatalf("unexpected parser errors: %v", errors) + } + + opts := []cmp.Option{ + cmpopts.IgnoreTypes(token.Token{}), + } + if diff := cmp.Diff(tt.expect, got, opts...); diff != "" { + t.Errorf("mismatch (-want +got):\n%s", diff) + } + }) + } +} diff --git a/token/token.go b/token/token.go new file mode 100644 index 0000000..9582230 --- /dev/null +++ b/token/token.go @@ -0,0 +1,148 @@ +package token + +import "strconv" + +type TokenType int + +const ( + ILLEGAL TokenType = iota + EOF + + IDENT + INT + FLOAT + STRING + BLOCK_STRING + + BANG // ! + DOLLAR // $ + PAREN_L // ( + PAREN_R // ) + SPREAD // ... + COLON // : + EQUALS // = + AT // @ + BRACKET_L // [ + BRACKET_R // ] + BRACE_L // { + PIPE // | + BRACE_R // } + AMP // & + COMMA // , + + NAME + QUERY // query + MUTATION // mutation + SUBSCRIPTION // subscription + FRAGMENT // fragment + TYPE // type + INPUT // input + ENUM // enum + UNION // union + INTERFACE // interface + SCALAR // scalar + DIRECTIVE // directive + EXTEND // extend + SCHEMA // schema + IMPLEMENTS // implements + ON // on + TRUE // true + FALSE // false + NULL // null +) + +var tokens = map[TokenType]string{ + ILLEGAL: "ILLEGAL", + EOF: "EOF", + IDENT: "IDENT", + INT: "INT", + FLOAT: "FLOAT", + STRING: "STRING", + BLOCK_STRING: "BLOCK_STRING", + + BANG: "!", + DOLLAR: "$", + PAREN_L: "(", + PAREN_R: ")", + SPREAD: "...", + COLON: ":", + EQUALS: "=", + AT: "@", + BRACKET_L: "[", + BRACKET_R: "]", + BRACE_L: "{", + PIPE: "|", + BRACE_R: "}", + AMP: "&", + + NAME: "NAME", + QUERY: "query", + MUTATION: "mutation", + SUBSCRIPTION: "subscription", + FRAGMENT: "fragment", + TYPE: "type", + INPUT: "input", + ENUM: "enum", + UNION: "union", + INTERFACE: "interface", + SCALAR: "scalar", + DIRECTIVE: "directive", + EXTEND: "extend", + SCHEMA: "schema", + IMPLEMENTS: "implements", + ON: "on", + TRUE: "true", + FALSE: "false", + NULL: "null", +} + +var keywords = map[string]TokenType{ + "query": QUERY, + "mutation": MUTATION, + "subscription": SUBSCRIPTION, + "fragment": FRAGMENT, + "type": TYPE, + "input": INPUT, + "enum": ENUM, + "union": UNION, + "interface": INTERFACE, + "scalar": SCALAR, + "directive": DIRECTIVE, + "extend": EXTEND, + "schema": SCHEMA, + "implements": IMPLEMENTS, + "on": ON, + "true": TRUE, + "false": FALSE, + "null": NULL, +} + +func (t TokenType) String() string { + if s, ok := tokens[t]; ok { + return s + } + return "token(" + strconv.Itoa(int(t)) + ")" +} + +func LookupIdent(ident string) TokenType { + if tok, ok := keywords[ident]; ok { + return tok + } + return IDENT +} + +type Token struct { + Type TokenType + Literal string + Line int + Start int + End int +} + +func New(tokenType TokenType, ch byte, line int) Token { + return Token{ + Type: tokenType, + Literal: string(ch), + Line: line, + } +}