Skip to content

Commit

Permalink
Try testing with RecursiveDescentParser as default; improve handling …
Browse files Browse the repository at this point in the history
…for keywords used as names; improve error messages
  • Loading branch information
rmosolgo committed Dec 9, 2023
1 parent 4f6e18b commit 15e70b8
Show file tree
Hide file tree
Showing 3 changed files with 144 additions and 36 deletions.
2 changes: 1 addition & 1 deletion lib/graphql.rb
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class RequiredImplementationMissingError < Error

class << self
def default_parser
@default_parser ||= GraphQL::Language::Parser
@default_parser ||= GraphQL::Language::RecursiveDescentParser
end

attr_writer :default_parser
Expand Down
161 changes: 130 additions & 31 deletions lib/graphql/language/recursive_descent_parser.rb
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,26 @@ class RecursiveDescentParser
include GraphQL::Language::Nodes
include EmptyObjects

def self.parse(graphql_str)
self.new(graphql_str).parse
def self.parse(graphql_str, filename: nil, trace: Tracing::NullTrace)
self.new(graphql_str, filename: filename, trace: trace).parse
end

def initialize(graphql_str)
def initialize(graphql_str, filename: nil, trace: Tracing::NullTrace)
if graphql_str.nil?
raise GraphQL::ParseError.new("No query string was present", nil, nil, nil)
end
@lexer = Lexer.new(graphql_str)
@graphql_str = graphql_str
@filename = filename
@trace = trace
end

def parse
@document ||= document
@document ||= begin
@trace.parse(query_string: @graphql_str) do
document
end
end
end

private
Expand Down Expand Up @@ -48,8 +57,9 @@ def definition
when :FRAGMENT
loc = pos
expect_token :FRAGMENT
expect_token(:IDENTIFIER) if at?(:ON)
f_name = parse_name
f_name = if !at?(:ON)
parse_name
end
expect_token :ON
f_type = parse_type_name
directives = parse_directives
Expand Down Expand Up @@ -82,7 +92,8 @@ def definition
expect_token(:COLON)
var_type = self.type
default_value = if at?(:EQUALS)
self.default_value
advance_token
value
end

defs << Nodes::VariableDefinition.new(pos: loc, name: var_name, type: var_type, default_value: default_value)
Expand Down Expand Up @@ -151,7 +162,7 @@ def definition
input_fields_definition = parse_input_object_field_definitions
InputObjectTypeExtension.new(pos: loc, name: name, directives: directives, fields: input_fields_definition)
else
expect_token :SOME_TYPE_EXTENSION
expect_one_of([:SCALAR, :TYPE, :ENUM, :INPUT, :UNION, :INTERFACE])
end
else
desc = at?(:STRING) ? string_value : nil
Expand All @@ -177,7 +188,7 @@ def definition
expect_token(:COLON)
subscription = parse_name
else
expect_token(:SOME_ROOT_TYPE_NAME)
expect_one_of([:QUERY, :MUTATION, :SUBSCRIPTION])
end
end
expect_token :RCURLY
Expand All @@ -188,13 +199,18 @@ def definition
expect_token :DIR_SIGN
name = parse_name
arguments_definition = parse_argument_definitions
repeatable = if at?(:REPEATABLE)
advance_token
true
else
fasle
end
expect_token :ON
directive_locations = [DirectiveLocation.new(pos: pos, name: parse_name)]
while at?(:PIPE)
advance_token
directive_locations << DirectiveLocation.new(pos: pos, name: parse_name)
end
repeatable = false # TODO parse this
DirectiveDefinition.new(pos: loc, description: desc, name: name, arguments: arguments_definition, locations: directive_locations, repeatable: repeatable)
when :TYPE
loc = pos
Expand Down Expand Up @@ -241,7 +257,7 @@ def definition
input_fields_definition = parse_input_object_field_definitions
InputObjectTypeDefinition.new(pos: loc, description: desc, name: name, directives: directives, fields: input_fields_definition)
else
expect_token(:SOME_ROOT_DEFINITION)
expect_one_of([:SCALAR, :TYPE, :ENUM, :INPUT, :UNION, :INTERFACE])
end
end
end
Expand Down Expand Up @@ -294,8 +310,8 @@ def parse_union_members

def parse_implements
if at?(:IMPLEMENTS)
expect_token :IMPLEMENTS
list = [parse_type_name]
advance_token
list = []
while true
advance_token if at?(:AMP)
break unless at?(:IDENTIFIER)
Expand Down Expand Up @@ -327,7 +343,7 @@ def parse_field_definitions

def parse_argument_definitions
if at?(:LPAREN)
expect_token :LPAREN
advance_token
list = []
while !at?(:RPAREN)
list << parse_input_value_definition
Expand All @@ -346,7 +362,7 @@ def parse_input_value_definition
expect_token :COLON
type = self.type
default_value = if at?(:EQUALS)
expect_token(:EQUALS)
advance_token
value
else
nil
Expand Down Expand Up @@ -411,17 +427,15 @@ def selection_set
directives = parse_directives

Nodes::InlineFragment.new(pos: loc, type: if_type, directives: directives, selections: selection_set)
when :IDENTIFIER
else
loc = pos
name = parse_name
name = parse_name_without_on
directives = parse_directives

# Can this ever happen?
# expect_token(:IDENTIFIER) if at?(:ON)

FragmentSpread.new(pos: loc, name: name, directives: directives)
else
expect_token(:FRAGMENT_SPREAD)
end
else
loc = pos
Expand All @@ -431,7 +445,7 @@ def selection_set

if at?(:COLON)
advance_token
field_alias = parse_name
field_alias = name
name = parse_name
end

Expand All @@ -450,17 +464,76 @@ def parse_name
case token_name
when :IDENTIFIER
expect_token_value(:IDENTIFIER)
when :SCHEMA
advance_token
"schema"
when :SCALAR
advance_token
"scalar"
when :IMPLEMENTS
advance_token
"implements"
when :INTERFACE
advance_token
"interface"
when :UNION
advance_token
"union"
when :ENUM
advance_token
"enum"
when :INPUT
advance_token
"input"
when :DIRECTIVE
advance_token
"directive"
when :TYPE
advance_token
"type"
when :QUERY
advance_token
"query"
when :INPUT
when :MUTATION
advance_token
"input"
"mutation"
when :SUBSCRIPTION
advance_token
"subscription"
when :TRUE
advance_token
"true"
when :FALSE
advance_token
"false"
when :FRAGMENT
advance_token
"fragment"
when :REPEATABLE
advance_token
"repeatable"
when :NULL
advance_token
"null"
else
expect_token(:IDENTIFIER)
end
end

def parse_name_without_on
if at?(:ON)
expect_token(:IDENTIFIER)
else
parse_name
end
end

# Any identifier, but not true, false, or null
def parse_enum_name
if at?(:TRUE) || at?(:FALSE) || at?(:NULL)
expect_token(:IDENTIFIER)
else
parse_name
end
end

Expand All @@ -473,7 +546,7 @@ def parse_directives
dirs = []
while at?(:DIR_SIGN)
loc = pos
expect_token(:DIR_SIGN)
advance_token
name = parse_name
arguments = parse_arguments

Expand Down Expand Up @@ -562,11 +635,27 @@ def at?(expected_token_name)

def expect_token(expected_token_name)
unless @token_name == expected_token_name
raise "TODO nice error for Expected token #{expected_token_name}, actual: #{token_name.inspect} #{@lexer.token_value} line: #{@lexer.line} / pos: #{@lexer.pos}"
raise_parse_error("Expected #{expected_token_name}, actual: #{token_name} (#{@lexer.token_value.inspect})")
end
advance_token
end

def expect_one_of(token_names)
raise_parse_error("Expected one of #{token_names.join(", ")}, actual: #{token_name} (#{@lexer.token_value.inspect})")
end

def raise_parse_error(message)
message += " at [#{@lexer.line_number}, #{@lexer.column_number}]"
raise GraphQL::ParseError.new(
message,
@lexer.line_number,
@lexer.column_number,
@graphql_str,
filename: @filename,
)

end

# Only use when we care about the expected token's value
def expect_token_value(tok)
token_value = @lexer.token_value
Expand Down Expand Up @@ -624,15 +713,15 @@ def advance
@scanner[1] ? :FLOAT : :INT
when ByteFor::ELLIPSIS
if @string.getbyte(@pos + 1) != 46 || @string.getbyte(@pos + 2) != 46
raise "TODO raise a nice error for a malformed ellipsis"
raise_parse_error("Expected `...`, actual: #{@string[@pos..@pos + 2].inspect}")
end
@scanner.pos += 3
:ELLIPSIS
when ByteFor::STRING
if @scanner.skip(BLOCK_STRING_REGEXP) || @scanner.skip(QUOTED_STRING_REGEXP)
:STRING
else
raise "TODO Raise a nice error for a badly-formatted string"
raise_parse_error("Expected string or block string, but it was malformed")
end
else
@scanner.pos += 1
Expand All @@ -643,7 +732,7 @@ def advance
def token_value
@string.byteslice(@scanner.pos - @scanner.matched_size, @scanner.matched_size)
rescue StandardError => err
"(token_value failed: #{err.class}: #{err.message})"
raise GraphQL::Error, "(token_value failed: #{err.class}: #{err.message})"
end

def string_value
Expand All @@ -656,23 +745,33 @@ def string_value
end

if !str.valid_encoding? || !str.match?(Language::Lexer::VALID_STRING)
raise "TODO Bad Unicode escape"
raise_parse_error("Bad unicode escape in #{str.inspect}")
else
GraphQL::Language::Lexer.replace_escaped_characters_in_place(str)

if !str.valid_encoding?
raise "TODO Bad Unicode escape"
raise_parse_error("Bad unicode escape in #{str.inspect}")
else
str
end
end
end

def line
def line_number
@scanner.string[0, @scanner.pos].count("\n") + 1
end

IGNORE_REGEXP = /[, \c\r\n\t]+/
def column_number
@scanner.string[0, @scanner.pos].split("\n").last.length - token_value.length + 1
end

# IGNORE_REGEXP = /[, \c\r\n\t]+/
IGNORE_REGEXP = %r{
(?:
[, \c\r\n\t]+ |
\#.*$
)*
}x
IDENTIFIER_REGEXP = /[_A-Za-z][_0-9A-Za-z]*/
INT_REGEXP = /[-]?(?:[0]|[1-9][0-9]*)/
FLOAT_DECIMAL_REGEXP = /[.][0-9]+/
Expand Down
17 changes: 13 additions & 4 deletions spec/graphql/language/parser_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -355,12 +355,21 @@
query = GraphQL::Query.new(schema, "{ t: __typename }")
subject.parse("{ t: __typename }", trace: query.current_trace)
traces = TestTracing.traces
assert_equal 2, traces.length
expected_traces = if GraphQL.default_parser == GraphQL::Language::RecursiveDescentParser
1
else
2
end
assert_equal expected_traces, traces.length
lex_trace, parse_trace = traces

assert_equal "{ t: __typename }", lex_trace[:query_string]
assert_equal "lex", lex_trace[:key]
assert_instance_of Array, lex_trace[:result]
if GraphQL.default_parser == GraphQL::Language::Parser
assert_equal "{ t: __typename }", lex_trace[:query_string]
assert_equal "lex", lex_trace[:key]
assert_instance_of Array, lex_trace[:result]
else
parse_trace = lex_trace
end

assert_equal "{ t: __typename }", parse_trace[:query_string]
assert_equal "parse", parse_trace[:key]
Expand Down

0 comments on commit 15e70b8

Please sign in to comment.