Skip to content

Commit

Permalink
Remove custom tokenizer.
Browse files Browse the repository at this point in the history
  • Loading branch information
Eric-Vin committed Nov 9, 2023
1 parent 2265364 commit 7394bed
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 513 deletions.
6 changes: 2 additions & 4 deletions src/scenic/syntax/scenic.gram
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@ from typing import (
Any, Callable, Iterator, List, Literal, NoReturn, Sequence, Tuple, TypeVar, Union
)

import scenic.syntax.tokenize as tokenize
from scenic.syntax.tokenize import generate_tokens
from pegen.tokenizer import Tokenizer

import scenic.syntax.ast as s
Expand Down Expand Up @@ -71,7 +69,7 @@ def parse_file(
tok_stream = (
token_stream_factory(f.readline)
if token_stream_factory else
generate_tokens(f.readline)
tokenize.generate_tokens(f.readline)
)
tokenizer = Tokenizer(tok_stream, verbose=verbose, path=path)
parser = ScenicParser(
Expand All @@ -97,7 +95,7 @@ def parse_string(
tok_stream = (
token_stream_factory(io.StringIO(source).readline)
if token_stream_factory else
generate_tokens(io.StringIO(source).readline)
tokenize.generate_tokens(io.StringIO(source).readline)
)
tokenizer = Tokenizer(tok_stream, verbose=verbose)
parser = ScenicParser(tokenizer, verbose=verbose, py_version=py_version, filename=filename)
Expand Down
Loading

0 comments on commit 7394bed

Please sign in to comment.