|
28 | 28 | from typing import IO, Any, Optional, Union, cast |
29 | 29 |
|
30 | 30 | from blib2to3.pgen2.grammar import Grammar |
31 | | -from blib2to3.pgen2.tokenize import GoodTokenInfo |
| 31 | +from blib2to3.pgen2.tokenize import TokenInfo |
32 | 32 | from blib2to3.pytree import NL |
33 | 33 |
|
34 | 34 | # Pgen imports |
@@ -112,7 +112,7 @@ def __init__(self, grammar: Grammar, logger: Optional[Logger] = None) -> None: |
112 | 112 | logger = logging.getLogger(__name__) |
113 | 113 | self.logger = logger |
114 | 114 |
|
115 | | - def parse_tokens(self, tokens: Iterable[GoodTokenInfo], debug: bool = False) -> NL: |
| 115 | + def parse_tokens(self, tokens: Iterable[TokenInfo], debug: bool = False) -> NL: |
116 | 116 | """Parse a series of tokens and return the syntax tree.""" |
117 | 117 | # XXX Move the prefix computation into a wrapper around tokenize. |
118 | 118 | proxy = TokenProxy(tokens) |
@@ -180,27 +180,17 @@ def parse_tokens(self, tokens: Iterable[GoodTokenInfo], debug: bool = False) -> |
180 | 180 | assert p.rootnode is not None |
181 | 181 | return p.rootnode |
182 | 182 |
|
183 | | - def parse_stream_raw(self, stream: IO[str], debug: bool = False) -> NL: |
184 | | - """Parse a stream and return the syntax tree.""" |
185 | | - tokens = tokenize.generate_tokens(stream.readline, grammar=self.grammar) |
186 | | - return self.parse_tokens(tokens, debug) |
187 | | - |
188 | | - def parse_stream(self, stream: IO[str], debug: bool = False) -> NL: |
189 | | - """Parse a stream and return the syntax tree.""" |
190 | | - return self.parse_stream_raw(stream, debug) |
191 | | - |
192 | 183 | def parse_file( |
193 | 184 | self, filename: Path, encoding: Optional[str] = None, debug: bool = False |
194 | 185 | ) -> NL: |
195 | 186 | """Parse a file and return the syntax tree.""" |
196 | 187 | with open(filename, encoding=encoding) as stream: |
197 | | - return self.parse_stream(stream, debug) |
| 188 | + text = stream.read() |
| 189 | + return self.parse_string(text, debug) |
198 | 190 |
|
199 | 191 | def parse_string(self, text: str, debug: bool = False) -> NL: |
200 | 192 | """Parse a string and return the syntax tree.""" |
201 | | - tokens = tokenize.generate_tokens( |
202 | | - io.StringIO(text).readline, grammar=self.grammar |
203 | | - ) |
| 193 | + tokens = tokenize.tokenize(text, grammar=self.grammar) |
204 | 194 | return self.parse_tokens(tokens, debug) |
205 | 195 |
|
206 | 196 | def _partially_consume_prefix(self, prefix: str, column: int) -> tuple[str, str]: |
|
0 commit comments