Skip to content

Commit 8bcf914

Browse files
committed
Fix for python 3.7
1 parent b303906 commit 8bcf914

File tree

1 file changed

+22
-12
lines changed

1 file changed

+22
-12
lines changed

astroid/rebuilder.py

Lines changed: 22 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,13 @@
66
order to get a single Astroid representation
77
"""
88

9-
import ast
109
import sys
1110
import token
1211
import tokenize
1312
from io import StringIO
1413
from tokenize import TokenInfo, generate_tokens
1514
from typing import (
15+
TYPE_CHECKING,
1616
Callable,
1717
Dict,
1818
Generator,
@@ -39,6 +39,9 @@
3939
else:
4040
from typing_extensions import Final
4141

42+
if TYPE_CHECKING:
43+
import ast
44+
4245

4346
REDIRECT: Final[Dict[str, str]] = {
4447
"arguments": "Arguments",
@@ -1386,18 +1389,25 @@ def _find_orelse_keyword(
13861389
if not self._data or not node.orelse:
13871390
return None, None
13881391

1389-
# If the first child in orelse is an If node the orelse is an elif block
1390-
if isinstance(node.orelse[0], ast.If):
1391-
return node.orelse[0].lineno, node.orelse[0].col_offset
1392-
1393-
end_lineno = node.orelse[0].lineno - 1
1392+
end_lineno = node.orelse[0].lineno
1393+
1394+
def find_keyword(begin: int, end: int) -> Tuple[Optional[int], Optional[int]]:
1395+
# pylint: disable-next=unsubscriptable-object
1396+
data = "\n".join(self._data[begin:end])
1397+
1398+
try:
1399+
tokens = list(generate_tokens(StringIO(data).readline))
1400+
except tokenize.TokenError:
1401+
# If we cut-off in the middle of multi-line if statements we
1402+
# generate a TokenError here. We just keep trying
1403+
# until the multi-line statement is closed.
1404+
return find_keyword(begin, end + 1)
1405+
for t in tokens[::-1]:
1406+
if t.type == token.NAME and t.string in {"else", "elif"}:
1407+
return node.lineno + t.start[0] - 1, t.start[1]
1408+
return None, None
13941409

1395-
# pylint: disable-next=unsubscriptable-object
1396-
data = "\n".join(self._data[node.lineno - 1 : end_lineno])
1397-
for t in generate_tokens(StringIO(data).readline):
1398-
if t.type == token.NAME and t.string == "else":
1399-
return node.lineno + t.start[0] - 1, t.start[1]
1400-
return None, None
1410+
return find_keyword(node.lineno - 1, end_lineno)
14011411

14021412
def visit_if(self, node: "ast.If", parent: NodeNG) -> nodes.If:
14031413
"""visit an If node by returning a fresh instance of it"""

0 commit comments

Comments
 (0)