Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _close_fstring_if_necessary(fstring_stack, string, start_pos, additional_prefix):
for fstring_stack_index, node in enumerate(fstring_stack):
if string.startswith(node.quote):
token = PythonToken(
FSTRING_END, node.quote, start_pos, prefix=additional_prefix
)
additional_prefix = ""
assert not node.previous_lines
del fstring_stack[fstring_stack_index:]
return token, "", len(node.quote)
return None, additional_prefix, 0
# Fake that the part before was already parsed.
line = "^" * start_pos[1] + line
pos = start_pos[1]
max += start_pos[1]
first = False
if contstr: # continued string
if endprog is None:
raise Exception("Logic error!")
endmatch = endprog.match(line)
if endmatch:
pos = endmatch.end(0)
if contstr_start is None:
raise Exception("Logic error!")
yield PythonToken(STRING, contstr + line[:pos], contstr_start, prefix)
contstr = ""
contline = None
else:
contstr = contstr + line
contline = contline + line
continue
while pos < max:
if fstring_stack:
tos = fstring_stack[-1]
if not tos.is_in_expr():
string, pos = _find_fstring_string(
token_collection.endpats, fstring_stack, line, lnum, pos
)
if string:
yield PythonToken(
token_collection.endpats.get(initial)
or token_collection.endpats.get(token[1])
or token_collection.endpats.get(token[2])
)
contstr = line[start:]
contline = line
break
else: # ordinary string
yield PythonToken(STRING, token, spos, prefix)
elif (
token in token_collection.fstring_pattern_map
): # The start of an fstring.
fstring_stack.append(
FStringNode(token_collection.fstring_pattern_map[token])
)
yield PythonToken(FSTRING_START, token, spos, prefix)
elif initial == "\\" and line[start:] in (
"\\\n",
"\\\r\n",
"\\\r",
): # continued stmt
additional_prefix += prefix + line[start:]
break
else:
if token in "([{":
if fstring_stack:
fstring_stack[-1].open_parentheses(token)
else:
paren_level += 1
elif token in ")]}":
if fstring_stack:
fstring_stack[-1].close_parentheses(token)
nonlocal async_def_indent
nonlocal async_def_newline
while start < indents[-1]:
if start > indents[-2]:
yield PythonToken(ERROR_DEDENT, "", (lnum, 0), "")
break
if stashed is not None:
yield stashed
stashed = None
if async_def and async_def_newline and async_def_indent >= indents[-1]:
# We exited an 'async def' block, so stop tracking for indents
async_def = False
async_def_newline = False
async_def_indent = 0
yield PythonToken(DEDENT, "", spos, "")
indents.pop()
# This means that we have a line with whitespace/comments at
# the end, which just results in an endmarker.
break
initial = token[0]
if new_line and initial not in "\r\n\\#":
new_line = False
if paren_level == 0 and not fstring_stack:
i = 0
indent_start = start
while line[i] == "\f":
i += 1
# TODO don't we need to change spos as well?
indent_start -= 1
if indent_start > indents[-1]:
yield PythonToken(INDENT, "", spos, "")
indents.append(indent_start)
for t in dedent_if_necessary(indent_start):
yield t
if initial in numchars or ( # ordinary number
initial == "." and token != "." and token != "..."
):
yield PythonToken(NUMBER, token, spos, prefix)
elif pseudomatch.group(3) is not None: # ordinary name
if token in token_collection.always_break_tokens:
fstring_stack[:] = []
paren_level = 0
# We only want to dedent if the token is on a new line.
if re.match(r"[ \f\t]*$", line[:start]):
while True:
indent = indents.pop()
else:
contstr = contstr + line
contline = contline + line
continue
while pos < max:
if fstring_stack:
tos = fstring_stack[-1]
if not tos.is_in_expr():
string, pos = _find_fstring_string(
token_collection.endpats, fstring_stack, line, lnum, pos
)
if string:
if stashed is not None:
raise Exception("Logic error!")
yield PythonToken(
FSTRING_STRING,
string,
tos.last_string_start_pos,
# Never has a prefix because it can start anywhere and
# include whitespace.
prefix="",
)
tos.previous_lines = ""
continue
if pos == max:
break
rest = line[pos:]
(
fstring_end_token,
additional_prefix,
fstring_stack, rest, (lnum, pos), additional_prefix
)
pos += quote_length
if fstring_end_token is not None:
yield fstring_end_token
continue
pseudomatch = token_collection.pseudo_token.match(line, pos)
if not pseudomatch: # scan for tokens
match = token_collection.whitespace.match(line, pos)
if pos == 0:
for t in dedent_if_necessary(match.end()):
yield t
pos = match.end()
new_line = False
yield PythonToken(
ERRORTOKEN,
line[pos],
(lnum, pos),
additional_prefix + match.group(0),
)
additional_prefix = ""
pos += 1
continue
prefix = additional_prefix + pseudomatch.group(1)
additional_prefix = ""
start, pos = pseudomatch.span(2)
spos = (lnum, start)
token = pseudomatch.group(2)
if token == "":
assert prefix
elif token in ")]}":
if fstring_stack:
fstring_stack[-1].close_parentheses(token)
else:
if paren_level:
paren_level -= 1
elif (
token == ":"
and fstring_stack
and fstring_stack[-1].parentheses_count
- fstring_stack[-1].format_spec_count
== 1
):
fstring_stack[-1].format_spec_count += 1
yield PythonToken(OP, token, spos, prefix)
if contstr:
yield PythonToken(ERRORTOKEN, contstr, contstr_start, prefix)
if contstr.endswith("\n") or contstr.endswith("\r"):
new_line = True
end_pos = lnum, max
# As the last position we just take the maximally possible position. We
# remove -1 for the last new line.
for indent in indents[1:]:
yield PythonToken(DEDENT, "", end_pos, "")
yield PythonToken(ENDMARKER, "", end_pos, additional_prefix)
if stashed is not None:
yield stashed
stashed = None
yield PythonToken(DEDENT, "", spos, "")
else:
indents.append(indent)
break
if str.isidentifier(token):
should_yield_identifier = True
if token in ("async", "await") and async_def:
# We're inside an 'async def' block, all async/await are
# tokens.
if token == "async":
yield PythonToken(ASYNC, token, spos, prefix)
else:
yield PythonToken(AWAIT, token, spos, prefix)
should_yield_identifier = False
# We are possibly starting an 'async def' section
elif token == "async" and not stashed:
stashed = PythonToken(NAME, token, spos, prefix)
should_yield_identifier = False
# We actually are starting an 'async def' section
elif (
token == "def"
and stashed is not None
and stashed[0] is NAME
and stashed[1] == "async"
):
async_def = True
async_def_indent = indents[-1]
indent_start -= 1
if indent_start > indents[-1]:
if stashed is not None:
yield stashed
stashed = None
yield PythonToken(INDENT, "", spos, "")
indents.append(indent_start)
yield from dedent_if_necessary(indent_start)
if initial in numchars or ( # ordinary number
initial == "." and token != "." and token != "..."
):
if stashed is not None:
yield stashed
stashed = None
yield PythonToken(NUMBER, token, spos, prefix)
elif pseudomatch.group(3) is not None: # ordinary name
if token in token_collection.always_break_tokens:
fstring_stack[:] = []
paren_level = 0
# We only want to dedent if the token is on a new line.
if re.match(r"[ \f\t]*$", line[:start]):
while True:
indent = indents.pop()
if indent > start:
if (
async_def
and async_def_newline
and async_def_indent >= indent
):
# We dedented outside of an 'async def' block.
async_def = False