Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_end_pos_one_line(self):
parsed = parser.Parser(parser.load_grammar(), dedent(u('''
def testit():
a = "huhu"
''')))
tok = parsed.module.subscopes[0].statements[0].children[2]
assert tok.end_pos == (3, 14)
def get_call(self, source):
stmt = Parser(u(source), no_docstr=True).module.statements[0]
return stmt.expression_list()[0]
def _calc_path_until_cursor(self, start_pos):
"""
Something like a reverse tokenizer that tokenizes the reversed strings.
"""
open_brackets = ['(', '[', '{']
close_brackets = [')', ']', '}']
start_cursor = start_pos
gen = PushBackIterator(self._get_backwards_tokenizer(start_pos))
string = u('')
level = 0
force_point = False
last_type = None
is_first = True
for tok_type, tok_str, tok_start_pos, prefix in gen:
if is_first:
if prefix: # whitespace is not a path
return u(''), start_cursor
is_first = False
if last_type == tok_type == tokenize.NAME:
string = ' ' + string
if level:
if tok_str in close_brackets:
level += 1
def _calc_path_until_cursor(self, start_pos):
"""
Something like a reverse tokenizer that tokenizes the reversed strings.
"""
open_brackets = ['(', '[', '{']
close_brackets = [')', ']', '}']
start_cursor = start_pos
gen = PushBackIterator(self._get_backwards_tokenizer(start_pos))
string = u('')
level = 0
force_point = False
last_type = None
is_first = True
for tok_type, tok_str, tok_start_pos, prefix in gen:
if is_first:
if prefix: # whitespace is not a path
return u(''), start_cursor
is_first = False
if last_type == tok_type == tokenize.NAME:
string = ' ' + string
if level:
if tok_str in close_brackets:
level += 1
def get_line(self, line_nr):
if not self._line_cache:
self._line_cache = common.splitlines(self.source)
if line_nr == 0:
# This is a fix for the zeroth line. We need a newline there, for
# the backwards parser.
return u('')
if line_nr < 0:
raise StopIteration()
try:
return self._line_cache[line_nr - 1]
except IndexError:
raise StopIteration()
def _calc_path_until_cursor(self, start_pos):
"""
Something like a reverse tokenizer that tokenizes the reversed strings.
"""
open_brackets = ['(', '[', '{']
close_brackets = [')', ']', '}']
start_cursor = start_pos
gen = PushBackIterator(self._get_backwards_tokenizer(start_pos))
string = u('')
level = 0
force_point = False
last_type = None
is_first = True
for tok_type, tok_str, tok_start_pos, prefix in gen:
if is_first:
if prefix: # whitespace is not a path
return u(''), start_cursor
is_first = False
if last_type == tok_type == tokenize.NAME:
string = ' ' + string
if level:
if tok_str in close_brackets:
level += 1
Something like a reverse tokenizer that tokenizes the reversed strings.
"""
open_brackets = ['(', '[', '{']
close_brackets = [')', ']', '}']
start_cursor = start_pos
gen = PushBackIterator(self._get_backwards_tokenizer(start_pos))
string = u('')
level = 0
force_point = False
last_type = None
is_first = True
for tok_type, tok_str, tok_start_pos, prefix in gen:
if is_first:
if prefix: # whitespace is not a path
return u(''), start_cursor
is_first = False
if last_type == tok_type == tokenize.NAME:
string = ' ' + string
if level:
if tok_str in close_brackets:
level += 1
elif tok_str in open_brackets:
level -= 1
elif tok_str == '.':
force_point = False
elif force_point:
# Reversed tokenizing, therefore a number is recognized as a
# floating point number.
# The same is true for string prefixes -> represented as a
# Error leafs cannot be parsed, completion in strings is also
# impossible.
raise OnErrorLeaf(leaf)
else:
if leaf == ';':
user_stmt = leaf.parent
else:
user_stmt = leaf.get_definition()
if user_stmt.parent.type == 'simple_stmt':
user_stmt = user_stmt.parent
if is_after_newline:
if user_stmt.start_pos[1] > position[1]:
# This means that it's actually a dedent and that means that we
# start without context (part of a suite).
return u('')
# This is basically getting the relevant lines.
return _get_code(code_lines, user_stmt.get_start_pos_of_prefix(), position)
def _calc_path_until_cursor(self, start_pos):
"""
Something like a reverse tokenizer that tokenizes the reversed strings.
"""
open_brackets = ['(', '[', '{']
close_brackets = [')', ']', '}']
start_cursor = start_pos
gen = PushBackIterator(self._get_backwards_tokenizer(start_pos))
string = u('')
level = 0
force_point = False
last_type = None
is_first = True
for tok_type, tok_str, tok_start_pos, prefix in gen:
if is_first:
if prefix: # whitespace is not a path
return u(''), start_cursor
is_first = False
if last_type == tok_type == tokenize.NAME:
string = ' ' + string
if level:
if tok_str in close_brackets:
level += 1
def get_line(self, line_nr):
if not self._line_cache:
self._line_cache = common.splitlines(self.source)
if line_nr == 0:
# This is a fix for the zeroth line. We need a newline there, for
# the backwards parser.
return u('')
if line_nr < 0:
raise StopIteration()
try:
return self._line_cache[line_nr - 1]
except IndexError:
raise StopIteration()