Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_parse_pair_with_sql_block():
sql = "SELECT * FROM schema.table"
stream = (
tokens.LiteralToken("sql", 1),
tokens.ValueToken(1),
tokens.LiteralToken(sql, 1),
tokens.ExpressionBlockEndToken(1),
tokens.StreamEndToken(1),
)
parser = lkml.parser.Parser(stream)
result = parser.parse_pair()
assert result == {"sql": sql}
def parser():
stream = (
tokens.StreamStartToken(1),
tokens.LiteralToken("view", 1),
tokens.ValueToken(1),
tokens.LiteralToken("view_name", 1),
tokens.BlockStartToken(1),
tokens.LiteralToken("sql_table_name", 2),
tokens.ValueToken(2),
tokens.ExpressionBlockToken("schema.table_name", 2),
tokens.ExpressionBlockEndToken(2),
tokens.LiteralToken("drill_fields", 3),
tokens.ValueToken(3),
tokens.ListStartToken(3),
tokens.LiteralToken("view_name.field_one", 3),
tokens.CommaToken(3),
tokens.LiteralToken("view_name.field_two", 3),
tokens.ListEndToken(3),
tokens.BlockEndToken(4),
tokens.StreamEndToken(4),
)
return lkml.parser.Parser(stream)
text = "# This is a comment\nStart here"
lexer = lkml.Lexer(text)
lexer.scan_until_token()
result = lexer.peek()
assert result == "S"
params = [
("\0", tokens.StreamEndToken(1)),
("{", tokens.BlockStartToken(1)),
("}", tokens.BlockEndToken(1)),
("[", tokens.ListStartToken(1)),
("]", tokens.ListEndToken(1)),
(",", tokens.CommaToken(1)),
(":", tokens.ValueToken(1)),
(";;", tokens.ExpressionBlockEndToken(1)),
]
@pytest.mark.parametrize("text,expected", params)
def test_scan_all_simple_tokens(text, expected):
lexer = lkml.Lexer(text)
result = lexer.scan()
# Skip stream start token appended at the beginning
assert result[1] == expected
def test_scan_quoted_literal():
text = '"This is quoted text."'
lexer = lkml.Lexer(text)
lexer.index = 1
token = lexer.scan_quoted_literal()
def test_scan_with_complex_sql_block():
text = (
"sql_distinct_key: concat(${orders.order_id}, '|', "
"${orders__items.primary_key}) ;;"
)
output = lkml.Lexer(text).scan()
assert output == (
tokens.StreamStartToken(1),
tokens.LiteralToken("sql_distinct_key", 1),
tokens.ValueToken(1),
tokens.ExpressionBlockToken(
"concat(${orders.order_id}, '|', ${orders__items.primary_key})", 1
),
tokens.ExpressionBlockEndToken(1),
tokens.StreamEndToken(1),
)
def test_parse_value_literal_with_sql_block():
literal = "SELECT * FROM tablename"
stream = (
tokens.LiteralToken(literal, 1),
tokens.ExpressionBlockEndToken(1),
tokens.StreamEndToken(1),
)
parser = lkml.parser.Parser(stream)
result = parser.parse_value()
assert result == literal
2) "${TABLE}.foo"
"""
if self.log_debug:
grammar = "[value] = literal / quoted_literal / expression_block"
self.logger.debug("%sTry to parse %s", self.depth * DELIMITER, grammar)
if self.check(tokens.QuotedLiteralToken, tokens.LiteralToken):
value = self.consume_token_value()
if self.log_debug:
self.logger.debug(
"%sSuccessfully parsed value.", self.depth * DELIMITER
)
return value
elif self.check(tokens.ExpressionBlockToken):
value = self.consume_token_value()
if self.check(tokens.ExpressionBlockEndToken):
self.advance()
else:
return None
if self.log_debug:
self.logger.debug(
"%sSuccessfully parsed value.", self.depth * DELIMITER
)
return value
else:
return None
KEYS_WITH_NAME_FIELDS: Tuple[str, ...] = (
"user_attribute_param",
"param",
"form_param",
"option",
)
CHARACTER_TO_TOKEN: Dict[str, Type[tokens.Token]] = {
"\0": tokens.StreamEndToken,
"{": tokens.BlockStartToken,
"}": tokens.BlockEndToken,
"[": tokens.ListStartToken,
"]": tokens.ListEndToken,
",": tokens.CommaToken,
":": tokens.ValueToken,
";": tokens.ExpressionBlockEndToken,
}