How to use the lkml.tokens.ListEndToken function in lkml

To help you get started, we’ve selected a few lkml examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github joshtemple / lkml / tests / test_lexer.py View on Github external
def test_scan_until_token_skips_comments():
    text = "# This is a comment\nStart here"
    lexer = lkml.Lexer(text)
    lexer.scan_until_token()
    result = lexer.peek()
    assert result == "S"


params = [
    ("\0", tokens.StreamEndToken(1)),
    ("{", tokens.BlockStartToken(1)),
    ("}", tokens.BlockEndToken(1)),
    ("[", tokens.ListStartToken(1)),
    ("]", tokens.ListEndToken(1)),
    (",", tokens.CommaToken(1)),
    (":", tokens.ValueToken(1)),
    (";;", tokens.ExpressionBlockEndToken(1)),
]


@pytest.mark.parametrize("text,expected", params)
def test_scan_all_simple_tokens(text, expected):
    lexer = lkml.Lexer(text)
    result = lexer.scan()
    # Skip stream start token appended at the beginning
    assert result[1] == expected


def test_scan_quoted_literal():
    text = '"This is quoted text."'
github joshtemple / lkml / tests / test_parser.py View on Github external
def test_parse_list_with_literals():
    stream = (
        tokens.LiteralToken("drill_fields", 1),
        tokens.ValueToken(1),
        tokens.ListStartToken(1),
        tokens.LiteralToken("view_name.field_one", 1),
        tokens.CommaToken(1),
        tokens.LiteralToken("view_name.field_two", 1),
        tokens.CommaToken(1),
        tokens.LiteralToken("view_name.field_three", 1),
        tokens.ListEndToken(1),
        tokens.StreamEndToken(1),
    )
    parser = lkml.parser.Parser(stream)
    result = parser.parse_list()
    assert result == {
        "drill_fields": [
            "view_name.field_one",
            "view_name.field_two",
            "view_name.field_three",
        ]
github joshtemple / lkml / tests / test_parser.py View on Github external
def test_parse_list_with_no_contents():
    stream = (
        tokens.LiteralToken("drill_fields", 1),
        tokens.ValueToken(1),
        tokens.ListStartToken(1),
        tokens.ListEndToken(1),
        tokens.StreamEndToken(1),
    )
    parser = lkml.parser.Parser(stream)
    result = parser.parse_list()
    assert result == {"drill_fields": []}
github joshtemple / lkml / tests / test_parser.py View on Github external
def test_parse_list_with_only_commas():
    stream = (
        tokens.LiteralToken("drill_fields", 1),
        tokens.ValueToken(1),
        tokens.ListStartToken(1),
        tokens.CommaToken(1),
        tokens.CommaToken(1),
        tokens.CommaToken(1),
        tokens.ListEndToken(1),
        tokens.StreamEndToken(1),
    )
    parser = lkml.parser.Parser(stream)
    result = parser.parse_list()
    assert result is None
github joshtemple / lkml / tests / test_parser.py View on Github external
def test_parse_list_with_missing_comma():
    stream = (
        tokens.LiteralToken("drill_fields", 1),
        tokens.ValueToken(1),
        tokens.ListStartToken(1),
        tokens.LiteralToken("view_name.field_one", 1),
        tokens.CommaToken(1),
        tokens.LiteralToken("view_name.field_two", 1),
        tokens.LiteralToken("view_name.field_three", 1),
        tokens.ListEndToken(1),
        tokens.StreamEndToken(1),
    )
    parser = lkml.parser.Parser(stream)
    result = parser.parse_list()
    assert result is None
github joshtemple / lkml / tests / test_parser.py View on Github external
def test_parse_list_with_trailing_comma():
    stream = (
        tokens.LiteralToken("drill_fields", 1),
        tokens.ValueToken(1),
        tokens.ListStartToken(1),
        tokens.LiteralToken("view_name.field_one", 1),
        tokens.CommaToken(1),
        tokens.ListEndToken(1),
        tokens.StreamEndToken(1),
    )
    parser = lkml.parser.Parser(stream)
    result = parser.parse_list()
    assert result == {"drill_fields": ["view_name.field_one"]}
github joshtemple / lkml / tests / test_parser.py View on Github external
tokens.StreamStartToken(1),
        tokens.LiteralToken("view", 1),
        tokens.ValueToken(1),
        tokens.LiteralToken("view_name", 1),
        tokens.BlockStartToken(1),
        tokens.LiteralToken("sql_table_name", 2),
        tokens.ValueToken(2),
        tokens.ExpressionBlockToken("schema.table_name", 2),
        tokens.ExpressionBlockEndToken(2),
        tokens.LiteralToken("drill_fields", 3),
        tokens.ValueToken(3),
        tokens.ListStartToken(3),
        tokens.LiteralToken("view_name.field_one", 3),
        tokens.CommaToken(3),
        tokens.LiteralToken("view_name.field_two", 3),
        tokens.ListEndToken(3),
        tokens.BlockEndToken(4),
        tokens.StreamEndToken(4),
    )
    return lkml.parser.Parser(stream)
github joshtemple / lkml / lkml / parser.py View on Github external
grammar = "[list] = key '[' csv? ']'"
            self.logger.debug("%sTry to parse %s", self.depth * DELIMITER, grammar)

        key = self.parse_key()
        if key is None:
            return key

        if self.check(tokens.ListStartToken):
            self.advance()
        else:
            return None

        csv = self.parse_csv()
        csv = csv if csv else []

        if self.check(tokens.ListEndToken):
            self.advance()
            list = {key: csv}
            if self.log_debug:
                self.logger.debug(
                    "%sSuccessfully parsed a list.", self.depth * DELIMITER
                )
            return list
        else:
            return None
github joshtemple / lkml / lkml / parser.py View on Github external
-------
            1) ["date", "week"]
            2) ["foo", "bar"]

        """
        if self.log_debug:
            grammar = '[csv] = (literal / quoted_literal) ("," (literal / quoted_literal))* ","?'
            self.logger.debug("%sTry to parse %s", self.depth * DELIMITER, grammar)
        values = []

        if self.check(tokens.LiteralToken, tokens.QuotedLiteralToken):
            values.append(self.consume_token_value())
        else:
            return None

        while not self.check(tokens.ListEndToken):
            if self.check(tokens.CommaToken):
                self.advance()
            else:
                return None

            if self.check(tokens.LiteralToken, tokens.QuotedLiteralToken):
                values.append(self.consume_token_value())
            elif self.check(tokens.ListEndToken):
                break
            else:
                return None

        if self.log_debug:
            self.logger.debug(
                "%sSuccessfully parsed comma-separated values.", self.depth * DELIMITER
            )