How to use the lkml.tokens.ListStartToken function in lkml

To help you get started, we’ve selected a few lkml examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github joshtemple / lkml / tests / test_parser.py View on Github external
def test_parse_list_with_trailing_comma():
    stream = (
        tokens.LiteralToken("drill_fields", 1),
        tokens.ValueToken(1),
        tokens.ListStartToken(1),
        tokens.LiteralToken("view_name.field_one", 1),
        tokens.CommaToken(1),
        tokens.ListEndToken(1),
        tokens.StreamEndToken(1),
    )
    parser = lkml.parser.Parser(stream)
    result = parser.parse_list()
    assert result == {"drill_fields": ["view_name.field_one"]}
github joshtemple / lkml / tests / test_parser.py View on Github external
def parser():
    stream = (
        tokens.StreamStartToken(1),
        tokens.LiteralToken("view", 1),
        tokens.ValueToken(1),
        tokens.LiteralToken("view_name", 1),
        tokens.BlockStartToken(1),
        tokens.LiteralToken("sql_table_name", 2),
        tokens.ValueToken(2),
        tokens.ExpressionBlockToken("schema.table_name", 2),
        tokens.ExpressionBlockEndToken(2),
        tokens.LiteralToken("drill_fields", 3),
        tokens.ValueToken(3),
        tokens.ListStartToken(3),
        tokens.LiteralToken("view_name.field_one", 3),
        tokens.CommaToken(3),
        tokens.LiteralToken("view_name.field_two", 3),
        tokens.ListEndToken(3),
        tokens.BlockEndToken(4),
        tokens.StreamEndToken(4),
    )
    return lkml.parser.Parser(stream)
github joshtemple / lkml / tests / test_parser.py View on Github external
def test_parse_list_with_literals():
    stream = (
        tokens.LiteralToken("drill_fields", 1),
        tokens.ValueToken(1),
        tokens.ListStartToken(1),
        tokens.LiteralToken("view_name.field_one", 1),
        tokens.CommaToken(1),
        tokens.LiteralToken("view_name.field_two", 1),
        tokens.CommaToken(1),
        tokens.LiteralToken("view_name.field_three", 1),
        tokens.ListEndToken(1),
        tokens.StreamEndToken(1),
    )
    parser = lkml.parser.Parser(stream)
    result = parser.parse_list()
    assert result == {
        "drill_fields": [
            "view_name.field_one",
            "view_name.field_two",
            "view_name.field_three",
        ]
github joshtemple / lkml / tests / test_parser.py View on Github external
def test_parse_list_with_bad_token():
    stream = (
        tokens.LiteralToken("drill_fields", 1),
        tokens.ValueToken(1),
        tokens.ListStartToken(1),
        tokens.LiteralToken("view_name.field_one", 1),
        tokens.CommaToken(1),
        tokens.LiteralToken("view_name.field_two", 1),
        tokens.CommaToken(1),
        tokens.ValueToken(1),
        tokens.ListEndToken(1),
        tokens.StreamEndToken(1),
    )
    parser = lkml.parser.Parser(stream)
    result = parser.parse_list()
    assert result is None
github joshtemple / lkml / tests / test_parser.py View on Github external
def test_parse_list_with_missing_comma():
    stream = (
        tokens.LiteralToken("drill_fields", 1),
        tokens.ValueToken(1),
        tokens.ListStartToken(1),
        tokens.LiteralToken("view_name.field_one", 1),
        tokens.CommaToken(1),
        tokens.LiteralToken("view_name.field_two", 1),
        tokens.LiteralToken("view_name.field_three", 1),
        tokens.ListEndToken(1),
        tokens.StreamEndToken(1),
    )
    parser = lkml.parser.Parser(stream)
    result = parser.parse_list()
    assert result is None
github joshtemple / lkml / tests / test_lexer.py View on Github external
assert result == "S"


def test_scan_until_token_skips_comments():
    text = "# This is a comment\nStart here"
    lexer = lkml.Lexer(text)
    lexer.scan_until_token()
    result = lexer.peek()
    assert result == "S"


params = [
    ("\0", tokens.StreamEndToken(1)),
    ("{", tokens.BlockStartToken(1)),
    ("}", tokens.BlockEndToken(1)),
    ("[", tokens.ListStartToken(1)),
    ("]", tokens.ListEndToken(1)),
    (",", tokens.CommaToken(1)),
    (":", tokens.ValueToken(1)),
    (";;", tokens.ExpressionBlockEndToken(1)),
]


@pytest.mark.parametrize("text,expected", params)
def test_scan_all_simple_tokens(text, expected):
    lexer = lkml.Lexer(text)
    result = lexer.scan()
    # Skip stream start token appended at the beginning
    assert result[1] == expected


def test_scan_quoted_literal():
github joshtemple / lkml / lkml / parser.py View on Github external
"timeframes: [date, week]"

            Output (dictionary):
            -------
            {"timeframes": ["date", "week"]}

        """
        if self.log_debug:
            grammar = "[list] = key '[' csv? ']'"
            self.logger.debug("%sTry to parse %s", self.depth * DELIMITER, grammar)

        key = self.parse_key()
        if key is None:
            return key

        if self.check(tokens.ListStartToken):
            self.advance()
        else:
            return None

        csv = self.parse_csv()
        csv = csv if csv else []

        if self.check(tokens.ListEndToken):
            self.advance()
            list = {key: csv}
            if self.log_debug:
                self.logger.debug(
                    "%sSuccessfully parsed a list.", self.depth * DELIMITER
                )
            return list
        else:
github joshtemple / lkml / lkml / keys.py View on Github external
# These are keys for fields in Looker that have a "name" attribute. Since lkml uses the
# key `name` to represent the name of the field (e.g. for `dimension: dimension_name {`,
# the `name` key would hold the value `dimension_name`.)

KEYS_WITH_NAME_FIELDS: Tuple[str, ...] = (
    "user_attribute_param",
    "param",
    "form_param",
    "option",
)

CHARACTER_TO_TOKEN: Dict[str, Type[tokens.Token]] = {
    "\0": tokens.StreamEndToken,
    "{": tokens.BlockStartToken,
    "}": tokens.BlockEndToken,
    "[": tokens.ListStartToken,
    "]": tokens.ListEndToken,
    ",": tokens.CommaToken,
    ":": tokens.ValueToken,
    ";": tokens.ExpressionBlockEndToken,
}