How to use the lkml.tokens.BlockEndToken function in lkml

To help you get started, we’ve selected a few lkml examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github joshtemple / lkml / tests / test_parser.py View on Github external
tokens.LiteralToken("view", 1),
        tokens.ValueToken(1),
        tokens.LiteralToken("view_name", 1),
        tokens.BlockStartToken(1),
        tokens.LiteralToken("sql_table_name", 2),
        tokens.ValueToken(2),
        tokens.ExpressionBlockToken("schema.table_name", 2),
        tokens.ExpressionBlockEndToken(2),
        tokens.LiteralToken("drill_fields", 3),
        tokens.ValueToken(3),
        tokens.ListStartToken(3),
        tokens.LiteralToken("view_name.field_one", 3),
        tokens.CommaToken(3),
        tokens.LiteralToken("view_name.field_two", 3),
        tokens.ListEndToken(3),
        tokens.BlockEndToken(4),
        tokens.StreamEndToken(4),
    )
    return lkml.parser.Parser(stream)
github joshtemple / lkml / tests / test_lexer.py View on Github external
result = lexer.peek()
    assert result == "S"


def test_scan_until_token_skips_comments():
    text = "# This is a comment\nStart here"
    lexer = lkml.Lexer(text)
    lexer.scan_until_token()
    result = lexer.peek()
    assert result == "S"


params = [
    ("\0", tokens.StreamEndToken(1)),
    ("{", tokens.BlockStartToken(1)),
    ("}", tokens.BlockEndToken(1)),
    ("[", tokens.ListStartToken(1)),
    ("]", tokens.ListEndToken(1)),
    (",", tokens.CommaToken(1)),
    (":", tokens.ValueToken(1)),
    (";;", tokens.ExpressionBlockEndToken(1)),
]


@pytest.mark.parametrize("text,expected", params)
def test_scan_all_simple_tokens(text, expected):
    lexer = lkml.Lexer(text)
    result = lexer.scan()
    # Skip stream start token appended at the beginning
    assert result[1] == expected
github joshtemple / lkml / tests / test_parser.py View on Github external
def test_parse_block_with_no_expression():
    stream = (
        tokens.LiteralToken("dimension", 1),
        tokens.ValueToken(1),
        tokens.LiteralToken("dimension_name", 1),
        tokens.BlockStartToken(1),
        tokens.BlockEndToken(1),
        tokens.StreamEndToken(1),
    )
    parser = lkml.parser.Parser(stream)
    result = parser.parse_block()
    assert result == {"dimension": {"name": "dimension_name"}}
github joshtemple / lkml / lkml / parser.py View on Github external
"""Returns a parsed LookML dictionary from a sequence of tokens.

        Raises:
            SyntaxError: If unable to find a matching grammar rule for the stream

        Grammar:
            expression ← (block / pair / list)*

        """
        if self.log_debug:
            grammar = "[expression] = (block / pair / list)*"
            self.logger.debug("%sTry to parse %s", self.depth * DELIMITER, grammar)
        expression: dict = {}
        if self.check(tokens.StreamStartToken):
            self.advance()
        while not self.check(tokens.StreamEndToken, tokens.BlockEndToken):
            block = self.parse_block()
            if block is not None:
                self.update_tree(expression, block)
                continue

            pair = self.parse_pair()
            if pair is not None:
                self.update_tree(expression, pair)
                continue

            list = self.parse_list()
            if list is not None:
                expression.update(list)
                continue

            token = self.tokens[self.progress]
github joshtemple / lkml / lkml / keys.py View on Github external
# These are keys for fields in Looker that have a "name" attribute. Since lkml uses the
# key `name` to represent the name of the field (e.g. for `dimension: dimension_name {`,
# the `name` key would hold the value `dimension_name`.)

KEYS_WITH_NAME_FIELDS: Tuple[str, ...] = (
    "user_attribute_param",
    "param",
    "form_param",
    "option",
)

CHARACTER_TO_TOKEN: Dict[str, Type[tokens.Token]] = {
    "\0": tokens.StreamEndToken,
    "{": tokens.BlockStartToken,
    "}": tokens.BlockEndToken,
    "[": tokens.ListStartToken,
    "]": tokens.ListEndToken,
    ",": tokens.CommaToken,
    ":": tokens.ValueToken,
    ";": tokens.ExpressionBlockEndToken,
}
github joshtemple / lkml / lkml / parser.py View on Github external
if key is None:
            return key

        if self.check(tokens.LiteralToken):
            literal = self.consume_token_value()
        else:
            literal = None

        if self.check(tokens.BlockStartToken):
            self.advance()
        else:
            return None

        expression = self.parse_expression()

        if self.check(tokens.BlockEndToken):
            self.advance()

            block = {key: expression}
            if literal:
                block[key]["name"] = literal

            if self.log_debug:
                self.logger.debug(
                    "%sSuccessfully parsed block.", self.depth * DELIMITER
                )
            return block
        else:
            return None