How to use the lkml.tokens.StreamStartToken function in lkml

To help you get started, we’ve selected a few lkml examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github joshtemple / lkml / tests / test_parser.py View on Github external
def parser():
    stream = (
        tokens.StreamStartToken(1),
        tokens.LiteralToken("view", 1),
        tokens.ValueToken(1),
        tokens.LiteralToken("view_name", 1),
        tokens.BlockStartToken(1),
        tokens.LiteralToken("sql_table_name", 2),
        tokens.ValueToken(2),
        tokens.ExpressionBlockToken("schema.table_name", 2),
        tokens.ExpressionBlockEndToken(2),
        tokens.LiteralToken("drill_fields", 3),
        tokens.ValueToken(3),
        tokens.ListStartToken(3),
        tokens.LiteralToken("view_name.field_one", 3),
        tokens.CommaToken(3),
        tokens.LiteralToken("view_name.field_two", 3),
        tokens.ListEndToken(3),
        tokens.BlockEndToken(4),
github joshtemple / lkml / tests / test_parser.py View on Github external
def test_check_returns_true_for_mix_of_valid_and_invalid_types(parser):
    assert parser.check(tokens.ValueToken, tokens.StreamStartToken)
github joshtemple / lkml / tests / test_lexer.py View on Github external
def test_scan_with_complex_sql_block():
    text = (
        "sql_distinct_key: concat(${orders.order_id}, '|', "
        "${orders__items.primary_key}) ;;"
    )
    output = lkml.Lexer(text).scan()
    assert output == (
        tokens.StreamStartToken(1),
        tokens.LiteralToken("sql_distinct_key", 1),
        tokens.ValueToken(1),
        tokens.ExpressionBlockToken(
            "concat(${orders.order_id}, '|', ${orders__items.primary_key})", 1
        ),
        tokens.ExpressionBlockEndToken(1),
        tokens.StreamEndToken(1),
    )
github joshtemple / lkml / tests / test_lexer.py View on Github external
def test_scan_with_non_expression_block_starting_with_sql():
    text = "sql_not_reserved_field: yes"
    output = lkml.Lexer(text).scan()
    assert output == (
        tokens.StreamStartToken(1),
        tokens.LiteralToken("sql_not_reserved_field", 1),
        tokens.ValueToken(1),
        tokens.LiteralToken("yes", 1),
        tokens.StreamEndToken(1),
    )
github joshtemple / lkml / lkml / parser.py View on Github external
def parse_expression(self) -> dict:
        """Returns a parsed LookML dictionary from a sequence of tokens.

        Raises:
            SyntaxError: If unable to find a matching grammar rule for the stream

        Grammar:
            expression ← (block / pair / list)*

        """
        if self.log_debug:
            grammar = "[expression] = (block / pair / list)*"
            self.logger.debug("%sTry to parse %s", self.depth * DELIMITER, grammar)
        expression: dict = {}
        if self.check(tokens.StreamStartToken):
            self.advance()
        while not self.check(tokens.StreamEndToken, tokens.BlockEndToken):
            block = self.parse_block()
            if block is not None:
                self.update_tree(expression, block)
                continue

            pair = self.parse_pair()
            if pair is not None:
                self.update_tree(expression, pair)
                continue

            list = self.parse_list()
            if list is not None:
                expression.update(list)
                continue
github joshtemple / lkml / lkml / lexer.py View on Github external
def scan(self) -> Tuple[tokens.Token, ...]:
        """Tokenizes LookML into a sequence of tokens.

        This method skips through the text being lexed until it finds a character that
        indicates the start of a new token. It consumes the relevant characters and adds
        the tokens to a sequence until it reaches the end of the text.
        """
        self.tokens.append(tokens.StreamStartToken(self.line_number))
        while True:
            self.scan_until_token()
            ch = self.peek()
            if ch == "\0":
                self.tokens.append(CHARACTER_TO_TOKEN[ch](self.line_number))
                break
            elif ch == ";":
                if self.peek_multiple(2) == ";;":
                    self.advance(2)
                    self.tokens.append(CHARACTER_TO_TOKEN[ch](self.line_number))
            elif ch == '"':
                self.advance()
                self.tokens.append(self.scan_quoted_literal())
            elif ch in CHARACTER_TO_TOKEN.keys():
                self.advance()
                self.tokens.append(CHARACTER_TO_TOKEN[ch](self.line_number))