How to use the routemaster.exit_conditions.tokenizer.TokenKind function in routemaster

To help you get started, we’ve selected a few routemaster examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github thread / routemaster / routemaster / exit_conditions / tokenizer.py View on Github external
pass

    # Is this an integer value?
    try:
        return Token(
            kind=TokenKind.NUMBER,
            value=int(raw_token.value),
            location=raw_token.location,
        )
    except ValueError:
        pass

    # A float?
    try:
        return Token(
            kind=TokenKind.NUMBER,
            value=float(raw_token.value),
            location=raw_token.location,
        )
    except ValueError:
        pass

    # A duration?
    duration_match = RE_DURATION.match(raw_token.value)
    if duration_match is not None:
        total_length = (
            int(duration_match.group(1) or '0') * 24 * 60 * 60 +
            int(duration_match.group(2) or '0') * 60 * 60 +
            int(duration_match.group(3) or '0') * 60 +
            int(duration_match.group(4) or '0')
        )
        return Token(
github thread / routemaster / routemaster / exit_conditions / parser.py View on Github external
def _parse_value(source):
    # Immediate special-case: parentheticals
    if source.try_eat_next(TokenKind.LEFT_PAREN):
        yield from _parse_and_expr(source)
        source.eat_next(TokenKind.RIGHT_PAREN)
        return

    # Atomic lookup
    try:
        atom = source.eat_next(TokenKind.ATOM)
        yield Operation.LOOKUP, tuple(atom.value)
        return
    except ParseError:
        pass

    # Literals
    try:
        literal = source.eat_next(TokenKind.LITERAL)
        yield Operation.LITERAL, literal.value
        return
    except ParseError:
github thread / routemaster / routemaster / exit_conditions / parser.py View on Github external
def _parse_and_expr(source):
    already_bool_converted = False

    yield from _parse_or_expr(source)

    while source.try_eat_next(TokenKind.AND):
        if not already_bool_converted:
            already_bool_converted = True
            yield Operation.TO_BOOL,
        yield from _parse_or_expr(source)
        yield Operation.TO_BOOL,
        yield Operation.AND,
github thread / routemaster / routemaster / exit_conditions / parser.py View on Github external
negated = not negated

    yield from _parse_value(source)
    if source.try_eat_next(TokenKind.COPULA):
        # `is` or `has` expression
        if source.try_eat_next(TokenKind.NOT):
            negated = not negated

        try:
            adjective = tuple(source.eat_next(TokenKind.ATOM).value)
        except ParseError:
            # Must be a prepositional phrase
            adjective = ()

        prepositions = []
        while source.match_next(TokenKind.PREPOSITION):
            prepositions.append(source.head.value)
            source.eat_next(TokenKind.PREPOSITION)
            yield from _parse_value(source)

        if not adjective and not prepositions:
            if source.head is not None:
                raise ParseError(
                    "Expected an adjective or preposition",
                    source.head.location,
                )
            else:
                raise ParseError(
                    "Expected an adjective or preposition afterwards, "
                    "but got the EOF",
                    source.previous_location,
                )
github thread / routemaster / routemaster / exit_conditions / parser.py View on Github external
if source.try_eat_next(TokenKind.LEFT_PAREN):
        yield from _parse_and_expr(source)
        source.eat_next(TokenKind.RIGHT_PAREN)
        return

    # Atomic lookup
    try:
        atom = source.eat_next(TokenKind.ATOM)
        yield Operation.LOOKUP, tuple(atom.value)
        return
    except ParseError:
        pass

    # Literals
    try:
        literal = source.eat_next(TokenKind.LITERAL)
        yield Operation.LITERAL, literal.value
        return
    except ParseError:
        pass

    # Durations
    try:
        duration = source.eat_next(TokenKind.DURATION)
        yield Operation.LITERAL, duration.value
        return
    except ParseError:
        pass

    # Numbers
    try:
        number = source.eat_next(TokenKind.NUMBER)
github thread / routemaster / routemaster / exit_conditions / parser.py View on Github external
def _parse_value(source):
    # Immediate special-case: parentheticals
    if source.try_eat_next(TokenKind.LEFT_PAREN):
        yield from _parse_and_expr(source)
        source.eat_next(TokenKind.RIGHT_PAREN)
        return

    # Atomic lookup
    try:
        atom = source.eat_next(TokenKind.ATOM)
        yield Operation.LOOKUP, tuple(atom.value)
        return
    except ParseError:
        pass

    # Literals
    try:
        literal = source.eat_next(TokenKind.LITERAL)
        yield Operation.LITERAL, literal.value
        return
    except ParseError:
        pass

    # Durations
    try:
        duration = source.eat_next(TokenKind.DURATION)
github thread / routemaster / routemaster / exit_conditions / tokenizer.py View on Github external
ATOM = 'atom'
    OPERATOR = 'operator'
    NUMBER = 'number'
    DURATION = 'duration'
    AND = '"and"'
    OR = '"or"'
    NOT = '"not"'
    LITERAL = 'constant'
    COPULA = 'is/has'
    PREPOSITION = 'preposition'


RAW_TOKEN_KIND_TO_TOKEN_KIND = {
    RawTokenKind.LEFT_PAREN: TokenKind.LEFT_PAREN,
    RawTokenKind.RIGHT_PAREN: TokenKind.RIGHT_PAREN,
    RawTokenKind.OPERATOR: TokenKind.OPERATOR,
}


class RawToken(NamedTuple):
    """A single raw (in-text, undigested) token."""

    kind: RawTokenKind
    value: str
    location: Tuple[int, int]


class Token(NamedTuple):
    """A single digested (usable) token."""

    kind: TokenKind
    value: Any
github thread / routemaster / routemaster / exit_conditions / tokenizer.py View on Github external
location: Tuple[int, int]


class Token(NamedTuple):
    """A single digested (usable) token."""

    kind: TokenKind
    value: Any
    location: Tuple[int, int]


LITERALS = {
    'true': (TokenKind.LITERAL, True),
    'false': (TokenKind.LITERAL, False),
    'null': (TokenKind.LITERAL, None),
    'and': (TokenKind.AND, None),
    'or': (TokenKind.OR, None),
    'not': (TokenKind.NOT, None),
    'is': (TokenKind.COPULA, 'is'),
    'has': (TokenKind.COPULA, 'has'),
    'was': (TokenKind.COPULA, 'was'),
}

STATE_MACHINE = {
    (None, 'Ps('): RawTokenKind.LEFT_PAREN,
    (None, 'Pe)'): RawTokenKind.RIGHT_PAREN,
    (None, 'Z'): RawTokenKind.WHITESPACE,
    (None, 'Cc'): RawTokenKind.WHITESPACE,
    (None, 'Po#'): RawTokenKind.COMMENT,
    (None, 'L'): RawTokenKind.ATOM,
    (None, 'Pd'): RawTokenKind.ATOM,
    (None, 'Pc'): RawTokenKind.ATOM,
github thread / routemaster / routemaster / exit_conditions / tokenizer.py View on Github external
RIGHT_PAREN = '")"'
    ATOM = 'atom'
    OPERATOR = 'operator'
    NUMBER = 'number'
    DURATION = 'duration'
    AND = '"and"'
    OR = '"or"'
    NOT = '"not"'
    LITERAL = 'constant'
    COPULA = 'is/has'
    PREPOSITION = 'preposition'


RAW_TOKEN_KIND_TO_TOKEN_KIND = {
    RawTokenKind.LEFT_PAREN: TokenKind.LEFT_PAREN,
    RawTokenKind.RIGHT_PAREN: TokenKind.RIGHT_PAREN,
    RawTokenKind.OPERATOR: TokenKind.OPERATOR,
}


class RawToken(NamedTuple):
    """A single raw (in-text, undigested) token."""

    kind: RawTokenKind
    value: str
    location: Tuple[int, int]


class Token(NamedTuple):
    """A single digested (usable) token."""

    kind: TokenKind