How to use the typesystem.tokenize.tokens.ScalarToken function in typesystem

To help you get started, we’ve selected a few typesystem examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github encode / typesystem / tests / tokenize / test_tokenize_json.py View on Github external
def test_tokenize_list():
    token = tokenize_json("[true, false, null]")
    expected = ListToken(
        [ScalarToken(True, 1, 4), ScalarToken(False, 7, 11), ScalarToken(None, 14, 17)],
        0,
        18,
    )
    assert token == expected
    assert token.value == [True, False, None]
    assert token.lookup([0]).value is True
    assert token.lookup([0]).string == "true"
    assert token.lookup([0]).start.char_index == 1
    assert token.lookup([0]).end.char_index == 4
github encode / typesystem / tests / tokenize / test_tokenize_yaml.py View on Github external
def test_tokenize_list():
    token = tokenize_yaml(YAML_LIST)
    expected = ListToken(
        [
            ScalarToken(True, 3, 6),
            ScalarToken(False, 10, 14),
            ScalarToken(None, 18, 21),
        ],
        1,
        22,
    )
    assert token == expected
github encode / typesystem / tests / tokenize / test_tokenize_json.py View on Github external
def test_tokenize_floats():
    token = tokenize_json("[100.0, 1.0E+2, 1E+2]")
    expected = ListToken(
        [
            ScalarToken(100.0, 1, 5),
            ScalarToken(100.0, 8, 13),
            ScalarToken(100.0, 16, 19),
        ],
        0,
        20,
    )
    assert token == expected
    assert token.value == [100.0, 1.0e2, 1e2]
    assert token.lookup([0]).value == 100.0
    assert token.lookup([0]).string == "100.0"
    assert token.lookup([0]).start.char_index == 1
    assert token.lookup([0]).end.char_index == 5
github encode / typesystem / tests / tokenize / test_tokenize_yaml.py View on Github external
def test_tokenize_floats():
    token = tokenize_yaml(YAML_FLOATS)
    expected = ListToken([ScalarToken(100.0, 3, 7), ScalarToken(100.0, 11, 16)], 1, 17)
    assert token == expected
github encode / typesystem / typesystem / tokenize / tokenize_json.py View on Github external
value, end = parse_string(string, idx + 1, strict)
            return ScalarToken(value, idx, end - 1, content), end
        elif nextchar == "{":
            value, end = parse_object(
                (string, idx + 1), strict, _scan_once, memo, content
            )
            return DictToken(value, idx, end - 1, content), end
        elif nextchar == "[":
            value, end = parse_array((string, idx + 1), _scan_once)
            return ListToken(value, idx, end - 1, content), end
        elif nextchar == "n" and string[idx : idx + 4] == "null":
            value, end = None, idx + 4
            return ScalarToken(value, idx, end - 1, content), end
        elif nextchar == "t" and string[idx : idx + 4] == "true":
            value, end = True, idx + 4
            return ScalarToken(value, idx, end - 1, content), end
        elif nextchar == "f" and string[idx : idx + 5] == "false":
            value, end = False, idx + 5
            return ScalarToken(value, idx, end - 1, content), end

        m = match_number(string, idx)
        if m is not None:
            integer, frac, exp = m.groups()
            if frac or exp:
                res = parse_float(integer + (frac or "") + (exp or ""))
            else:
                res = parse_int(integer)
            value, end = res, m.end()
            return ScalarToken(value, idx, end - 1, content), end
        else:  # pragma: no cover
            raise StopIteration(idx)
github encode / typesystem / typesystem / tokenize / tokenize_json.py View on Github external
raise StopIteration(idx) from None

        if nextchar == '"':
            value, end = parse_string(string, idx + 1, strict)
            return ScalarToken(value, idx, end - 1, content), end
        elif nextchar == "{":
            value, end = parse_object(
                (string, idx + 1), strict, _scan_once, memo, content
            )
            return DictToken(value, idx, end - 1, content), end
        elif nextchar == "[":
            value, end = parse_array((string, idx + 1), _scan_once)
            return ListToken(value, idx, end - 1, content), end
        elif nextchar == "n" and string[idx : idx + 4] == "null":
            value, end = None, idx + 4
            return ScalarToken(value, idx, end - 1, content), end
        elif nextchar == "t" and string[idx : idx + 4] == "true":
            value, end = True, idx + 4
            return ScalarToken(value, idx, end - 1, content), end
        elif nextchar == "f" and string[idx : idx + 5] == "false":
            value, end = False, idx + 5
            return ScalarToken(value, idx, end - 1, content), end

        m = match_number(string, idx)
        if m is not None:
            integer, frac, exp = m.groups()
            if frac or exp:
                res = parse_float(integer + (frac or "") + (exp or ""))
            else:
                res = parse_int(integer)
            value, end = res, m.end()
            return ScalarToken(value, idx, end - 1, content), end
github encode / typesystem / typesystem / tokenize / tokenize_json.py View on Github external
if nextchar in _ws:
            end = _w(s, end).end()
            nextchar = s[end : end + 1]
        # Trivial empty object
        if nextchar == "}":
            return {}, end + 1
        elif nextchar != '"':
            raise JSONDecodeError(
                "Expecting property name enclosed in double quotes", s, end
            )
    end += 1
    while True:
        start = end - 1
        key, end = scanstring(s, end, strict)
        key = memo_get(key, key)
        key = ScalarToken(memo_get(key, key), start, end - 1, content)
        # To skip some function call overhead we optimize the fast paths where
        # the JSON key separator is ": " or just ":".
        if s[end : end + 1] != ":":
            end = _w(s, end).end()
            if s[end : end + 1] != ":":
                raise JSONDecodeError("Expecting ':' delimiter", s, end)
        end += 1

        try:
            if s[end] in _ws:
                end += 1
                if s[end] in _ws:
                    end = _w(s, end + 1).end()
        except IndexError:
            pass
github encode / typesystem / typesystem / tokenize / tokenize_yaml.py View on Github external
def construct_scalar(loader: "yaml.Loader", node: "yaml.Node") -> ScalarToken:
        start = node.start_mark.index
        end = node.end_mark.index
        value = loader.construct_scalar(node)
        return ScalarToken(value, start, end - 1, content=str_content)
github encode / typesystem / typesystem / tokenize / tokenize_json.py View on Github external
elif nextchar == "t" and string[idx : idx + 4] == "true":
            value, end = True, idx + 4
            return ScalarToken(value, idx, end - 1, content), end
        elif nextchar == "f" and string[idx : idx + 5] == "false":
            value, end = False, idx + 5
            return ScalarToken(value, idx, end - 1, content), end

        m = match_number(string, idx)
        if m is not None:
            integer, frac, exp = m.groups()
            if frac or exp:
                res = parse_float(integer + (frac or "") + (exp or ""))
            else:
                res = parse_int(integer)
            value, end = res, m.end()
            return ScalarToken(value, idx, end - 1, content), end
        else:  # pragma: no cover
            raise StopIteration(idx)