How to use the mathy.Token function in mathy

To help you get started, we’ve selected a few mathy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github justindujardin / mathy / libraries / mathy_mkdocs / mathy_mkdocs / plugin.py View on Github external
def render_tokens_from_text(input_text: str):
    global tokenizer
    try:
        tokens: List[Token] = tokenizer.tokenize(input_text)
        length = len(tokens)
        values = [t.value for t in tokens]
        types = [t.type for t in tokens]
        assert len(types) == len(values)

        box_size = 64
        view_x = 0
        view_y = 0
        view_w = box_size * length
        view_h = box_size * 2 + BORDER_WIDTH * 2

        tree = svgwrite.Drawing(size=(view_w, view_h))
        tree.viewbox(minx=view_x, miny=view_y, width=view_w, height=view_h)

        curr_x = BORDER_WIDTH
        for t, v in zip(types, values):
github justindujardin / mathy / libraries / website / docs / snippets / cas / tokenizer_manual.py View on Github external
from typing import List
from mathy import Token, TokenConstant, TokenEOF, Tokenizer, TokenPlus, TokenVariable

manual_tokens: List[Token] = [
    Token("4", TokenConstant),
    Token("x", TokenVariable),
    Token("+", TokenPlus),
    Token("2", TokenConstant),
    Token("", TokenEOF),
]
auto_tokens: List[Token] = Tokenizer().tokenize("4x + 2")

for i, token in enumerate(manual_tokens):
    assert auto_tokens[i].value == token.value
    assert auto_tokens[i].type == token.type
github justindujardin / mathy / libraries / website / docs / snippets / cas / tokenizer_manual.py View on Github external
from typing import List
from mathy import Token, TokenConstant, TokenEOF, Tokenizer, TokenPlus, TokenVariable

manual_tokens: List[Token] = [
    Token("4", TokenConstant),
    Token("x", TokenVariable),
    Token("+", TokenPlus),
    Token("2", TokenConstant),
    Token("", TokenEOF),
]
auto_tokens: List[Token] = Tokenizer().tokenize("4x + 2")

for i, token in enumerate(manual_tokens):
    assert auto_tokens[i].value == token.value
    assert auto_tokens[i].type == token.type
github justindujardin / mathy / libraries / website / docs / snippets / cas / tokenizer_tokenize.py View on Github external
from typing import List
from mathy import Tokenizer, Token

text = "4x + 2x^3 * 7x"
tokenizer = Tokenizer()
tokens: List[Token] = tokenizer.tokenize(text)

for token in tokens:
    print(f"type: {token.type}, value: {token.value}")
github justindujardin / mathy / libraries / website / docs / snippets / cas / tokenizer_manual.py View on Github external
from typing import List
from mathy import Token, TokenConstant, TokenEOF, Tokenizer, TokenPlus, TokenVariable

manual_tokens: List[Token] = [
    Token("4", TokenConstant),
    Token("x", TokenVariable),
    Token("+", TokenPlus),
    Token("2", TokenConstant),
    Token("", TokenEOF),
]
auto_tokens: List[Token] = Tokenizer().tokenize("4x + 2")

for i, token in enumerate(manual_tokens):
    assert auto_tokens[i].value == token.value
    assert auto_tokens[i].type == token.type
github justindujardin / mathy / libraries / website / docs / snippets / cas / tokenizer_manual.py View on Github external
from typing import List
from mathy import Token, TokenConstant, TokenEOF, Tokenizer, TokenPlus, TokenVariable

manual_tokens: List[Token] = [
    Token("4", TokenConstant),
    Token("x", TokenVariable),
    Token("+", TokenPlus),
    Token("2", TokenConstant),
    Token("", TokenEOF),
]
auto_tokens: List[Token] = Tokenizer().tokenize("4x + 2")

for i, token in enumerate(manual_tokens):
    assert auto_tokens[i].value == token.value
    assert auto_tokens[i].type == token.type
github justindujardin / mathy / libraries / website / docs / snippets / cas / tokenizer_manual.py View on Github external
from typing import List
from mathy import Token, TokenConstant, TokenEOF, Tokenizer, TokenPlus, TokenVariable

manual_tokens: List[Token] = [
    Token("4", TokenConstant),
    Token("x", TokenVariable),
    Token("+", TokenPlus),
    Token("2", TokenConstant),
    Token("", TokenEOF),
]
auto_tokens: List[Token] = Tokenizer().tokenize("4x + 2")

for i, token in enumerate(manual_tokens):
    assert auto_tokens[i].value == token.value
    assert auto_tokens[i].type == token.type