How to use the ply.yacc.yacc function in ply

To help you get started, we’ve selected a few ply examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github dabeaz / ply / test / yacc_error5.py View on Github external
def p_expression_number(t):
    'expression : NUMBER'
    t[0] = t[1]

def p_expression_name(t):
    'expression : NAME'
    try:
        t[0] = names[t[1]]
    except LookupError:
        print("Undefined name '%s'" % t[1])
        t[0] = 0

def p_error(t):
    print("Syntax error at '%s'" % t.value)

parser = yacc.yacc()
import calclex
calclex.lexer.lineno=1
parser.parse("""
a = 3 +
(4*5) +
(a b c) +
+ 6 + 7
""", tracking=True)
github dabeaz / ply / test / yacc_badargs.py View on Github external
def p_expression_number(t):
    'expression : NUMBER'
    t[0] = t[1]

def p_expression_name(t):
    'expression : NAME'
    try:
        t[0] = names[t[1]]
    except LookupError:
        print("Undefined name '%s'" % t[1])
        t[0] = 0

def p_error(t):
    print("Syntax error at '%s'" % t.value)

yacc.yacc()
github armon / pypred / pypred / parser.py View on Github external
def get_parser(lexer=None, debug=0):
    "Returns a new instance of the parser"
    p = yacc.yacc(debug=debug)
    p.errors = []
    if lexer:
        lexer.parser = p
        p.lexer = lexer
    return p
github EricssonResearch / calvin-base / calvin / csparser / parser.py View on Github external
def __init__(self, lexer=None):
        super(CalvinParser, self).__init__()
        if lexer:
            self.lexer = lexer
        else:
            self.lexer = lex.lex(module=calvin_rules, debug=False, optimize=False)
        # Since the parse may be called from other scripts, we want to have control
        # over where parse tables (and parser.out log) will be put if the tables
        # have to be recreated
        this_file = os.path.realpath(__file__)
        containing_dir = os.path.dirname(this_file)
        self.parser = yacc.yacc(module=self, debug=True, optimize=False, outputdir=containing_dir)
github ialbert / pyblue-central / pyblue / parser.py View on Github external
def process(lines, fname="text"):
    lines = map(strip, lines)
    # Only process lines that are comments.
    lines = filter(lambda x: x.startswith("{#"), lines)
    lexer = DjagnoCommentLexer()
    lexer.fname=fname
    lexer.meta = {}
    parser = yacc.yacc(write_tables=0, debug=0)
    for line in lines:
        parser.parse(line, lexer=lexer)
    return lexer.meta
github GRASS-GIS / grass-ci / lib / python / temporal / temporal_raster3d_algebra.py View on Github external
def parse(self, expression, basename = None, overwrite=False):
        # Check for space time dataset type definitions from temporal algebra
        l = TemporalRasterAlgebraLexer()
        l.build()
        l.lexer.input(expression)

        while True:
            tok = l.lexer.token()
            if not tok: break

            if tok.type == "STVDS" or tok.type == "STRDS" or tok.type == "STR3DS":
                raise SyntaxError("Syntax error near '%s'" %(tok.type))

        self.lexer = TemporalRasterAlgebraLexer()
        self.lexer.build()
        self.parser = yacc.yacc(module=self, debug=self.debug)

        self.overwrite = overwrite
        self.count = 0
        self.stdstype = "str3ds"
        self.maptype = "raster_3d"
        self.mapclass = Raster3DDataset
        self.basename = basename
        self.expression = expression
        self.parser.parse(expression)

        return self.process_chain_dict
github microsoft / ivy / ivy / ivy_logic_parser_gen.py View on Github external
('left', 'MINUS'),
    ('left', 'TIMES'),
    ('left', 'DIV'),
    ('left', 'DOLLAR'),
)

from ivy_logic_parser import *

def p_error(token):
    raise LogicParseError(token,"syntax error")

import os
tabdir = os.path.dirname(os.path.abspath(__file__))
formula_parser = yacc.yacc(start = 'fmla', tabmodule='ivy_formulatab',errorlog=yacc.NullLogger(),outputdir=tabdir,debug=None)
#formula_parser = yacc.yacc(start = 'fmla', tabmodule='ivy_formulatab')
term_parser = yacc.yacc(start = 'term', tabmodule='ivy_termtab',errorlog=yacc.NullLogger(),outputdir=tabdir,debug=None)
github openembedded / openembedded-core / bitbake / lib / bb / pysh / pyshyacc.py View on Github external
if not n:
            break
        w('  %r\n' % n)
    raise sherrors.ShellSyntaxError(''.join(msg))

# Build the parser
try:
    import pyshtables
except ImportError:
    import os
    outputdir = os.path.dirname(__file__)
    if not os.access(outputdir, os.W_OK):
        outputdir = ''
    yacc.yacc(tabmodule = 'pyshtables', outputdir = outputdir, debug = 0)
else:
    yacc.yacc(tabmodule = 'pysh.pyshtables', write_tables = 0, debug = 0)


def parse(input, eof=False, debug=False):
    """Parse a whole script at once and return the generated AST and unconsumed
    data in a tuple.
    
    NOTE: eof is probably meaningless for now, the parser being unable to work
    in pull mode. It should be set to True.
    """
    lexer = pyshlex.PLYLexer()
    remaining = lexer.add(input, eof)
    if lexer.is_empty():
        return [], remaining
    if debug:
        debug = 2
    return yacc.parse(lexer=lexer, debug=debug), remaining
github kelp404 / Victory / application / static / jc / slimit / parser.py View on Github external
def __init__(self, lex_optimize=True, lextab=lextab,
                 yacc_optimize=True, yacctab=yacctab, yacc_debug=False):
        self.lex_optimize = lex_optimize
        self.lextab = lextab
        self.yacc_optimize = yacc_optimize
        self.yacctab = yacctab
        self.yacc_debug = yacc_debug

        self.lexer = Lexer()
        self.lexer.build(optimize=lex_optimize, lextab=lextab)
        self.tokens = self.lexer.tokens

        self.parser = ply.yacc.yacc(
            module=self, optimize=yacc_optimize,
            debug=yacc_debug, tabmodule=yacctab, start='program')

        # https://github.com/rspivak/slimit/issues/29
        # lexer.auto_semi can cause a loop in a parser
        # when a parser error happens on a token right after
        # a newline.
        # We keep record of the tokens that caused p_error
        # and if the token has already been seen - we raise
        # a SyntaxError exception to avoid looping over and
        # over again.
        self._error_tokens = {}
github nucleic / enaml / enaml / core / parser / base_parser.py View on Github external
def __init__(self):
        self.tokens = self.lexer().tokens
        # Get a save directory for the lex and parse tables
        parse_dir, parse_mod = self._tables_location()
        self.parser = yacc.yacc(
            method='LALR',
            module=self,
            start='enaml',
            tabmodule=parse_mod,
            outputdir=parse_dir,
            optimize=1,
            debug=0,
            errorlog=yacc.NullLogger())