How to use the astor.to_source function in astor

To help you get started, we’ve selected a few astor examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github ebanner / pynt / codebook / ast_server.py View on Github external
...         pass
    ...     def quuux():
    ...         \"\"\"function\"\"\"
    ...         pass
    ... y
    ...
    ... '''
    >>>
    >>> namespace = 'ast_server.Qux.quux'
    >>> code = [s, namespace]

    """
    code, namespace = code[0], code[1]
    tree = ast.parse(code)
    embedded = IPythonEmbedder(namespace).visit(tree)
    c = astor.to_source(embedded)
    return c
github cornell-brg / pymtl3 / pymtl / old_passes / GenerateTickPass.py View on Github external
# in the form of:
        # >>> hostobj = s.reg # this is hostobj_stmt
        # >>> hostobj.out = hostobj.in_ # stmt

        if hostobj != "s":
          hostobj_stmt = ast.parse( "hostobj = " + hostobj ).body[0]
          newfunc.body.append( ast.fix_missing_locations( hostobj_stmt ) )

        for stmt in root.body[0].body:
          if hostobj != "s": rewriter.visit( stmt )
          newfunc.body.append( stmt )

      if dump:
        import astor
        gen_tick_src = astor.to_source(newroot)

      exec compile( newroot, "", "exec") in locals()
      tick_hacky.func_globals.update( func_globals )
      ret = tick_hacky

    m._tick_src = gen_tick_src
    m.tick = ret
github GoogleCloudPlatform / cloudml-samples / tools / to_ipynb.py View on Github external
lines = source.split('\n')

    cells = []
    cell_source = []            
    prev_type = None
    start = 0

    # main processing loop
    for node in module.body:
        cur_type = type(node).__name__
        top, bottom = get_boundary(node)

        # special handling for dangling lines
        if cur_type in ['Import', 'ImportFrom', 'Expr']:
            # print(astor.to_source(node))
            code_lines = astor.to_source(node).strip().split('\n')
            cur_group = process_node(code_lines, cur_type, remove)
        else:
            code_lines = lines[top:bottom]
            cur_group = process_node(code_lines, cur_type, remove)

        # group of lines between ast nodes
        between = process_between(lines[start:top])
        
        if between:
            # flush cell_source
            if cell_source:
                cells.append(code_cell(cell_source))

            cells.append(markdown_cell(between))

            # get current node source, check later if need to concatenate
github coetaur0 / staticfg / staticfg / model.py View on Github external
def get_source(self):
        """
        Get a string containing the Python source code corresponding to the
        statements in the block.

        Returns:
            A string containing the source code of the statements.
        """
        src = ""
        for statement in self.statements:
            if type(statement) in [ast.If, ast.For, ast.While]:
                src += (astor.to_source(statement)).split('\n')[0] + "\n"
            elif type(statement) == ast.FunctionDef or\
                 type(statement) == ast.AsyncFunctionDef:
                src += (astor.to_source(statement)).split('\n')[0] + "...\n"
            else:
                src += astor.to_source(statement)
        return src
github dantarion / bbtools / gg_rev_script_parser.py View on Github external
filesize = struct.unpack(MODE+"I",f.read(4))[0]
    f.seek(0x38)
    FUNCTION_COUNT, = struct.unpack(MODE+"I",f.read(4))
    f.seek(0x24*(FUNCTION_COUNT),1)
    parse_bbscript_routine(f, filesize + 0x38)
    '''
    for i in range(0,FUNCTION_COUNT):
        f.seek(BASE+4+0x24*i)
        FUNCTION_NAME = f.read(0x20).split("\x00")[0]
        if log: log.write("\n#---------------{0} {1}/{2}\n".format(FUNCTION_NAME,i,FUNCTION_COUNT))
        FUNCTION_OFFSET, = struct.unpack(MODE+"I",f.read(4))
        f.seek(BASE+4+0x24*FUNCTION_COUNT+FUNCTION_OFFSET)
        parse_bbscript_routine(f)
    '''
    py = open(os.path.join(dirname, basename) + ".py","w")
    py.write(astor.to_source(astRoot))
    py.close()
    return j
github microsoft / nni / tools / nni_annotation / specific_code_generator.py View on Github external
def parse_nni_variable(code):
    """Parse `nni.variable` expression.
    Return the name argument and AST node of annotated expression.
    code: annotation string
    """
    name, call = parse_annotation_function(code, 'variable')

    assert len(call.args) == 1, 'nni.variable contains more than one arguments'
    arg = call.args[0]
    assert type(arg) is ast.Call, 'Value of nni.variable is not a function call'
    assert type(arg.func) is ast.Attribute, 'nni.variable value is not a NNI function'
    assert type(arg.func.value) is ast.Name, 'nni.variable value is not a NNI function'
    assert arg.func.value.id == 'nni', 'nni.variable value is not a NNI function'

    name_str = astor.to_source(name).strip()
    keyword_arg = ast.keyword(arg='name', value=ast.Str(s=name_str))
    arg.keywords.append(keyword_arg)
    if arg.func.attr == 'choice':
        convert_args_to_dict(arg)

    return name, arg
github bsc-wdc / compss / compss / programming_model / bindings / tmp / python / src / pycompss / util / translators / py2pycompss / components / calculator.py View on Github external
Creates the ISL Access objects given the loops information and the accesses information

        :param loops_info: Information about loop bounds and indexes
        :param subscript_accesses_info: Map between subscript names and all its access expressions
        :return: A map of the form Map)> containing the number
        of dimensions, the original access and the basic ISL sets of each access of each subscript
        Two maps of the form Map)> containing the global min/max ISL sets of each
        subscript
        """

        if __debug__:
            import astor
            logger.debug("- Loop information:")
            for k, v in loops_info.items():
                # logger.debug(str(astor.to_source(k)) + " -> " + str(astor.dump_tree(v)))
                logger.debug(str(astor.to_source(k)) + " -> " + str(astor.to_source(v)))
            logger.debug("- Subscripts Accesses:")
            for var_name, values in subscript_accesses_info.items():
                for a in values:
                    # logger.debug(str(var_name) + ": " + str([str(astor.dump_tree(dim)) for dim in a]))
                    logger.debug(str(var_name) + ": " + str([str(astor.to_source(dim)) for dim in a]))

        # Global loop information (defining the three spaces)
        global_min_isl_builder = _IslSetBuilder()
        global_min_isl_builder.set_variables([loop_ind.id for loop_ind in loops_info.keys()])
        for loop_ind, loop_bounds in loops_info.items():
            loop_ind_varname = loop_ind.id
            global_min_isl_builder.add_constraint(2, loop_ind_varname, loop_bounds.args[0])
            global_min_isl_builder.add_constraint(4, loop_ind_varname, loop_bounds.args[1])
        import copy
        global_max_isl_builder = copy.deepcopy(global_min_isl_builder)
        specific_access_isl_builder = copy.deepcopy(global_min_isl_builder)
github dantarion / bbtools / gg_rev_script_parser.py View on Github external
astStack[-1].append(Expr(Call(Name(id=dbData["name"]),map(sanitizer(currentCMD),cmdData),[],None,None)))
            else:
                if len(astStack[-1]) == 0:
                    astStack[-1].append(Pass())
                if len(astStack) > 1:
                    astStack.pop()
                    if dbData['name'] == 'endState' or dbData['name'] == 'endSubroutine':
                        while(inUpon > 0):
                            astStack.pop()
                            inUpon -= 1
                        while(inIf > 0):
                            astStack.pop()
                            inIf -= 1
                    if len(astStack) == 1:
                        lastFunc = j["Functions"][-1]
                        j["FunctionsPy"].append({"type":lastFunc["type"],"name":lastFunc["name"],"src":astor.to_source(astStack[-1][-1])})
                else:
                    print("\tasterror",currentIndicator)
                    astStack[-1].append(Expr(Call(Name(id=dbData["name"]),map(sanitizer(currentCMD),cmdData),[],None,None)))
                if dbData['name'] == 'endUpon':
                    inUpon -= 1
                if dbData['name'] == 'endIf':
                    inIf -= 1

        else:
            astStack[-1].append(Expr(Call(Name(id=dbData["name"]),map(sanitizer(currentCMD),cmdData),[],None,None)))
        if GAME == "gg_rev":
            if dbData['name'] == 'sprite':
                #comment = "Frame {0}->{1}".format(currentFrame,currentFrame+cmdData[1])
                currentFrame = currentFrame+cmdData[1]
            if dbData['name'] == "upon":
                param = 1
github tsdaemon / treelstm-code-generation / scripts / preprocess_utils.py View on Github external
raw_code = code
        raw_codes.append(raw_code)

        code = canonicalize_code(code)
        for str_literal, str_repr in str_map.items():
            code = code.replace(str_literal, '\'' + str_repr + '\'')

        codes.append(code)

        try:
            p_tree = parse_raw(code)
            # sanity check
            pred_ast = parse_tree_to_python_ast(p_tree)
            pred_code = astor.to_source(pred_ast)
            ref_ast = ast.parse(code)
            ref_code = astor.to_source(ref_ast)

            if pred_code != ref_code:
                raise RuntimeError('code mismatch!')

            parse_trees.append(p_tree)
        except:
            parse_trees.append(None)

    serialize_to_file(codes, code_out_file)
    serialize_to_file(raw_codes, raw_code_out_file)
    return parse_trees
github tensorflow / docs / tools / tensorflow_docs / api_generator / parser.py View on Github external
# of the defaults.
      ast_defaults = [None] * len(argspec.defaults)
    except SyntaxError:
      # You may get a SyntaxError using pytype in python 2.
      ast_defaults = [None] * len(argspec.defaults)
    except IndexError:
      # Some python3 signatures fail in tf_inspect.getsource with IndexError
      ast_defaults = [None] * len(argspec.defaults)

    for arg, default, ast_default in zip(
        argspec.args[first_arg_with_default:], argspec.defaults, ast_defaults):
      if id(default) in reverse_index:
        default_text = reverse_index[id(default)]
      elif ast_default is not None:
        default_text = (
            astor.to_source(ast_default).rstrip('\n').replace('\t', '\\t')
            .replace('\n', '\\n').replace('"""', "'"))
        default_text = PAREN_NUMBER_RE.sub('\\1', default_text)

        if default_text != repr(default):
          # This may be an internal name. If so, handle the ones we know about.
          # TODO(wicke): This should be replaced with a lookup in the index.
          # TODO(wicke): (replace first ident with tf., check if in index)
          internal_names = {
              'ops.GraphKeys': 'tf.GraphKeys',
              '_ops.GraphKeys': 'tf.GraphKeys',
              'init_ops.zeros_initializer': 'tf.zeros_initializer',
              'init_ops.ones_initializer': 'tf.ones_initializer',
              'saver_pb2.SaverDef': 'tf.train.SaverDef',
          }
          full_name_re = '^%s(.%s)+' % (IDENTIFIER_RE, IDENTIFIER_RE)
          match = re.match(full_name_re, default_text)