How to use the petl.util.data function in petl

To help you get started, we’ve selected a few petl examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github petl-developers / petl / src / petl / transform.py View on Github external
def itercrossjoin(sources):

    # construct fields
    outflds = list()
    for s in sources:
        outflds.extend(fields(s))
    yield outflds

    datasrcs = [data(src) for src in sources]
    for prod in product(*datasrcs):
        outrow = list()
        for row in prod:
            outrow.extend(row)
        yield outrow
github petl-developers / petl / src / petl / transform / reshape.py View on Github external
def __iter__(self):
        for row in data(self.table):
            for value in row:
                yield value
github petl-developers / petl / src / petl / io / csv_py3.py View on Github external
def _writecsv(table, source, mode, write_header, encoding, **csvargs):
    rows = table if write_header else data(table)
    with source.open_(mode) as buf:
        # wrap buffer for text IO
        csvfile = io.TextIOWrapper(buf, encoding=encoding,
                                   newline='', write_through=True)
        try:
            writer = csv.writer(csvfile, **csvargs)
            for row in rows:
                writer.writerow(row)
        finally:
            csvfile.detach()
github petl-developers / petl / src / petl / io / csv_py2.py View on Github external
def _writecsv(table, source, mode, write_header, **csvargs):
    rows = table if write_header else data(table)
    with source.open_(mode) as csvfile:
        writer = csv.writer(csvfile, **csvargs)
        for row in rows:
            writer.writerow(row)
github petl-developers / petl / src / petl / io.py View on Github external
def toucsv(table, source=None, dialect=csv.excel, encoding='utf-8', write_header=True, **kwargs):
    """
    Write the table to a CSV file via the given encoding. Like :func:`tocsv` but accepts an additional ``encoding``
    argument which should be one of the Python supported encodings. See also :mod:`codecs`.

    .. versionadded:: 0.19
    """
    source = _write_source_from_arg(source)
    with source.open_('wb') as f:
        writer = ucsv.UnicodeWriter(f, dialect=dialect, encoding=encoding, **kwargs)
        # User specified no header
        if write_header == False:
            for row in data(table):
                writer.writerow(row)
        # Default behavior, write the header
        else:
            for row in table:
                writer.writerow(row)
github petl-developers / petl / src / petl / io / json.py View on Github external
...           ['c', 2]]
        >>> from petl import tojsonarrays
        >>> tojsonarrays(table1, 'example.json')
        >>> # check what it did
        ... print(open('example.json').read())
        [["a", 1], ["b", 2], ["c", 2]]

    Note that this is currently not streaming, all data is loaded into memory
    before being written to the file.

    """

    if output_header:
        obj = list(table)
    else:
        obj = list(data(table))
    _writejson(source, obj, prefix, suffix, *args, **kwargs)
github petl-developers / petl / src / petl / io.py View on Github external
The `filename` argument is the path of the delimited file, all other keyword
    arguments are passed to :func:`csv.writer`. So, e.g., to override the delimiter
    from the default CSV dialect, provide the `delimiter` keyword argument.
     
    Note that no attempt is made to check that the fields or row lengths are 
    consistent with the existing data, the data rows from the table are simply
    appended to the file. See also the :func:`cat` function.
    
    Supports transparent writing to ``.gz`` and ``.bz2`` files.
    
    """
    
    source = _write_source_from_arg(source)
    with source.open_('ab') as f:
        writer = csv.writer(f, dialect=dialect, **kwargs)
        for row in data(table):
            writer.writerow(row)