How to use the fastparquet.writer.write_common_metadata function in fastparquet

To help you get started, we’ve selected a few fastparquet examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github dask / dask / dask / dataframe / io / parquet / fastparquet.py View on Github external
def write_metadata(parts, fmd, fs, path, append=False, **kwargs):
        _meta = copy.copy(fmd)
        if parts:
            for rg in parts:
                if rg is not None:
                    if isinstance(rg, list):
                        for r in rg:
                            _meta.row_groups.append(r)
                    else:
                        _meta.row_groups.append(rg)
            fn = fs.sep.join([path, "_metadata"])
            fastparquet.writer.write_common_metadata(
                fn, _meta, open_with=fs.open, no_row_groups=False
            )

        # if appending, could skip this, but would need to check existence
        fn = fs.sep.join([path, "_common_metadata"])
        fastparquet.writer.write_common_metadata(fn, _meta, open_with=fs.open)
github dask / dask / dask / dataframe / io / parquet / fastparquet.py View on Github external
if parts:
            for rg in parts:
                if rg is not None:
                    if isinstance(rg, list):
                        for r in rg:
                            _meta.row_groups.append(r)
                    else:
                        _meta.row_groups.append(rg)
            fn = fs.sep.join([path, "_metadata"])
            fastparquet.writer.write_common_metadata(
                fn, _meta, open_with=fs.open, no_row_groups=False
            )

        # if appending, could skip this, but would need to check existence
        fn = fs.sep.join([path, "_common_metadata"])
        fastparquet.writer.write_common_metadata(fn, _meta, open_with=fs.open)
github holoviz / datashader / datashader / spatial / points.py View on Github external
dd.to_parquet(
        ddf, path, engine='fastparquet', compression=compression)

    # Open resulting parquet file
    pf = fp.ParquetFile(path)

    # Add a new property to the file metadata
    new_fmd = copy.copy(pf.fmd)
    new_kv = fp.parquet_thrift.KeyValue()
    new_kv.key = 'SpatialPointsFrame'
    new_kv.value = json.dumps(props)
    new_fmd.key_value_metadata.append(new_kv)

    # Overwrite file metadata
    fn = os.path.join(path, '_metadata')
    fp.writer.write_common_metadata(fn, new_fmd, no_row_groups=False)

    fn = os.path.join(path, '_common_metadata')
    fp.writer.write_common_metadata(fn, new_fmd)
github holoviz / datashader / datashader / spatial / points.py View on Github external
# Open resulting parquet file
    pf = fp.ParquetFile(path)

    # Add a new property to the file metadata
    new_fmd = copy.copy(pf.fmd)
    new_kv = fp.parquet_thrift.KeyValue()
    new_kv.key = 'SpatialPointsFrame'
    new_kv.value = json.dumps(props)
    new_fmd.key_value_metadata.append(new_kv)

    # Overwrite file metadata
    fn = os.path.join(path, '_metadata')
    fp.writer.write_common_metadata(fn, new_fmd, no_row_groups=False)

    fn = os.path.join(path, '_common_metadata')
    fp.writer.write_common_metadata(fn, new_fmd)