How to use the datacube.Datacube function in datacube

To help you get started, we’ve selected a few datacube examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github opendatacube / datacube-core / datacube_apps / pixeldrill.py View on Github external
type=float,
                        default=4000,
                        required=False)

    parser.add_argument('-verbose',
                        help='verbose output',
                        default=True,
                        required=False)

    args = parser.parse_args()
    kwargs = vars(args)

    if not args.product:
        parser.print_help()
        print('\n\nValid choices for PRODUCT are:')
        dc = datacube.Datacube()
        prods = dc.list_products()['name']
        print(prods.to_string(index=False, header=False))
        parser.exit()

    if args.verbose:
        print(kwargs)

    run(**kwargs)
github opendatacube / odc-tools / apps / dc_tools / odc / apps / dc_tools / export_md.py View on Github external
def cli(ctx, datacube_config):
    """ Specify datacube index to be used for the given datacube config"""
    ctx.obj = Datacube(config=datacube_config).index
github ceos-seo / data_cube_utilities / data_cube_utilities / dc_fractional_coverage_classifier.py View on Github external
Assumptions:
      The command-line tool assumes there is a measurement called cf_mask
    Inputs:
      platform (str)
      product_type (str)
      min_lon (str)
      max_lon (str)
      min_lat (str)
      max_lat (str)
      start_date (str)
      end_date (str)
      dc_config (str)
    """

    # Initialize data cube object
    dc = datacube.Datacube(config=dc_config, app='dc-frac-cov')

    products = dc.list_products()
    platform_names = set([product[6] for product in products.values])
    if platform not in platform_names:
        print('ERROR: Invalid platform.')
        print('Valid platforms are:')
        for name in platform_names:
            print(name)
        return

    product_names = [product[0] for product in products.values]
    if product_type not in product_names:
        print('ERROR: Invalid product type.')
        print('Valid product types are:')
        for name in product_names:
            print(name)
github opendatacube / datacube-core / datacube / execution / execution_engine2.py View on Github external
def _get_data(self, metadata, chunk=None):
        '''Retrieves data for the worker.'''
        use_threads = isinstance(self._datacube.index, S3AIOIndex)
        if chunk is None:
            return Datacube.load_data(metadata['grouped'], metadata['geobox'],
                                      metadata['measurements_values'].values(), use_threads=use_threads)
        else:
            return Datacube.load_data(metadata['grouped'][chunk[0]], metadata['geobox'][chunk[1:]],
                                      metadata['measurements_values'].values(), use_threads=use_threads)
github GeoscienceAustralia / COG-Conversion / dea_cog_converter / cli.py View on Github external
def get_dataset_values(product_name, product_config, time_range=None):
    """
    Extract the file list corresponding to a product for the given year and month using datacube API.
    """
    try:
        query = {**dict(product=product_name), **time_range}
    except TypeError:
        # Time range is None
        query = {**dict(product=product_name)}

    dc = Datacube(app='cog-worklist query')

    field_names = get_field_names(product_config)

    LOG.info(f"Perform a datacube dataset search returning only the specified fields, {field_names}.")
    ds_records = dc.index.datasets.search_returning(field_names=tuple(field_names), **query)

    search_results = False
    for ds_rec in ds_records:
        search_results = True
        yield check_prefix_from_query_result(ds_rec, product_config)

    if not search_results:
        LOG.warning(f"Datacube product query is empty for {product_name} product with time-range, {time_range}")
github opendatacube / datacube-core / docs / user / recipes / poly_drill.py View on Github external
def main():
    shape_file = 'my_shape_file.shp'
    with fiona.open(shape_file) as shapes:
        crs = geometry.CRS(shapes.crs_wkt)
        first_geometry = next(iter(shapes))['geometry']
        geom = geometry.Geometry(first_geometry, crs=crs)

    query = {
        'time': ('1990-01-01', '1991-01-01'),
        'geopolygon': geom
    }

    dc = datacube.Datacube(app='poly-drill-recipe')
    data = dc.load(product='ls5_nbar_albers', measurements=['red'], **query)

    mask = geometry_mask([geom], data.geobox, invert=True)
    data = data.where(mask)

    data.red.plot.imshow(col='time', col_wrap=5)
github opendatacube / datacube-stats / scripts / tile_check.py View on Github external
def main(products, year, month, save):
    from datacube_stats.utils.query import multi_product_list_cells
    import datacube
    from datacube.api import GridWorkflow

    query = {}
    if year is not None:
        if month is not None:
            query['time'] = ('{}-{}-01'.format(year, month),
                             '{}-{}-01'.format(year, month+1))
        else:
            query['time'] = ('{}-01-01'.format(year),
                             '{}-12-31'.format(year))

    dc = datacube.Datacube(app='dbg')
    gw = GridWorkflow(product=products[0],
                      index=dc.index)

    click.echo('## Starting to run query', err=True)
    t_start = time.time()
    co_common, co_unmatched = multi_product_list_cells(products, gw, **query)
    t_took = time.time() - t_start
    click.echo('## Completed in {} seconds'.format(t_took), err=True)

    if save is not None:
        click.echo('## Saving data to {}'.format(save), err=True)
        with open(save, 'wb') as f:
            pickle.dump(dict(co_common=co_common, co_unmatched=co_unmatched), f)
            f.close()
        click.echo(' done')
github opendatacube / odc-tools / dea / apps / index_from_json.py View on Github external
for ds in from_json_lines(f, index, verify_lineage=False):
            n_total += 1
            try:
                index.datasets.add(ds, with_lineage=True)
            except Exception as e:
                n_failed += 1
                print(str(e))

            if (n_total % 10) == 0:
                print('.', end='', flush=True)

            if (n_total % 100) == 0:
                print(' T:{:d} F:{:d}'.format(n_total, n_failed))

    dc = datacube.Datacube(env=env)

    if len(input_fname) == 0:
        input_fname = ('-',)

    for filename in input_fname:
        if filename == '-':
            process_file(sys.stdin, dc.index)
        else:
            with open(filename, 'rt') as f:
                process_file(f, dc.index)
github GeoscienceAustralia / digitalearthau / scripts / gaps.py View on Github external
def main(products, output_file, start_date, end_date, time_divs):
    """ Entry point. """
    datacube = Datacube(app='find-those-gaps')

    summary = find_gaps(datacube, products,
                        time_query(start_date, end_date), time_divs)

    yaml.dump(summary, output_file, default_flow_style=False)
github opendatacube / datacube-core / docs / user / recipes / line_transect.py View on Github external
def main():
    with fiona.open('line.shp') as shapes:
        crs = geometry.CRS(shapes.crs_wkt)
        first_geometry = next(shapes)['geometry']
        line = geometry.Geometry(first_geometry, crs=crs)

    query = {
        'time': ('1990-01-01', '1991-01-01'),
        'geopolygon': line
    }

    dc = datacube.Datacube(app='line-trans-recipe')
    data = dc.load(product='ls5_nbar_albers', measurements=['red'], **query)

    trans = transect(data, line, abs(data.affine.a))
    trans.red.plot(x='distance', y='time')