How to use the datacube.ui.click function in datacube

To help you get started, we’ve selected a few datacube examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github opendatacube / datacube-core / datacube / ui / click.py View on Github external
simply print the given error message.

    Include a '%s' in the message to print the single line message from the
    exception.

    :param e: caught Exception
    :param msg: Message to User with optional %s
    """
    ctx = click.get_current_context()
    if ctx.obj['verbosity'] >= 1:
        raise e
    else:
        if '%s' in msg:
            click.echo(msg % e)
        else:
            click.echo(msg)
        ctx.exit(1)
github opendatacube / datacube-core / datacube / scripts / dataset.py View on Github external
@ui.pass_index()
def index_cmd(index, product_names,
              exclude_product_names,
              auto_match,
              auto_add_lineage,
              verify_lineage,
              dry_run,
              ignore_lineage,
              confirm_ignore_lineage,
              dataset_paths):
    if confirm_ignore_lineage is False and ignore_lineage is True:
        if sys.stdin.isatty():
            confirmed = click.confirm("Requested to skip lineage information, Are you sure?", default=False)
            if not confirmed:
                click.echo('OK aborting', err=True)
                sys.exit(1)
        else:
github GeoscienceAustralia / digitalearthau / digitalearthau / stacker.py View on Github external
@ui.verbose_option
@ui.pass_index(app_name=APP_NAME)
def submit(index: Index,
           app_config: str,
           project: str,
           queue: str,
           no_qsub: bool,
           time_range: Tuple[datetime, datetime]):
    app_config_path = Path(app_config).resolve()
    app_config = paths.read_document(app_config_path)

    task_desc, task_path = init_task_app(
        job_type="stack",
        source_products=[app_config['output_type']],  # With stacker, source=output
        output_products=[app_config['output_type']],  # With stacker, source=output
        # TODO: Use @datacube.ui.click.parsed_search_expressions to allow params other than time from the cli?
        datacube_query_args=Query(index=index, time=time_range).search_terms,
github opendatacube / datacube-core / datacube_apps / movie_generator.py View on Github external
@ui.global_cli_options
@ui.executor_cli_options
def main(bounds, base_output_name, load_bounds_from, start_date, end_date, product, measurement, executor,
         step_size, stats_duration, time_incr, ffmpeg_path, crs):
    """
    Create an mp4 movie file based on datacube data

    Use only clear pixels, and mosaic over time to produce full frames.

    Can combine products, specify multiple --product

    """
    if load_bounds_from:
        crs, (left, bottom, right, top) = bounds_from_file(load_bounds_from)
    elif bounds:
        left, bottom, right, top = bounds
    else:
github opendatacube / datacube-core / datacube / scripts / dataset.py View on Github external
@ui.pass_index()
def info_cmd(index: Index, show_sources: bool, show_derived: bool,
             f: str,
             max_depth: int,
             ids: Iterable[str]) -> None:
    # Using an array wrapper to get around the lack of "nonlocal" in py2
    missing_datasets = [0]

    def get_datasets(ids):
        for id_ in ids:
            dataset = index.datasets.get(id_, include_sources=show_sources)
            if dataset:
                yield dataset
            else:
                click.echo('%s missing' % id_, err=True)
                missing_datasets[0] += 1
github GeoscienceAustralia / digitalearthau / digitalearthau / vpmapper / cli.py View on Github external
@ui.parsed_search_expressions
def run_many(config_file, expressions, environment=None, limit=None):
    # Load Configuration file
    d4 = Dataset2Dataset(config_file=config_file, dc_env=environment)

    tasks = d4.generate_tasks(expressions, limit=limit)

    execute_with_dask(tasks)
github GeoscienceAustralia / digitalearthau / digitalearthau / move.py View on Github external
@ui.pass_index('move')
def cli(index, dry_run, paths, destination, checksum):
    """
    Move the given folder of datasets into the given destination folder.

    This will checksum the data, copy it to the destination, and mark the original as archived in the DEA index.


    Notes:

    * An operator can later run dea-clean to trash the archived original locations.

    * Source datasets with failing checksums will be left as-is, with a warning logged.

    * Both the source(s) and destination paths are expected to be paths containing existing DEA collections.
    (See collections.py and paths.py)
    """
github opendatacube / datacube-core / datacube_apps / stacker / fixer.py View on Github external
@datacube.ui.click.pass_index(app_name=APP_NAME)
@datacube.ui.click.global_cli_options
@click.option('--cell-index', 'cell_index', help='Limit the process to a particular cell (e.g. 14,-11)',
              callback=task_app.validate_cell_index, default=None)
@click.option('--year', 'time', callback=task_app.validate_year, help='Limit the process to a particular year')
@click.option('--export-path', 'export_path',
              help='Write the stacked files to an external location without updating the index',
              default=None,
              type=click.Path(exists=True, writable=True, file_okay=False))
@task_app.queue_size_option
@task_app.task_app_options
@task_app.task_app(make_config=make_fixer_config, make_tasks=make_fixer_tasks)
def fixer(index, config, tasks, executor, queue_size, **kwargs):
    """This script rewrites unstacked dataset files to correct their NetCDF metadata."""
    click.echo('Starting fixer utility...')

    task_func = partial(do_fixer_task, config)