How to use the click.progressbar function in click

To help you get started, we’ve selected a few click examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github GeoscienceAustralia / digitalearthau / digitalearthau / test_env.py View on Github external
# TODO copy the files on disk
    # for the URIs collected, copy them to a local file system
    # and change the URIs accordingly before indexing in the next section
    # the digitalearthau.move, datacube_apps/simple_replica.py modules seem relevant
    # it would be great if the user can provide a mapping of file systems
    # perhaps a .yaml file with multiple entries like
    # - source: /g/data/rs0/datacube/
    #   target: /g/data/u46/users/ia1511/data/datacube/
    # or something similar

    # TODO - Need to find a file where index_dataset_paths has been moved and import from that file
    index_dataset_paths = None
    # there are too many DuplicateRecordError warnings
    # can they be prevented?
    if sys.stdout.isatty():
        with click.progressbar(uris, label='Indexing datasets') as uri:
            index_dataset_paths('verify', False, test_index, rules, uri)
    else:
        index_dataset_paths('verify', False, test_index, rules, uris)
github Diaoul / subliminal / subliminal / cli.py View on Github external
click.style(str(len(videos)), bold=True, fg='green' if videos else None),
        's' if len(videos) > 1 else '',
        click.style(str(len(ignored_videos)), bold=True, fg='yellow' if ignored_videos else None),
        's' if len(ignored_videos) > 1 else '',
        click.style(str(len(errored_paths)), bold=True, fg='red' if errored_paths else None),
        's' if len(errored_paths) > 1 else '',
    ))

    # exit if no video collected
    if not videos:
        return

    # download best subtitles
    downloaded_subtitles = defaultdict(list)
    with AsyncProviderPool(max_workers=max_workers, providers=provider, provider_configs=obj['provider_configs']) as p:
        with click.progressbar(videos, label='Downloading subtitles',
                               item_show_func=lambda v: os.path.split(v.name)[1] if v is not None else '') as bar:
            for v in bar:
                scores = get_scores(v)
                subtitles = p.download_best_subtitles(p.list_subtitles(v, language - v.subtitle_languages),
                                                      v, language, min_score=scores['hash'] * min_score / 100,
                                                      hearing_impaired=hearing_impaired, only_one=single)
                downloaded_subtitles[v] = subtitles

        if p.discarded_providers:
            click.secho('Some providers have been discarded due to unexpected errors: %s' %
                        ', '.join(p.discarded_providers), fg='yellow')

    # save subtitles
    total_subtitles = 0
    for v, subtitles in downloaded_subtitles.items():
        saved_subtitles = save_subtitles(v, subtitles, single=single, directory=directory, encoding=encoding)
github lyft / flytekit / flytekit / clis / flyte_cli / main.py View on Github external
def _get_io(node_executions, wf_execution, show_io, verbose):
    # Fetch I/O if necessary
    uri_to_message_map = {}
    if show_io:
        uris = [ne.input_uri for ne in node_executions]
        uris.extend([ne.closure.output_uri for ne in node_executions if ne.closure.output_uri is not None])
        if wf_execution is not None and \
                wf_execution.closure.outputs is not None and \
                wf_execution.closure.outputs.uri is not None:
            uris.append(wf_execution.closure.outputs.uri)

        with _click.progressbar(uris, label="Downloading Inputs and Outputs") as progress_bar_uris:
            for uri in progress_bar_uris:
                uri_to_message_map[uri] = _fetch_and_stringify_literal_map(uri, verbose=verbose)
    return uri_to_message_map
github Syncano / syncano-cli / syncano_cli / parse_to_syncano / migrations / transfer.py View on Github external
def process_relations(self, instance):
        if self.relations:
            with click.progressbar(
                    self.relations,
                    label='Transferring relations for Classes',
                    show_pos=True,
                    item_show_func=ClassProcessor.show_class_name) as classes:
                for class_name, class_relations in classes:
                    RelationProcessor(
                        class_name=class_name,
                        class_relations=class_relations
                    ).process(instance=instance, config=self.config)
github oracle / oci-cli / services / resource_manager / src / oci_cli_resource_manager / generated / resourcemanager_cli.py View on Github external
kwargs = {}
    kwargs['opc_request_id'] = cli_util.use_or_generate_request_id(ctx.obj['request_id'])
    client = cli_util.build_client('resource_manager', ctx)
    result = client.get_stack_tf_config(
        stack_id=stack_id,
        **kwargs
    )

    # If outputting to stdout we don't want to print a progress bar because it will get mixed up with the output
    # Also we need a non-zero Content-Length in order to display a meaningful progress bar
    bar = None
    if hasattr(file, 'name') and file.name != '' and 'Content-Length' in result.headers:
        content_length = int(result.headers['Content-Length'])
        if content_length > 0:
            bar = click.progressbar(length=content_length, label='Downloading file')

    try:
        if bar:
            bar.__enter__()

        # TODO: Make the download size a configurable option
        # use decode_content=True to automatically unzip service responses (this should be overridden for object storage)
        for chunk in result.data.raw.stream(cli_constants.MEBIBYTE, decode_content=True):
            if bar:
                bar.update(len(chunk))
            file.write(chunk)
    finally:
        if bar:
            bar.render_finish()
        file.close()
github opendatacube / odc-tools / libs / dscache / odc / dscache / apps / slurpy.py View on Github external
db_thread = Thread(target=db_task, args=(products, conn, q))
    db_thread.start()

    dss = qmap(lambda ds: ds, q, eos_marker=EOS)
    dss = cache.tee(dss)

    cells = {}
    if grid is not None:
        gs = parse_gridspec(grid)
        # TODO for named gridspecs should we use the name as group_prefix?
        group_prefix = f"epsg{gs.crs.epsg:d}"
        cache.add_grid(gs, group_prefix)
        dss = bin_dataset_stream(gs, dss, cells)

    label = 'Processing ({:8,d})'.format(n_total)
    with click.progressbar(dss, label=label, length=n_total) as dss:
        for _ in dss:
            pass

    if grid is not None:
        click.echo('Total bins: {:d}'.format(len(cells)))

        with click.progressbar(cells.values(), length=len(cells), label='Saving') as groups:
            for group in groups:
                cache.add_grid_tile(group_prefix, group.idx, group.dss)

    db_thread.join()
    cache.close()
github RedisGraph / redisgraph-bulk-loader / bulk_insert / relation_type.py View on Github external
def process_entities(self, query_buffer):
        entities_created = 0
        with click.progressbar(self.reader, length=self.entities_count, label=self.entity_str) as reader:
            for row in reader:
                self.validate_row(row)
                try:
                    start_id = row[self.start_id]
                    if self.start_namespace:
                        start_id = self.start_namespace + '.' + str(start_id)
                    end_id = row[self.end_id]
                    if self.end_namespace:
                        end_id = self.end_namespace + '.' + str(end_id)

                    src = query_buffer.nodes[start_id]
                    dest = query_buffer.nodes[end_id]
                except KeyError as e:
                    print("Relationship specified a non-existent identifier. src: %s; dest: %s" % (row[self.start_id], row[self.end_id]))
                    if configs.skip_invalid_edges is False:
                        raise e
github mobiusklein / ms_deisotope / ms_deisotope / tools / indexing.py View on Github external
This extended index is saved in a separate JSON file that can be loaded with
    :class:`~.ExtendedScanIndex`. It includes the scan time of all scans, the precursor
    mass of MSn scans, as well as the relationships between precursor and product ion
    scans, as well as other details. See :class:`~.ExtendedScanIndex` for more information
    '''
    for path in paths:
        click.echo("Indexing %s" % (path, ))
        reader = MSFileLoader(path)
        try:
            fn = reader.prebuild_byte_offset_file
            if not reader.source._check_has_byte_offset_file():
                fn(path)
        except AttributeError:
            pass
        if processes > 1:
            progbar = click.progressbar(label='Building Index', length=100)
            acc = [0]

            def update_bar(x):
                '''Progress Bar update callback for :func:`~.quick_index.index`
                '''
                x = int(x * 100)
                x -= acc[0]  # pylint: disable=cell-var-from-loop
                progbar.update(x)  # pylint: disable=cell-var-from-loop
                acc[0] += x  # pylint: disable=cell-var-from-loop

            with progbar:
                update_bar(0.0)
                index, _ = quick_index.index(
                    reader, processes, progress_indicator=update_bar)
        else:
            index = quick_index.ExtendedScanIndex()
github alerta / python-alerta-client / alertaclient / commands / cmd_tag.py View on Github external
def cli(obj, ids, query, filters, tags):
    """Add tags to alerts."""
    client = obj['client']
    if ids:
        total = len(ids)
    else:
        if query:
            query = [('q', query)]
        else:
            query = build_query(filters)
        total, _, _ = client.get_count(query)
        ids = [a.id for a in client.get_alerts(query)]

    with click.progressbar(ids, label='Tagging {} alerts'.format(total)) as bar:
        for id in bar:
            client.tag_alert(id, tags)