How to use the argcomplete.FilesCompleter function in argcomplete

To help you get started, we’ve selected a few argcomplete examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github python-cmd2 / cmd2 / cmd2 / argcomplete_bridge.py View on Github external
# coding=utf-8
"""Hijack the ArgComplete's bash completion handler to return AutoCompleter results"""

try:
    # check if argcomplete is installed
    import argcomplete
except ImportError:  # pragma: no cover
    # not installed, skip the rest of the file
    DEFAULT_COMPLETER = None
else:
    # argcomplete is installed

    # Newer versions of argcomplete have FilesCompleter at top level, older versions only have it under completers
    try:
        DEFAULT_COMPLETER = argcomplete.FilesCompleter()
    except AttributeError:
        DEFAULT_COMPLETER = argcomplete.completers.FilesCompleter()

    from cmd2.argparse_completer import ACTION_ARG_CHOICES, ACTION_SUPPRESS_HINT
    from contextlib import redirect_stdout
    import copy
    from io import StringIO
    import os
    import shlex
    import sys
    from typing import List, Tuple, Union

    from . import constants
    from . import utils

    def tokens_for_completion(line: str, endidx: int) -> Union[Tuple[List[str], List[str], int, int],
github Azure / azure-cli / src / azure-cli / azure / cli / command_modules / batchai / _params.py View on Github external
c.argument('workspace', options_list=['--workspace', '-w'], configured_default='default_workspace_name', id_part='name', help='Name or ARM ID of the workspace. You can configure default workspace using `az batchai workspace set-default`')
        c.argument('file_server_name', options_list=['--name', '-n'], id_part='child_name_1', help='Name of file server.')

    with self.argument_context('batchai file-server create') as c:
        c.argument('vm_size', options_list=['--vm-size', '-s'], help='VM size.', completer=get_vm_size_completion_list)
        c.argument('json_file', options_list=['--config-file', '-f'], help='A path to a json file containing file server create parameters (json representation of azure.mgmt.batchai.models.FileServerCreateParameters). Note, parameters given via command line will overwrite parameters specified in the configuration file.', arg_group='Advanced')

    with self.argument_context('batchai file-server create', arg_group='Storage Disks') as c:
        c.argument('disk_count', help='Number of disks.', type=int)
        c.argument('disk_size', help='Disk size in Gb.', type=int)
        c.argument('caching_type', arg_type=get_enum_type(['none', 'readonly', 'readwrite']), help='Caching type for premium disks. If not provided via command line or in configuration file, no caching will be used.')
        c.argument('storage_sku', arg_type=get_enum_type(['Premium_LRS', 'Standard_LRS']), help='The sku of storage account to persist VM.')

    with self.argument_context('batchai file-server create', arg_group='Admin Account') as c:
        c.argument('user_name', options_list=['--user-name', '-u'], help='Name of admin user account to be created on NFS node. If the value is not provided and no user configuration is provided in the config file, current user\'s name will be used.')
        c.argument('ssh_key', options_list=['--ssh-key', '-k'], help='Optional SSH public key value or path. If ommited and no password specified, default SSH key (~/.ssh/id_rsa.pub) will be used.', completer=FilesCompleter())
        c.argument('generate_ssh_keys', action='store_true', help='Generate SSH public and private key files in ~/.ssh directory (if missing).')
        c.argument('password', options_list=['--password', '-p'], help='Optional password for the admin user created on the NFS node.')

    with self.argument_context('batchai file-server create', arg_group='Virtual Network') as c:
        c.argument('subnet', options_list=['--subnet'], help='ARM ID of a virtual network subnet to put the file server in. If not provided via command line or in the configuration file, Batch AI will create a new virtual network and subnet under your subscription.')

    with self.argument_context('batchai file-server list') as c:
        c.argument('workspace_name', options_list=['--workspace', '-w'], id_part=None, help='Name of workspace.')
        c.ignore('file_servers_list_by_workspace_options')
github k4cg / nichtparasoup / nichtparasoup / cli / parser.py View on Github external
from argcomplete import FilesCompleter  # type: ignore

from nichtparasoup import __version__
from nichtparasoup.imagecrawler import get_imagecrawlers


def _imagecrawler_completion(*args: Any, **kwargs: Any) -> Set[str]:  # pragma: no cover
    """ImageCrawler completer.
    see https://kislyuk.github.io/argcomplete/#specifying-completers
    """
    del args
    del kwargs
    return set(get_imagecrawlers().names())


_YAML_FILE_COMPLETION = FilesCompleter(allowednames=('yaml', 'yml'), directories=True)


def create_parser() -> ArgumentParser:  # pragma: no cover
    # used `__tmp_action`  several times, to omit type-checkers warning ala 'Action has no attribute "completer"'

    debug = ArgumentParser(add_help=False)
    debug.add_argument(
        '--debug',
        help='enable debug output',
        action='store_true', dest="debug",
    )

    parser = ArgumentParser(
        add_help=True,
        allow_abbrev=False,
    )
github Azure / azure-cli-extensions / src / synapse / azext_synapse / _params.py View on Github external
for scope in ['show', 'cancel', 'reset-timeout']:
        with self.argument_context('synapse spark session ' + scope) as c:
            c.argument('session_id', options_list=['--livy-id'], arg_group='Spark Session',
                       help='The id of the Spark session job.')

    with self.argument_context('synapse spark statement') as c:
        c.argument('session_id', help='The id of Spark session.')

    for scope in ['show', 'cancel']:
        with self.argument_context('synapse spark statement ' + scope) as c:
            c.argument('statement_id', options_list=['--livy-id'], arg_group="Spark statement",
                       help='The id of the statement.')

    with self.argument_context('synapse spark statement invoke') as c:
        c.argument('code', completer=FilesCompleter(),
                   help='The code of Spark statement. This is either the code contents or use `@` to load the content from a file')
        c.argument('language', arg_type=get_enum_type(SparkStatementLanguage), validator=validate_statement_language, help='The language of Spark statement.')

    # synapse workspace
    for scope in ['show', 'create', 'update', 'delete']:
        with self.argument_context('synapse workspace ' + scope) as c:
            c.argument('workspace_name', arg_type=name_type, id_part='name', help='The workspace name.')

    for scope in ['create', 'update']:
        with self.argument_context('synapse workspace ' + scope) as c:
            c.argument('sql_admin_login_password', options_list=['--sql-admin-login-password', '-p'],
                       help='The sql administrator login password.')
            c.argument('tags', arg_type=tags_type)

    with self.argument_context('synapse workspace create') as c:
        c.argument("storage_account", validator=validate_storage_account,