How to use the humanfriendly.Timer function in humanfriendly

To help you get started, we’ve selected a few humanfriendly examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github xolox / python-executor / executor / tests.py View on Github external
def check_termination(self, method):
        """Helper method for process termination tests."""
        with ExternalCommand('sleep', '60', check=False) as cmd:
            timer = Timer()
            # We use a positive but very low timeout so that all of the code
            # involved gets a chance to run, but without slowing us down.
            getattr(cmd, method)(timeout=0.1)
            # Gotcha: Call wait() so that the process (our own subprocess) is
            # reclaimed because until we do so proc.is_running will be True!
            cmd.wait()
            # Now we can verify our assertions.
            assert not cmd.is_running, "Child still running despite graceful termination request!"
            assert timer.elapsed_time < 10, "It look too long to terminate the child!"
github martin68 / apt-smart / apt_smart / backends / ubuntu.py View on Github external
def discover_mirror_selection():
    """Discover "geographically suitable" Ubuntu mirrors."""
    timer = Timer()
    logger.info("Identifying fast Ubuntu mirrors using %s ..", MIRROR_SELECTION_URL)
    data = fetch_url(MIRROR_SELECTION_URL, timeout=3, retry=True, max_attempts=5)
    # shorter timeout with more retries is good for unstable connections to MIRROR_SELECTION_URL
    dammit = UnicodeDammit(data)
    mirrors = set(
        CandidateMirror(mirror_url=mirror_url.strip())
        for mirror_url in dammit.unicode_markup.splitlines()
        if mirror_url and not mirror_url.isspace() and mirror_url.startswith(('http://', 'https://'))
    )
    logger.debug("Found %s in %s.", pluralize(len(mirrors), "fast Ubuntu mirror"), timer)
    return mirrors
github xolox / python-executor / executor / process.py View on Github external
:param timeout: The number of seconds to wait for the process to
                        terminate after we've asked it nicely (defaults
                        to zero which means we wait indefinitely).
        :param use_spinner: Whether or not to display an interactive spinner
                            on the terminal (using :class:`~humanfriendly.Spinner`)
                            to explain to the user what they are waiting for:

                            - :data:`True` enables the spinner,
                            - :data:`False` disables the spinner,
                            - :data:`None` (the default) means the spinner is
                              enabled when the program is connected to an
                              interactive terminal, otherwise it's disabled.
        :returns: A :class:`~humanfriendly.Timer` object telling you how long
                  it took to wait for the process.
        """
        with Timer(resumable=True) as timer:
            with Spinner(interactive=use_spinner, timer=timer) as spinner:
                while self.is_running:
                    if timeout and timer.elapsed_time >= timeout:
                        break
                    spinner.step(label="Waiting for process %i to terminate" % self.pid)
                    spinner.sleep()
            return timer
github xolox / python-rsync-system-backup / rsync_system_backup / __init__.py View on Github external
def execute_helper(self):
        """Helper for :func:`execute()`."""
        timer = Timer()
        actions = []
        if self.crypto_device and not self.crypto_device_available:
            msg = "Encrypted filesystem %s isn't available! (the device file %s doesn't exist)"
            raise MissingBackupDiskError(msg % (self.crypto_device, self.crypttab_entry.source_device))
        if self.backup_enabled:
            self.notify_starting()
        self.unlock_device()
        try:
            self.mount_filesystem()
            if self.backup_enabled:
                self.transfer_changes()
                actions.append('create backup')
            if self.snapshot_enabled:
                self.create_snapshot()
                actions.append('create snapshot')
            if self.rotate_enabled:
github paylogic / pip-accel / pip_accel / caches / s3.py View on Github external
def put(self, filename, handle):
        """
        Upload a distribution archive to the configured Amazon S3 bucket.

        If the :attr:`~.Config.s3_cache_readonly` configuration option is
        enabled this method does nothing.

        :param filename: The filename of the distribution archive (a string).
        :param handle: A file-like object that provides access to the
                       distribution archive.
        :raises: :exc:`.CacheBackendError` when any underlying method fails.
        """
        if self.config.s3_cache_readonly:
            logger.info('Skipping upload to S3 bucket (using S3 in read only mode).')
        else:
            timer = Timer()
            self.check_prerequisites()
            with PatchedBotoConfig():
                from boto.s3.key import Key
                raw_key = self.get_cache_key(filename)
                logger.info("Uploading distribution archive to S3 bucket: %s", raw_key)
                key = Key(self.s3_bucket)
                key.key = raw_key
                try:
                    key.set_contents_from_file(handle)
                except Exception as e:
                    logger.info("Encountered error writing to S3 bucket, "
                                "falling back to read only mode (exception: %s)", e)
                    self.config.s3_cache_readonly = True
                else:
                    logger.info("Finished uploading distribution archive to S3 bucket in %s.", timer)
github paylogic / pip-accel / pip_accel / __init__.py View on Github external
- I could of course switch to storing the new local source distribution
          index in a differently named directory (avoiding potential conflicts
          between multiple versions of pip-accel) but then I would have to
          introduce a new configuration option, otherwise everyone who has
          configured pip-accel to store its source index in a non-default
          location could still be bitten by compatibility issues.

        For now I've decided to keep using the same directory for the local
        source distribution index and to keep cleaning up broken symbolic
        links. This enables cooperating between old and new versions of
        pip-accel and avoids trashing user's local source distribution indexes.
        The main disadvantage is that pip-accel is still required to clean up
        broken symbolic links...
        """
        cleanup_timer = Timer()
        cleanup_counter = 0
        for entry in os.listdir(self.config.source_index):
            pathname = os.path.join(self.config.source_index, entry)
            if os.path.islink(pathname) and not os.path.exists(pathname):
                logger.warn("Cleaning up broken symbolic link: %s", pathname)
                os.unlink(pathname)
                cleanup_counter += 1
        logger.debug("Cleaned up %i broken symbolic links from source index in %s.", cleanup_counter, cleanup_timer)
github martin68 / apt-smart / apt_smart / __init__.py View on Github external
def change_mirror(self, new_mirror=None, update=True):
        """
        Change the main mirror in use in :attr:`main_sources_list`.

        :param new_mirror: The URL of the new mirror (a string, defaults to
                           :attr:`best_mirror`).
        :param update: Whether an ``apt-get update`` should be run after
                       changing the mirror (a boolean, defaults to
                       :data:`True`).
        """
        timer = Timer()
        # Default to the best available mirror.
        if new_mirror:
            logger.info("Changing mirror of %s to %s ..", self.context, new_mirror)
        else:
            logger.info("Changing mirror of %s to best available mirror ..", self.context)
            new_mirror = self.best_mirror
            logger.info("Selected mirror: %s", new_mirror)
        # Parse /etc/apt/sources.list to replace the old mirror with the new one.
        sources_list = self.get_sources_list()
        mirrors_to_replace = [normalize_mirror_url(self.current_mirror)]
        if self.release_is_eol:
            # When a release goes EOL the security updates mirrors stop
            # serving that release as well, so we need to remove them.
            logger.debug("Replacing %s URLs as well ..", self.security_url)
            mirrors_to_replace.append(normalize_mirror_url(self.security_url))
        else:
github xolox / python-redock / redock / base.py View on Github external
:param client: Connection to Docker (instance of :py:class:`docker.Client`)
    :returns: The unique id of the base image.

    .. _apt-get: http://manpages.ubuntu.com/manpages/precise/man8/apt-get.8.html
    .. _init: http://manpages.ubuntu.com/manpages/precise/man8/init.8.html
    .. _initscripts: http://packages.ubuntu.com/precise/initscripts
    .. _language-pack-en-base: http://packages.ubuntu.com/precise/language-pack-en-base
    .. _locale: http://en.wikipedia.org/wiki/Locale
    .. _openssh-server: http://packages.ubuntu.com/precise/openssh-server
    .. _ssh-keygen: http://manpages.ubuntu.com/manpages/precise/man1/ssh-keygen.1.html
    .. _supervisor: http://packages.ubuntu.com/precise/supervisor
    .. _ubuntu:precise: https://index.docker.io/_/ubuntu/
    .. _upstart: http://packages.ubuntu.com/precise/upstart
    """
    download_image(client, 'ubuntu', 'precise')
    creation_timer = Timer()
    logger.info("Initializing base image (this can take a few minutes but you only have to do it once) ..")
    command = ' && '.join([
        'echo %s > /etc/apt/apt.conf.d/90redock' % pipes.quote(APT_CONFIG.strip()),
        'echo %s > /etc/apt/sources.list' % pipes.quote(SOURCES_LIST.format(mirror=select_ubuntu_mirror()).strip()),
        'apt-get update',
        'DEBIAN_FRONTEND=noninteractive apt-get install -q -y language-pack-en-base openssh-server supervisor',
        'apt-get clean', # Don't keep the +/- 20 MB of *.deb archives after installation.
        # Make it possible to run `apt-get dist-upgrade'.
        # https://help.ubuntu.com/community/PinningHowto#Introduction_to_Holding_Packages
        'apt-mark hold initscripts upstart',
        # Install the generated SSH public key.
        'mkdir -p /root/.ssh',
        'echo %s > /root/.ssh/authorized_keys' % pipes.quote(get_ssh_public_key()),
        # Create the Supervisor configuration for the SSH server.
        'echo %s > /etc/supervisor/conf.d/ssh-server.conf' % pipes.quote(SUPERVISOR_CONFIG.strip())])
    logger.debug("Generated command line: %s", command)
github xolox / python-executor / executor / cli.py View on Github external
def apply_fudge_factor(fudge_factor):
    """
    Apply the requested scheduling fudge factor.

    :param fudge_factor: The maximum number of seconds to sleep (a number).

    Previous implementations of the fudge factor interrupt used UNIX signals
    (specifically ``SIGUSR1``) but the use of this signal turned out to be
    sensitive to awkward race conditions and it wasn't very cross platform, so
    now the creation of a regular file is used to interrupt the fudge factor.
    """
    if fudge_factor:
        timer = Timer()
        logger.debug("Calculating fudge factor based on user defined maximum (%s) ..",
                     format_timespan(fudge_factor))
        fudged_sleep_time = fudge_factor * random.random()
        logger.info("Sleeping for %s because of user defined fudge factor ..",
                    format_timespan(fudged_sleep_time))
        interrupt_file = get_lock_path(INTERRUPT_FILE)
        while timer.elapsed_time < fudged_sleep_time:
            if os.path.isfile(interrupt_file):
                logger.info("Fudge factor sleep was interrupted! (%s exists)",
                            interrupt_file)
                break
            time_to_sleep = min(1, fudged_sleep_time - timer.elapsed_time)
            if time_to_sleep > 0:
                time.sleep(time_to_sleep)
        else:
            logger.info("Finished sleeping because of fudge factor (took %s).", timer)