How to use the dvc.logger.Logger function in dvc

To help you get started, we’ve selected a few dvc examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github iterative / dvc / dvc / command / target.py View on Github external
def unset_target(self, target_conf_file_path):
        if not os.path.exists(target_conf_file_path):
            Logger.error('Target conf file {} does not exists'.format(
                target_conf_file_path))
            return 1
        if not os.path.isfile(target_conf_file_path):
            Logger.error('Target conf file {} exists but it is not a regular file'.format(
                target_conf_file_path))
            return 1
        if open(target_conf_file_path).read() == '':
            return 0

        os.remove(target_conf_file_path)
        open(target_conf_file_path, 'a').close()

        return self.commit_if_needed('DVC target unset')
github iterative / dvc / dvc / command / merge.py View on Github external
def print_info(self, targets):
        for item in targets:
            Logger.info('Restored original data after merge:')
            Logger.info(' {}'.format(item.data.relative))
github iterative / dvc / dvc / cloud / gcp.py View on Github external
def _pull_key(self, key, path, no_progress_bar=False):
        self._makedirs(path)

        name = os.path.relpath(path, self._cloud_settings.cache.local.cache_dir)
        tmp_file = self.tmp_file(path)

        if self._cmp_checksum(key, path):
            Logger.debug('File "{}" matches with "{}".'.format(path, key.name))
            return path

        Logger.debug('Downloading cache file from gc "{}/{}"'.format(key.bucket.name, key.name))

        if not no_progress_bar:
            # percent_cb is not available for download_to_filename, so
            # lets at least update progress at keypoints(start, finish)
            progress.update_target(name, 0, None)

        try:
            key.download_to_filename(tmp_file)
        except Exception as exc:
            Logger.error('Failed to download "{}": {}'.format(key.name, exc))
            return None

        os.rename(tmp_file, path)
github iterative / dvc / dvc / data_cloud.py View on Github external
key = bucket.get_key(aws_key)
        if key:
            Logger.debug('File already uploaded to the cloud. Checksum validation...')

            if self._cmp_checksum(key, path):
                Logger.debug('File checksum matches. No uploading is needed.')
                return path

            Logger.debug('Checksum miss-match. Re-uploading is required.')

        key = bucket.new_key(aws_key)

        try:
            self._push_multipart(key, path)
        except Exception as exc:
            Logger.error('Failed to upload "{}": {}'.format(path, exc))
            return None

        progress.finish_target(os.path.basename(path))

        return path
github iterative / dvc / dvc / cloud / local.py View on Github external
def _pull_key(self, key, path, no_progress_bar=False):
        self._makedirs(path)

        tmp_file = self.tmp_file(path)
        try:
            copyfile(key.path, tmp_file, no_progress_bar=no_progress_bar)
        except Exception as exc:
            Logger.error('Failed to copy "{}": {}'.format(key.path, exc))
            return None

        os.rename(tmp_file, path)

        return path
github iterative / dvc / dvc / cloud / base.py View on Github external
def storage_path(self):
        """ get storage path

        Precedence: Storage, then cloud specific
        """

        if self._cloud_settings.global_storage_path:
            return self._cloud_settings.global_storage_path

        if not self.url:
            path = self._cloud_settings.cloud_config.get(Config.SECTION_CORE_STORAGEPATH, None)
            if path:
                Logger.warn('Using obsoleted config format. Consider updating.')
        else:
            path = self.path

        return path
github iterative / dvc / dvc / command / merge.py View on Github external
def print_info(self, targets):
        for item in targets:
            Logger.info('Restored original data after merge:')
            Logger.info(' {}'.format(item.data.relative))
github iterative / dvc / dvc / cloud / instance_aws.py View on Github external
def create_new_keypair(self):
        Logger.info('AWS key {} does not exist: creating the key'.format(self._keypair_name))
        # Create an SSH key to use when logging into instances.
        key = self._conn.create_key_pair(self._keypair_name)
        Logger.info('AWS key was created: {}'.format(self._keypair_name))
        # Expand key dir.
        key_dir = os.path.expandvars(self._keypair_dir)
        if not os.path.isdir(self._keypair_dir):
            os.mkdir(self._keypair_dir, 0o700)

        # Private key has to be stored locally.
        key_file = os.path.join(key_dir, self._keypair_name + '.pem')
        # key.save(key_file) # doesn't work in python3
        fp = open(key_file, 'w')
        fp.write(key.material)
        fp.close()
        os.chmod(key_file, 0o600)
        Logger.info('AWS private key file was saved: {}'.format(key_file))
github iterative / dvc / dvc / cloud / instance_aws.py View on Github external
def get_volume_name(self, inst, volumes):
        volume_name = ''
        bdm = inst.block_device_mapping
        for device_type in bdm.values():
            for vol in volumes:
                if device_type.volume_id == vol.id:
                    if self.VOLUME_TAG in vol.tags:
                        volume_name = vol.tags[self.VOLUME_TAG]
                    else:
                        Logger.error('Instance {} storage volume does not have tag'.format(inst.id))
                    break
        return volume_name
github iterative / dvc / dvc / cloud / aws.py View on Github external
def _pull_key(self, key, fname, no_progress_bar=False):
        Logger.debug("Pulling key '{}' from bucket '{}' to file '{}'".format(key.name,
                                                                             key.bucket,
                                                                             fname))
        self._makedirs(fname)

        tmp_file = self.tmp_file(fname)
        name = os.path.relpath(fname, self._cloud_settings.cache.cache_dir)

        if self._cmp_checksum(key, fname):
            Logger.debug('File "{}" matches with "{}".'.format(fname, key.name))
            return fname

        Logger.debug('Downloading cache file from S3 "{}/{}" to "{}"'.format(key.bucket,
                                                                             key.name,
                                                                             fname))

        if no_progress_bar:
            cb = None
        else:
            cb = self.create_cb_pull(name, key)


        try:
            self.s3.Object(key.bucket, key.name).download_file(tmp_file, Callback=cb)
        except Exception as exc:
            Logger.error('Failed to download "{}": {}'.format(key.name, exc))