Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_warning(self, caplog):
# preparation
writer = self._mock_writer()
msg = 'test message'
output.warning(msg)
# logging test
for record in caplog.records:
assert record.levelname == 'WARNING'
assert record.name == __name__
assert msg in caplog.text
# writer test
assert not writer.error_occurred.called
writer.warning.assert_called_once_with(msg)
# global status test
assert not output.error_occurred
# ignore the value.
# noinspection PyBroadException
if key in self.PARSERS:
parser = self.PARSERS[key]
try:
# If the parser is a subclass of the CsvOption class
# we need a different invocation, which passes not only
# the value to the parser, but also the key name
# and the section that contains the configuration
if inspect.isclass(parser) \
and issubclass(parser, CsvOption):
value = parser(new_value, key, source)
else:
value = parser(new_value)
except Exception as e:
output.warning("Ignoring invalid configuration value '%s' "
"for key %s in %s: %s",
new_value, key, source, e)
else:
value = new_value
return value
'total_time': copy_time,
}
# Check for the presence of configuration files outside the PGDATA
external_config = backup_info.get_external_config_files()
if any(external_config):
msg = ("pg_basebackup does not copy the PostgreSQL "
"configuration files that reside outside PGDATA. "
"Please manually backup the following files:\n"
"\t%s\n" %
"\n\t".join(ecf.path for ecf in external_config))
# Show the warning only if the EXTERNAL_CONFIGURATION option
# is not specified in the backup_options.
if (BackupOptions.EXTERNAL_CONFIGURATION
not in self.config.backup_options):
output.warning(msg)
else:
_logger.debug(msg)
'Missing or invalid ssh_command in barman configuration '
'for server %s' % backup_manager.config.name)
# Apply the default backup strategy
backup_options = self.config.backup_options
concurrent_backup = (
BackupOptions.CONCURRENT_BACKUP in backup_options)
exclusive_backup = (
BackupOptions.EXCLUSIVE_BACKUP in backup_options)
if not concurrent_backup and not exclusive_backup:
self.config.backup_options.add(BackupOptions.EXCLUSIVE_BACKUP)
output.warning(
"No backup strategy set for server '%s' "
"(using default 'exclusive_backup').",
self.config.name)
output.warning(
"The default backup strategy will change "
"to 'concurrent_backup' in the future. "
"Explicitly set 'backup_options' to silence this warning.")
# Depending on the backup options value, create the proper strategy
if BackupOptions.CONCURRENT_BACKUP in self.config.backup_options:
# Concurrent backup strategy
self.strategy = LocalConcurrentBackupStrategy(
self.server.postgres, self.config.name)
else:
# Exclusive backup strategy
self.strategy = ExclusiveBackupStrategy(
self.server.postgres, self.config.name)
def _validate_with_keys(config_items, allowed_keys, section):
"""
Check every config parameter against a list of allowed keys
:param config_items: list of tuples containing provided parameters
along with their values
:param allowed_keys: list of allowed keys
:param section: source section (for error reporting)
"""
for parameter in config_items:
# if the parameter name is not in the list of allowed values,
# then output a warning
name = parameter[0]
if name not in allowed_keys:
output.warning('Invalid configuration option "%s" in [%s] '
'section.', name, section)
def delete_backup(self, backup):
"""
Delete a backup
:param backup: the backup to delete
:return bool: True if deleted, False if could not delete the backup
"""
available_backups = self.get_available_backups(
status_filter=(BackupInfo.DONE,))
minimum_redundancy = self.server.config.minimum_redundancy
# Honour minimum required redundancy
if backup.status == BackupInfo.DONE and \
minimum_redundancy >= len(available_backups):
output.warning("Skipping delete of backup %s for server %s "
"due to minimum redundancy requirements "
"(minimum redundancy = %s, "
"current redundancy = %s)",
backup.backup_id,
self.config.name,
minimum_redundancy,
len(available_backups))
return False
# Keep track of when the delete operation started.
delete_start_time = datetime.datetime.now()
# Run the pre_delete_script if present.
script = HookScriptRunner(self, 'delete_script', 'pre')
script.env_from_backup_info(backup)
script.run()
list(segment.name for segment in xlogs[prefix]),
wal_decompression_dest, wal_dest)
except CommandFailedException as e:
msg = ("data transfer failure while copying WAL files "
"to directory '%s'") % (wal_dest[1:],)
raise DataTransferFailure.from_command_error(
'rsync', e, msg)
# Cleanup files after the transfer
for segment in xlogs[prefix]:
file_name = os.path.join(wal_decompression_dest,
segment.name)
try:
os.unlink(file_name)
except OSError as e:
output.warning(
"Error removing temporary file '%s': %s",
file_name, e)
else:
try:
rsync.from_file_list(
list(segment.name for segment in xlogs[prefix]),
"%s/" % os.path.join(self.config.wals_directory,
prefix),
wal_dest)
except CommandFailedException as e:
msg = "data transfer failure while copying WAL files " \
"to directory '%s'" % (wal_dest[1:],)
raise DataTransferFailure.from_command_error(
'rsync', e, msg)
_logger.info("Finished copying %s WAL files.", total_wals)