Restore backed up PostgreSQL databases via "borgmatic restore" sub-command (#229).
This commit is contained in:
parent
ca23c3b8b3
commit
3006db0cae
11 changed files with 547 additions and 130 deletions
1
NEWS
1
NEWS
|
@ -1,4 +1,5 @@
|
||||||
1.4.1.dev0
|
1.4.1.dev0
|
||||||
|
* #229: Restore backed up PostgreSQL databases via "borgmatic restore" sub-command.
|
||||||
* Documentation on how to develop borgmatic's documentation:
|
* Documentation on how to develop borgmatic's documentation:
|
||||||
https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/#documentation-development
|
https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/#documentation-development
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
from borgmatic.execute import execute_command, execute_command_without_capture
|
from borgmatic.execute import execute_command, execute_command_without_capture
|
||||||
|
|
||||||
|
@ -47,7 +48,7 @@ def extract_last_archive_dry_run(repository, lock_wait=None, local_path='borg',
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
execute_command(full_extract_command)
|
execute_command(full_extract_command, working_directory=None, error_on_warnings=True)
|
||||||
|
|
||||||
|
|
||||||
def extract_archive(
|
def extract_archive(
|
||||||
|
@ -59,12 +60,14 @@ def extract_archive(
|
||||||
storage_config,
|
storage_config,
|
||||||
local_path='borg',
|
local_path='borg',
|
||||||
remote_path=None,
|
remote_path=None,
|
||||||
|
destination_path=None,
|
||||||
progress=False,
|
progress=False,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to
|
Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to
|
||||||
restore from the archive, and location/storage configuration dicts, extract the archive into the
|
restore from the archive, location/storage configuration dicts, optional local and remote Borg
|
||||||
current directory.
|
paths, and an optional destination path to extract to, extract the archive into the current
|
||||||
|
directory.
|
||||||
'''
|
'''
|
||||||
umask = storage_config.get('umask', None)
|
umask = storage_config.get('umask', None)
|
||||||
lock_wait = storage_config.get('lock_wait', None)
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
@ -79,14 +82,18 @@ def extract_archive(
|
||||||
+ (('--debug', '--list', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
+ (('--debug', '--list', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
+ (('--dry-run',) if dry_run else ())
|
+ (('--dry-run',) if dry_run else ())
|
||||||
+ (('--progress',) if progress else ())
|
+ (('--progress',) if progress else ())
|
||||||
+ ('::'.join((repository, archive)),)
|
+ ('::'.join((os.path.abspath(repository), archive)),)
|
||||||
+ (tuple(restore_paths) if restore_paths else ())
|
+ (tuple(restore_paths) if restore_paths else ())
|
||||||
)
|
)
|
||||||
|
|
||||||
# The progress output isn't compatible with captured and logged output, as progress messes with
|
# The progress output isn't compatible with captured and logged output, as progress messes with
|
||||||
# the terminal directly.
|
# the terminal directly.
|
||||||
if progress:
|
if progress:
|
||||||
execute_command_without_capture(full_command)
|
execute_command_without_capture(
|
||||||
|
full_command, working_directory=destination_path, error_on_warnings=True
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
execute_command(full_command)
|
# Error on warnings, as Borg only gives a warning if the restore paths don't exist in the
|
||||||
|
# archive!
|
||||||
|
execute_command(full_command, working_directory=destination_path, error_on_warnings=True)
|
||||||
|
|
|
@ -9,6 +9,7 @@ SUBPARSER_ALIASES = {
|
||||||
'create': ['--create', '-C'],
|
'create': ['--create', '-C'],
|
||||||
'check': ['--check', '-k'],
|
'check': ['--check', '-k'],
|
||||||
'extract': ['--extract', '-x'],
|
'extract': ['--extract', '-x'],
|
||||||
|
'restore': ['--restore', '-r'],
|
||||||
'list': ['--list', '-l'],
|
'list': ['--list', '-l'],
|
||||||
'info': ['--info', '-i'],
|
'info': ['--info', '-i'],
|
||||||
}
|
}
|
||||||
|
@ -263,7 +264,7 @@ def parse_arguments(*unparsed_arguments):
|
||||||
extract_parser = subparsers.add_parser(
|
extract_parser = subparsers.add_parser(
|
||||||
'extract',
|
'extract',
|
||||||
aliases=SUBPARSER_ALIASES['extract'],
|
aliases=SUBPARSER_ALIASES['extract'],
|
||||||
help='Extract a named archive to the current directory',
|
help='Extract files from a named archive to the current directory',
|
||||||
description='Extract a named archive to the current directory',
|
description='Extract a named archive to the current directory',
|
||||||
add_help=False,
|
add_help=False,
|
||||||
)
|
)
|
||||||
|
@ -275,6 +276,7 @@ def parse_arguments(*unparsed_arguments):
|
||||||
extract_group.add_argument('--archive', help='Name of archive to operate on', required=True)
|
extract_group.add_argument('--archive', help='Name of archive to operate on', required=True)
|
||||||
extract_group.add_argument(
|
extract_group.add_argument(
|
||||||
'--restore-path',
|
'--restore-path',
|
||||||
|
metavar='PATH',
|
||||||
nargs='+',
|
nargs='+',
|
||||||
dest='restore_paths',
|
dest='restore_paths',
|
||||||
help='Paths to restore from archive, defaults to the entire archive',
|
help='Paths to restore from archive, defaults to the entire archive',
|
||||||
|
@ -290,6 +292,37 @@ def parse_arguments(*unparsed_arguments):
|
||||||
'-h', '--help', action='help', help='Show this help message and exit'
|
'-h', '--help', action='help', help='Show this help message and exit'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
restore_parser = subparsers.add_parser(
|
||||||
|
'restore',
|
||||||
|
aliases=SUBPARSER_ALIASES['restore'],
|
||||||
|
help='Restore database dumps from a named archive',
|
||||||
|
description='Restore database dumps from a named archive. (To extract files instead, use "borgmatic extract".)',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
restore_group = restore_parser.add_argument_group('restore arguments')
|
||||||
|
restore_group.add_argument(
|
||||||
|
'--repository',
|
||||||
|
help='Path of repository to restore from, defaults to the configured repository if there is only one',
|
||||||
|
)
|
||||||
|
restore_group.add_argument('--archive', help='Name of archive to operate on', required=True)
|
||||||
|
restore_group.add_argument(
|
||||||
|
'--database',
|
||||||
|
metavar='NAME',
|
||||||
|
nargs='+',
|
||||||
|
dest='databases',
|
||||||
|
help='Names of databases to restore from archive, defaults to all databases. Note that any databases to restore must be defined in borgmatic\'s configuration',
|
||||||
|
)
|
||||||
|
restore_group.add_argument(
|
||||||
|
'--progress',
|
||||||
|
dest='progress',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='Display progress for each database dump file as it is extracted from archive',
|
||||||
|
)
|
||||||
|
restore_group.add_argument(
|
||||||
|
'-h', '--help', action='help', help='Show this help message and exit'
|
||||||
|
)
|
||||||
|
|
||||||
list_parser = subparsers.add_parser(
|
list_parser = subparsers.add_parser(
|
||||||
'list',
|
'list',
|
||||||
aliases=SUBPARSER_ALIASES['list'],
|
aliases=SUBPARSER_ALIASES['list'],
|
||||||
|
|
|
@ -81,11 +81,12 @@ def run_configuration(config_filename, config, arguments):
|
||||||
storage=storage,
|
storage=storage,
|
||||||
retention=retention,
|
retention=retention,
|
||||||
consistency=consistency,
|
consistency=consistency,
|
||||||
|
hooks=hooks,
|
||||||
local_path=local_path,
|
local_path=local_path,
|
||||||
remote_path=remote_path,
|
remote_path=remote_path,
|
||||||
repository_path=repository_path,
|
repository_path=repository_path,
|
||||||
)
|
)
|
||||||
except (OSError, CalledProcessError) as error:
|
except (OSError, CalledProcessError, ValueError) as error:
|
||||||
encountered_error = error
|
encountered_error = error
|
||||||
error_repository = repository_path
|
error_repository = repository_path
|
||||||
yield from make_error_log_records(
|
yield from make_error_log_records(
|
||||||
|
@ -141,6 +142,7 @@ def run_actions(
|
||||||
storage,
|
storage,
|
||||||
retention,
|
retention,
|
||||||
consistency,
|
consistency,
|
||||||
|
hooks,
|
||||||
local_path,
|
local_path,
|
||||||
remote_path,
|
remote_path,
|
||||||
repository_path
|
repository_path
|
||||||
|
@ -151,6 +153,9 @@ def run_actions(
|
||||||
from the command-line arguments on the given repository.
|
from the command-line arguments on the given repository.
|
||||||
|
|
||||||
Yield JSON output strings from executing any actions that produce JSON.
|
Yield JSON output strings from executing any actions that produce JSON.
|
||||||
|
|
||||||
|
Raise OSError or subprocess.CalledProcessError if an error occurs running a command for an
|
||||||
|
action. Raise ValueError if the arguments or configuration passed to action are invalid.
|
||||||
'''
|
'''
|
||||||
repository = os.path.expanduser(repository_path)
|
repository = os.path.expanduser(repository_path)
|
||||||
global_arguments = arguments['global']
|
global_arguments = arguments['global']
|
||||||
|
@ -215,8 +220,47 @@ def run_actions(
|
||||||
storage,
|
storage,
|
||||||
local_path=local_path,
|
local_path=local_path,
|
||||||
remote_path=remote_path,
|
remote_path=remote_path,
|
||||||
|
destination_path=None,
|
||||||
progress=arguments['extract'].progress,
|
progress=arguments['extract'].progress,
|
||||||
)
|
)
|
||||||
|
if 'restore' in arguments:
|
||||||
|
if arguments['restore'].repository is None or repository == arguments['restore'].repository:
|
||||||
|
logger.info(
|
||||||
|
'{}: Restoring databases from archive {}'.format(
|
||||||
|
repository, arguments['restore'].archive
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
restore_names = arguments['restore'].databases or []
|
||||||
|
if 'all' in restore_names:
|
||||||
|
restore_names = []
|
||||||
|
|
||||||
|
# Extract dumps for the named databases from the archive.
|
||||||
|
dump_patterns = postgresql.make_database_dump_patterns(restore_names)
|
||||||
|
borg_extract.extract_archive(
|
||||||
|
global_arguments.dry_run,
|
||||||
|
repository,
|
||||||
|
arguments['restore'].archive,
|
||||||
|
postgresql.convert_glob_patterns_to_borg_patterns(dump_patterns),
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
destination_path='/',
|
||||||
|
progress=arguments['restore'].progress,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Map the restore names to the corresponding database configurations.
|
||||||
|
databases = list(
|
||||||
|
postgresql.get_database_configurations(
|
||||||
|
hooks.get('postgresql_databases'),
|
||||||
|
restore_names or postgresql.get_database_names_from_dumps(dump_patterns),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Finally, restore the databases and cleanup the dumps.
|
||||||
|
postgresql.restore_database_dumps(databases, repository, global_arguments.dry_run)
|
||||||
|
postgresql.remove_database_dumps(databases, repository, global_arguments.dry_run)
|
||||||
if 'list' in arguments:
|
if 'list' in arguments:
|
||||||
if arguments['list'].repository is None or repository == arguments['list'].repository:
|
if arguments['list'].repository is None or repository == arguments['list'].repository:
|
||||||
logger.info('{}: Listing archives'.format(repository))
|
logger.info('{}: Listing archives'.format(repository))
|
||||||
|
@ -295,9 +339,10 @@ def make_error_log_records(message, error=None):
|
||||||
yield logging.makeLogRecord(
|
yield logging.makeLogRecord(
|
||||||
dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=message)
|
dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=message)
|
||||||
)
|
)
|
||||||
yield logging.makeLogRecord(
|
if error.output:
|
||||||
dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error.output)
|
yield logging.makeLogRecord(
|
||||||
)
|
dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error.output)
|
||||||
|
)
|
||||||
yield logging.makeLogRecord(dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error))
|
yield logging.makeLogRecord(dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error))
|
||||||
except (ValueError, OSError) as error:
|
except (ValueError, OSError) as error:
|
||||||
yield logging.makeLogRecord(
|
yield logging.makeLogRecord(
|
||||||
|
|
|
@ -9,17 +9,28 @@ ERROR_OUTPUT_MAX_LINE_COUNT = 25
|
||||||
BORG_ERROR_EXIT_CODE = 2
|
BORG_ERROR_EXIT_CODE = 2
|
||||||
|
|
||||||
|
|
||||||
def borg_command(full_command):
|
def exit_code_indicates_error(command, exit_code, error_on_warnings=False):
|
||||||
'''
|
'''
|
||||||
Return True if this is a Borg command, or False if it's some other command.
|
Return True if the given exit code from running the command corresponds to an error.
|
||||||
'''
|
'''
|
||||||
return 'borg' in full_command[0]
|
# If we're running something other than Borg, treat all non-zero exit codes as errors.
|
||||||
|
if 'borg' in command[0] and not error_on_warnings:
|
||||||
|
return bool(exit_code >= BORG_ERROR_EXIT_CODE)
|
||||||
|
|
||||||
|
return bool(exit_code != 0)
|
||||||
|
|
||||||
|
|
||||||
def execute_and_log_output(full_command, output_log_level, shell, environment):
|
def execute_and_log_output(
|
||||||
|
full_command, output_log_level, shell, environment, working_directory, error_on_warnings
|
||||||
|
):
|
||||||
last_lines = []
|
last_lines = []
|
||||||
process = subprocess.Popen(
|
process = subprocess.Popen(
|
||||||
full_command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=shell, env=environment
|
full_command,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
shell=shell,
|
||||||
|
env=environment,
|
||||||
|
cwd=working_directory,
|
||||||
)
|
)
|
||||||
|
|
||||||
while process.poll() is None:
|
while process.poll() is None:
|
||||||
|
@ -41,13 +52,7 @@ def execute_and_log_output(full_command, output_log_level, shell, environment):
|
||||||
|
|
||||||
exit_code = process.poll()
|
exit_code = process.poll()
|
||||||
|
|
||||||
# If we're running something other than Borg, treat all non-zero exit codes as errors.
|
if exit_code_indicates_error(full_command, exit_code, error_on_warnings):
|
||||||
if borg_command(full_command):
|
|
||||||
error = bool(exit_code >= BORG_ERROR_EXIT_CODE)
|
|
||||||
else:
|
|
||||||
error = bool(exit_code != 0)
|
|
||||||
|
|
||||||
if error:
|
|
||||||
# If an error occurs, include its output in the raised exception so that we don't
|
# If an error occurs, include its output in the raised exception so that we don't
|
||||||
# inadvertently hide error output.
|
# inadvertently hide error output.
|
||||||
if len(last_lines) == ERROR_OUTPUT_MAX_LINE_COUNT:
|
if len(last_lines) == ERROR_OUTPUT_MAX_LINE_COUNT:
|
||||||
|
@ -59,13 +64,19 @@ def execute_and_log_output(full_command, output_log_level, shell, environment):
|
||||||
|
|
||||||
|
|
||||||
def execute_command(
|
def execute_command(
|
||||||
full_command, output_log_level=logging.INFO, shell=False, extra_environment=None
|
full_command,
|
||||||
|
output_log_level=logging.INFO,
|
||||||
|
shell=False,
|
||||||
|
extra_environment=None,
|
||||||
|
working_directory=None,
|
||||||
|
error_on_warnings=False,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Execute the given command (a sequence of command/argument strings) and log its output at the
|
Execute the given command (a sequence of command/argument strings) and log its output at the
|
||||||
given log level. If output log level is None, instead capture and return the output. If
|
given log level. If output log level is None, instead capture and return the output. If
|
||||||
shell is True, execute the command within a shell. If an extra environment dict is given, then
|
shell is True, execute the command within a shell. If an extra environment dict is given, then
|
||||||
use it to augment the current environment, and pass the result into the command.
|
use it to augment the current environment, and pass the result into the command. If a working
|
||||||
|
directory is given, use that as the present working directory when running the command.
|
||||||
|
|
||||||
Raise subprocesses.CalledProcessError if an error occurs while running the command.
|
Raise subprocesses.CalledProcessError if an error occurs while running the command.
|
||||||
'''
|
'''
|
||||||
|
@ -73,22 +84,34 @@ def execute_command(
|
||||||
environment = {**os.environ, **extra_environment} if extra_environment else None
|
environment = {**os.environ, **extra_environment} if extra_environment else None
|
||||||
|
|
||||||
if output_log_level is None:
|
if output_log_level is None:
|
||||||
output = subprocess.check_output(full_command, shell=shell, env=environment)
|
output = subprocess.check_output(
|
||||||
|
full_command, shell=shell, env=environment, cwd=working_directory
|
||||||
|
)
|
||||||
return output.decode() if output is not None else None
|
return output.decode() if output is not None else None
|
||||||
else:
|
else:
|
||||||
execute_and_log_output(full_command, output_log_level, shell=shell, environment=environment)
|
execute_and_log_output(
|
||||||
|
full_command,
|
||||||
|
output_log_level,
|
||||||
|
shell=shell,
|
||||||
|
environment=environment,
|
||||||
|
working_directory=working_directory,
|
||||||
|
error_on_warnings=error_on_warnings,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def execute_command_without_capture(full_command):
|
def execute_command_without_capture(full_command, working_directory=None, error_on_warnings=False):
|
||||||
'''
|
'''
|
||||||
Execute the given command (a sequence of command/argument strings), but don't capture or log its
|
Execute the given command (a sequence of command/argument strings), but don't capture or log its
|
||||||
output in any way. This is necessary for commands that monkey with the terminal (e.g. progress
|
output in any way. This is necessary for commands that monkey with the terminal (e.g. progress
|
||||||
display) or provide interactive prompts.
|
display) or provide interactive prompts.
|
||||||
|
|
||||||
|
If a working directory is given, use that as the present working directory when running the
|
||||||
|
command.
|
||||||
'''
|
'''
|
||||||
logger.debug(' '.join(full_command))
|
logger.debug(' '.join(full_command))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
subprocess.check_call(full_command)
|
subprocess.check_call(full_command, cwd=working_directory)
|
||||||
except subprocess.CalledProcessError as error:
|
except subprocess.CalledProcessError as error:
|
||||||
if error.returncode >= BORG_ERROR_EXIT_CODE:
|
if exit_code_indicates_error(full_command, error.returncode, error_on_warnings):
|
||||||
raise
|
raise
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import glob
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
@ -7,32 +8,39 @@ DUMP_PATH = '~/.borgmatic/postgresql_databases'
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def dump_databases(databases, config_filename, dry_run):
|
def make_database_dump_filename(name, hostname=None):
|
||||||
|
'''
|
||||||
|
Based on the given database name and hostname, return a filename to use for the database dump.
|
||||||
|
|
||||||
|
Raise ValueError if the database name is invalid.
|
||||||
|
'''
|
||||||
|
if os.path.sep in name:
|
||||||
|
raise ValueError('Invalid database name {}'.format(name))
|
||||||
|
|
||||||
|
return os.path.join(os.path.expanduser(DUMP_PATH), hostname or 'localhost', name)
|
||||||
|
|
||||||
|
|
||||||
|
def dump_databases(databases, log_prefix, dry_run):
|
||||||
'''
|
'''
|
||||||
Dump the given PostgreSQL databases to disk. The databases are supplied as a sequence of dicts,
|
Dump the given PostgreSQL databases to disk. The databases are supplied as a sequence of dicts,
|
||||||
one dict describing each database as per the configuration schema. Use the given configuration
|
one dict describing each database as per the configuration schema. Use the given log prefix in
|
||||||
filename in any log entries. If this is a dry run, then don't actually dump anything.
|
any log entries. If this is a dry run, then don't actually dump anything.
|
||||||
'''
|
'''
|
||||||
if not databases:
|
if not databases:
|
||||||
logger.debug('{}: No PostgreSQL databases configured'.format(config_filename))
|
logger.debug('{}: No PostgreSQL databases configured'.format(log_prefix))
|
||||||
return
|
return
|
||||||
|
|
||||||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||||
|
|
||||||
logger.info('{}: Dumping PostgreSQL databases{}'.format(config_filename, dry_run_label))
|
logger.info('{}: Dumping PostgreSQL databases{}'.format(log_prefix, dry_run_label))
|
||||||
|
|
||||||
for database in databases:
|
for database in databases:
|
||||||
if os.path.sep in database['name']:
|
|
||||||
raise ValueError('Invalid database name {}'.format(database['name']))
|
|
||||||
|
|
||||||
dump_path = os.path.join(
|
|
||||||
os.path.expanduser(DUMP_PATH), database.get('hostname', 'localhost')
|
|
||||||
)
|
|
||||||
name = database['name']
|
name = database['name']
|
||||||
|
dump_filename = make_database_dump_filename(name, database.get('hostname'))
|
||||||
all_databases = bool(name == 'all')
|
all_databases = bool(name == 'all')
|
||||||
command = (
|
command = (
|
||||||
('pg_dumpall' if all_databases else 'pg_dump', '--no-password', '--clean')
|
('pg_dumpall' if all_databases else 'pg_dump', '--no-password', '--clean')
|
||||||
+ ('--file', os.path.join(dump_path, name))
|
+ ('--file', dump_filename)
|
||||||
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
||||||
+ (('--port', str(database['port'])) if 'port' in database else ())
|
+ (('--port', str(database['port'])) if 'port' in database else ())
|
||||||
+ (('--username', database['username']) if 'username' in database else ())
|
+ (('--username', database['username']) if 'username' in database else ())
|
||||||
|
@ -42,47 +50,136 @@ def dump_databases(databases, config_filename, dry_run):
|
||||||
)
|
)
|
||||||
extra_environment = {'PGPASSWORD': database['password']} if 'password' in database else None
|
extra_environment = {'PGPASSWORD': database['password']} if 'password' in database else None
|
||||||
|
|
||||||
logger.debug(
|
logger.debug('{}: Dumping PostgreSQL database {}{}'.format(log_prefix, name, dry_run_label))
|
||||||
'{}: Dumping PostgreSQL database {}{}'.format(config_filename, name, dry_run_label)
|
|
||||||
)
|
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
os.makedirs(dump_path, mode=0o700, exist_ok=True)
|
os.makedirs(os.path.dirname(dump_filename), mode=0o700, exist_ok=True)
|
||||||
execute_command(command, extra_environment=extra_environment)
|
execute_command(command, extra_environment=extra_environment)
|
||||||
|
|
||||||
|
|
||||||
def remove_database_dumps(databases, config_filename, dry_run):
|
def remove_database_dumps(databases, log_prefix, dry_run):
|
||||||
'''
|
'''
|
||||||
Remove the database dumps for the given databases. The databases are supplied as a sequence of
|
Remove the database dumps for the given databases. The databases are supplied as a sequence of
|
||||||
dicts, one dict describing each database as per the configuration schema. Use the given
|
dicts, one dict describing each database as per the configuration schema. Use the log prefix in
|
||||||
configuration filename in any log entries. If this is a dry run, then don't actually remove
|
any log entries. If this is a dry run, then don't actually remove anything.
|
||||||
anything.
|
|
||||||
'''
|
'''
|
||||||
if not databases:
|
if not databases:
|
||||||
logger.debug('{}: No PostgreSQL databases configured'.format(config_filename))
|
logger.debug('{}: No PostgreSQL databases configured'.format(log_prefix))
|
||||||
return
|
return
|
||||||
|
|
||||||
dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
|
||||||
|
|
||||||
logger.info('{}: Removing PostgreSQL database dumps{}'.format(config_filename, dry_run_label))
|
logger.info('{}: Removing PostgreSQL database dumps{}'.format(log_prefix, dry_run_label))
|
||||||
|
|
||||||
for database in databases:
|
for database in databases:
|
||||||
if os.path.sep in database['name']:
|
dump_filename = make_database_dump_filename(database['name'], database.get('hostname'))
|
||||||
raise ValueError('Invalid database name {}'.format(database['name']))
|
|
||||||
|
|
||||||
name = database['name']
|
|
||||||
dump_path = os.path.join(
|
|
||||||
os.path.expanduser(DUMP_PATH), database.get('hostname', 'localhost')
|
|
||||||
)
|
|
||||||
dump_filename = os.path.join(dump_path, name)
|
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'{}: Remove PostgreSQL database dump {} from {}{}'.format(
|
'{}: Removing PostgreSQL database dump {} from {}{}'.format(
|
||||||
config_filename, name, dump_filename, dry_run_label
|
log_prefix, database['name'], dump_filename, dry_run_label
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if dry_run:
|
if dry_run:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
os.remove(dump_filename)
|
os.remove(dump_filename)
|
||||||
|
dump_path = os.path.dirname(dump_filename)
|
||||||
|
|
||||||
if len(os.listdir(dump_path)) == 0:
|
if len(os.listdir(dump_path)) == 0:
|
||||||
os.rmdir(dump_path)
|
os.rmdir(dump_path)
|
||||||
|
|
||||||
|
|
||||||
|
def make_database_dump_patterns(names):
|
||||||
|
'''
|
||||||
|
Given a sequence of database names, return the corresponding glob patterns to match the database
|
||||||
|
dumps in an archive. An empty sequence of names indicates that the patterns should match all
|
||||||
|
dumps.
|
||||||
|
'''
|
||||||
|
return [make_database_dump_filename(name, hostname='*') for name in (names or ['*'])]
|
||||||
|
|
||||||
|
|
||||||
|
def convert_glob_patterns_to_borg_patterns(patterns):
|
||||||
|
'''
|
||||||
|
Convert a sequence of shell glob patterns like "/etc/*" to the corresponding Borg archive
|
||||||
|
patterns like "sh:etc/*".
|
||||||
|
'''
|
||||||
|
return ['sh:{}'.format(pattern.lstrip(os.path.sep)) for pattern in patterns]
|
||||||
|
|
||||||
|
|
||||||
|
def get_database_names_from_dumps(patterns):
|
||||||
|
'''
|
||||||
|
Given a sequence of database dump patterns, find the corresponding database dumps on disk and
|
||||||
|
return the database names from their filenames.
|
||||||
|
'''
|
||||||
|
return [os.path.basename(dump_path) for pattern in patterns for dump_path in glob.glob(pattern)]
|
||||||
|
|
||||||
|
|
||||||
|
def get_database_configurations(databases, names):
|
||||||
|
'''
|
||||||
|
Given the full database configuration dicts as per the configuration schema, and a sequence of
|
||||||
|
database names, filter down and yield the configuration for just the named databases.
|
||||||
|
Additionally, if a database configuration is named "all", project out that configuration for
|
||||||
|
each named database.
|
||||||
|
|
||||||
|
Raise ValueError if one of the database names cannot be matched to a database in borgmatic's
|
||||||
|
database configuration.
|
||||||
|
'''
|
||||||
|
named_databases = {database['name']: database for database in databases}
|
||||||
|
|
||||||
|
for name in names:
|
||||||
|
database = named_databases.get(name)
|
||||||
|
if database:
|
||||||
|
yield database
|
||||||
|
continue
|
||||||
|
|
||||||
|
if 'all' in named_databases:
|
||||||
|
yield {**named_databases['all'], **{'name': name}}
|
||||||
|
continue
|
||||||
|
|
||||||
|
raise ValueError(
|
||||||
|
'Cannot restore database "{}", as it is not defined in borgmatic\'s configuration'.format(
|
||||||
|
name
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def restore_database_dumps(databases, log_prefix, dry_run):
|
||||||
|
'''
|
||||||
|
Restore the given PostgreSQL databases from disk. The databases are supplied as a sequence of
|
||||||
|
dicts, one dict describing each database as per the configuration schema. Use the given log
|
||||||
|
prefix in any log entries. If this is a dry run, then don't actually restore anything.
|
||||||
|
'''
|
||||||
|
if not databases:
|
||||||
|
logger.debug('{}: No PostgreSQL databases configured'.format(log_prefix))
|
||||||
|
return
|
||||||
|
|
||||||
|
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
|
||||||
|
|
||||||
|
for database in databases:
|
||||||
|
dump_filename = make_database_dump_filename(database['name'], database.get('hostname'))
|
||||||
|
restore_command = (
|
||||||
|
('pg_restore', '--no-password', '--clean', '--if-exists', '--exit-on-error')
|
||||||
|
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
||||||
|
+ (('--port', str(database['port'])) if 'port' in database else ())
|
||||||
|
+ (('--username', database['username']) if 'username' in database else ())
|
||||||
|
+ ('--dbname', database['name'])
|
||||||
|
+ (dump_filename,)
|
||||||
|
)
|
||||||
|
extra_environment = {'PGPASSWORD': database['password']} if 'password' in database else None
|
||||||
|
analyze_command = (
|
||||||
|
('psql', '--no-password')
|
||||||
|
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
||||||
|
+ (('--port', str(database['port'])) if 'port' in database else ())
|
||||||
|
+ (('--username', database['username']) if 'username' in database else ())
|
||||||
|
+ ('--dbname', database['name'])
|
||||||
|
+ ('--quiet',)
|
||||||
|
+ ('--command', 'ANALYZE')
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
'{}: Restoring PostgreSQL database {}{}'.format(
|
||||||
|
log_prefix, database['name'], dry_run_label
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if not dry_run:
|
||||||
|
execute_command(restore_command, extra_environment=extra_environment)
|
||||||
|
execute_command(analyze_command, extra_environment=extra_environment)
|
||||||
|
|
|
@ -260,7 +260,7 @@ def test_parse_arguments_allows_repository_with_list():
|
||||||
module.parse_arguments('--config', 'myconfig', 'list', '--repository', 'test.borg')
|
module.parse_arguments('--config', 'myconfig', 'list', '--repository', 'test.borg')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_disallows_archive_without_extract_or_list():
|
def test_parse_arguments_disallows_archive_without_extract_restore_or_list():
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
with pytest.raises(SystemExit):
|
with pytest.raises(SystemExit):
|
||||||
|
@ -286,6 +286,18 @@ def test_parse_arguments_allows_archive_with_dashed_extract():
|
||||||
module.parse_arguments('--config', 'myconfig', '--extract', '--archive', 'test')
|
module.parse_arguments('--config', 'myconfig', '--extract', '--archive', 'test')
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_arguments_allows_archive_with_restore():
|
||||||
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
module.parse_arguments('--config', 'myconfig', 'restore', '--archive', 'test')
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_arguments_allows_archive_with_dashed_restore():
|
||||||
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
module.parse_arguments('--config', 'myconfig', '--restore', '--archive', 'test')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_allows_archive_with_list():
|
def test_parse_arguments_allows_archive_with_list():
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
@ -299,6 +311,13 @@ def test_parse_arguments_requires_archive_with_extract():
|
||||||
module.parse_arguments('--config', 'myconfig', 'extract')
|
module.parse_arguments('--config', 'myconfig', 'extract')
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_arguments_requires_archive_with_restore():
|
||||||
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
with pytest.raises(SystemExit):
|
||||||
|
module.parse_arguments('--config', 'myconfig', 'restore')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_allows_progress_before_create():
|
def test_parse_arguments_allows_progress_before_create():
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
@ -317,6 +336,12 @@ def test_parse_arguments_allows_progress_and_extract():
|
||||||
module.parse_arguments('--progress', 'extract', '--archive', 'test', 'list')
|
module.parse_arguments('--progress', 'extract', '--archive', 'test', 'list')
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_arguments_allows_progress_and_restore():
|
||||||
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
module.parse_arguments('--progress', 'restore', '--archive', 'test', 'list')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_disallows_progress_without_create():
|
def test_parse_arguments_disallows_progress_without_create():
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
|
|
@ -7,69 +7,60 @@ from flexmock import flexmock
|
||||||
from borgmatic import execute as module
|
from borgmatic import execute as module
|
||||||
|
|
||||||
|
|
||||||
def test_borg_command_identifies_borg_command():
|
|
||||||
assert module.borg_command(['/usr/bin/borg1', 'info'])
|
|
||||||
|
|
||||||
|
|
||||||
def test_borg_command_does_not_identify_other_command():
|
|
||||||
assert not module.borg_command(['grep', 'stuff'])
|
|
||||||
|
|
||||||
|
|
||||||
def test_execute_and_log_output_logs_each_line_separately():
|
def test_execute_and_log_output_logs_each_line_separately():
|
||||||
flexmock(module.logger).should_receive('log').with_args(logging.INFO, 'hi').once()
|
flexmock(module.logger).should_receive('log').with_args(logging.INFO, 'hi').once()
|
||||||
flexmock(module.logger).should_receive('log').with_args(logging.INFO, 'there').once()
|
flexmock(module.logger).should_receive('log').with_args(logging.INFO, 'there').once()
|
||||||
flexmock(module).should_receive('borg_command').and_return(False)
|
flexmock(module).should_receive('exit_code_indicates_error').and_return(False)
|
||||||
|
|
||||||
module.execute_and_log_output(
|
module.execute_and_log_output(
|
||||||
['echo', 'hi'], output_log_level=logging.INFO, shell=False, environment=None
|
['echo', 'hi'],
|
||||||
|
output_log_level=logging.INFO,
|
||||||
|
shell=False,
|
||||||
|
environment=None,
|
||||||
|
working_directory=None,
|
||||||
|
error_on_warnings=False,
|
||||||
)
|
)
|
||||||
module.execute_and_log_output(
|
module.execute_and_log_output(
|
||||||
['echo', 'there'], output_log_level=logging.INFO, shell=False, environment=None
|
['echo', 'there'],
|
||||||
|
output_log_level=logging.INFO,
|
||||||
|
shell=False,
|
||||||
|
environment=None,
|
||||||
|
working_directory=None,
|
||||||
|
error_on_warnings=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_execute_and_log_output_with_borg_warning_does_not_raise():
|
def test_execute_and_log_output_includes_error_output_in_exception():
|
||||||
flexmock(module.logger).should_receive('log')
|
flexmock(module.logger).should_receive('log')
|
||||||
flexmock(module).should_receive('borg_command').and_return(True)
|
flexmock(module).should_receive('exit_code_indicates_error').and_return(True)
|
||||||
|
|
||||||
module.execute_and_log_output(
|
|
||||||
['false'], output_log_level=logging.INFO, shell=False, environment=None
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_execute_and_log_output_includes_borg_error_output_in_exception():
|
|
||||||
flexmock(module.logger).should_receive('log')
|
|
||||||
flexmock(module).should_receive('borg_command').and_return(True)
|
|
||||||
|
|
||||||
with pytest.raises(subprocess.CalledProcessError) as error:
|
with pytest.raises(subprocess.CalledProcessError) as error:
|
||||||
module.execute_and_log_output(
|
module.execute_and_log_output(
|
||||||
['grep'], output_log_level=logging.INFO, shell=False, environment=None
|
['grep'],
|
||||||
|
output_log_level=logging.INFO,
|
||||||
|
shell=False,
|
||||||
|
environment=None,
|
||||||
|
working_directory=None,
|
||||||
|
error_on_warnings=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert error.value.returncode == 2
|
assert error.value.returncode == 2
|
||||||
assert error.value.output
|
assert error.value.output
|
||||||
|
|
||||||
|
|
||||||
def test_execute_and_log_output_with_non_borg_error_raises():
|
def test_execute_and_log_output_truncates_long_error_output():
|
||||||
flexmock(module.logger).should_receive('log')
|
|
||||||
flexmock(module).should_receive('borg_command').and_return(False)
|
|
||||||
|
|
||||||
with pytest.raises(subprocess.CalledProcessError) as error:
|
|
||||||
module.execute_and_log_output(
|
|
||||||
['false'], output_log_level=logging.INFO, shell=False, environment=None
|
|
||||||
)
|
|
||||||
|
|
||||||
assert error.value.returncode == 1
|
|
||||||
|
|
||||||
|
|
||||||
def test_execute_and_log_output_truncates_long_borg_error_output():
|
|
||||||
flexmock(module).ERROR_OUTPUT_MAX_LINE_COUNT = 0
|
flexmock(module).ERROR_OUTPUT_MAX_LINE_COUNT = 0
|
||||||
flexmock(module.logger).should_receive('log')
|
flexmock(module.logger).should_receive('log')
|
||||||
flexmock(module).should_receive('borg_command').and_return(False)
|
flexmock(module).should_receive('exit_code_indicates_error').and_return(True)
|
||||||
|
|
||||||
with pytest.raises(subprocess.CalledProcessError) as error:
|
with pytest.raises(subprocess.CalledProcessError) as error:
|
||||||
module.execute_and_log_output(
|
module.execute_and_log_output(
|
||||||
['grep'], output_log_level=logging.INFO, shell=False, environment=None
|
['grep'],
|
||||||
|
output_log_level=logging.INFO,
|
||||||
|
shell=False,
|
||||||
|
environment=None,
|
||||||
|
working_directory=None,
|
||||||
|
error_on_warnings=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert error.value.returncode == 2
|
assert error.value.returncode == 2
|
||||||
|
@ -78,18 +69,13 @@ def test_execute_and_log_output_truncates_long_borg_error_output():
|
||||||
|
|
||||||
def test_execute_and_log_output_with_no_output_logs_nothing():
|
def test_execute_and_log_output_with_no_output_logs_nothing():
|
||||||
flexmock(module.logger).should_receive('log').never()
|
flexmock(module.logger).should_receive('log').never()
|
||||||
flexmock(module).should_receive('borg_command').and_return(False)
|
flexmock(module).should_receive('exit_code_indicates_error').and_return(False)
|
||||||
|
|
||||||
module.execute_and_log_output(
|
module.execute_and_log_output(
|
||||||
['true'], output_log_level=logging.INFO, shell=False, environment=None
|
['true'],
|
||||||
|
output_log_level=logging.INFO,
|
||||||
|
shell=False,
|
||||||
|
environment=None,
|
||||||
|
working_directory=None,
|
||||||
|
error_on_warnings=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_execute_and_log_output_with_error_exit_status_raises():
|
|
||||||
flexmock(module.logger).should_receive('log')
|
|
||||||
flexmock(module).should_receive('borg_command').and_return(False)
|
|
||||||
|
|
||||||
with pytest.raises(subprocess.CalledProcessError):
|
|
||||||
module.execute_and_log_output(
|
|
||||||
['grep'], output_log_level=logging.INFO, shell=False, environment=None
|
|
||||||
)
|
|
||||||
|
|
|
@ -7,8 +7,10 @@ from borgmatic.borg import extract as module
|
||||||
from ..test_verbosity import insert_logging_mock
|
from ..test_verbosity import insert_logging_mock
|
||||||
|
|
||||||
|
|
||||||
def insert_execute_command_mock(command):
|
def insert_execute_command_mock(command, working_directory=None, error_on_warnings=True):
|
||||||
flexmock(module).should_receive('execute_command').with_args(command).once()
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
|
command, working_directory=working_directory, error_on_warnings=error_on_warnings
|
||||||
|
).once()
|
||||||
|
|
||||||
|
|
||||||
def insert_execute_command_output_mock(command, result):
|
def insert_execute_command_output_mock(command, result):
|
||||||
|
@ -86,6 +88,7 @@ def test_extract_last_archive_dry_run_calls_borg_with_lock_wait_parameters():
|
||||||
|
|
||||||
|
|
||||||
def test_extract_archive_calls_borg_with_restore_path_parameters():
|
def test_extract_archive_calls_borg_with_restore_path_parameters():
|
||||||
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(('borg', 'extract', 'repo::archive', 'path1', 'path2'))
|
insert_execute_command_mock(('borg', 'extract', 'repo::archive', 'path1', 'path2'))
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
|
@ -99,6 +102,7 @@ def test_extract_archive_calls_borg_with_restore_path_parameters():
|
||||||
|
|
||||||
|
|
||||||
def test_extract_archive_calls_borg_with_remote_path_parameters():
|
def test_extract_archive_calls_borg_with_remote_path_parameters():
|
||||||
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(('borg', 'extract', '--remote-path', 'borg1', 'repo::archive'))
|
insert_execute_command_mock(('borg', 'extract', '--remote-path', 'borg1', 'repo::archive'))
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
|
@ -113,6 +117,7 @@ def test_extract_archive_calls_borg_with_remote_path_parameters():
|
||||||
|
|
||||||
|
|
||||||
def test_extract_archive_calls_borg_with_numeric_owner_parameter():
|
def test_extract_archive_calls_borg_with_numeric_owner_parameter():
|
||||||
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(('borg', 'extract', '--numeric-owner', 'repo::archive'))
|
insert_execute_command_mock(('borg', 'extract', '--numeric-owner', 'repo::archive'))
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
|
@ -126,6 +131,7 @@ def test_extract_archive_calls_borg_with_numeric_owner_parameter():
|
||||||
|
|
||||||
|
|
||||||
def test_extract_archive_calls_borg_with_umask_parameters():
|
def test_extract_archive_calls_borg_with_umask_parameters():
|
||||||
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(('borg', 'extract', '--umask', '0770', 'repo::archive'))
|
insert_execute_command_mock(('borg', 'extract', '--umask', '0770', 'repo::archive'))
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
|
@ -139,6 +145,7 @@ def test_extract_archive_calls_borg_with_umask_parameters():
|
||||||
|
|
||||||
|
|
||||||
def test_extract_archive_calls_borg_with_lock_wait_parameters():
|
def test_extract_archive_calls_borg_with_lock_wait_parameters():
|
||||||
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(('borg', 'extract', '--lock-wait', '5', 'repo::archive'))
|
insert_execute_command_mock(('borg', 'extract', '--lock-wait', '5', 'repo::archive'))
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
|
@ -152,6 +159,7 @@ def test_extract_archive_calls_borg_with_lock_wait_parameters():
|
||||||
|
|
||||||
|
|
||||||
def test_extract_archive_with_log_info_calls_borg_with_info_parameter():
|
def test_extract_archive_with_log_info_calls_borg_with_info_parameter():
|
||||||
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(('borg', 'extract', '--info', 'repo::archive'))
|
insert_execute_command_mock(('borg', 'extract', '--info', 'repo::archive'))
|
||||||
insert_logging_mock(logging.INFO)
|
insert_logging_mock(logging.INFO)
|
||||||
|
|
||||||
|
@ -166,6 +174,7 @@ def test_extract_archive_with_log_info_calls_borg_with_info_parameter():
|
||||||
|
|
||||||
|
|
||||||
def test_extract_archive_with_log_debug_calls_borg_with_debug_parameters():
|
def test_extract_archive_with_log_debug_calls_borg_with_debug_parameters():
|
||||||
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(
|
insert_execute_command_mock(
|
||||||
('borg', 'extract', '--debug', '--list', '--show-rc', 'repo::archive')
|
('borg', 'extract', '--debug', '--list', '--show-rc', 'repo::archive')
|
||||||
)
|
)
|
||||||
|
@ -182,6 +191,7 @@ def test_extract_archive_with_log_debug_calls_borg_with_debug_parameters():
|
||||||
|
|
||||||
|
|
||||||
def test_extract_archive_calls_borg_with_dry_run_parameter():
|
def test_extract_archive_calls_borg_with_dry_run_parameter():
|
||||||
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(('borg', 'extract', '--dry-run', 'repo::archive'))
|
insert_execute_command_mock(('borg', 'extract', '--dry-run', 'repo::archive'))
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
|
@ -194,9 +204,27 @@ def test_extract_archive_calls_borg_with_dry_run_parameter():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_extract_archive_calls_borg_with_destination_path():
|
||||||
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
|
insert_execute_command_mock(('borg', 'extract', 'repo::archive'), working_directory='/dest')
|
||||||
|
|
||||||
|
module.extract_archive(
|
||||||
|
dry_run=False,
|
||||||
|
repository='repo',
|
||||||
|
archive='archive',
|
||||||
|
restore_paths=None,
|
||||||
|
location_config={},
|
||||||
|
storage_config={},
|
||||||
|
destination_path='/dest',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_extract_archive_calls_borg_with_progress_parameter():
|
def test_extract_archive_calls_borg_with_progress_parameter():
|
||||||
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
flexmock(module).should_receive('execute_command_without_capture').with_args(
|
flexmock(module).should_receive('execute_command_without_capture').with_args(
|
||||||
('borg', 'extract', '--progress', 'repo::archive')
|
('borg', 'extract', '--progress', 'repo::archive'),
|
||||||
|
working_directory=None,
|
||||||
|
error_on_warnings=True,
|
||||||
).once()
|
).once()
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
|
|
|
@ -4,9 +4,30 @@ from flexmock import flexmock
|
||||||
from borgmatic.hooks import postgresql as module
|
from borgmatic.hooks import postgresql as module
|
||||||
|
|
||||||
|
|
||||||
|
def test_make_database_dump_filename_uses_name_and_hostname():
|
||||||
|
flexmock(module.os.path).should_receive('expanduser').and_return('databases')
|
||||||
|
|
||||||
|
assert module.make_database_dump_filename('test', 'hostname') == 'databases/hostname/test'
|
||||||
|
|
||||||
|
|
||||||
|
def test_make_database_dump_filename_without_hostname_defaults_to_localhost():
|
||||||
|
flexmock(module.os.path).should_receive('expanduser').and_return('databases')
|
||||||
|
|
||||||
|
assert module.make_database_dump_filename('test') == 'databases/localhost/test'
|
||||||
|
|
||||||
|
|
||||||
|
def test_make_database_dump_filename_with_invalid_name_raises():
|
||||||
|
flexmock(module.os.path).should_receive('expanduser').and_return('databases')
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.make_database_dump_filename('invalid/name')
|
||||||
|
|
||||||
|
|
||||||
def test_dump_databases_runs_pg_dump_for_each_database():
|
def test_dump_databases_runs_pg_dump_for_each_database():
|
||||||
databases = [{'name': 'foo'}, {'name': 'bar'}]
|
databases = [{'name': 'foo'}, {'name': 'bar'}]
|
||||||
flexmock(module.os.path).should_receive('expanduser').and_return('databases')
|
flexmock(module).should_receive('make_database_dump_filename').and_return(
|
||||||
|
'databases/localhost/foo'
|
||||||
|
).and_return('databases/localhost/bar')
|
||||||
flexmock(module.os).should_receive('makedirs')
|
flexmock(module.os).should_receive('makedirs')
|
||||||
|
|
||||||
for name in ('foo', 'bar'):
|
for name in ('foo', 'bar'):
|
||||||
|
@ -29,7 +50,9 @@ def test_dump_databases_runs_pg_dump_for_each_database():
|
||||||
|
|
||||||
def test_dump_databases_with_dry_run_skips_pg_dump():
|
def test_dump_databases_with_dry_run_skips_pg_dump():
|
||||||
databases = [{'name': 'foo'}, {'name': 'bar'}]
|
databases = [{'name': 'foo'}, {'name': 'bar'}]
|
||||||
flexmock(module.os.path).should_receive('expanduser').and_return('databases')
|
flexmock(module).should_receive('make_database_dump_filename').and_return(
|
||||||
|
'databases/localhost/foo'
|
||||||
|
).and_return('databases/localhost/bar')
|
||||||
flexmock(module.os).should_receive('makedirs')
|
flexmock(module.os).should_receive('makedirs')
|
||||||
flexmock(module).should_receive('execute_command').never()
|
flexmock(module).should_receive('execute_command').never()
|
||||||
|
|
||||||
|
@ -49,7 +72,9 @@ def test_dump_databases_with_invalid_database_name_raises():
|
||||||
|
|
||||||
def test_dump_databases_runs_pg_dump_with_hostname_and_port():
|
def test_dump_databases_runs_pg_dump_with_hostname_and_port():
|
||||||
databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
|
databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
|
||||||
flexmock(module.os.path).should_receive('expanduser').and_return('databases')
|
flexmock(module).should_receive('make_database_dump_filename').and_return(
|
||||||
|
'databases/database.example.org/foo'
|
||||||
|
)
|
||||||
flexmock(module.os).should_receive('makedirs')
|
flexmock(module.os).should_receive('makedirs')
|
||||||
|
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
|
@ -75,7 +100,9 @@ def test_dump_databases_runs_pg_dump_with_hostname_and_port():
|
||||||
|
|
||||||
def test_dump_databases_runs_pg_dump_with_username_and_password():
|
def test_dump_databases_runs_pg_dump_with_username_and_password():
|
||||||
databases = [{'name': 'foo', 'username': 'postgres', 'password': 'trustsome1'}]
|
databases = [{'name': 'foo', 'username': 'postgres', 'password': 'trustsome1'}]
|
||||||
flexmock(module.os.path).should_receive('expanduser').and_return('databases')
|
flexmock(module).should_receive('make_database_dump_filename').and_return(
|
||||||
|
'databases/localhost/foo'
|
||||||
|
)
|
||||||
flexmock(module.os).should_receive('makedirs')
|
flexmock(module.os).should_receive('makedirs')
|
||||||
|
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
|
@ -99,7 +126,9 @@ def test_dump_databases_runs_pg_dump_with_username_and_password():
|
||||||
|
|
||||||
def test_dump_databases_runs_pg_dump_with_format():
|
def test_dump_databases_runs_pg_dump_with_format():
|
||||||
databases = [{'name': 'foo', 'format': 'tar'}]
|
databases = [{'name': 'foo', 'format': 'tar'}]
|
||||||
flexmock(module.os.path).should_receive('expanduser').and_return('databases')
|
flexmock(module).should_receive('make_database_dump_filename').and_return(
|
||||||
|
'databases/localhost/foo'
|
||||||
|
)
|
||||||
flexmock(module.os).should_receive('makedirs')
|
flexmock(module.os).should_receive('makedirs')
|
||||||
|
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
|
@ -121,7 +150,9 @@ def test_dump_databases_runs_pg_dump_with_format():
|
||||||
|
|
||||||
def test_dump_databases_runs_pg_dump_with_options():
|
def test_dump_databases_runs_pg_dump_with_options():
|
||||||
databases = [{'name': 'foo', 'options': '--stuff=such'}]
|
databases = [{'name': 'foo', 'options': '--stuff=such'}]
|
||||||
flexmock(module.os.path).should_receive('expanduser').and_return('databases')
|
flexmock(module).should_receive('make_database_dump_filename').and_return(
|
||||||
|
'databases/localhost/foo'
|
||||||
|
)
|
||||||
flexmock(module.os).should_receive('makedirs')
|
flexmock(module.os).should_receive('makedirs')
|
||||||
|
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
|
@ -144,7 +175,9 @@ def test_dump_databases_runs_pg_dump_with_options():
|
||||||
|
|
||||||
def test_dump_databases_runs_pg_dumpall_for_all_databases():
|
def test_dump_databases_runs_pg_dumpall_for_all_databases():
|
||||||
databases = [{'name': 'all'}]
|
databases = [{'name': 'all'}]
|
||||||
flexmock(module.os.path).should_receive('expanduser').and_return('databases')
|
flexmock(module).should_receive('make_database_dump_filename').and_return(
|
||||||
|
'databases/localhost/all'
|
||||||
|
)
|
||||||
flexmock(module.os).should_receive('makedirs')
|
flexmock(module.os).should_receive('makedirs')
|
||||||
|
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
|
@ -157,7 +190,9 @@ def test_dump_databases_runs_pg_dumpall_for_all_databases():
|
||||||
|
|
||||||
def test_remove_database_dumps_removes_dump_for_each_database():
|
def test_remove_database_dumps_removes_dump_for_each_database():
|
||||||
databases = [{'name': 'foo'}, {'name': 'bar'}]
|
databases = [{'name': 'foo'}, {'name': 'bar'}]
|
||||||
flexmock(module.os.path).should_receive('expanduser').and_return('databases')
|
flexmock(module).should_receive('make_database_dump_filename').and_return(
|
||||||
|
'databases/localhost/foo'
|
||||||
|
).and_return('databases/localhost/bar')
|
||||||
flexmock(module.os).should_receive('listdir').and_return([])
|
flexmock(module.os).should_receive('listdir').and_return([])
|
||||||
flexmock(module.os).should_receive('rmdir')
|
flexmock(module.os).should_receive('rmdir')
|
||||||
|
|
||||||
|
@ -185,3 +220,36 @@ def test_remove_database_dumps_with_invalid_database_name_raises():
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
module.remove_database_dumps(databases, 'test.yaml', dry_run=True)
|
module.remove_database_dumps(databases, 'test.yaml', dry_run=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_make_database_dump_patterns_converts_names_to_glob_paths():
|
||||||
|
flexmock(module).should_receive('make_database_dump_filename').and_return(
|
||||||
|
'databases/*/foo'
|
||||||
|
).and_return('databases/*/bar')
|
||||||
|
|
||||||
|
assert module.make_database_dump_patterns(('foo', 'bar')) == [
|
||||||
|
'databases/*/foo',
|
||||||
|
'databases/*/bar',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_make_database_dump_patterns_treats_empty_names_as_matching_all_databases():
|
||||||
|
flexmock(module).should_receive('make_database_dump_filename').with_args('*', '*').and_return(
|
||||||
|
'databases/*/*'
|
||||||
|
)
|
||||||
|
|
||||||
|
assert module.make_database_dump_patterns(()) == ['databases/*/*']
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_glob_patterns_to_borg_patterns_removes_leading_slash():
|
||||||
|
assert module.convert_glob_patterns_to_borg_patterns(('/etc/foo/bar',)) == ['sh:etc/foo/bar']
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_database_names_from_dumps_gets_names_from_filenames_matching_globs():
|
||||||
|
flexmock(module.glob).should_receive('glob').and_return(
|
||||||
|
('databases/localhost/foo',)
|
||||||
|
).and_return(('databases/localhost/bar',)).and_return(())
|
||||||
|
|
||||||
|
assert module.get_database_names_from_dumps(
|
||||||
|
('databases/*/foo', 'databases/*/bar', 'databases/*/baz')
|
||||||
|
) == ['foo', 'bar']
|
||||||
|
|
|
@ -6,11 +6,54 @@ from flexmock import flexmock
|
||||||
from borgmatic import execute as module
|
from borgmatic import execute as module
|
||||||
|
|
||||||
|
|
||||||
|
def test_exit_code_indicates_error_with_borg_error_is_true():
|
||||||
|
assert module.exit_code_indicates_error(('/usr/bin/borg1', 'init'), 2)
|
||||||
|
|
||||||
|
|
||||||
|
def test_exit_code_indicates_error_with_borg_warning_is_false():
|
||||||
|
assert not module.exit_code_indicates_error(('/usr/bin/borg1', 'init'), 1)
|
||||||
|
|
||||||
|
|
||||||
|
def test_exit_code_indicates_error_with_borg_success_is_false():
|
||||||
|
assert not module.exit_code_indicates_error(('/usr/bin/borg1', 'init'), 0)
|
||||||
|
|
||||||
|
|
||||||
|
def test_exit_code_indicates_error_with_borg_error_and_error_on_warnings_is_true():
|
||||||
|
assert module.exit_code_indicates_error(('/usr/bin/borg1', 'init'), 2, error_on_warnings=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_exit_code_indicates_error_with_borg_warning_and_error_on_warnings_is_true():
|
||||||
|
assert module.exit_code_indicates_error(('/usr/bin/borg1', 'init'), 1, error_on_warnings=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_exit_code_indicates_error_with_borg_success_and_error_on_warnings_is_false():
|
||||||
|
assert not module.exit_code_indicates_error(
|
||||||
|
('/usr/bin/borg1', 'init'), 0, error_on_warnings=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_exit_code_indicates_error_with_non_borg_error_is_true():
|
||||||
|
assert module.exit_code_indicates_error(('/usr/bin/command',), 2)
|
||||||
|
|
||||||
|
|
||||||
|
def test_exit_code_indicates_error_with_non_borg_warning_is_true():
|
||||||
|
assert module.exit_code_indicates_error(('/usr/bin/command',), 1)
|
||||||
|
|
||||||
|
|
||||||
|
def test_exit_code_indicates_error_with_non_borg_success_is_false():
|
||||||
|
assert not module.exit_code_indicates_error(('/usr/bin/command',), 0)
|
||||||
|
|
||||||
|
|
||||||
def test_execute_command_calls_full_command():
|
def test_execute_command_calls_full_command():
|
||||||
full_command = ['foo', 'bar']
|
full_command = ['foo', 'bar']
|
||||||
flexmock(module.os, environ={'a': 'b'})
|
flexmock(module.os, environ={'a': 'b'})
|
||||||
flexmock(module).should_receive('execute_and_log_output').with_args(
|
flexmock(module).should_receive('execute_and_log_output').with_args(
|
||||||
full_command, output_log_level=logging.INFO, shell=False, environment=None
|
full_command,
|
||||||
|
output_log_level=logging.INFO,
|
||||||
|
shell=False,
|
||||||
|
environment=None,
|
||||||
|
working_directory=None,
|
||||||
|
error_on_warnings=False,
|
||||||
).once()
|
).once()
|
||||||
|
|
||||||
output = module.execute_command(full_command)
|
output = module.execute_command(full_command)
|
||||||
|
@ -22,7 +65,12 @@ def test_execute_command_calls_full_command_with_shell():
|
||||||
full_command = ['foo', 'bar']
|
full_command = ['foo', 'bar']
|
||||||
flexmock(module.os, environ={'a': 'b'})
|
flexmock(module.os, environ={'a': 'b'})
|
||||||
flexmock(module).should_receive('execute_and_log_output').with_args(
|
flexmock(module).should_receive('execute_and_log_output').with_args(
|
||||||
full_command, output_log_level=logging.INFO, shell=True, environment=None
|
full_command,
|
||||||
|
output_log_level=logging.INFO,
|
||||||
|
shell=True,
|
||||||
|
environment=None,
|
||||||
|
working_directory=None,
|
||||||
|
error_on_warnings=False,
|
||||||
).once()
|
).once()
|
||||||
|
|
||||||
output = module.execute_command(full_command, shell=True)
|
output = module.execute_command(full_command, shell=True)
|
||||||
|
@ -34,7 +82,12 @@ def test_execute_command_calls_full_command_with_extra_environment():
|
||||||
full_command = ['foo', 'bar']
|
full_command = ['foo', 'bar']
|
||||||
flexmock(module.os, environ={'a': 'b'})
|
flexmock(module.os, environ={'a': 'b'})
|
||||||
flexmock(module).should_receive('execute_and_log_output').with_args(
|
flexmock(module).should_receive('execute_and_log_output').with_args(
|
||||||
full_command, output_log_level=logging.INFO, shell=False, environment={'a': 'b', 'c': 'd'}
|
full_command,
|
||||||
|
output_log_level=logging.INFO,
|
||||||
|
shell=False,
|
||||||
|
environment={'a': 'b', 'c': 'd'},
|
||||||
|
working_directory=None,
|
||||||
|
error_on_warnings=False,
|
||||||
).once()
|
).once()
|
||||||
|
|
||||||
output = module.execute_command(full_command, extra_environment={'c': 'd'})
|
output = module.execute_command(full_command, extra_environment={'c': 'd'})
|
||||||
|
@ -42,12 +95,46 @@ def test_execute_command_calls_full_command_with_extra_environment():
|
||||||
assert output is None
|
assert output is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_execute_command_calls_full_command_with_working_directory():
|
||||||
|
full_command = ['foo', 'bar']
|
||||||
|
flexmock(module.os, environ={'a': 'b'})
|
||||||
|
flexmock(module).should_receive('execute_and_log_output').with_args(
|
||||||
|
full_command,
|
||||||
|
output_log_level=logging.INFO,
|
||||||
|
shell=False,
|
||||||
|
environment=None,
|
||||||
|
working_directory='/working',
|
||||||
|
error_on_warnings=False,
|
||||||
|
).once()
|
||||||
|
|
||||||
|
output = module.execute_command(full_command, working_directory='/working')
|
||||||
|
|
||||||
|
assert output is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_execute_command_calls_full_command_with_error_on_warnings():
|
||||||
|
full_command = ['foo', 'bar']
|
||||||
|
flexmock(module.os, environ={'a': 'b'})
|
||||||
|
flexmock(module).should_receive('execute_and_log_output').with_args(
|
||||||
|
full_command,
|
||||||
|
output_log_level=logging.INFO,
|
||||||
|
shell=False,
|
||||||
|
environment=None,
|
||||||
|
working_directory=None,
|
||||||
|
error_on_warnings=True,
|
||||||
|
).once()
|
||||||
|
|
||||||
|
output = module.execute_command(full_command, error_on_warnings=True)
|
||||||
|
|
||||||
|
assert output is None
|
||||||
|
|
||||||
|
|
||||||
def test_execute_command_captures_output():
|
def test_execute_command_captures_output():
|
||||||
full_command = ['foo', 'bar']
|
full_command = ['foo', 'bar']
|
||||||
expected_output = '[]'
|
expected_output = '[]'
|
||||||
flexmock(module.os, environ={'a': 'b'})
|
flexmock(module.os, environ={'a': 'b'})
|
||||||
flexmock(module.subprocess).should_receive('check_output').with_args(
|
flexmock(module.subprocess).should_receive('check_output').with_args(
|
||||||
full_command, shell=False, env=None
|
full_command, shell=False, env=None, cwd=None
|
||||||
).and_return(flexmock(decode=lambda: expected_output)).once()
|
).and_return(flexmock(decode=lambda: expected_output)).once()
|
||||||
|
|
||||||
output = module.execute_command(full_command, output_log_level=None)
|
output = module.execute_command(full_command, output_log_level=None)
|
||||||
|
@ -60,7 +147,7 @@ def test_execute_command_captures_output_with_shell():
|
||||||
expected_output = '[]'
|
expected_output = '[]'
|
||||||
flexmock(module.os, environ={'a': 'b'})
|
flexmock(module.os, environ={'a': 'b'})
|
||||||
flexmock(module.subprocess).should_receive('check_output').with_args(
|
flexmock(module.subprocess).should_receive('check_output').with_args(
|
||||||
full_command, shell=True, env=None
|
full_command, shell=True, env=None, cwd=None
|
||||||
).and_return(flexmock(decode=lambda: expected_output)).once()
|
).and_return(flexmock(decode=lambda: expected_output)).once()
|
||||||
|
|
||||||
output = module.execute_command(full_command, output_log_level=None, shell=True)
|
output = module.execute_command(full_command, output_log_level=None, shell=True)
|
||||||
|
@ -73,7 +160,7 @@ def test_execute_command_captures_output_with_extra_environment():
|
||||||
expected_output = '[]'
|
expected_output = '[]'
|
||||||
flexmock(module.os, environ={'a': 'b'})
|
flexmock(module.os, environ={'a': 'b'})
|
||||||
flexmock(module.subprocess).should_receive('check_output').with_args(
|
flexmock(module.subprocess).should_receive('check_output').with_args(
|
||||||
full_command, shell=False, env={'a': 'b', 'c': 'd'}
|
full_command, shell=False, env={'a': 'b', 'c': 'd'}, cwd=None
|
||||||
).and_return(flexmock(decode=lambda: expected_output)).once()
|
).and_return(flexmock(decode=lambda: expected_output)).once()
|
||||||
|
|
||||||
output = module.execute_command(
|
output = module.execute_command(
|
||||||
|
@ -83,6 +170,21 @@ def test_execute_command_captures_output_with_extra_environment():
|
||||||
assert output == expected_output
|
assert output == expected_output
|
||||||
|
|
||||||
|
|
||||||
|
def test_execute_command_captures_output_with_working_directory():
|
||||||
|
full_command = ['foo', 'bar']
|
||||||
|
expected_output = '[]'
|
||||||
|
flexmock(module.os, environ={'a': 'b'})
|
||||||
|
flexmock(module.subprocess).should_receive('check_output').with_args(
|
||||||
|
full_command, shell=False, env=None, cwd='/working'
|
||||||
|
).and_return(flexmock(decode=lambda: expected_output)).once()
|
||||||
|
|
||||||
|
output = module.execute_command(
|
||||||
|
full_command, output_log_level=None, shell=False, working_directory='/working'
|
||||||
|
)
|
||||||
|
|
||||||
|
assert output == expected_output
|
||||||
|
|
||||||
|
|
||||||
def test_execute_command_without_capture_does_not_raise_on_success():
|
def test_execute_command_without_capture_does_not_raise_on_success():
|
||||||
flexmock(module.subprocess).should_receive('check_call').and_raise(
|
flexmock(module.subprocess).should_receive('check_call').and_raise(
|
||||||
module.subprocess.CalledProcessError(0, 'borg init')
|
module.subprocess.CalledProcessError(0, 'borg init')
|
||||||
|
@ -92,6 +194,7 @@ def test_execute_command_without_capture_does_not_raise_on_success():
|
||||||
|
|
||||||
|
|
||||||
def test_execute_command_without_capture_does_not_raise_on_warning():
|
def test_execute_command_without_capture_does_not_raise_on_warning():
|
||||||
|
flexmock(module).should_receive('exit_code_indicates_error').and_return(False)
|
||||||
flexmock(module.subprocess).should_receive('check_call').and_raise(
|
flexmock(module.subprocess).should_receive('check_call').and_raise(
|
||||||
module.subprocess.CalledProcessError(1, 'borg init')
|
module.subprocess.CalledProcessError(1, 'borg init')
|
||||||
)
|
)
|
||||||
|
@ -100,6 +203,7 @@ def test_execute_command_without_capture_does_not_raise_on_warning():
|
||||||
|
|
||||||
|
|
||||||
def test_execute_command_without_capture_raises_on_error():
|
def test_execute_command_without_capture_raises_on_error():
|
||||||
|
flexmock(module).should_receive('exit_code_indicates_error').and_return(True)
|
||||||
flexmock(module.subprocess).should_receive('check_call').and_raise(
|
flexmock(module.subprocess).should_receive('check_call').and_raise(
|
||||||
module.subprocess.CalledProcessError(2, 'borg init')
|
module.subprocess.CalledProcessError(2, 'borg init')
|
||||||
)
|
)
|
||||||
|
|
Loading…
Reference in a new issue