2016-06-10 11:21:53 -07:00
|
|
|
from argparse import ArgumentParser
|
2018-07-28 21:21:38 +00:00
|
|
|
import json
|
2017-10-25 21:47:33 -07:00
|
|
|
import logging
|
2016-06-10 11:21:53 -07:00
|
|
|
import os
|
|
|
|
from subprocess import CalledProcessError
|
|
|
|
import sys
|
|
|
|
|
2018-05-26 16:09:08 -07:00
|
|
|
from borgmatic.borg import check as borg_check, create as borg_create, prune as borg_prune, \
|
2018-05-26 16:19:05 -07:00
|
|
|
list as borg_list, info as borg_info
|
2017-10-26 06:38:27 +02:00
|
|
|
from borgmatic.commands import hook
|
2017-07-25 21:18:51 -07:00
|
|
|
from borgmatic.config import collect, convert, validate
|
2017-10-31 21:58:35 -07:00
|
|
|
from borgmatic.signals import configure_signals
|
2018-09-08 20:53:37 +00:00
|
|
|
from borgmatic.verbosity import verbosity_to_log_level
|
2017-10-25 21:47:33 -07:00
|
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
2016-06-10 11:21:53 -07:00
|
|
|
|
|
|
|
|
2017-07-25 21:18:51 -07:00
|
|
|
LEGACY_CONFIG_PATH = '/etc/borgmatic/config'
|
2016-06-10 11:21:53 -07:00
|
|
|
|
|
|
|
|
|
|
|
def parse_arguments(*arguments):
|
|
|
|
'''
|
2017-07-08 22:33:51 -07:00
|
|
|
Given command-line arguments with which this script was invoked, parse the arguments and return
|
|
|
|
them as an ArgumentParser instance.
|
2016-06-10 11:21:53 -07:00
|
|
|
'''
|
2018-07-25 01:34:05 +00:00
|
|
|
config_paths = collect.get_default_config_paths()
|
|
|
|
|
2017-07-28 22:02:18 -07:00
|
|
|
parser = ArgumentParser(
|
|
|
|
description=
|
|
|
|
'''
|
|
|
|
A simple wrapper script for the Borg backup software that creates and prunes backups.
|
|
|
|
If none of the --prune, --create, or --check options are given, then borgmatic defaults
|
|
|
|
to all three: prune, create, and check archives.
|
|
|
|
'''
|
|
|
|
)
|
2016-06-10 11:21:53 -07:00
|
|
|
parser.add_argument(
|
|
|
|
'-c', '--config',
|
2017-07-25 21:18:51 -07:00
|
|
|
nargs='+',
|
|
|
|
dest='config_paths',
|
2018-07-25 01:34:05 +00:00
|
|
|
default=config_paths,
|
|
|
|
help='Configuration filenames or directories, defaults to: {}'.format(' '.join(config_paths)),
|
2016-06-10 11:21:53 -07:00
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
'--excludes',
|
|
|
|
dest='excludes_filename',
|
2017-07-28 22:02:18 -07:00
|
|
|
help='Deprecated in favor of exclude_patterns within configuration',
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
'-p', '--prune',
|
|
|
|
dest='prune',
|
|
|
|
action='store_true',
|
|
|
|
help='Prune archives according to the retention policy',
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
'-C', '--create',
|
|
|
|
dest='create',
|
|
|
|
action='store_true',
|
|
|
|
help='Create archives (actually perform backups)',
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
'-k', '--check',
|
|
|
|
dest='check',
|
|
|
|
action='store_true',
|
|
|
|
help='Check archives for consistency',
|
2016-06-10 11:21:53 -07:00
|
|
|
)
|
2018-05-26 16:09:08 -07:00
|
|
|
parser.add_argument(
|
|
|
|
'-l', '--list',
|
|
|
|
dest='list',
|
|
|
|
action='store_true',
|
|
|
|
help='List archives',
|
|
|
|
)
|
2018-05-26 16:19:05 -07:00
|
|
|
parser.add_argument(
|
|
|
|
'-i', '--info',
|
|
|
|
dest='info',
|
|
|
|
action='store_true',
|
|
|
|
help='Display summary information on archives',
|
|
|
|
)
|
2018-07-28 21:21:38 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--json',
|
|
|
|
dest='json',
|
|
|
|
default=False,
|
|
|
|
action='store_true',
|
2018-08-19 12:57:52 -07:00
|
|
|
help='Output results from the --list or --info options as json',
|
2018-07-28 21:21:38 +00:00
|
|
|
)
|
2018-01-15 20:55:27 -08:00
|
|
|
parser.add_argument(
|
|
|
|
'-n', '--dry-run',
|
|
|
|
dest='dry_run',
|
|
|
|
action='store_true',
|
2018-05-26 16:09:08 -07:00
|
|
|
help='Go through the motions, but do not actually write to any repositories',
|
2018-01-15 20:55:27 -08:00
|
|
|
)
|
2016-06-10 11:21:53 -07:00
|
|
|
parser.add_argument(
|
|
|
|
'-v', '--verbosity',
|
|
|
|
type=int,
|
2018-09-18 05:31:27 +00:00
|
|
|
choices=range(0,3),
|
|
|
|
default=0,
|
2016-06-10 11:21:53 -07:00
|
|
|
help='Display verbose progress (1 for some, 2 for lots)',
|
|
|
|
)
|
|
|
|
|
2017-07-28 22:02:18 -07:00
|
|
|
args = parser.parse_args(arguments)
|
|
|
|
|
2018-07-29 03:17:45 +00:00
|
|
|
if args.json and not (args.list or args.info):
|
|
|
|
raise ValueError('The --json option can only be used with the --list or --info options')
|
|
|
|
|
|
|
|
if args.json and args.list and args.info:
|
|
|
|
raise ValueError(
|
|
|
|
'With the --json option, options --list and --info cannot be used together'
|
|
|
|
)
|
2018-07-28 21:21:38 +00:00
|
|
|
|
2018-05-26 16:09:08 -07:00
|
|
|
# If any of the action flags are explicitly requested, leave them as-is. Otherwise, assume
|
|
|
|
# defaults: Mutate the given arguments to enable the default actions.
|
2018-05-26 16:19:05 -07:00
|
|
|
if args.prune or args.create or args.check or args.list or args.info:
|
|
|
|
return args
|
2017-07-28 22:02:18 -07:00
|
|
|
|
2018-05-26 16:19:05 -07:00
|
|
|
args.prune = True
|
|
|
|
args.create = True
|
|
|
|
args.check = True
|
2017-07-28 22:02:18 -07:00
|
|
|
return args
|
2016-06-10 11:21:53 -07:00
|
|
|
|
|
|
|
|
2017-10-29 16:44:15 -07:00
|
|
|
def run_configuration(config_filename, args): # pragma: no cover
|
|
|
|
'''
|
2017-10-29 16:46:30 -07:00
|
|
|
Parse a single configuration file, and execute its defined pruning, backups, and/or consistency
|
2017-10-29 16:44:15 -07:00
|
|
|
checks.
|
|
|
|
'''
|
|
|
|
logger.info('{}: Parsing configuration file'.format(config_filename))
|
|
|
|
config = validate.parse_configuration(config_filename, validate.schema_filename())
|
|
|
|
(location, storage, retention, consistency, hooks) = (
|
|
|
|
config.get(section_name, {})
|
|
|
|
for section_name in ('location', 'storage', 'retention', 'consistency', 'hooks')
|
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
2018-01-14 16:35:24 -08:00
|
|
|
local_path = location.get('local_path', 'borg')
|
2017-10-29 16:44:15 -07:00
|
|
|
remote_path = location.get('remote_path')
|
2018-05-26 16:09:08 -07:00
|
|
|
borg_create.initialize_environment(storage)
|
2018-07-01 21:09:45 +02:00
|
|
|
|
|
|
|
if args.create:
|
|
|
|
hook.execute_hook(hooks.get('before_backup'), config_filename, 'pre-backup')
|
2017-10-29 16:44:15 -07:00
|
|
|
|
2018-07-28 21:21:38 +00:00
|
|
|
_run_commands(args, consistency, local_path, location, remote_path, retention, storage)
|
2017-10-29 16:44:15 -07:00
|
|
|
|
2018-07-01 21:09:45 +02:00
|
|
|
if args.create:
|
|
|
|
hook.execute_hook(hooks.get('after_backup'), config_filename, 'post-backup')
|
2017-10-29 16:44:15 -07:00
|
|
|
except (OSError, CalledProcessError):
|
|
|
|
hook.execute_hook(hooks.get('on_error'), config_filename, 'on-error')
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
2018-07-28 21:21:38 +00:00
|
|
|
def _run_commands(args, consistency, local_path, location, remote_path, retention, storage):
|
|
|
|
json_results = []
|
|
|
|
for unexpanded_repository in location['repositories']:
|
2018-07-28 15:02:17 -07:00
|
|
|
_run_commands_on_repository(
|
|
|
|
args, consistency, json_results, local_path, location, remote_path, retention, storage,
|
|
|
|
unexpanded_repository,
|
|
|
|
)
|
2018-07-28 21:21:38 +00:00
|
|
|
if args.json:
|
|
|
|
sys.stdout.write(json.dumps(json_results))
|
|
|
|
|
|
|
|
|
2018-07-28 15:02:17 -07:00
|
|
|
def _run_commands_on_repository(
|
|
|
|
args, consistency, json_results, local_path, location, remote_path,
|
|
|
|
retention, storage, unexpanded_repository,
|
|
|
|
): # pragma: no cover
|
2018-07-28 21:21:38 +00:00
|
|
|
repository = os.path.expanduser(unexpanded_repository)
|
|
|
|
dry_run_label = ' (dry run; not making any changes)' if args.dry_run else ''
|
|
|
|
if args.prune:
|
|
|
|
logger.info('{}: Pruning archives{}'.format(repository, dry_run_label))
|
|
|
|
borg_prune.prune_archives(
|
|
|
|
args.dry_run,
|
|
|
|
repository,
|
|
|
|
storage,
|
|
|
|
retention,
|
|
|
|
local_path=local_path,
|
|
|
|
remote_path=remote_path,
|
|
|
|
)
|
|
|
|
if args.create:
|
|
|
|
logger.info('{}: Creating archive{}'.format(repository, dry_run_label))
|
|
|
|
borg_create.create_archive(
|
|
|
|
args.dry_run,
|
|
|
|
repository,
|
|
|
|
location,
|
|
|
|
storage,
|
|
|
|
local_path=local_path,
|
|
|
|
remote_path=remote_path,
|
|
|
|
)
|
|
|
|
if args.check:
|
|
|
|
logger.info('{}: Running consistency checks'.format(repository))
|
|
|
|
borg_check.check_archives(
|
|
|
|
repository,
|
|
|
|
storage,
|
|
|
|
consistency,
|
|
|
|
local_path=local_path,
|
|
|
|
remote_path=remote_path,
|
|
|
|
)
|
|
|
|
if args.list:
|
|
|
|
logger.info('{}: Listing archives'.format(repository))
|
|
|
|
output = borg_list.list_archives(
|
|
|
|
repository,
|
|
|
|
storage,
|
|
|
|
local_path=local_path,
|
|
|
|
remote_path=remote_path,
|
|
|
|
json=args.json,
|
|
|
|
)
|
|
|
|
if args.json:
|
|
|
|
json_results.append(json.loads(output))
|
|
|
|
else:
|
|
|
|
sys.stdout.write(output)
|
|
|
|
if args.info:
|
|
|
|
logger.info('{}: Displaying summary info for archives'.format(repository))
|
2018-07-29 03:17:45 +00:00
|
|
|
output = borg_info.display_archives_info(
|
2018-07-28 21:21:38 +00:00
|
|
|
repository,
|
|
|
|
storage,
|
|
|
|
local_path=local_path,
|
|
|
|
remote_path=remote_path,
|
2018-07-29 03:17:45 +00:00
|
|
|
json=args.json,
|
2018-07-28 21:21:38 +00:00
|
|
|
)
|
2018-07-29 03:17:45 +00:00
|
|
|
if args.json:
|
|
|
|
json_results.append(json.loads(output))
|
|
|
|
else:
|
|
|
|
sys.stdout.write(output)
|
2018-07-28 21:21:38 +00:00
|
|
|
|
|
|
|
|
2017-07-09 17:03:45 -07:00
|
|
|
def main(): # pragma: no cover
|
2016-06-10 11:21:53 -07:00
|
|
|
try:
|
2017-10-31 21:58:35 -07:00
|
|
|
configure_signals()
|
2016-06-10 11:21:53 -07:00
|
|
|
args = parse_arguments(*sys.argv[1:])
|
2017-10-25 21:47:33 -07:00
|
|
|
logging.basicConfig(level=verbosity_to_log_level(args.verbosity), format='%(message)s')
|
|
|
|
|
2017-07-25 21:18:51 -07:00
|
|
|
config_filenames = tuple(collect.collect_config_filenames(args.config_paths))
|
2017-10-25 21:47:33 -07:00
|
|
|
logger.debug('Ensuring legacy configuration is upgraded')
|
2017-07-25 21:18:51 -07:00
|
|
|
convert.guard_configuration_upgraded(LEGACY_CONFIG_PATH, config_filenames)
|
2017-07-22 21:19:26 -07:00
|
|
|
|
2017-07-25 21:18:51 -07:00
|
|
|
if len(config_filenames) == 0:
|
|
|
|
raise ValueError('Error: No configuration files found in: {}'.format(' '.join(args.config_paths)))
|
2017-07-22 22:56:46 -07:00
|
|
|
|
2017-07-25 21:18:51 -07:00
|
|
|
for config_filename in config_filenames:
|
2017-10-29 16:44:15 -07:00
|
|
|
run_configuration(config_filename, args)
|
2017-07-04 18:23:59 -07:00
|
|
|
except (ValueError, OSError, CalledProcessError) as error:
|
2016-06-10 11:21:53 -07:00
|
|
|
print(error, file=sys.stderr)
|
|
|
|
sys.exit(1)
|