2019-02-18 09:30:34 -08:00
|
|
|
import collections
|
2020-01-29 16:59:02 -08:00
|
|
|
import copy
|
2018-07-28 21:21:38 +00:00
|
|
|
import json
|
2017-10-25 21:47:33 -07:00
|
|
|
import logging
|
2016-06-10 11:21:53 -07:00
|
|
|
import os
|
|
|
|
import sys
|
2021-11-14 22:15:22 +00:00
|
|
|
import time
|
2021-07-14 22:46:02 +01:00
|
|
|
from queue import Queue
|
2021-07-14 23:17:35 +01:00
|
|
|
from subprocess import CalledProcessError
|
2016-06-10 11:21:53 -07:00
|
|
|
|
2019-05-13 23:39:10 +02:00
|
|
|
import colorama
|
2018-12-25 21:01:08 -08:00
|
|
|
import pkg_resources
|
|
|
|
|
2021-06-17 20:41:44 -07:00
|
|
|
from borgmatic.borg import borg as borg_borg
|
2019-05-13 23:39:10 +02:00
|
|
|
from borgmatic.borg import check as borg_check
|
2022-02-07 23:29:44 -08:00
|
|
|
from borgmatic.borg import compact as borg_compact
|
2019-05-13 23:39:10 +02:00
|
|
|
from borgmatic.borg import create as borg_create
|
|
|
|
from borgmatic.borg import environment as borg_environment
|
2020-07-22 21:53:55 -07:00
|
|
|
from borgmatic.borg import export_tar as borg_export_tar
|
2019-05-13 23:39:10 +02:00
|
|
|
from borgmatic.borg import extract as borg_extract
|
2022-02-09 14:33:12 -08:00
|
|
|
from borgmatic.borg import feature as borg_feature
|
2019-05-13 23:39:10 +02:00
|
|
|
from borgmatic.borg import info as borg_info
|
|
|
|
from borgmatic.borg import init as borg_init
|
|
|
|
from borgmatic.borg import list as borg_list
|
2019-11-25 14:56:20 -08:00
|
|
|
from borgmatic.borg import mount as borg_mount
|
2019-05-13 23:39:10 +02:00
|
|
|
from borgmatic.borg import prune as borg_prune
|
2019-11-25 14:56:20 -08:00
|
|
|
from borgmatic.borg import umount as borg_umount
|
2022-02-09 14:33:12 -08:00
|
|
|
from borgmatic.borg import version as borg_version
|
2019-06-22 21:32:27 -07:00
|
|
|
from borgmatic.commands.arguments import parse_arguments
|
2018-10-13 20:34:51 -07:00
|
|
|
from borgmatic.config import checks, collect, convert, validate
|
2019-11-12 15:31:07 -08:00
|
|
|
from borgmatic.hooks import command, dispatch, dump, monitor
|
2019-06-17 11:53:08 -07:00
|
|
|
from borgmatic.logger import configure_logging, should_do_markup
|
2017-10-31 21:58:35 -07:00
|
|
|
from borgmatic.signals import configure_signals
|
2018-09-08 20:53:37 +00:00
|
|
|
from borgmatic.verbosity import verbosity_to_log_level
|
2017-10-25 21:47:33 -07:00
|
|
|
|
2019-06-17 11:53:08 -07:00
|
|
|
logger = logging.getLogger(__name__)
|
2016-06-10 11:21:53 -07:00
|
|
|
|
2017-07-25 21:18:51 -07:00
|
|
|
LEGACY_CONFIG_PATH = '/etc/borgmatic/config'
|
2016-06-10 11:21:53 -07:00
|
|
|
|
|
|
|
|
2019-09-30 22:19:31 -07:00
|
|
|
def run_configuration(config_filename, config, arguments):
|
2017-10-29 16:44:15 -07:00
|
|
|
'''
|
2019-06-22 16:10:07 -07:00
|
|
|
Given a config filename, the corresponding parsed config dict, and command-line arguments as a
|
2022-02-09 14:33:12 -08:00
|
|
|
dict from subparser name to a namespace of parsed arguments, execute the defined prune, compact,
|
|
|
|
create, check, and/or other actions.
|
2019-04-02 22:30:14 -07:00
|
|
|
|
2019-09-30 22:19:31 -07:00
|
|
|
Yield a combination of:
|
|
|
|
|
|
|
|
* JSON output strings from successfully executing any actions that produce JSON
|
|
|
|
* logging.LogRecord instances containing errors from any actions or backup hooks that fail
|
2017-10-29 16:44:15 -07:00
|
|
|
'''
|
|
|
|
(location, storage, retention, consistency, hooks) = (
|
|
|
|
config.get(section_name, {})
|
|
|
|
for section_name in ('location', 'storage', 'retention', 'consistency', 'hooks')
|
|
|
|
)
|
2019-06-22 16:10:07 -07:00
|
|
|
global_arguments = arguments['global']
|
2017-10-29 16:44:15 -07:00
|
|
|
|
2019-09-30 22:19:31 -07:00
|
|
|
local_path = location.get('local_path', 'borg')
|
|
|
|
remote_path = location.get('remote_path')
|
2021-07-14 22:49:03 +01:00
|
|
|
retries = storage.get('retries', 0)
|
2021-11-15 11:51:17 -08:00
|
|
|
retry_wait = storage.get('retry_wait', 0)
|
2019-09-30 22:19:31 -07:00
|
|
|
borg_environment.initialize(storage)
|
2019-10-01 12:23:16 -07:00
|
|
|
encountered_error = None
|
|
|
|
error_repository = ''
|
2022-02-09 14:33:12 -08:00
|
|
|
using_primary_action = {'prune', 'compact', 'create', 'check'}.intersection(arguments)
|
2020-01-22 15:10:47 -08:00
|
|
|
monitoring_log_level = verbosity_to_log_level(global_arguments.monitoring_verbosity)
|
2018-07-01 21:09:45 +02:00
|
|
|
|
2021-11-26 11:27:15 +08:00
|
|
|
hook_context = {
|
2022-03-14 13:34:14 -07:00
|
|
|
'repositories': ','.join(location['repositories']),
|
2021-11-26 11:27:15 +08:00
|
|
|
}
|
|
|
|
|
2019-12-12 22:54:45 -08:00
|
|
|
try:
|
2022-02-09 14:33:12 -08:00
|
|
|
local_borg_version = borg_version.local_borg_version(local_path)
|
|
|
|
except (OSError, CalledProcessError, ValueError) as error:
|
|
|
|
yield from make_error_log_records(
|
|
|
|
'{}: Error getting local Borg version'.format(config_filename), error
|
|
|
|
)
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
if using_primary_action:
|
2019-11-12 15:31:07 -08:00
|
|
|
dispatch.call_hooks(
|
2020-06-02 14:33:41 -07:00
|
|
|
'initialize_monitor',
|
2019-11-12 15:31:07 -08:00
|
|
|
hooks,
|
|
|
|
config_filename,
|
|
|
|
monitor.MONITOR_HOOK_NAMES,
|
2020-01-22 15:10:47 -08:00
|
|
|
monitoring_log_level,
|
2019-11-12 15:31:07 -08:00
|
|
|
global_arguments.dry_run,
|
2019-11-07 10:08:44 -08:00
|
|
|
)
|
2020-01-27 11:07:07 -08:00
|
|
|
if 'prune' in arguments:
|
|
|
|
command.execute_hook(
|
|
|
|
hooks.get('before_prune'),
|
|
|
|
hooks.get('umask'),
|
|
|
|
config_filename,
|
|
|
|
'pre-prune',
|
|
|
|
global_arguments.dry_run,
|
2021-11-26 11:27:15 +08:00
|
|
|
**hook_context,
|
2020-01-27 11:07:07 -08:00
|
|
|
)
|
2022-02-07 23:29:44 -08:00
|
|
|
if 'compact' in arguments:
|
|
|
|
command.execute_hook(
|
|
|
|
hooks.get('before_compact'),
|
|
|
|
hooks.get('umask'),
|
|
|
|
config_filename,
|
|
|
|
'pre-compact',
|
|
|
|
global_arguments.dry_run,
|
|
|
|
)
|
2019-12-12 22:54:45 -08:00
|
|
|
if 'create' in arguments:
|
2019-10-21 15:52:14 -07:00
|
|
|
command.execute_hook(
|
2019-06-13 17:05:26 -07:00
|
|
|
hooks.get('before_backup'),
|
|
|
|
hooks.get('umask'),
|
|
|
|
config_filename,
|
|
|
|
'pre-backup',
|
2019-06-22 16:10:07 -07:00
|
|
|
global_arguments.dry_run,
|
2021-11-26 11:27:15 +08:00
|
|
|
**hook_context,
|
2019-05-07 16:06:31 -07:00
|
|
|
)
|
2020-01-27 11:07:07 -08:00
|
|
|
if 'check' in arguments:
|
|
|
|
command.execute_hook(
|
|
|
|
hooks.get('before_check'),
|
|
|
|
hooks.get('umask'),
|
|
|
|
config_filename,
|
|
|
|
'pre-check',
|
|
|
|
global_arguments.dry_run,
|
2021-11-26 11:27:15 +08:00
|
|
|
**hook_context,
|
2020-01-27 11:07:07 -08:00
|
|
|
)
|
2020-08-01 16:08:32 +02:00
|
|
|
if 'extract' in arguments:
|
|
|
|
command.execute_hook(
|
|
|
|
hooks.get('before_extract'),
|
|
|
|
hooks.get('umask'),
|
|
|
|
config_filename,
|
2020-08-12 23:07:57 +02:00
|
|
|
'pre-extract',
|
2020-08-01 16:08:32 +02:00
|
|
|
global_arguments.dry_run,
|
2021-11-26 11:27:15 +08:00
|
|
|
**hook_context,
|
2020-08-01 16:08:32 +02:00
|
|
|
)
|
2022-02-09 14:33:12 -08:00
|
|
|
if using_primary_action:
|
2020-06-02 14:33:41 -07:00
|
|
|
dispatch.call_hooks(
|
|
|
|
'ping_monitor',
|
|
|
|
hooks,
|
|
|
|
config_filename,
|
|
|
|
monitor.MONITOR_HOOK_NAMES,
|
|
|
|
monitor.State.START,
|
|
|
|
monitoring_log_level,
|
|
|
|
global_arguments.dry_run,
|
|
|
|
)
|
2019-12-12 22:54:45 -08:00
|
|
|
except (OSError, CalledProcessError) as error:
|
2020-01-24 20:52:48 -08:00
|
|
|
if command.considered_soft_failure(config_filename, error):
|
|
|
|
return
|
|
|
|
|
2019-12-12 22:54:45 -08:00
|
|
|
encountered_error = error
|
|
|
|
yield from make_error_log_records(
|
2020-01-27 11:07:07 -08:00
|
|
|
'{}: Error running pre hook'.format(config_filename), error
|
2019-12-12 22:54:45 -08:00
|
|
|
)
|
2017-10-29 16:44:15 -07:00
|
|
|
|
2019-09-30 22:19:31 -07:00
|
|
|
if not encountered_error:
|
2021-07-14 22:46:02 +01:00
|
|
|
repo_queue = Queue()
|
|
|
|
for repo in location['repositories']:
|
2021-11-14 22:37:42 +00:00
|
|
|
repo_queue.put((repo, 0),)
|
2021-07-14 22:53:01 +01:00
|
|
|
|
2021-07-14 22:46:02 +01:00
|
|
|
while not repo_queue.empty():
|
|
|
|
repository_path, retry_num = repo_queue.get()
|
2021-11-15 11:51:17 -08:00
|
|
|
timeout = retry_num * retry_wait
|
2021-11-14 22:15:22 +00:00
|
|
|
if timeout:
|
2021-11-15 11:51:17 -08:00
|
|
|
logger.warning(f'{config_filename}: Sleeping {timeout}s before next retry')
|
2021-11-14 22:15:22 +00:00
|
|
|
time.sleep(timeout)
|
2019-09-30 22:19:31 -07:00
|
|
|
try:
|
|
|
|
yield from run_actions(
|
|
|
|
arguments=arguments,
|
|
|
|
location=location,
|
|
|
|
storage=storage,
|
|
|
|
retention=retention,
|
|
|
|
consistency=consistency,
|
2019-10-31 12:57:36 -07:00
|
|
|
hooks=hooks,
|
2019-09-30 22:19:31 -07:00
|
|
|
local_path=local_path,
|
|
|
|
remote_path=remote_path,
|
2022-02-09 14:33:12 -08:00
|
|
|
local_borg_version=local_borg_version,
|
2019-09-30 22:19:31 -07:00
|
|
|
repository_path=repository_path,
|
|
|
|
)
|
2019-10-31 12:57:36 -07:00
|
|
|
except (OSError, CalledProcessError, ValueError) as error:
|
2019-09-30 22:19:31 -07:00
|
|
|
yield from make_error_log_records(
|
|
|
|
'{}: Error running actions for repository'.format(repository_path), error
|
|
|
|
)
|
2021-07-14 22:46:02 +01:00
|
|
|
if retry_num < retries:
|
2021-11-14 22:37:42 +00:00
|
|
|
repo_queue.put((repository_path, retry_num + 1),)
|
2021-11-15 11:51:17 -08:00
|
|
|
logger.warning(
|
|
|
|
f'{config_filename}: Retrying... attempt {retry_num + 1}/{retries}'
|
|
|
|
)
|
2021-07-14 22:46:02 +01:00
|
|
|
continue
|
|
|
|
encountered_error = error
|
|
|
|
error_repository = repository_path
|
2017-10-29 16:44:15 -07:00
|
|
|
|
2019-12-12 22:54:45 -08:00
|
|
|
if not encountered_error:
|
2019-09-30 22:19:31 -07:00
|
|
|
try:
|
2020-01-27 11:07:07 -08:00
|
|
|
if 'prune' in arguments:
|
|
|
|
command.execute_hook(
|
|
|
|
hooks.get('after_prune'),
|
|
|
|
hooks.get('umask'),
|
|
|
|
config_filename,
|
|
|
|
'post-prune',
|
|
|
|
global_arguments.dry_run,
|
2021-11-26 11:27:15 +08:00
|
|
|
**hook_context,
|
2020-01-27 11:07:07 -08:00
|
|
|
)
|
2022-02-07 23:29:44 -08:00
|
|
|
if 'compact' in arguments:
|
|
|
|
command.execute_hook(
|
|
|
|
hooks.get('after_compact'),
|
|
|
|
hooks.get('umask'),
|
|
|
|
config_filename,
|
|
|
|
'post-compact',
|
|
|
|
global_arguments.dry_run,
|
|
|
|
)
|
2019-12-12 22:54:45 -08:00
|
|
|
if 'create' in arguments:
|
|
|
|
dispatch.call_hooks(
|
|
|
|
'remove_database_dumps',
|
|
|
|
hooks,
|
|
|
|
config_filename,
|
|
|
|
dump.DATABASE_HOOK_NAMES,
|
|
|
|
location,
|
|
|
|
global_arguments.dry_run,
|
|
|
|
)
|
|
|
|
command.execute_hook(
|
|
|
|
hooks.get('after_backup'),
|
|
|
|
hooks.get('umask'),
|
|
|
|
config_filename,
|
|
|
|
'post-backup',
|
|
|
|
global_arguments.dry_run,
|
2021-11-26 11:27:15 +08:00
|
|
|
**hook_context,
|
2019-12-12 22:54:45 -08:00
|
|
|
)
|
2020-01-27 11:07:07 -08:00
|
|
|
if 'check' in arguments:
|
|
|
|
command.execute_hook(
|
|
|
|
hooks.get('after_check'),
|
|
|
|
hooks.get('umask'),
|
|
|
|
config_filename,
|
|
|
|
'post-check',
|
|
|
|
global_arguments.dry_run,
|
2021-11-26 11:27:15 +08:00
|
|
|
**hook_context,
|
2020-01-27 11:07:07 -08:00
|
|
|
)
|
2020-08-01 16:08:32 +02:00
|
|
|
if 'extract' in arguments:
|
|
|
|
command.execute_hook(
|
|
|
|
hooks.get('after_extract'),
|
|
|
|
hooks.get('umask'),
|
|
|
|
config_filename,
|
2020-08-12 23:07:57 +02:00
|
|
|
'post-extract',
|
2020-08-01 16:08:32 +02:00
|
|
|
global_arguments.dry_run,
|
2021-11-26 11:27:15 +08:00
|
|
|
**hook_context,
|
2020-08-01 16:08:32 +02:00
|
|
|
)
|
2022-02-09 14:33:12 -08:00
|
|
|
if using_primary_action:
|
2020-06-23 11:01:03 -07:00
|
|
|
dispatch.call_hooks(
|
|
|
|
'ping_monitor',
|
|
|
|
hooks,
|
|
|
|
config_filename,
|
|
|
|
monitor.MONITOR_HOOK_NAMES,
|
|
|
|
monitor.State.FINISH,
|
|
|
|
monitoring_log_level,
|
|
|
|
global_arguments.dry_run,
|
|
|
|
)
|
|
|
|
dispatch.call_hooks(
|
|
|
|
'destroy_monitor',
|
|
|
|
hooks,
|
|
|
|
config_filename,
|
|
|
|
monitor.MONITOR_HOOK_NAMES,
|
|
|
|
monitoring_log_level,
|
|
|
|
global_arguments.dry_run,
|
|
|
|
)
|
2019-09-30 22:19:31 -07:00
|
|
|
except (OSError, CalledProcessError) as error:
|
2020-01-24 20:52:48 -08:00
|
|
|
if command.considered_soft_failure(config_filename, error):
|
|
|
|
return
|
|
|
|
|
2019-10-01 12:23:16 -07:00
|
|
|
encountered_error = error
|
2019-09-30 22:19:31 -07:00
|
|
|
yield from make_error_log_records(
|
2020-01-27 11:07:07 -08:00
|
|
|
'{}: Error running post hook'.format(config_filename), error
|
2019-09-30 22:19:31 -07:00
|
|
|
)
|
|
|
|
|
2022-02-09 14:33:12 -08:00
|
|
|
if encountered_error and using_primary_action:
|
2019-09-30 22:19:31 -07:00
|
|
|
try:
|
2020-06-23 11:01:03 -07:00
|
|
|
command.execute_hook(
|
|
|
|
hooks.get('on_error'),
|
|
|
|
hooks.get('umask'),
|
|
|
|
config_filename,
|
|
|
|
'on-error',
|
|
|
|
global_arguments.dry_run,
|
|
|
|
repository=error_repository,
|
|
|
|
error=encountered_error,
|
|
|
|
output=getattr(encountered_error, 'output', ''),
|
|
|
|
)
|
2020-06-02 14:33:41 -07:00
|
|
|
dispatch.call_hooks(
|
|
|
|
'ping_monitor',
|
|
|
|
hooks,
|
|
|
|
config_filename,
|
|
|
|
monitor.MONITOR_HOOK_NAMES,
|
|
|
|
monitor.State.FAIL,
|
|
|
|
monitoring_log_level,
|
|
|
|
global_arguments.dry_run,
|
|
|
|
)
|
2020-06-23 11:01:03 -07:00
|
|
|
dispatch.call_hooks(
|
|
|
|
'destroy_monitor',
|
|
|
|
hooks,
|
2019-09-30 22:19:31 -07:00
|
|
|
config_filename,
|
2020-06-23 11:01:03 -07:00
|
|
|
monitor.MONITOR_HOOK_NAMES,
|
|
|
|
monitoring_log_level,
|
2019-09-30 22:19:31 -07:00
|
|
|
global_arguments.dry_run,
|
|
|
|
)
|
|
|
|
except (OSError, CalledProcessError) as error:
|
2020-01-24 20:52:48 -08:00
|
|
|
if command.considered_soft_failure(config_filename, error):
|
|
|
|
return
|
|
|
|
|
2019-09-30 22:19:31 -07:00
|
|
|
yield from make_error_log_records(
|
|
|
|
'{}: Error running on-error hook'.format(config_filename), error
|
|
|
|
)
|
2017-10-29 16:44:15 -07:00
|
|
|
|
|
|
|
|
2019-04-02 22:30:14 -07:00
|
|
|
def run_actions(
|
2019-06-22 16:10:07 -07:00
|
|
|
*,
|
|
|
|
arguments,
|
|
|
|
location,
|
|
|
|
storage,
|
|
|
|
retention,
|
|
|
|
consistency,
|
2019-10-31 12:57:36 -07:00
|
|
|
hooks,
|
2019-06-22 16:10:07 -07:00
|
|
|
local_path,
|
|
|
|
remote_path,
|
2022-02-09 14:33:12 -08:00
|
|
|
local_borg_version,
|
2021-07-14 22:53:01 +01:00
|
|
|
repository_path,
|
2018-07-28 15:02:17 -07:00
|
|
|
): # pragma: no cover
|
2019-04-02 22:30:14 -07:00
|
|
|
'''
|
|
|
|
Given parsed command-line arguments as an argparse.ArgumentParser instance, several different
|
2022-02-09 14:33:12 -08:00
|
|
|
configuration dicts, local and remote paths to Borg, a local Borg version string, and a
|
|
|
|
repository name, run all actions from the command-line arguments on the given repository.
|
2019-04-02 22:30:14 -07:00
|
|
|
|
|
|
|
Yield JSON output strings from executing any actions that produce JSON.
|
2019-10-31 12:57:36 -07:00
|
|
|
|
|
|
|
Raise OSError or subprocess.CalledProcessError if an error occurs running a command for an
|
|
|
|
action. Raise ValueError if the arguments or configuration passed to action are invalid.
|
2019-04-02 22:30:14 -07:00
|
|
|
'''
|
|
|
|
repository = os.path.expanduser(repository_path)
|
2019-06-22 16:10:07 -07:00
|
|
|
global_arguments = arguments['global']
|
|
|
|
dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else ''
|
|
|
|
if 'init' in arguments:
|
2018-12-10 22:20:57 -08:00
|
|
|
logger.info('{}: Initializing repository'.format(repository))
|
|
|
|
borg_init.initialize_repository(
|
|
|
|
repository,
|
2019-12-04 15:48:10 -08:00
|
|
|
storage,
|
2019-06-22 16:10:07 -07:00
|
|
|
arguments['init'].encryption_mode,
|
|
|
|
arguments['init'].append_only,
|
|
|
|
arguments['init'].storage_quota,
|
2018-12-10 22:20:57 -08:00
|
|
|
local_path=local_path,
|
|
|
|
remote_path=remote_path,
|
|
|
|
)
|
2019-06-22 16:10:07 -07:00
|
|
|
if 'prune' in arguments:
|
2018-07-28 21:21:38 +00:00
|
|
|
logger.info('{}: Pruning archives{}'.format(repository, dry_run_label))
|
|
|
|
borg_prune.prune_archives(
|
2019-06-22 16:10:07 -07:00
|
|
|
global_arguments.dry_run,
|
2018-07-28 21:21:38 +00:00
|
|
|
repository,
|
|
|
|
storage,
|
|
|
|
retention,
|
|
|
|
local_path=local_path,
|
|
|
|
remote_path=remote_path,
|
2019-06-22 16:10:07 -07:00
|
|
|
stats=arguments['prune'].stats,
|
2020-01-11 16:38:07 +01:00
|
|
|
files=arguments['prune'].files,
|
2018-07-28 21:21:38 +00:00
|
|
|
)
|
2022-02-07 23:29:44 -08:00
|
|
|
if 'compact' in arguments:
|
2022-02-09 14:33:12 -08:00
|
|
|
if borg_feature.available(borg_feature.Feature.COMPACT, local_borg_version):
|
|
|
|
logger.info('{}: Compacting segments{}'.format(repository, dry_run_label))
|
|
|
|
borg_compact.compact_segments(
|
|
|
|
global_arguments.dry_run,
|
|
|
|
repository,
|
|
|
|
storage,
|
|
|
|
local_path=local_path,
|
|
|
|
remote_path=remote_path,
|
|
|
|
progress=arguments['compact'].progress,
|
|
|
|
cleanup_commits=arguments['compact'].cleanup_commits,
|
|
|
|
threshold=arguments['compact'].threshold,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
logger.info(
|
|
|
|
'{}: Skipping compact (only available/needed in Borg 1.2+)'.format(repository)
|
|
|
|
)
|
2019-06-22 16:10:07 -07:00
|
|
|
if 'create' in arguments:
|
2018-07-28 21:21:38 +00:00
|
|
|
logger.info('{}: Creating archive{}'.format(repository, dry_run_label))
|
2020-05-18 11:31:29 -07:00
|
|
|
dispatch.call_hooks(
|
|
|
|
'remove_database_dumps',
|
|
|
|
hooks,
|
|
|
|
repository,
|
|
|
|
dump.DATABASE_HOOK_NAMES,
|
|
|
|
location,
|
|
|
|
global_arguments.dry_run,
|
|
|
|
)
|
2020-05-07 11:44:04 -07:00
|
|
|
active_dumps = dispatch.call_hooks(
|
|
|
|
'dump_databases',
|
|
|
|
hooks,
|
|
|
|
repository,
|
|
|
|
dump.DATABASE_HOOK_NAMES,
|
|
|
|
location,
|
|
|
|
global_arguments.dry_run,
|
|
|
|
)
|
|
|
|
stream_processes = [process for processes in active_dumps.values() for process in processes]
|
|
|
|
|
2019-04-02 22:30:14 -07:00
|
|
|
json_output = borg_create.create_archive(
|
2019-06-22 16:10:07 -07:00
|
|
|
global_arguments.dry_run,
|
2018-07-28 21:21:38 +00:00
|
|
|
repository,
|
|
|
|
location,
|
|
|
|
storage,
|
2022-02-09 16:54:35 -08:00
|
|
|
local_borg_version,
|
2018-07-28 21:21:38 +00:00
|
|
|
local_path=local_path,
|
|
|
|
remote_path=remote_path,
|
2019-06-22 16:10:07 -07:00
|
|
|
progress=arguments['create'].progress,
|
|
|
|
stats=arguments['create'].stats,
|
|
|
|
json=arguments['create'].json,
|
2020-01-11 16:38:07 +01:00
|
|
|
files=arguments['create'].files,
|
2020-05-07 11:44:04 -07:00
|
|
|
stream_processes=stream_processes,
|
2018-07-28 21:21:38 +00:00
|
|
|
)
|
2019-04-02 22:30:14 -07:00
|
|
|
if json_output:
|
|
|
|
yield json.loads(json_output)
|
2020-05-07 11:44:04 -07:00
|
|
|
|
2019-06-22 21:23:48 -07:00
|
|
|
if 'check' in arguments and checks.repository_enabled_for_checks(repository, consistency):
|
2018-07-28 21:21:38 +00:00
|
|
|
logger.info('{}: Running consistency checks'.format(repository))
|
|
|
|
borg_check.check_archives(
|
2019-09-19 11:43:53 -07:00
|
|
|
repository,
|
|
|
|
storage,
|
|
|
|
consistency,
|
|
|
|
local_path=local_path,
|
|
|
|
remote_path=remote_path,
|
2020-01-24 11:27:16 -08:00
|
|
|
progress=arguments['check'].progress,
|
2019-12-04 16:07:00 -08:00
|
|
|
repair=arguments['check'].repair,
|
2019-09-19 11:43:53 -07:00
|
|
|
only_checks=arguments['check'].only,
|
2018-07-28 21:21:38 +00:00
|
|
|
)
|
2019-06-22 16:10:07 -07:00
|
|
|
if 'extract' in arguments:
|
2019-12-06 16:29:41 -08:00
|
|
|
if arguments['extract'].repository is None or validate.repositories_match(
|
|
|
|
repository, arguments['extract'].repository
|
|
|
|
):
|
2019-06-22 16:10:07 -07:00
|
|
|
logger.info(
|
|
|
|
'{}: Extracting archive {}'.format(repository, arguments['extract'].archive)
|
|
|
|
)
|
2019-02-18 09:30:34 -08:00
|
|
|
borg_extract.extract_archive(
|
2019-06-22 16:10:07 -07:00
|
|
|
global_arguments.dry_run,
|
2019-02-18 09:30:34 -08:00
|
|
|
repository,
|
2020-01-29 16:59:02 -08:00
|
|
|
borg_list.resolve_archive_name(
|
|
|
|
repository, arguments['extract'].archive, storage, local_path, remote_path
|
|
|
|
),
|
2019-11-01 10:00:26 -07:00
|
|
|
arguments['extract'].paths,
|
2019-03-05 09:11:35 -08:00
|
|
|
location,
|
2019-02-18 09:30:34 -08:00
|
|
|
storage,
|
2022-02-10 10:09:18 -08:00
|
|
|
local_borg_version,
|
2019-02-18 09:30:34 -08:00
|
|
|
local_path=local_path,
|
|
|
|
remote_path=remote_path,
|
2019-11-01 10:00:26 -07:00
|
|
|
destination_path=arguments['extract'].destination,
|
2020-06-06 14:57:14 -07:00
|
|
|
strip_components=arguments['extract'].strip_components,
|
2019-06-22 16:10:07 -07:00
|
|
|
progress=arguments['extract'].progress,
|
2019-02-18 09:30:34 -08:00
|
|
|
)
|
2020-07-22 21:53:55 -07:00
|
|
|
if 'export-tar' in arguments:
|
|
|
|
if arguments['export-tar'].repository is None or validate.repositories_match(
|
|
|
|
repository, arguments['export-tar'].repository
|
|
|
|
):
|
|
|
|
logger.info(
|
|
|
|
'{}: Exporting archive {} as tar file'.format(
|
|
|
|
repository, arguments['export-tar'].archive
|
|
|
|
)
|
|
|
|
)
|
|
|
|
borg_export_tar.export_tar_archive(
|
|
|
|
global_arguments.dry_run,
|
|
|
|
repository,
|
|
|
|
borg_list.resolve_archive_name(
|
|
|
|
repository, arguments['export-tar'].archive, storage, local_path, remote_path
|
|
|
|
),
|
|
|
|
arguments['export-tar'].paths,
|
|
|
|
arguments['export-tar'].destination,
|
|
|
|
storage,
|
|
|
|
local_path=local_path,
|
|
|
|
remote_path=remote_path,
|
|
|
|
tar_filter=arguments['export-tar'].tar_filter,
|
|
|
|
files=arguments['export-tar'].files,
|
|
|
|
strip_components=arguments['export-tar'].strip_components,
|
|
|
|
)
|
2019-11-25 14:56:20 -08:00
|
|
|
if 'mount' in arguments:
|
2019-12-06 16:29:41 -08:00
|
|
|
if arguments['mount'].repository is None or validate.repositories_match(
|
|
|
|
repository, arguments['mount'].repository
|
|
|
|
):
|
2019-12-06 15:58:54 -08:00
|
|
|
if arguments['mount'].archive:
|
|
|
|
logger.info(
|
|
|
|
'{}: Mounting archive {}'.format(repository, arguments['mount'].archive)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
logger.info('{}: Mounting repository'.format(repository))
|
|
|
|
|
2019-11-25 14:56:20 -08:00
|
|
|
borg_mount.mount_archive(
|
|
|
|
repository,
|
2020-01-29 16:59:02 -08:00
|
|
|
borg_list.resolve_archive_name(
|
|
|
|
repository, arguments['mount'].archive, storage, local_path, remote_path
|
|
|
|
),
|
2019-11-25 14:56:20 -08:00
|
|
|
arguments['mount'].mount_point,
|
|
|
|
arguments['mount'].paths,
|
|
|
|
arguments['mount'].foreground,
|
|
|
|
arguments['mount'].options,
|
|
|
|
storage,
|
|
|
|
local_path=local_path,
|
|
|
|
remote_path=remote_path,
|
|
|
|
)
|
2019-10-31 12:57:36 -07:00
|
|
|
if 'restore' in arguments:
|
2019-12-06 16:29:41 -08:00
|
|
|
if arguments['restore'].repository is None or validate.repositories_match(
|
|
|
|
repository, arguments['restore'].repository
|
|
|
|
):
|
2019-10-31 12:57:36 -07:00
|
|
|
logger.info(
|
|
|
|
'{}: Restoring databases from archive {}'.format(
|
|
|
|
repository, arguments['restore'].archive
|
|
|
|
)
|
|
|
|
)
|
2020-05-18 11:31:29 -07:00
|
|
|
dispatch.call_hooks(
|
|
|
|
'remove_database_dumps',
|
|
|
|
hooks,
|
|
|
|
repository,
|
|
|
|
dump.DATABASE_HOOK_NAMES,
|
|
|
|
location,
|
|
|
|
global_arguments.dry_run,
|
|
|
|
)
|
2019-10-31 12:57:36 -07:00
|
|
|
|
|
|
|
restore_names = arguments['restore'].databases or []
|
|
|
|
if 'all' in restore_names:
|
|
|
|
restore_names = []
|
|
|
|
|
2020-05-07 11:44:04 -07:00
|
|
|
archive_name = borg_list.resolve_archive_name(
|
|
|
|
repository, arguments['restore'].archive, storage, local_path, remote_path
|
2019-11-11 21:59:30 -08:00
|
|
|
)
|
2020-05-07 11:44:04 -07:00
|
|
|
found_names = set()
|
2019-11-13 10:41:57 -08:00
|
|
|
|
2020-05-07 11:44:04 -07:00
|
|
|
for hook_name, per_hook_restore_databases in hooks.items():
|
|
|
|
if hook_name not in dump.DATABASE_HOOK_NAMES:
|
|
|
|
continue
|
2019-10-31 12:57:36 -07:00
|
|
|
|
2020-05-07 11:44:04 -07:00
|
|
|
for restore_database in per_hook_restore_databases:
|
|
|
|
database_name = restore_database['name']
|
|
|
|
if restore_names and database_name not in restore_names:
|
|
|
|
continue
|
|
|
|
|
|
|
|
found_names.add(database_name)
|
|
|
|
dump_pattern = dispatch.call_hooks(
|
|
|
|
'make_database_dump_pattern',
|
|
|
|
hooks,
|
|
|
|
repository,
|
|
|
|
dump.DATABASE_HOOK_NAMES,
|
|
|
|
location,
|
|
|
|
database_name,
|
|
|
|
)[hook_name]
|
|
|
|
|
|
|
|
# Kick off a single database extract to stdout.
|
|
|
|
extract_process = borg_extract.extract_archive(
|
|
|
|
dry_run=global_arguments.dry_run,
|
|
|
|
repository=repository,
|
|
|
|
archive=archive_name,
|
|
|
|
paths=dump.convert_glob_patterns_to_borg_patterns([dump_pattern]),
|
|
|
|
location_config=location,
|
|
|
|
storage_config=storage,
|
2022-02-10 10:09:18 -08:00
|
|
|
local_borg_version=local_borg_version,
|
2020-05-07 11:44:04 -07:00
|
|
|
local_path=local_path,
|
|
|
|
remote_path=remote_path,
|
|
|
|
destination_path='/',
|
2020-05-18 11:31:29 -07:00
|
|
|
# A directory format dump isn't a single file, and therefore can't extract
|
|
|
|
# to stdout. In this case, the extract_process return value is None.
|
|
|
|
extract_to_stdout=bool(restore_database.get('format') != 'directory'),
|
2020-05-07 11:44:04 -07:00
|
|
|
)
|
|
|
|
|
2020-05-18 11:31:29 -07:00
|
|
|
# Run a single database restore, consuming the extract stdout (if any).
|
2020-05-07 11:44:04 -07:00
|
|
|
dispatch.call_hooks(
|
|
|
|
'restore_database_dump',
|
|
|
|
{hook_name: [restore_database]},
|
|
|
|
repository,
|
|
|
|
dump.DATABASE_HOOK_NAMES,
|
|
|
|
location,
|
|
|
|
global_arguments.dry_run,
|
|
|
|
extract_process,
|
|
|
|
)
|
|
|
|
|
2020-05-18 11:31:29 -07:00
|
|
|
dispatch.call_hooks(
|
|
|
|
'remove_database_dumps',
|
|
|
|
hooks,
|
|
|
|
repository,
|
|
|
|
dump.DATABASE_HOOK_NAMES,
|
|
|
|
location,
|
|
|
|
global_arguments.dry_run,
|
|
|
|
)
|
|
|
|
|
2020-05-07 11:44:04 -07:00
|
|
|
if not restore_names and not found_names:
|
|
|
|
raise ValueError('No databases were found to restore')
|
|
|
|
|
|
|
|
missing_names = sorted(set(restore_names) - found_names)
|
|
|
|
if missing_names:
|
|
|
|
raise ValueError(
|
|
|
|
'Cannot restore database(s) {} missing from borgmatic\'s configuration'.format(
|
|
|
|
', '.join(missing_names)
|
|
|
|
)
|
|
|
|
)
|
2019-10-31 12:57:36 -07:00
|
|
|
|
2019-06-22 16:10:07 -07:00
|
|
|
if 'list' in arguments:
|
2019-12-06 16:29:41 -08:00
|
|
|
if arguments['list'].repository is None or validate.repositories_match(
|
|
|
|
repository, arguments['list'].repository
|
|
|
|
):
|
2020-01-29 16:59:02 -08:00
|
|
|
list_arguments = copy.copy(arguments['list'])
|
|
|
|
if not list_arguments.json:
|
2020-01-23 13:40:54 -08:00
|
|
|
logger.warning('{}: Listing archives'.format(repository))
|
2020-01-29 16:59:02 -08:00
|
|
|
list_arguments.archive = borg_list.resolve_archive_name(
|
|
|
|
repository, list_arguments.archive, storage, local_path, remote_path
|
|
|
|
)
|
2019-04-02 22:30:14 -07:00
|
|
|
json_output = borg_list.list_archives(
|
2019-02-23 23:02:17 -08:00
|
|
|
repository,
|
|
|
|
storage,
|
2020-01-29 16:59:02 -08:00
|
|
|
list_arguments=list_arguments,
|
2019-02-23 23:02:17 -08:00
|
|
|
local_path=local_path,
|
|
|
|
remote_path=remote_path,
|
|
|
|
)
|
2019-04-02 22:30:14 -07:00
|
|
|
if json_output:
|
|
|
|
yield json.loads(json_output)
|
2019-06-22 16:10:07 -07:00
|
|
|
if 'info' in arguments:
|
2019-12-06 16:29:41 -08:00
|
|
|
if arguments['info'].repository is None or validate.repositories_match(
|
|
|
|
repository, arguments['info'].repository
|
|
|
|
):
|
2020-01-29 16:59:02 -08:00
|
|
|
info_arguments = copy.copy(arguments['info'])
|
|
|
|
if not info_arguments.json:
|
2020-01-23 13:40:54 -08:00
|
|
|
logger.warning('{}: Displaying summary info for archives'.format(repository))
|
2020-01-29 16:59:02 -08:00
|
|
|
info_arguments.archive = borg_list.resolve_archive_name(
|
|
|
|
repository, info_arguments.archive, storage, local_path, remote_path
|
|
|
|
)
|
2019-06-25 10:46:55 -07:00
|
|
|
json_output = borg_info.display_archives_info(
|
|
|
|
repository,
|
|
|
|
storage,
|
2020-01-29 16:59:02 -08:00
|
|
|
info_arguments=info_arguments,
|
2019-06-25 10:46:55 -07:00
|
|
|
local_path=local_path,
|
|
|
|
remote_path=remote_path,
|
|
|
|
)
|
|
|
|
if json_output:
|
|
|
|
yield json.loads(json_output)
|
2021-06-17 20:41:44 -07:00
|
|
|
if 'borg' in arguments:
|
|
|
|
if arguments['borg'].repository is None or validate.repositories_match(
|
|
|
|
repository, arguments['borg'].repository
|
|
|
|
):
|
|
|
|
logger.warning('{}: Running arbitrary Borg command'.format(repository))
|
|
|
|
archive_name = borg_list.resolve_archive_name(
|
|
|
|
repository, arguments['borg'].archive, storage, local_path, remote_path
|
|
|
|
)
|
|
|
|
borg_borg.run_arbitrary_borg(
|
|
|
|
repository,
|
|
|
|
storage,
|
|
|
|
options=arguments['borg'].options,
|
|
|
|
archive=archive_name,
|
|
|
|
local_path=local_path,
|
|
|
|
remote_path=remote_path,
|
|
|
|
)
|
2018-07-28 21:21:38 +00:00
|
|
|
|
|
|
|
|
2019-12-17 11:46:27 -08:00
|
|
|
def load_configurations(config_filenames, overrides=None):
|
2018-12-25 15:23:54 -08:00
|
|
|
'''
|
2019-06-19 20:48:54 -07:00
|
|
|
Given a sequence of configuration filenames, load and validate each configuration file. Return
|
|
|
|
the results as a tuple of: dict of configuration filename to corresponding parsed configuration,
|
|
|
|
and sequence of logging.LogRecord instances containing any parse errors.
|
2018-12-25 15:23:54 -08:00
|
|
|
'''
|
2019-02-18 09:30:34 -08:00
|
|
|
# Dict mapping from config filename to corresponding parsed config dict.
|
|
|
|
configs = collections.OrderedDict()
|
2019-06-19 20:48:54 -07:00
|
|
|
logs = []
|
2019-02-18 09:30:34 -08:00
|
|
|
|
2019-02-23 23:02:17 -08:00
|
|
|
# Parse and load each configuration file.
|
2018-12-25 15:23:54 -08:00
|
|
|
for config_filename in config_filenames:
|
|
|
|
try:
|
2019-02-18 09:30:34 -08:00
|
|
|
configs[config_filename] = validate.parse_configuration(
|
2019-12-17 11:46:27 -08:00
|
|
|
config_filename, validate.schema_filename(), overrides
|
2019-02-18 09:30:34 -08:00
|
|
|
)
|
2022-03-08 10:19:36 -08:00
|
|
|
except PermissionError:
|
|
|
|
logs.extend(
|
|
|
|
[
|
|
|
|
logging.makeLogRecord(
|
|
|
|
dict(
|
|
|
|
levelno=logging.WARNING,
|
|
|
|
levelname='WARNING',
|
|
|
|
msg='{}: Insufficient permissions to read configuration file'.format(
|
|
|
|
config_filename
|
|
|
|
),
|
|
|
|
)
|
|
|
|
),
|
|
|
|
]
|
|
|
|
)
|
2019-02-18 09:30:34 -08:00
|
|
|
except (ValueError, OSError, validate.Validation_error) as error:
|
2019-06-19 20:48:54 -07:00
|
|
|
logs.extend(
|
|
|
|
[
|
|
|
|
logging.makeLogRecord(
|
|
|
|
dict(
|
|
|
|
levelno=logging.CRITICAL,
|
|
|
|
levelname='CRITICAL',
|
|
|
|
msg='{}: Error parsing configuration file'.format(config_filename),
|
|
|
|
)
|
|
|
|
),
|
|
|
|
logging.makeLogRecord(
|
|
|
|
dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error)
|
|
|
|
),
|
|
|
|
]
|
2019-05-26 16:34:46 -07:00
|
|
|
)
|
2019-02-18 09:30:34 -08:00
|
|
|
|
2019-06-19 20:48:54 -07:00
|
|
|
return (configs, logs)
|
|
|
|
|
|
|
|
|
2019-11-19 10:09:25 -08:00
|
|
|
def log_record(suppress_log=False, **kwargs):
|
2019-11-17 16:54:27 -08:00
|
|
|
'''
|
|
|
|
Create a log record based on the given makeLogRecord() arguments, one of which must be
|
2019-11-19 10:09:25 -08:00
|
|
|
named "levelno". Log the record (unless suppress log is set) and return it.
|
2019-11-17 16:54:27 -08:00
|
|
|
'''
|
|
|
|
record = logging.makeLogRecord(kwargs)
|
2019-11-19 10:09:25 -08:00
|
|
|
if suppress_log:
|
|
|
|
return record
|
2019-11-17 16:54:27 -08:00
|
|
|
|
2019-11-19 10:09:25 -08:00
|
|
|
logger.handle(record)
|
2019-11-17 16:54:27 -08:00
|
|
|
return record
|
|
|
|
|
|
|
|
|
2019-09-30 22:19:31 -07:00
|
|
|
def make_error_log_records(message, error=None):
|
2019-09-28 16:18:10 -07:00
|
|
|
'''
|
2019-09-30 22:19:31 -07:00
|
|
|
Given error message text and an optional exception object, yield a series of logging.LogRecord
|
2019-11-17 16:54:27 -08:00
|
|
|
instances with error summary information. As a side effect, log each record.
|
2019-09-28 16:18:10 -07:00
|
|
|
'''
|
2019-09-30 22:19:31 -07:00
|
|
|
if not error:
|
2019-11-17 16:54:27 -08:00
|
|
|
yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=message)
|
2019-09-30 22:19:31 -07:00
|
|
|
return
|
|
|
|
|
2019-09-28 16:18:10 -07:00
|
|
|
try:
|
|
|
|
raise error
|
|
|
|
except CalledProcessError as error:
|
2019-11-17 16:54:27 -08:00
|
|
|
yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=message)
|
2019-10-31 12:57:36 -07:00
|
|
|
if error.output:
|
2019-11-25 10:31:09 -08:00
|
|
|
# Suppress these logs for now and save full error output for the log summary at the end.
|
2019-11-19 10:09:25 -08:00
|
|
|
yield log_record(
|
2019-11-25 10:31:09 -08:00
|
|
|
levelno=logging.CRITICAL, levelname='CRITICAL', msg=error.output, suppress_log=True
|
2019-11-17 19:06:28 -08:00
|
|
|
)
|
2019-11-17 16:54:27 -08:00
|
|
|
yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error)
|
2019-09-28 16:18:10 -07:00
|
|
|
except (ValueError, OSError) as error:
|
2019-11-17 16:54:27 -08:00
|
|
|
yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=message)
|
|
|
|
yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error)
|
2019-09-28 16:18:10 -07:00
|
|
|
except: # noqa: E722
|
|
|
|
# Raising above only as a means of determining the error type. Swallow the exception here
|
|
|
|
# because we don't want the exception to propagate out of this function.
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2019-12-07 21:36:51 -08:00
|
|
|
def get_local_path(configs):
|
|
|
|
'''
|
|
|
|
Arbitrarily return the local path from the first configuration dict. Default to "borg" if not
|
|
|
|
set.
|
|
|
|
'''
|
|
|
|
return next(iter(configs.values())).get('location', {}).get('local_path', 'borg')
|
|
|
|
|
|
|
|
|
2019-06-22 16:10:07 -07:00
|
|
|
def collect_configuration_run_summary_logs(configs, arguments):
|
2019-06-19 20:48:54 -07:00
|
|
|
'''
|
|
|
|
Given a dict of configuration filename to corresponding parsed configuration, and parsed
|
2019-06-22 16:10:07 -07:00
|
|
|
command-line arguments as a dict from subparser name to a parsed namespace of arguments, run
|
|
|
|
each configuration file and yield a series of logging.LogRecord instances containing summary
|
|
|
|
information about each run.
|
2019-06-19 20:48:54 -07:00
|
|
|
|
|
|
|
As a side effect of running through these configuration files, output their JSON results, if
|
|
|
|
any, to stdout.
|
|
|
|
'''
|
2019-02-23 23:02:17 -08:00
|
|
|
# Run cross-file validation checks.
|
2019-06-22 16:10:07 -07:00
|
|
|
if 'extract' in arguments:
|
|
|
|
repository = arguments['extract'].repository
|
|
|
|
elif 'list' in arguments and arguments['list'].archive:
|
|
|
|
repository = arguments['list'].repository
|
2019-11-25 14:56:20 -08:00
|
|
|
elif 'mount' in arguments:
|
|
|
|
repository = arguments['mount'].repository
|
2019-06-22 16:10:07 -07:00
|
|
|
else:
|
|
|
|
repository = None
|
|
|
|
|
|
|
|
if repository:
|
2019-02-18 21:52:56 -08:00
|
|
|
try:
|
2019-06-22 16:10:07 -07:00
|
|
|
validate.guard_configuration_contains_repository(repository, configs)
|
2019-02-18 21:52:56 -08:00
|
|
|
except ValueError as error:
|
2019-09-30 22:19:31 -07:00
|
|
|
yield from make_error_log_records(str(error))
|
2019-02-18 21:52:56 -08:00
|
|
|
return
|
2019-02-18 09:30:34 -08:00
|
|
|
|
2019-09-28 16:18:10 -07:00
|
|
|
if not configs:
|
2019-09-30 22:19:31 -07:00
|
|
|
yield from make_error_log_records(
|
2020-10-07 15:41:45 -07:00
|
|
|
'{}: No valid configuration files found'.format(
|
|
|
|
' '.join(arguments['global'].config_paths)
|
|
|
|
)
|
2019-09-28 16:18:10 -07:00
|
|
|
)
|
|
|
|
return
|
|
|
|
|
2019-09-30 22:19:31 -07:00
|
|
|
if 'create' in arguments:
|
|
|
|
try:
|
2019-09-28 16:18:10 -07:00
|
|
|
for config_filename, config in configs.items():
|
|
|
|
hooks = config.get('hooks', {})
|
2019-10-21 15:52:14 -07:00
|
|
|
command.execute_hook(
|
2019-09-28 16:18:10 -07:00
|
|
|
hooks.get('before_everything'),
|
|
|
|
hooks.get('umask'),
|
|
|
|
config_filename,
|
|
|
|
'pre-everything',
|
|
|
|
arguments['global'].dry_run,
|
|
|
|
)
|
2019-09-30 22:19:31 -07:00
|
|
|
except (CalledProcessError, ValueError, OSError) as error:
|
|
|
|
yield from make_error_log_records('Error running pre-everything hook', error)
|
|
|
|
return
|
2019-09-28 16:18:10 -07:00
|
|
|
|
2019-02-23 23:02:17 -08:00
|
|
|
# Execute the actions corresponding to each configuration file.
|
2019-04-02 22:30:14 -07:00
|
|
|
json_results = []
|
2019-02-18 09:30:34 -08:00
|
|
|
for config_filename, config in configs.items():
|
2019-09-30 22:19:31 -07:00
|
|
|
results = list(run_configuration(config_filename, config, arguments))
|
|
|
|
error_logs = tuple(result for result in results if isinstance(result, logging.LogRecord))
|
|
|
|
|
|
|
|
if error_logs:
|
|
|
|
yield from make_error_log_records(
|
|
|
|
'{}: Error running configuration file'.format(config_filename)
|
|
|
|
)
|
|
|
|
yield from error_logs
|
|
|
|
else:
|
2018-12-25 15:23:54 -08:00
|
|
|
yield logging.makeLogRecord(
|
|
|
|
dict(
|
|
|
|
levelno=logging.INFO,
|
2019-05-26 16:34:46 -07:00
|
|
|
levelname='INFO',
|
2018-12-25 15:23:54 -08:00
|
|
|
msg='{}: Successfully ran configuration file'.format(config_filename),
|
|
|
|
)
|
|
|
|
)
|
2019-09-30 22:19:31 -07:00
|
|
|
if results:
|
|
|
|
json_results.extend(results)
|
2018-12-25 15:23:54 -08:00
|
|
|
|
2019-12-07 21:36:51 -08:00
|
|
|
if 'umount' in arguments:
|
|
|
|
logger.info('Unmounting mount point {}'.format(arguments['umount'].mount_point))
|
|
|
|
try:
|
|
|
|
borg_umount.unmount_archive(
|
|
|
|
mount_point=arguments['umount'].mount_point, local_path=get_local_path(configs)
|
|
|
|
)
|
|
|
|
except (CalledProcessError, OSError) as error:
|
|
|
|
yield from make_error_log_records('Error unmounting mount point', error)
|
|
|
|
|
2019-04-02 22:30:14 -07:00
|
|
|
if json_results:
|
|
|
|
sys.stdout.write(json.dumps(json_results))
|
|
|
|
|
2019-09-30 22:19:31 -07:00
|
|
|
if 'create' in arguments:
|
|
|
|
try:
|
2019-09-28 16:18:10 -07:00
|
|
|
for config_filename, config in configs.items():
|
|
|
|
hooks = config.get('hooks', {})
|
2019-10-21 15:52:14 -07:00
|
|
|
command.execute_hook(
|
2019-09-28 16:18:10 -07:00
|
|
|
hooks.get('after_everything'),
|
|
|
|
hooks.get('umask'),
|
|
|
|
config_filename,
|
|
|
|
'post-everything',
|
|
|
|
arguments['global'].dry_run,
|
|
|
|
)
|
2019-09-30 22:19:31 -07:00
|
|
|
except (CalledProcessError, ValueError, OSError) as error:
|
|
|
|
yield from make_error_log_records('Error running post-everything hook', error)
|
2018-12-25 15:23:54 -08:00
|
|
|
|
|
|
|
|
2019-01-27 11:58:04 -08:00
|
|
|
def exit_with_help_link(): # pragma: no cover
|
|
|
|
'''
|
|
|
|
Display a link to get help and exit with an error code.
|
|
|
|
'''
|
2019-05-27 15:05:26 -07:00
|
|
|
logger.critical('')
|
|
|
|
logger.critical('Need some help? https://torsion.org/borgmatic/#issues')
|
2019-01-27 11:58:04 -08:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
2017-07-09 17:03:45 -07:00
|
|
|
def main(): # pragma: no cover
|
2018-12-25 15:23:54 -08:00
|
|
|
configure_signals()
|
2019-01-27 11:58:04 -08:00
|
|
|
|
|
|
|
try:
|
2019-06-22 16:10:07 -07:00
|
|
|
arguments = parse_arguments(*sys.argv[1:])
|
2019-01-27 11:58:04 -08:00
|
|
|
except ValueError as error:
|
2019-05-27 15:05:26 -07:00
|
|
|
configure_logging(logging.CRITICAL)
|
2019-01-27 11:58:04 -08:00
|
|
|
logger.critical(error)
|
|
|
|
exit_with_help_link()
|
2019-05-27 15:44:48 -07:00
|
|
|
except SystemExit as error:
|
|
|
|
if error.code == 0:
|
|
|
|
raise error
|
2019-05-27 15:05:26 -07:00
|
|
|
configure_logging(logging.CRITICAL)
|
|
|
|
logger.critical('Error parsing arguments: {}'.format(' '.join(sys.argv)))
|
|
|
|
exit_with_help_link()
|
2019-01-27 11:58:04 -08:00
|
|
|
|
2019-06-22 16:10:07 -07:00
|
|
|
global_arguments = arguments['global']
|
|
|
|
if global_arguments.version:
|
2018-12-25 21:01:08 -08:00
|
|
|
print(pkg_resources.require('borgmatic')[0].version)
|
|
|
|
sys.exit(0)
|
|
|
|
|
2019-06-22 16:10:07 -07:00
|
|
|
config_filenames = tuple(collect.collect_config_filenames(global_arguments.config_paths))
|
2019-12-17 11:46:27 -08:00
|
|
|
configs, parse_logs = load_configurations(config_filenames, global_arguments.overrides)
|
2019-06-19 20:48:54 -07:00
|
|
|
|
2020-01-04 15:50:41 -08:00
|
|
|
any_json_flags = any(
|
|
|
|
getattr(sub_arguments, 'json', False) for sub_arguments in arguments.values()
|
|
|
|
)
|
|
|
|
colorama.init(
|
|
|
|
autoreset=True,
|
|
|
|
strip=not should_do_markup(global_arguments.no_color or any_json_flags, configs),
|
|
|
|
)
|
2019-11-02 11:23:18 -07:00
|
|
|
try:
|
|
|
|
configure_logging(
|
|
|
|
verbosity_to_log_level(global_arguments.verbosity),
|
|
|
|
verbosity_to_log_level(global_arguments.syslog_verbosity),
|
|
|
|
verbosity_to_log_level(global_arguments.log_file_verbosity),
|
2020-01-22 15:10:47 -08:00
|
|
|
verbosity_to_log_level(global_arguments.monitoring_verbosity),
|
2019-11-02 11:23:18 -07:00
|
|
|
global_arguments.log_file,
|
|
|
|
)
|
|
|
|
except (FileNotFoundError, PermissionError) as error:
|
|
|
|
configure_logging(logging.CRITICAL)
|
|
|
|
logger.critical('Error configuring logging: {}'.format(error))
|
|
|
|
exit_with_help_link()
|
2019-06-19 20:48:54 -07:00
|
|
|
|
2018-12-25 15:23:54 -08:00
|
|
|
logger.debug('Ensuring legacy configuration is upgraded')
|
|
|
|
convert.guard_configuration_upgraded(LEGACY_CONFIG_PATH, config_filenames)
|
2017-07-22 21:19:26 -07:00
|
|
|
|
2019-11-25 10:31:09 -08:00
|
|
|
summary_logs = parse_logs + list(collect_configuration_run_summary_logs(configs, arguments))
|
|
|
|
summary_logs_max_level = max(log.levelno for log in summary_logs)
|
2017-07-22 22:56:46 -07:00
|
|
|
|
2019-11-25 10:31:09 -08:00
|
|
|
for message in ('', 'summary:'):
|
|
|
|
log_record(
|
|
|
|
levelno=summary_logs_max_level,
|
|
|
|
levelname=logging.getLevelName(summary_logs_max_level),
|
|
|
|
msg=message,
|
|
|
|
)
|
|
|
|
|
|
|
|
for log in summary_logs:
|
|
|
|
logger.handle(log)
|
2018-12-25 15:23:54 -08:00
|
|
|
|
2019-11-25 10:31:09 -08:00
|
|
|
if summary_logs_max_level >= logging.CRITICAL:
|
2019-01-27 11:58:04 -08:00
|
|
|
exit_with_help_link()
|