Remove configuration sections (#723).

Reviewed-on: https://projects.torsion.org/borgmatic-collective/borgmatic/pulls/723
This commit is contained in:
Dan Helfman 2023-07-14 03:10:51 +00:00
commit e913cddcd1
128 changed files with 3644 additions and 3778 deletions

12
NEWS
View file

@ -10,11 +10,23 @@
"check --repair". "check --repair".
* When merging two configuration files, error gracefully if the two files do not adhere to the same * When merging two configuration files, error gracefully if the two files do not adhere to the same
format. format.
* #721: Remove configuration sections ("location:", "storage:", "hooks:" etc.), while still keeping
deprecated support for them. Now, all options are at the same level, and you don't need to worry
about commenting/uncommenting section headers when you change an option.
* #721: BREAKING: The retention prefix and the consistency prefix can no longer have different
values (unless one is not set).
* #721: BREAKING: The storage umask and the hooks umask can no longer have different values (unless
one is not set).
* BREAKING: Flags like "--config" that previously took multiple values now need to be given once
per value, e.g. "--config first.yaml --config second.yaml" instead of "--config first.yaml
second.yaml". This prevents argument parsing errors on ambiguous commands.
* BREAKING: Remove the deprecated (and silently ignored) "--successful" flag on the "list" action, * BREAKING: Remove the deprecated (and silently ignored) "--successful" flag on the "list" action,
as newer versions of Borg list successful (non-checkpoint) archives by default. as newer versions of Borg list successful (non-checkpoint) archives by default.
* All deprecated configuration option values now generate warning logs. * All deprecated configuration option values now generate warning logs.
* Remove the deprecated (and non-functional) "--excludes" flag in favor of excludes within * Remove the deprecated (and non-functional) "--excludes" flag in favor of excludes within
configuration. configuration.
* Fix an error when logging too-long command output during error handling. Now, long command output
is truncated before logging.
1.7.15 1.7.15
* #326: Add configuration options and command-line flags for backing up a database from one * #326: Add configuration options and command-line flags for backing up a database from one

View file

@ -16,50 +16,41 @@ The canonical home of borgmatic is at <a href="https://torsion.org/borgmatic">ht
Here's an example configuration file: Here's an example configuration file:
```yaml ```yaml
location: # List of source directories to backup.
# List of source directories to backup. source_directories:
source_directories: - /home
- /home - /etc
- /etc
# Paths of local or remote repositories to backup to. # Paths of local or remote repositories to backup to.
repositories: repositories:
- path: ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo - path: ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo
label: borgbase label: borgbase
- path: /var/lib/backups/local.borg - path: /var/lib/backups/local.borg
label: local label: local
retention: # Retention policy for how many backups to keep.
# Retention policy for how many backups to keep. keep_daily: 7
keep_daily: 7 keep_weekly: 4
keep_weekly: 4 keep_monthly: 6
keep_monthly: 6
consistency: # List of checks to run to validate your backups.
# List of checks to run to validate your backups. checks:
checks: - name: repository
- name: repository - name: archives
- name: archives frequency: 2 weeks
frequency: 2 weeks
hooks: # Custom preparation scripts to run.
# Custom preparation scripts to run. before_backup:
before_backup: - prepare-for-backup.sh
- prepare-for-backup.sh
# Databases to dump and include in backups. # Databases to dump and include in backups.
postgresql_databases: postgresql_databases:
- name: users - name: users
# Third-party services to notify you if backups aren't happening. # Third-party services to notify you if backups aren't happening.
healthchecks: https://hc-ping.com/be067061-cf96-4412-8eae-62b0c50d6a8c healthchecks: https://hc-ping.com/be067061-cf96-4412-8eae-62b0c50d6a8c
``` ```
Want to see borgmatic in action? Check out the <a
href="https://asciinema.org/a/203761?autoplay=1" target="_blank">screencast</a>.
<a href="https://asciinema.org/a/203761?autoplay=1" target="_blank"><img src="https://asciinema.org/a/203761.png" width="480"></a>
borgmatic is powered by [Borg Backup](https://www.borgbackup.org/). borgmatic is powered by [Borg Backup](https://www.borgbackup.org/).
## Integrations ## Integrations

View file

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def run_borg( def run_borg(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
borg_arguments, borg_arguments,
global_arguments, global_arguments,
@ -28,7 +28,7 @@ def run_borg(
archive_name = borgmatic.borg.rlist.resolve_archive_name( archive_name = borgmatic.borg.rlist.resolve_archive_name(
repository['path'], repository['path'],
borg_arguments.archive, borg_arguments.archive,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -36,7 +36,7 @@ def run_borg(
) )
borgmatic.borg.borg.run_arbitrary_borg( borgmatic.borg.borg.run_arbitrary_borg(
repository['path'], repository['path'],
storage, config,
local_borg_version, local_borg_version,
options=borg_arguments.options, options=borg_arguments.options,
archive=archive_name, archive=archive_name,

View file

@ -8,7 +8,7 @@ logger = logging.getLogger(__name__)
def run_break_lock( def run_break_lock(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
break_lock_arguments, break_lock_arguments,
global_arguments, global_arguments,
@ -26,7 +26,7 @@ def run_break_lock(
) )
borgmatic.borg.break_lock.break_lock( borgmatic.borg.break_lock.break_lock(
repository['path'], repository['path'],
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path=local_path, local_path=local_path,

View file

@ -10,10 +10,7 @@ logger = logging.getLogger(__name__)
def run_check( def run_check(
config_filename, config_filename,
repository, repository,
location, config,
storage,
consistency,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
check_arguments, check_arguments,
@ -30,8 +27,8 @@ def run_check(
return return
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('before_check'), config.get('before_check'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'pre-check', 'pre-check',
global_arguments.dry_run, global_arguments.dry_run,
@ -40,9 +37,7 @@ def run_check(
logger.info(f'{repository.get("label", repository["path"])}: Running consistency checks') logger.info(f'{repository.get("label", repository["path"])}: Running consistency checks')
borgmatic.borg.check.check_archives( borgmatic.borg.check.check_archives(
repository['path'], repository['path'],
location, config,
storage,
consistency,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path=local_path, local_path=local_path,
@ -53,8 +48,8 @@ def run_check(
force=check_arguments.force, force=check_arguments.force,
) )
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('after_check'), config.get('after_check'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'post-check', 'post-check',
global_arguments.dry_run, global_arguments.dry_run,

View file

@ -11,9 +11,7 @@ logger = logging.getLogger(__name__)
def run_compact( def run_compact(
config_filename, config_filename,
repository, repository,
storage, config,
retention,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
compact_arguments, compact_arguments,
@ -31,8 +29,8 @@ def run_compact(
return return
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('before_compact'), config.get('before_compact'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'pre-compact', 'pre-compact',
global_arguments.dry_run, global_arguments.dry_run,
@ -45,7 +43,7 @@ def run_compact(
borgmatic.borg.compact.compact_segments( borgmatic.borg.compact.compact_segments(
global_arguments.dry_run, global_arguments.dry_run,
repository['path'], repository['path'],
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path=local_path, local_path=local_path,
@ -59,8 +57,8 @@ def run_compact(
f'{repository.get("label", repository["path"])}: Skipping compact (only available/needed in Borg 1.2+)' f'{repository.get("label", repository["path"])}: Skipping compact (only available/needed in Borg 1.2+)'
) )
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('after_compact'), config.get('after_compact'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'post-compact', 'post-compact',
global_arguments.dry_run, global_arguments.dry_run,

View file

@ -43,7 +43,6 @@ def get_config_paths(bootstrap_arguments, global_arguments, local_borg_version):
), ),
[borgmatic_manifest_path], [borgmatic_manifest_path],
{}, {},
{},
local_borg_version, local_borg_version,
global_arguments, global_arguments,
extract_to_stdout=True, extract_to_stdout=True,
@ -95,7 +94,6 @@ def run_bootstrap(bootstrap_arguments, global_arguments, local_borg_version):
), ),
[config_path.lstrip(os.path.sep) for config_path in manifest_config_paths], [config_path.lstrip(os.path.sep) for config_path in manifest_config_paths],
{}, {},
{},
local_borg_version, local_borg_version,
global_arguments, global_arguments,
extract_to_stdout=False, extract_to_stdout=False,

View file

@ -2,6 +2,7 @@ import logging
import borgmatic.config.generate import borgmatic.config.generate
import borgmatic.config.validate import borgmatic.config.validate
import borgmatic.logger
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -14,6 +15,7 @@ def run_generate(generate_arguments, global_arguments):
Raise FileExistsError if a file already exists at the destination path and the generate Raise FileExistsError if a file already exists at the destination path and the generate
arguments do not have overwrite set. arguments do not have overwrite set.
''' '''
borgmatic.logger.add_custom_log_levels()
dry_run_label = ' (dry run; not actually writing anything)' if global_arguments.dry_run else '' dry_run_label = ' (dry run; not actually writing anything)' if global_arguments.dry_run else ''
logger.answer( logger.answer(

View file

@ -1,6 +1,7 @@
import logging import logging
import borgmatic.config.generate import borgmatic.config.generate
import borgmatic.logger
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -14,6 +15,8 @@ def run_validate(validate_arguments, configs):
loading machinery prior to here, so this function mainly exists to support additional validate loading machinery prior to here, so this function mainly exists to support additional validate
flags like "--show". flags like "--show".
''' '''
borgmatic.logger.add_custom_log_levels()
if validate_arguments.show: if validate_arguments.show:
for config_path, config in configs.items(): for config_path, config in configs.items():
if len(configs) > 1: if len(configs) > 1:

View file

@ -17,7 +17,7 @@ import borgmatic.hooks.dump
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def create_borgmatic_manifest(location, config_paths, dry_run): def create_borgmatic_manifest(config, config_paths, dry_run):
''' '''
Create a borgmatic manifest file to store the paths to the configuration files used to create Create a borgmatic manifest file to store the paths to the configuration files used to create
the archive. the archive.
@ -25,7 +25,7 @@ def create_borgmatic_manifest(location, config_paths, dry_run):
if dry_run: if dry_run:
return return
borgmatic_source_directory = location.get( borgmatic_source_directory = config.get(
'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY 'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY
) )
@ -49,9 +49,7 @@ def create_borgmatic_manifest(location, config_paths, dry_run):
def run_create( def run_create(
config_filename, config_filename,
repository, repository,
location, config,
storage,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
create_arguments, create_arguments,
@ -71,8 +69,8 @@ def run_create(
return return
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('before_backup'), config.get('before_backup'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'pre-backup', 'pre-backup',
global_arguments.dry_run, global_arguments.dry_run,
@ -81,30 +79,25 @@ def run_create(
logger.info(f'{repository.get("label", repository["path"])}: Creating archive{dry_run_label}') logger.info(f'{repository.get("label", repository["path"])}: Creating archive{dry_run_label}')
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps', 'remove_database_dumps',
hooks, config,
repository['path'], repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES, borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run, global_arguments.dry_run,
) )
active_dumps = borgmatic.hooks.dispatch.call_hooks( active_dumps = borgmatic.hooks.dispatch.call_hooks(
'dump_databases', 'dump_databases',
hooks, config,
repository['path'], repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES, borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run, global_arguments.dry_run,
) )
create_borgmatic_manifest( create_borgmatic_manifest(config, global_arguments.used_config_paths, global_arguments.dry_run)
location, global_arguments.used_config_paths, global_arguments.dry_run
)
stream_processes = [process for processes in active_dumps.values() for process in processes] stream_processes = [process for processes in active_dumps.values() for process in processes]
json_output = borgmatic.borg.create.create_archive( json_output = borgmatic.borg.create.create_archive(
global_arguments.dry_run, global_arguments.dry_run,
repository['path'], repository['path'],
location, config,
storage,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path=local_path, local_path=local_path,
@ -120,15 +113,14 @@ def run_create(
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps', 'remove_database_dumps',
hooks, config,
config_filename, config_filename,
borgmatic.hooks.dump.DATABASE_HOOK_NAMES, borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run, global_arguments.dry_run,
) )
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('after_backup'), config.get('after_backup'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'post-backup', 'post-backup',
global_arguments.dry_run, global_arguments.dry_run,

View file

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def run_export_tar( def run_export_tar(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
export_tar_arguments, export_tar_arguments,
global_arguments, global_arguments,
@ -31,7 +31,7 @@ def run_export_tar(
borgmatic.borg.rlist.resolve_archive_name( borgmatic.borg.rlist.resolve_archive_name(
repository['path'], repository['path'],
export_tar_arguments.archive, export_tar_arguments.archive,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -39,7 +39,7 @@ def run_export_tar(
), ),
export_tar_arguments.paths, export_tar_arguments.paths,
export_tar_arguments.destination, export_tar_arguments.destination,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path=local_path, local_path=local_path,

View file

@ -11,9 +11,7 @@ logger = logging.getLogger(__name__)
def run_extract( def run_extract(
config_filename, config_filename,
repository, repository,
location, config,
storage,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
extract_arguments, extract_arguments,
@ -25,8 +23,8 @@ def run_extract(
Run the "extract" action for the given repository. Run the "extract" action for the given repository.
''' '''
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('before_extract'), config.get('before_extract'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'pre-extract', 'pre-extract',
global_arguments.dry_run, global_arguments.dry_run,
@ -44,15 +42,14 @@ def run_extract(
borgmatic.borg.rlist.resolve_archive_name( borgmatic.borg.rlist.resolve_archive_name(
repository['path'], repository['path'],
extract_arguments.archive, extract_arguments.archive,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
remote_path, remote_path,
), ),
extract_arguments.paths, extract_arguments.paths,
location, config,
storage,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path=local_path, local_path=local_path,
@ -62,8 +59,8 @@ def run_extract(
progress=extract_arguments.progress, progress=extract_arguments.progress,
) )
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('after_extract'), config.get('after_extract'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'post-extract', 'post-extract',
global_arguments.dry_run, global_arguments.dry_run,

View file

@ -11,7 +11,7 @@ logger = logging.getLogger(__name__)
def run_info( def run_info(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
info_arguments, info_arguments,
global_arguments, global_arguments,
@ -33,7 +33,7 @@ def run_info(
archive_name = borgmatic.borg.rlist.resolve_archive_name( archive_name = borgmatic.borg.rlist.resolve_archive_name(
repository['path'], repository['path'],
info_arguments.archive, info_arguments.archive,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -41,7 +41,7 @@ def run_info(
) )
json_output = borgmatic.borg.info.display_archives_info( json_output = borgmatic.borg.info.display_archives_info(
repository['path'], repository['path'],
storage, config,
local_borg_version, local_borg_version,
borgmatic.actions.arguments.update_arguments(info_arguments, archive=archive_name), borgmatic.actions.arguments.update_arguments(info_arguments, archive=archive_name),
global_arguments, global_arguments,

View file

@ -10,7 +10,7 @@ logger = logging.getLogger(__name__)
def run_list( def run_list(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
list_arguments, list_arguments,
global_arguments, global_arguments,
@ -34,7 +34,7 @@ def run_list(
archive_name = borgmatic.borg.rlist.resolve_archive_name( archive_name = borgmatic.borg.rlist.resolve_archive_name(
repository['path'], repository['path'],
list_arguments.archive, list_arguments.archive,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -42,7 +42,7 @@ def run_list(
) )
json_output = borgmatic.borg.list.list_archive( json_output = borgmatic.borg.list.list_archive(
repository['path'], repository['path'],
storage, config,
local_borg_version, local_borg_version,
borgmatic.actions.arguments.update_arguments(list_arguments, archive=archive_name), borgmatic.actions.arguments.update_arguments(list_arguments, archive=archive_name),
global_arguments, global_arguments,

View file

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def run_mount( def run_mount(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
mount_arguments, mount_arguments,
global_arguments, global_arguments,
@ -34,14 +34,14 @@ def run_mount(
borgmatic.borg.rlist.resolve_archive_name( borgmatic.borg.rlist.resolve_archive_name(
repository['path'], repository['path'],
mount_arguments.archive, mount_arguments.archive,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
remote_path, remote_path,
), ),
mount_arguments, mount_arguments,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path=local_path, local_path=local_path,

View file

@ -10,9 +10,7 @@ logger = logging.getLogger(__name__)
def run_prune( def run_prune(
config_filename, config_filename,
repository, repository,
storage, config,
retention,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
prune_arguments, prune_arguments,
@ -30,8 +28,8 @@ def run_prune(
return return
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('before_prune'), config.get('before_prune'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'pre-prune', 'pre-prune',
global_arguments.dry_run, global_arguments.dry_run,
@ -41,8 +39,7 @@ def run_prune(
borgmatic.borg.prune.prune_archives( borgmatic.borg.prune.prune_archives(
global_arguments.dry_run, global_arguments.dry_run,
repository['path'], repository['path'],
storage, config,
retention,
local_borg_version, local_borg_version,
prune_arguments, prune_arguments,
global_arguments, global_arguments,
@ -50,8 +47,8 @@ def run_prune(
remote_path=remote_path, remote_path=remote_path,
) )
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('after_prune'), config.get('after_prune'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'post-prune', 'post-prune',
global_arguments.dry_run, global_arguments.dry_run,

View file

@ -8,7 +8,7 @@ logger = logging.getLogger(__name__)
def run_rcreate( def run_rcreate(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
rcreate_arguments, rcreate_arguments,
global_arguments, global_arguments,
@ -27,7 +27,7 @@ def run_rcreate(
borgmatic.borg.rcreate.create_repository( borgmatic.borg.rcreate.create_repository(
global_arguments.dry_run, global_arguments.dry_run,
repository['path'], repository['path'],
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
rcreate_arguments.encryption_mode, rcreate_arguments.encryption_mode,

View file

@ -18,12 +18,12 @@ UNSPECIFIED_HOOK = object()
def get_configured_database( def get_configured_database(
hooks, archive_database_names, hook_name, database_name, configuration_database_name=None config, archive_database_names, hook_name, database_name, configuration_database_name=None
): ):
''' '''
Find the first database with the given hook name and database name in the configured hooks Find the first database with the given hook name and database name in the configuration dict and
dict and the given archive database names dict (from hook name to database names contained in the given archive database names dict (from hook name to database names contained in a
a particular backup archive). If UNSPECIFIED_HOOK is given as the hook name, search all database particular backup archive). If UNSPECIFIED_HOOK is given as the hook name, search all database
hooks for the named database. If a configuration database name is given, use that instead of the hooks for the named database. If a configuration database name is given, use that instead of the
database name to lookup the database in the given hooks configuration. database name to lookup the database in the given hooks configuration.
@ -33,9 +33,13 @@ def get_configured_database(
configuration_database_name = database_name configuration_database_name = database_name
if hook_name == UNSPECIFIED_HOOK: if hook_name == UNSPECIFIED_HOOK:
hooks_to_search = hooks hooks_to_search = {
hook_name: value
for (hook_name, value) in config.items()
if hook_name in borgmatic.hooks.dump.DATABASE_HOOK_NAMES
}
else: else:
hooks_to_search = {hook_name: hooks[hook_name]} hooks_to_search = {hook_name: config[hook_name]}
return next( return next(
( (
@ -58,9 +62,7 @@ def get_configured_hook_name_and_database(hooks, database_name):
def restore_single_database( def restore_single_database(
repository, repository,
location, config,
storage,
hooks,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -81,10 +83,9 @@ def restore_single_database(
dump_pattern = borgmatic.hooks.dispatch.call_hooks( dump_pattern = borgmatic.hooks.dispatch.call_hooks(
'make_database_dump_pattern', 'make_database_dump_pattern',
hooks, config,
repository['path'], repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES, borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
database['name'], database['name'],
)[hook_name] )[hook_name]
@ -94,8 +95,7 @@ def restore_single_database(
repository=repository['path'], repository=repository['path'],
archive=archive_name, archive=archive_name,
paths=borgmatic.hooks.dump.convert_glob_patterns_to_borg_patterns([dump_pattern]), paths=borgmatic.hooks.dump.convert_glob_patterns_to_borg_patterns([dump_pattern]),
location_config=location, config=config,
storage_config=storage,
local_borg_version=local_borg_version, local_borg_version=local_borg_version,
global_arguments=global_arguments, global_arguments=global_arguments,
local_path=local_path, local_path=local_path,
@ -109,10 +109,10 @@ def restore_single_database(
# Run a single database restore, consuming the extract stdout (if any). # Run a single database restore, consuming the extract stdout (if any).
borgmatic.hooks.dispatch.call_hooks( borgmatic.hooks.dispatch.call_hooks(
'restore_database_dump', 'restore_database_dump',
{hook_name: [database]}, config,
repository['path'], repository['path'],
database['name'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES, borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run, global_arguments.dry_run,
extract_process, extract_process,
connection_params, connection_params,
@ -122,21 +122,20 @@ def restore_single_database(
def collect_archive_database_names( def collect_archive_database_names(
repository, repository,
archive, archive,
location, config,
storage,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
remote_path, remote_path,
): ):
''' '''
Given a local or remote repository path, a resolved archive name, a location configuration dict, Given a local or remote repository path, a resolved archive name, a configuration dict, the
a storage configuration dict, the local Borg version, global_arguments an argparse.Namespace, local Borg version, global_arguments an argparse.Namespace, and local and remote Borg paths,
and local and remote Borg paths, query the archive for the names of databases it contains and query the archive for the names of databases it contains and return them as a dict from hook
return them as a dict from hook name to a sequence of database names. name to a sequence of database names.
''' '''
borgmatic_source_directory = os.path.expanduser( borgmatic_source_directory = os.path.expanduser(
location.get( config.get(
'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY 'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY
) )
).lstrip('/') ).lstrip('/')
@ -146,7 +145,7 @@ def collect_archive_database_names(
dump_paths = borgmatic.borg.list.capture_archive_listing( dump_paths = borgmatic.borg.list.capture_archive_listing(
repository, repository,
archive, archive,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
list_path=parent_dump_path, list_path=parent_dump_path,
@ -249,9 +248,7 @@ def ensure_databases_found(restore_names, remaining_restore_names, found_names):
def run_restore( def run_restore(
repository, repository,
location, config,
storage,
hooks,
local_borg_version, local_borg_version,
restore_arguments, restore_arguments,
global_arguments, global_arguments,
@ -275,17 +272,16 @@ def run_restore(
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps', 'remove_database_dumps',
hooks, config,
repository['path'], repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES, borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run, global_arguments.dry_run,
) )
archive_name = borgmatic.borg.rlist.resolve_archive_name( archive_name = borgmatic.borg.rlist.resolve_archive_name(
repository['path'], repository['path'],
restore_arguments.archive, restore_arguments.archive,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -294,8 +290,7 @@ def run_restore(
archive_database_names = collect_archive_database_names( archive_database_names = collect_archive_database_names(
repository['path'], repository['path'],
archive_name, archive_name,
location, config,
storage,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -315,7 +310,7 @@ def run_restore(
for hook_name, database_names in restore_names.items(): for hook_name, database_names in restore_names.items():
for database_name in database_names: for database_name in database_names:
found_hook_name, found_database = get_configured_database( found_hook_name, found_database = get_configured_database(
hooks, archive_database_names, hook_name, database_name config, archive_database_names, hook_name, database_name
) )
if not found_database: if not found_database:
@ -327,9 +322,7 @@ def run_restore(
found_names.add(database_name) found_names.add(database_name)
restore_single_database( restore_single_database(
repository, repository,
location, config,
storage,
hooks,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -340,12 +333,12 @@ def run_restore(
connection_params, connection_params,
) )
# For any database that weren't found via exact matches in the hooks configuration, try to # For any database that weren't found via exact matches in the configuration, try to fallback
# fallback to "all" entries. # to "all" entries.
for hook_name, database_names in remaining_restore_names.items(): for hook_name, database_names in remaining_restore_names.items():
for database_name in database_names: for database_name in database_names:
found_hook_name, found_database = get_configured_database( found_hook_name, found_database = get_configured_database(
hooks, archive_database_names, hook_name, database_name, 'all' config, archive_database_names, hook_name, database_name, 'all'
) )
if not found_database: if not found_database:
@ -357,9 +350,7 @@ def run_restore(
restore_single_database( restore_single_database(
repository, repository,
location, config,
storage,
hooks,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -372,10 +363,9 @@ def run_restore(
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps', 'remove_database_dumps',
hooks, config,
repository['path'], repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES, borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run, global_arguments.dry_run,
) )

View file

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def run_rinfo( def run_rinfo(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
rinfo_arguments, rinfo_arguments,
global_arguments, global_arguments,
@ -31,7 +31,7 @@ def run_rinfo(
json_output = borgmatic.borg.rinfo.display_repository_info( json_output = borgmatic.borg.rinfo.display_repository_info(
repository['path'], repository['path'],
storage, config,
local_borg_version, local_borg_version,
rinfo_arguments=rinfo_arguments, rinfo_arguments=rinfo_arguments,
global_arguments=global_arguments, global_arguments=global_arguments,

View file

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def run_rlist( def run_rlist(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
rlist_arguments, rlist_arguments,
global_arguments, global_arguments,
@ -29,7 +29,7 @@ def run_rlist(
json_output = borgmatic.borg.rlist.list_repository( json_output = borgmatic.borg.rlist.list_repository(
repository['path'], repository['path'],
storage, config,
local_borg_version, local_borg_version,
rlist_arguments=rlist_arguments, rlist_arguments=rlist_arguments,
global_arguments=global_arguments, global_arguments=global_arguments,

View file

@ -7,7 +7,7 @@ logger = logging.getLogger(__name__)
def run_transfer( def run_transfer(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
transfer_arguments, transfer_arguments,
global_arguments, global_arguments,
@ -23,7 +23,7 @@ def run_transfer(
borgmatic.borg.transfer.transfer_archives( borgmatic.borg.transfer.transfer_archives(
global_arguments.dry_run, global_arguments.dry_run,
repository['path'], repository['path'],
storage, config,
local_borg_version, local_borg_version,
transfer_arguments, transfer_arguments,
global_arguments, global_arguments,

View file

@ -13,7 +13,7 @@ BORG_SUBCOMMANDS_WITH_SUBCOMMANDS = {'key', 'debug'}
def run_arbitrary_borg( def run_arbitrary_borg(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
options, options,
archive=None, archive=None,
@ -21,13 +21,13 @@ def run_arbitrary_borg(
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, a storage config dict, the local Borg version, a Given a local or remote repository path, a configuration dict, the local Borg version, a
sequence of arbitrary command-line Borg options, and an optional archive name, run an arbitrary sequence of arbitrary command-line Borg options, and an optional archive name, run an arbitrary
Borg command, passing in REPOSITORY and ARCHIVE environment variables for optional use in the Borg command, passing in REPOSITORY and ARCHIVE environment variables for optional use in the
command. command.
''' '''
borgmatic.logger.add_custom_log_levels() borgmatic.logger.add_custom_log_levels()
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
try: try:
options = options[1:] if options[0] == '--' else options options = options[1:] if options[0] == '--' else options
@ -61,7 +61,7 @@ def run_arbitrary_borg(
borg_local_path=local_path, borg_local_path=local_path,
shell=True, shell=True,
extra_environment=dict( extra_environment=dict(
(environment.make_environment(storage_config) or {}), (environment.make_environment(config) or {}),
**{ **{
'BORG_REPO': repository_path, 'BORG_REPO': repository_path,
'ARCHIVE': archive if archive else '', 'ARCHIVE': archive if archive else '',

View file

@ -8,19 +8,19 @@ logger = logging.getLogger(__name__)
def break_lock( def break_lock(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path='borg', local_path='borg',
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, a storage configuration dict, the local Borg version, Given a local or remote repository path, a configuration dict, the local Borg version, an
an argparse.Namespace of global arguments, and optional local and remote Borg paths, break any argparse.Namespace of global arguments, and optional local and remote Borg paths, break any
repository and cache locks leftover from Borg aborting. repository and cache locks leftover from Borg aborting.
''' '''
umask = storage_config.get('umask', None) umask = config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
full_command = ( full_command = (
(local_path, 'break-lock') (local_path, 'break-lock')
@ -33,5 +33,5 @@ def break_lock(
+ flags.make_repository_flags(repository_path, local_borg_version) + flags.make_repository_flags(repository_path, local_borg_version)
) )
borg_environment = environment.make_environment(storage_config) borg_environment = environment.make_environment(config)
execute_command(full_command, borg_local_path=local_path, extra_environment=borg_environment) execute_command(full_command, borg_local_path=local_path, extra_environment=borg_environment)

View file

@ -19,12 +19,12 @@ DEFAULT_CHECKS = (
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def parse_checks(consistency_config, only_checks=None): def parse_checks(config, only_checks=None):
''' '''
Given a consistency config with a "checks" sequence of dicts and an optional list of override Given a configuration dict with a "checks" sequence of dicts and an optional list of override
checks, return a tuple of named checks to run. checks, return a tuple of named checks to run.
For example, given a retention config of: For example, given a config of:
{'checks': ({'name': 'repository'}, {'name': 'archives'})} {'checks': ({'name': 'repository'}, {'name': 'archives'})}
@ -36,8 +36,7 @@ def parse_checks(consistency_config, only_checks=None):
has a name of "disabled", return an empty tuple, meaning that no checks should be run. has a name of "disabled", return an empty tuple, meaning that no checks should be run.
''' '''
checks = only_checks or tuple( checks = only_checks or tuple(
check_config['name'] check_config['name'] for check_config in (config.get('checks', None) or DEFAULT_CHECKS)
for check_config in (consistency_config.get('checks', None) or DEFAULT_CHECKS)
) )
checks = tuple(check.lower() for check in checks) checks = tuple(check.lower() for check in checks)
if 'disabled' in checks: if 'disabled' in checks:
@ -90,23 +89,22 @@ def parse_frequency(frequency):
def filter_checks_on_frequency( def filter_checks_on_frequency(
location_config, config,
consistency_config,
borg_repository_id, borg_repository_id,
checks, checks,
force, force,
archives_check_id=None, archives_check_id=None,
): ):
''' '''
Given a location config, a consistency config with a "checks" sequence of dicts, a Borg Given a configuration dict with a "checks" sequence of dicts, a Borg repository ID, a sequence
repository ID, a sequence of checks, whether to force checks to run, and an ID for the archives of checks, whether to force checks to run, and an ID for the archives check potentially being
check potentially being run (if any), filter down those checks based on the configured run (if any), filter down those checks based on the configured "frequency" for each check as
"frequency" for each check as compared to its check time file. compared to its check time file.
In other words, a check whose check time file's timestamp is too new (based on the configured In other words, a check whose check time file's timestamp is too new (based on the configured
frequency) will get cut from the returned sequence of checks. Example: frequency) will get cut from the returned sequence of checks. Example:
consistency_config = { config = {
'checks': [ 'checks': [
{ {
'name': 'archives', 'name': 'archives',
@ -115,9 +113,9 @@ def filter_checks_on_frequency(
] ]
} }
When this function is called with that consistency_config and "archives" in checks, "archives" When this function is called with that config and "archives" in checks, "archives" will get
will get filtered out of the returned result if its check time file is newer than 2 weeks old, filtered out of the returned result if its check time file is newer than 2 weeks old, indicating
indicating that it's not yet time to run that check again. that it's not yet time to run that check again.
Raise ValueError if a frequency cannot be parsed. Raise ValueError if a frequency cannot be parsed.
''' '''
@ -126,7 +124,7 @@ def filter_checks_on_frequency(
if force: if force:
return tuple(filtered_checks) return tuple(filtered_checks)
for check_config in consistency_config.get('checks', DEFAULT_CHECKS): for check_config in config.get('checks', DEFAULT_CHECKS):
check = check_config['name'] check = check_config['name']
if checks and check not in checks: if checks and check not in checks:
continue continue
@ -135,9 +133,7 @@ def filter_checks_on_frequency(
if not frequency_delta: if not frequency_delta:
continue continue
check_time = probe_for_check_time( check_time = probe_for_check_time(config, borg_repository_id, check, archives_check_id)
location_config, borg_repository_id, check, archives_check_id
)
if not check_time: if not check_time:
continue continue
@ -153,13 +149,11 @@ def filter_checks_on_frequency(
return tuple(filtered_checks) return tuple(filtered_checks)
def make_archive_filter_flags( def make_archive_filter_flags(local_borg_version, config, checks, check_last=None, prefix=None):
local_borg_version, storage_config, checks, check_last=None, prefix=None
):
''' '''
Given the local Borg version, a storage configuration dict, a parsed sequence of checks, the Given the local Borg version, a configuration dict, a parsed sequence of checks, the check last
check last value, and a consistency check prefix, transform the checks into tuple of value, and a consistency check prefix, transform the checks into tuple of command-line flags for
command-line flags for filtering archives in a check command. filtering archives in a check command.
If a check_last value is given and "archives" is in checks, then include a "--last" flag. And if If a check_last value is given and "archives" is in checks, then include a "--last" flag. And if
a prefix value is given and "archives" is in checks, then include a "--match-archives" flag. a prefix value is given and "archives" is in checks, then include a "--match-archives" flag.
@ -174,8 +168,8 @@ def make_archive_filter_flags(
if prefix if prefix
else ( else (
flags.make_match_archives_flags( flags.make_match_archives_flags(
storage_config.get('match_archives'), config.get('match_archives'),
storage_config.get('archive_name_format'), config.get('archive_name_format'),
local_borg_version, local_borg_version,
) )
) )
@ -237,14 +231,14 @@ def make_check_flags(checks, archive_filter_flags):
) )
def make_check_time_path(location_config, borg_repository_id, check_type, archives_check_id=None): def make_check_time_path(config, borg_repository_id, check_type, archives_check_id=None):
''' '''
Given a location configuration dict, a Borg repository ID, the name of a check type Given a configuration dict, a Borg repository ID, the name of a check type ("repository",
("repository", "archives", etc.), and a unique hash of the archives filter flags, return a "archives", etc.), and a unique hash of the archives filter flags, return a path for recording
path for recording that check's time (the time of that check last occurring). that check's time (the time of that check last occurring).
''' '''
borgmatic_source_directory = os.path.expanduser( borgmatic_source_directory = os.path.expanduser(
location_config.get('borgmatic_source_directory', state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY) config.get('borgmatic_source_directory', state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY)
) )
if check_type in ('archives', 'data'): if check_type in ('archives', 'data'):
@ -287,11 +281,11 @@ def read_check_time(path):
return None return None
def probe_for_check_time(location_config, borg_repository_id, check, archives_check_id): def probe_for_check_time(config, borg_repository_id, check, archives_check_id):
''' '''
Given a location configuration dict, a Borg repository ID, the name of a check type Given a configuration dict, a Borg repository ID, the name of a check type ("repository",
("repository", "archives", etc.), and a unique hash of the archives filter flags, return a "archives", etc.), and a unique hash of the archives filter flags, return a the corresponding
the corresponding check time or None if such a check time does not exist. check time or None if such a check time does not exist.
When the check type is "archives" or "data", this function probes two different paths to find When the check type is "archives" or "data", this function probes two different paths to find
the check time, e.g.: the check time, e.g.:
@ -311,8 +305,8 @@ def probe_for_check_time(location_config, borg_repository_id, check, archives_ch
read_check_time(group[0]) read_check_time(group[0])
for group in itertools.groupby( for group in itertools.groupby(
( (
make_check_time_path(location_config, borg_repository_id, check, archives_check_id), make_check_time_path(config, borg_repository_id, check, archives_check_id),
make_check_time_path(location_config, borg_repository_id, check), make_check_time_path(config, borg_repository_id, check),
) )
) )
) )
@ -323,10 +317,10 @@ def probe_for_check_time(location_config, borg_repository_id, check, archives_ch
return None return None
def upgrade_check_times(location_config, borg_repository_id): def upgrade_check_times(config, borg_repository_id):
''' '''
Given a location configuration dict and a Borg repository ID, upgrade any corresponding check Given a configuration dict and a Borg repository ID, upgrade any corresponding check times on
times on disk from old-style paths to new-style paths. disk from old-style paths to new-style paths.
Currently, the only upgrade performed is renaming an archive or data check path that looks like: Currently, the only upgrade performed is renaming an archive or data check path that looks like:
@ -337,7 +331,7 @@ def upgrade_check_times(location_config, borg_repository_id):
~/.borgmatic/checks/1234567890/archives/all ~/.borgmatic/checks/1234567890/archives/all
''' '''
for check_type in ('archives', 'data'): for check_type in ('archives', 'data'):
new_path = make_check_time_path(location_config, borg_repository_id, check_type, 'all') new_path = make_check_time_path(config, borg_repository_id, check_type, 'all')
old_path = os.path.dirname(new_path) old_path = os.path.dirname(new_path)
temporary_path = f'{old_path}.temp' temporary_path = f'{old_path}.temp'
@ -357,9 +351,7 @@ def upgrade_check_times(location_config, borg_repository_id):
def check_archives( def check_archives(
repository_path, repository_path,
location_config, config,
storage_config,
consistency_config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path='borg', local_path='borg',
@ -370,10 +362,9 @@ def check_archives(
force=None, force=None,
): ):
''' '''
Given a local or remote repository path, a storage config dict, a consistency config dict, Given a local or remote repository path, a configuration dict, local/remote commands to run,
local/remote commands to run, whether to include progress information, whether to attempt a whether to include progress information, whether to attempt a repair, and an optional list of
repair, and an optional list of checks to use instead of configured checks, check the contained checks to use instead of configured checks, check the contained Borg archives for consistency.
Borg archives for consistency.
If there are no consistency checks to run, skip running them. If there are no consistency checks to run, skip running them.
@ -383,7 +374,7 @@ def check_archives(
borg_repository_id = json.loads( borg_repository_id = json.loads(
rinfo.display_repository_info( rinfo.display_repository_info(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
argparse.Namespace(json=True), argparse.Namespace(json=True),
global_arguments, global_arguments,
@ -394,21 +385,20 @@ def check_archives(
except (json.JSONDecodeError, KeyError): except (json.JSONDecodeError, KeyError):
raise ValueError(f'Cannot determine Borg repository ID for {repository_path}') raise ValueError(f'Cannot determine Borg repository ID for {repository_path}')
upgrade_check_times(location_config, borg_repository_id) upgrade_check_times(config, borg_repository_id)
check_last = consistency_config.get('check_last', None) check_last = config.get('check_last', None)
prefix = consistency_config.get('prefix') prefix = config.get('prefix')
configured_checks = parse_checks(consistency_config, only_checks) configured_checks = parse_checks(config, only_checks)
lock_wait = None lock_wait = None
extra_borg_options = storage_config.get('extra_borg_options', {}).get('check', '') extra_borg_options = config.get('extra_borg_options', {}).get('check', '')
archive_filter_flags = make_archive_filter_flags( archive_filter_flags = make_archive_filter_flags(
local_borg_version, storage_config, configured_checks, check_last, prefix local_borg_version, config, configured_checks, check_last, prefix
) )
archives_check_id = make_archives_check_id(archive_filter_flags) archives_check_id = make_archives_check_id(archive_filter_flags)
checks = filter_checks_on_frequency( checks = filter_checks_on_frequency(
location_config, config,
consistency_config,
borg_repository_id, borg_repository_id,
configured_checks, configured_checks,
force, force,
@ -416,7 +406,7 @@ def check_archives(
) )
if set(checks).intersection({'repository', 'archives', 'data'}): if set(checks).intersection({'repository', 'archives', 'data'}):
lock_wait = storage_config.get('lock_wait') lock_wait = config.get('lock_wait')
verbosity_flags = () verbosity_flags = ()
if logger.isEnabledFor(logging.INFO): if logger.isEnabledFor(logging.INFO):
@ -437,7 +427,7 @@ def check_archives(
+ flags.make_repository_flags(repository_path, local_borg_version) + flags.make_repository_flags(repository_path, local_borg_version)
) )
borg_environment = environment.make_environment(storage_config) borg_environment = environment.make_environment(config)
# The Borg repair option triggers an interactive prompt, which won't work when output is # The Borg repair option triggers an interactive prompt, which won't work when output is
# captured. And progress messes with the terminal directly. # captured. And progress messes with the terminal directly.
@ -450,12 +440,12 @@ def check_archives(
for check in checks: for check in checks:
write_check_time( write_check_time(
make_check_time_path(location_config, borg_repository_id, check, archives_check_id) make_check_time_path(config, borg_repository_id, check, archives_check_id)
) )
if 'extract' in checks: if 'extract' in checks:
extract.extract_last_archive_dry_run( extract.extract_last_archive_dry_run(
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
repository_path, repository_path,
@ -463,4 +453,4 @@ def check_archives(
local_path, local_path,
remote_path, remote_path,
) )
write_check_time(make_check_time_path(location_config, borg_repository_id, 'extract')) write_check_time(make_check_time_path(config, borg_repository_id, 'extract'))

View file

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def compact_segments( def compact_segments(
dry_run, dry_run,
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path='borg', local_path='borg',
@ -19,12 +19,12 @@ def compact_segments(
threshold=None, threshold=None,
): ):
''' '''
Given dry-run flag, a local or remote repository path, a storage config dict, and the local Given dry-run flag, a local or remote repository path, a configuration dict, and the local Borg
Borg version, compact the segments in a repository. version, compact the segments in a repository.
''' '''
umask = storage_config.get('umask', None) umask = config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
extra_borg_options = storage_config.get('extra_borg_options', {}).get('compact', '') extra_borg_options = config.get('extra_borg_options', {}).get('compact', '')
full_command = ( full_command = (
(local_path, 'compact') (local_path, 'compact')
@ -49,5 +49,5 @@ def compact_segments(
full_command, full_command,
output_log_level=logging.INFO, output_log_level=logging.INFO,
borg_local_path=local_path, borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config), extra_environment=environment.make_environment(config),
) )

View file

@ -146,12 +146,12 @@ def ensure_files_readable(*filename_lists):
open(file_object).close() open(file_object).close()
def make_pattern_flags(location_config, pattern_filename=None): def make_pattern_flags(config, pattern_filename=None):
''' '''
Given a location config dict with a potential patterns_from option, and a filename containing Given a configuration dict with a potential patterns_from option, and a filename containing any
any additional patterns, return the corresponding Borg flags for those files as a tuple. additional patterns, return the corresponding Borg flags for those files as a tuple.
''' '''
pattern_filenames = tuple(location_config.get('patterns_from') or ()) + ( pattern_filenames = tuple(config.get('patterns_from') or ()) + (
(pattern_filename,) if pattern_filename else () (pattern_filename,) if pattern_filename else ()
) )
@ -162,12 +162,12 @@ def make_pattern_flags(location_config, pattern_filename=None):
) )
def make_exclude_flags(location_config, exclude_filename=None): def make_exclude_flags(config, exclude_filename=None):
''' '''
Given a location config dict with various exclude options, and a filename containing any exclude Given a configuration dict with various exclude options, and a filename containing any exclude
patterns, return the corresponding Borg flags as a tuple. patterns, return the corresponding Borg flags as a tuple.
''' '''
exclude_filenames = tuple(location_config.get('exclude_from') or ()) + ( exclude_filenames = tuple(config.get('exclude_from') or ()) + (
(exclude_filename,) if exclude_filename else () (exclude_filename,) if exclude_filename else ()
) )
exclude_from_flags = tuple( exclude_from_flags = tuple(
@ -175,17 +175,15 @@ def make_exclude_flags(location_config, exclude_filename=None):
('--exclude-from', exclude_filename) for exclude_filename in exclude_filenames ('--exclude-from', exclude_filename) for exclude_filename in exclude_filenames
) )
) )
caches_flag = ('--exclude-caches',) if location_config.get('exclude_caches') else () caches_flag = ('--exclude-caches',) if config.get('exclude_caches') else ()
if_present_flags = tuple( if_present_flags = tuple(
itertools.chain.from_iterable( itertools.chain.from_iterable(
('--exclude-if-present', if_present) ('--exclude-if-present', if_present)
for if_present in location_config.get('exclude_if_present', ()) for if_present in config.get('exclude_if_present', ())
) )
) )
keep_exclude_tags_flags = ( keep_exclude_tags_flags = ('--keep-exclude-tags',) if config.get('keep_exclude_tags') else ()
('--keep-exclude-tags',) if location_config.get('keep_exclude_tags') else () exclude_nodump_flags = ('--exclude-nodump',) if config.get('exclude_nodump') else ()
)
exclude_nodump_flags = ('--exclude-nodump',) if location_config.get('exclude_nodump') else ()
return ( return (
exclude_from_flags exclude_from_flags
@ -294,6 +292,7 @@ def collect_special_file_paths(
capture_stderr=True, capture_stderr=True,
working_directory=working_directory, working_directory=working_directory,
extra_environment=borg_environment, extra_environment=borg_environment,
borg_local_path=local_path,
) )
paths = tuple( paths = tuple(
@ -326,8 +325,7 @@ def check_all_source_directories_exist(source_directories):
def create_archive( def create_archive(
dry_run, dry_run,
repository_path, repository_path,
location_config, config,
storage_config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path='borg', local_path='borg',
@ -339,72 +337,70 @@ def create_archive(
stream_processes=None, stream_processes=None,
): ):
''' '''
Given vebosity/dry-run flags, a local or remote repository path, a location config dict, and a Given vebosity/dry-run flags, a local or remote repository path, and a configuration dict,
storage config dict, create a Borg archive and return Borg's JSON output (if any). create a Borg archive and return Borg's JSON output (if any).
If a sequence of stream processes is given (instances of subprocess.Popen), then execute the If a sequence of stream processes is given (instances of subprocess.Popen), then execute the
create command while also triggering the given processes to produce output. create command while also triggering the given processes to produce output.
''' '''
borgmatic.logger.add_custom_log_levels() borgmatic.logger.add_custom_log_levels()
borgmatic_source_directories = expand_directories( borgmatic_source_directories = expand_directories(
collect_borgmatic_source_directories(location_config.get('borgmatic_source_directory')) collect_borgmatic_source_directories(config.get('borgmatic_source_directory'))
) )
if location_config.get('source_directories_must_exist', False): if config.get('source_directories_must_exist', False):
check_all_source_directories_exist(location_config.get('source_directories')) check_all_source_directories_exist(config.get('source_directories'))
sources = deduplicate_directories( sources = deduplicate_directories(
map_directories_to_devices( map_directories_to_devices(
expand_directories( expand_directories(
tuple(location_config.get('source_directories', ())) tuple(config.get('source_directories', ()))
+ borgmatic_source_directories + borgmatic_source_directories
+ tuple(global_arguments.used_config_paths) + tuple(global_arguments.used_config_paths)
) )
), ),
additional_directory_devices=map_directories_to_devices( additional_directory_devices=map_directories_to_devices(
expand_directories(pattern_root_directories(location_config.get('patterns'))) expand_directories(pattern_root_directories(config.get('patterns')))
), ),
) )
ensure_files_readable(location_config.get('patterns_from'), location_config.get('exclude_from')) ensure_files_readable(config.get('patterns_from'), config.get('exclude_from'))
try: try:
working_directory = os.path.expanduser(location_config.get('working_directory')) working_directory = os.path.expanduser(config.get('working_directory'))
except TypeError: except TypeError:
working_directory = None working_directory = None
pattern_file = ( pattern_file = (
write_pattern_file(location_config.get('patterns'), sources) write_pattern_file(config.get('patterns'), sources)
if location_config.get('patterns') or location_config.get('patterns_from') if config.get('patterns') or config.get('patterns_from')
else None else None
) )
exclude_file = write_pattern_file( exclude_file = write_pattern_file(expand_home_directories(config.get('exclude_patterns')))
expand_home_directories(location_config.get('exclude_patterns')) checkpoint_interval = config.get('checkpoint_interval', None)
) checkpoint_volume = config.get('checkpoint_volume', None)
checkpoint_interval = storage_config.get('checkpoint_interval', None) chunker_params = config.get('chunker_params', None)
checkpoint_volume = storage_config.get('checkpoint_volume', None) compression = config.get('compression', None)
chunker_params = storage_config.get('chunker_params', None) upload_rate_limit = config.get('upload_rate_limit', None)
compression = storage_config.get('compression', None) umask = config.get('umask', None)
upload_rate_limit = storage_config.get('upload_rate_limit', None) lock_wait = config.get('lock_wait', None)
umask = storage_config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None)
list_filter_flags = make_list_filter_flags(local_borg_version, dry_run) list_filter_flags = make_list_filter_flags(local_borg_version, dry_run)
files_cache = location_config.get('files_cache') files_cache = config.get('files_cache')
archive_name_format = storage_config.get('archive_name_format', DEFAULT_ARCHIVE_NAME_FORMAT) archive_name_format = config.get('archive_name_format', DEFAULT_ARCHIVE_NAME_FORMAT)
extra_borg_options = storage_config.get('extra_borg_options', {}).get('create', '') extra_borg_options = config.get('extra_borg_options', {}).get('create', '')
if feature.available(feature.Feature.ATIME, local_borg_version): if feature.available(feature.Feature.ATIME, local_borg_version):
atime_flags = ('--atime',) if location_config.get('atime') is True else () atime_flags = ('--atime',) if config.get('atime') is True else ()
else: else:
atime_flags = ('--noatime',) if location_config.get('atime') is False else () atime_flags = ('--noatime',) if config.get('atime') is False else ()
if feature.available(feature.Feature.NOFLAGS, local_borg_version): if feature.available(feature.Feature.NOFLAGS, local_borg_version):
noflags_flags = ('--noflags',) if location_config.get('flags') is False else () noflags_flags = ('--noflags',) if config.get('flags') is False else ()
else: else:
noflags_flags = ('--nobsdflags',) if location_config.get('flags') is False else () noflags_flags = ('--nobsdflags',) if config.get('flags') is False else ()
if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version): if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version):
numeric_ids_flags = ('--numeric-ids',) if location_config.get('numeric_ids') else () numeric_ids_flags = ('--numeric-ids',) if config.get('numeric_ids') else ()
else: else:
numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_ids') else () numeric_ids_flags = ('--numeric-owner',) if config.get('numeric_ids') else ()
if feature.available(feature.Feature.UPLOAD_RATELIMIT, local_borg_version): if feature.available(feature.Feature.UPLOAD_RATELIMIT, local_borg_version):
upload_ratelimit_flags = ( upload_ratelimit_flags = (
@ -415,7 +411,7 @@ def create_archive(
('--remote-ratelimit', str(upload_rate_limit)) if upload_rate_limit else () ('--remote-ratelimit', str(upload_rate_limit)) if upload_rate_limit else ()
) )
if stream_processes and location_config.get('read_special') is False: if stream_processes and config.get('read_special') is False:
logger.warning( logger.warning(
f'{repository_path}: Ignoring configured "read_special" value of false, as true is needed for database hooks.' f'{repository_path}: Ignoring configured "read_special" value of false, as true is needed for database hooks.'
) )
@ -423,23 +419,19 @@ def create_archive(
create_command = ( create_command = (
tuple(local_path.split(' ')) tuple(local_path.split(' '))
+ ('create',) + ('create',)
+ make_pattern_flags(location_config, pattern_file.name if pattern_file else None) + make_pattern_flags(config, pattern_file.name if pattern_file else None)
+ make_exclude_flags(location_config, exclude_file.name if exclude_file else None) + make_exclude_flags(config, exclude_file.name if exclude_file else None)
+ (('--checkpoint-interval', str(checkpoint_interval)) if checkpoint_interval else ()) + (('--checkpoint-interval', str(checkpoint_interval)) if checkpoint_interval else ())
+ (('--checkpoint-volume', str(checkpoint_volume)) if checkpoint_volume else ()) + (('--checkpoint-volume', str(checkpoint_volume)) if checkpoint_volume else ())
+ (('--chunker-params', chunker_params) if chunker_params else ()) + (('--chunker-params', chunker_params) if chunker_params else ())
+ (('--compression', compression) if compression else ()) + (('--compression', compression) if compression else ())
+ upload_ratelimit_flags + upload_ratelimit_flags
+ ( + (('--one-file-system',) if config.get('one_file_system') or stream_processes else ())
('--one-file-system',)
if location_config.get('one_file_system') or stream_processes
else ()
)
+ numeric_ids_flags + numeric_ids_flags
+ atime_flags + atime_flags
+ (('--noctime',) if location_config.get('ctime') is False else ()) + (('--noctime',) if config.get('ctime') is False else ())
+ (('--nobirthtime',) if location_config.get('birthtime') is False else ()) + (('--nobirthtime',) if config.get('birthtime') is False else ())
+ (('--read-special',) if location_config.get('read_special') or stream_processes else ()) + (('--read-special',) if config.get('read_special') or stream_processes else ())
+ noflags_flags + noflags_flags
+ (('--files-cache', files_cache) if files_cache else ()) + (('--files-cache', files_cache) if files_cache else ())
+ (('--remote-path', remote_path) if remote_path else ()) + (('--remote-path', remote_path) if remote_path else ())
@ -470,11 +462,11 @@ def create_archive(
# the terminal directly. # the terminal directly.
output_file = DO_NOT_CAPTURE if progress else None output_file = DO_NOT_CAPTURE if progress else None
borg_environment = environment.make_environment(storage_config) borg_environment = environment.make_environment(config)
# If database hooks are enabled (as indicated by streaming processes), exclude files that might # If database hooks are enabled (as indicated by streaming processes), exclude files that might
# cause Borg to hang. But skip this if the user has explicitly set the "read_special" to True. # cause Borg to hang. But skip this if the user has explicitly set the "read_special" to True.
if stream_processes and not location_config.get('read_special'): if stream_processes and not config.get('read_special'):
logger.debug(f'{repository_path}: Collecting special file paths') logger.debug(f'{repository_path}: Collecting special file paths')
special_file_paths = collect_special_file_paths( special_file_paths = collect_special_file_paths(
create_command, create_command,
@ -490,11 +482,11 @@ def create_archive(
) )
exclude_file = write_pattern_file( exclude_file = write_pattern_file(
expand_home_directories( expand_home_directories(
tuple(location_config.get('exclude_patterns') or ()) + special_file_paths tuple(config.get('exclude_patterns') or ()) + special_file_paths
), ),
pattern_file=exclude_file, pattern_file=exclude_file,
) )
create_command += make_exclude_flags(location_config, exclude_file.name) create_command += make_exclude_flags(config, exclude_file.name)
create_command += ( create_command += (
(('--info',) if logger.getEffectiveLevel() == logging.INFO and not json else ()) (('--info',) if logger.getEffectiveLevel() == logging.INFO and not json else ())
@ -519,6 +511,7 @@ def create_archive(
create_command, create_command,
working_directory=working_directory, working_directory=working_directory,
extra_environment=borg_environment, extra_environment=borg_environment,
borg_local_path=local_path,
) )
else: else:
execute_command( execute_command(

View file

@ -21,15 +21,15 @@ DEFAULT_BOOL_OPTION_TO_UPPERCASE_ENVIRONMENT_VARIABLE = {
} }
def make_environment(storage_config): def make_environment(config):
''' '''
Given a borgmatic storage configuration dict, return its options converted to a Borg environment Given a borgmatic configuration dict, return its options converted to a Borg environment
variable dict. variable dict.
''' '''
environment = {} environment = {}
for option_name, environment_variable_name in OPTION_TO_ENVIRONMENT_VARIABLE.items(): for option_name, environment_variable_name in OPTION_TO_ENVIRONMENT_VARIABLE.items():
value = storage_config.get(option_name) value = config.get(option_name)
if value: if value:
environment[environment_variable_name] = str(value) environment[environment_variable_name] = str(value)
@ -38,14 +38,14 @@ def make_environment(storage_config):
option_name, option_name,
environment_variable_name, environment_variable_name,
) in DEFAULT_BOOL_OPTION_TO_DOWNCASE_ENVIRONMENT_VARIABLE.items(): ) in DEFAULT_BOOL_OPTION_TO_DOWNCASE_ENVIRONMENT_VARIABLE.items():
value = storage_config.get(option_name, False) value = config.get(option_name, False)
environment[environment_variable_name] = 'yes' if value else 'no' environment[environment_variable_name] = 'yes' if value else 'no'
for ( for (
option_name, option_name,
environment_variable_name, environment_variable_name,
) in DEFAULT_BOOL_OPTION_TO_UPPERCASE_ENVIRONMENT_VARIABLE.items(): ) in DEFAULT_BOOL_OPTION_TO_UPPERCASE_ENVIRONMENT_VARIABLE.items():
value = storage_config.get(option_name, False) value = config.get(option_name, False)
environment[environment_variable_name] = 'YES' if value else 'NO' environment[environment_variable_name] = 'YES' if value else 'NO'
return environment return environment

View file

@ -13,7 +13,7 @@ def export_tar_archive(
archive, archive,
paths, paths,
destination_path, destination_path,
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path='borg', local_path='borg',
@ -24,16 +24,16 @@ def export_tar_archive(
): ):
''' '''
Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to
export from the archive, a destination path to export to, a storage configuration dict, the export from the archive, a destination path to export to, a configuration dict, the local Borg
local Borg version, optional local and remote Borg paths, an optional filter program, whether to version, optional local and remote Borg paths, an optional filter program, whether to include
include per-file details, and an optional number of path components to strip, export the archive per-file details, and an optional number of path components to strip, export the archive into
into the given destination path as a tar-formatted file. the given destination path as a tar-formatted file.
If the destination path is "-", then stream the output to stdout instead of to a file. If the destination path is "-", then stream the output to stdout instead of to a file.
''' '''
borgmatic.logger.add_custom_log_levels() borgmatic.logger.add_custom_log_levels()
umask = storage_config.get('umask', None) umask = config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
full_command = ( full_command = (
(local_path, 'export-tar') (local_path, 'export-tar')
@ -70,5 +70,5 @@ def export_tar_archive(
output_file=DO_NOT_CAPTURE if destination_path == '-' else None, output_file=DO_NOT_CAPTURE if destination_path == '-' else None,
output_log_level=output_log_level, output_log_level=output_log_level,
borg_local_path=local_path, borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config), extra_environment=environment.make_environment(config),
) )

View file

@ -10,7 +10,7 @@ logger = logging.getLogger(__name__)
def extract_last_archive_dry_run( def extract_last_archive_dry_run(
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
repository_path, repository_path,
@ -32,7 +32,7 @@ def extract_last_archive_dry_run(
last_archive_name = rlist.resolve_archive_name( last_archive_name = rlist.resolve_archive_name(
repository_path, repository_path,
'latest', 'latest',
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -43,7 +43,7 @@ def extract_last_archive_dry_run(
return return
list_flag = ('--list',) if logger.isEnabledFor(logging.DEBUG) else () list_flag = ('--list',) if logger.isEnabledFor(logging.DEBUG) else ()
borg_environment = environment.make_environment(storage_config) borg_environment = environment.make_environment(config)
full_extract_command = ( full_extract_command = (
(local_path, 'extract', '--dry-run') (local_path, 'extract', '--dry-run')
+ (('--remote-path', remote_path) if remote_path else ()) + (('--remote-path', remote_path) if remote_path else ())
@ -66,8 +66,7 @@ def extract_archive(
repository, repository,
archive, archive,
paths, paths,
location_config, config,
storage_config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path='borg', local_path='borg',
@ -80,22 +79,22 @@ def extract_archive(
''' '''
Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to
restore from the archive, the local Borg version string, an argparse.Namespace of global restore from the archive, the local Borg version string, an argparse.Namespace of global
arguments, location/storage configuration dicts, optional local and remote Borg paths, and an arguments, a configuration dict, optional local and remote Borg paths, and an optional
optional destination path to extract to, extract the archive into the current directory. destination path to extract to, extract the archive into the current directory.
If extract to stdout is True, then start the extraction streaming to stdout, and return that If extract to stdout is True, then start the extraction streaming to stdout, and return that
extract process as an instance of subprocess.Popen. extract process as an instance of subprocess.Popen.
''' '''
umask = storage_config.get('umask', None) umask = config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
if progress and extract_to_stdout: if progress and extract_to_stdout:
raise ValueError('progress and extract_to_stdout cannot both be set') raise ValueError('progress and extract_to_stdout cannot both be set')
if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version): if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version):
numeric_ids_flags = ('--numeric-ids',) if location_config.get('numeric_ids') else () numeric_ids_flags = ('--numeric-ids',) if config.get('numeric_ids') else ()
else: else:
numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_ids') else () numeric_ids_flags = ('--numeric-owner',) if config.get('numeric_ids') else ()
if strip_components == 'all': if strip_components == 'all':
if not paths: if not paths:
@ -127,7 +126,7 @@ def extract_archive(
+ (tuple(paths) if paths else ()) + (tuple(paths) if paths else ())
) )
borg_environment = environment.make_environment(storage_config) borg_environment = environment.make_environment(config)
# The progress output isn't compatible with captured and logged output, as progress messes with # The progress output isn't compatible with captured and logged output, as progress messes with
# the terminal directly. # the terminal directly.

View file

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def display_archives_info( def display_archives_info(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
info_arguments, info_arguments,
global_arguments, global_arguments,
@ -17,12 +17,12 @@ def display_archives_info(
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, a storage config dict, the local Borg version, global Given a local or remote repository path, a configuration dict, the local Borg version, global
arguments as an argparse.Namespace, and the arguments to the info action, display summary arguments as an argparse.Namespace, and the arguments to the info action, display summary
information for Borg archives in the repository or return JSON summary information. information for Borg archives in the repository or return JSON summary information.
''' '''
borgmatic.logger.add_custom_log_levels() borgmatic.logger.add_custom_log_levels()
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
full_command = ( full_command = (
(local_path, 'info') (local_path, 'info')
@ -50,8 +50,8 @@ def display_archives_info(
flags.make_match_archives_flags( flags.make_match_archives_flags(
info_arguments.match_archives info_arguments.match_archives
or info_arguments.archive or info_arguments.archive
or storage_config.get('match_archives'), or config.get('match_archives'),
storage_config.get('archive_name_format'), config.get('archive_name_format'),
local_borg_version, local_borg_version,
) )
) )
@ -65,12 +65,13 @@ def display_archives_info(
if info_arguments.json: if info_arguments.json:
return execute_command_and_capture_output( return execute_command_and_capture_output(
full_command, full_command,
extra_environment=environment.make_environment(storage_config), extra_environment=environment.make_environment(config),
borg_local_path=local_path,
) )
else: else:
execute_command( execute_command(
full_command, full_command,
output_log_level=logging.ANSWER, output_log_level=logging.ANSWER,
borg_local_path=local_path, borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config), extra_environment=environment.make_environment(config),
) )

View file

@ -21,7 +21,7 @@ MAKE_FLAGS_EXCLUDES = (
def make_list_command( def make_list_command(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
list_arguments, list_arguments,
global_arguments, global_arguments,
@ -29,11 +29,11 @@ def make_list_command(
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, a storage config dict, the arguments to the list Given a local or remote repository path, a configuration dict, the arguments to the list action,
action, and local and remote Borg paths, return a command as a tuple to list archives or paths and local and remote Borg paths, return a command as a tuple to list archives or paths within an
within an archive. archive.
''' '''
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
return ( return (
(local_path, 'list') (local_path, 'list')
@ -89,7 +89,7 @@ def make_find_paths(find_paths):
def capture_archive_listing( def capture_archive_listing(
repository_path, repository_path,
archive, archive,
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
list_path=None, list_path=None,
@ -97,18 +97,18 @@ def capture_archive_listing(
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, an archive name, a storage config dict, the local Borg Given a local or remote repository path, an archive name, a configuration dict, the local Borg
version, global arguments as an argparse.Namespace, the archive path in which to list files, and version, global arguments as an argparse.Namespace, the archive path in which to list files, and
local and remote Borg paths, capture the output of listing that archive and return it as a list local and remote Borg paths, capture the output of listing that archive and return it as a list
of file paths. of file paths.
''' '''
borg_environment = environment.make_environment(storage_config) borg_environment = environment.make_environment(config)
return tuple( return tuple(
execute_command_and_capture_output( execute_command_and_capture_output(
make_list_command( make_list_command(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
argparse.Namespace( argparse.Namespace(
repository=repository_path, repository=repository_path,
@ -123,6 +123,7 @@ def capture_archive_listing(
remote_path, remote_path,
), ),
extra_environment=borg_environment, extra_environment=borg_environment,
borg_local_path=local_path,
) )
.strip('\n') .strip('\n')
.split('\n') .split('\n')
@ -131,7 +132,7 @@ def capture_archive_listing(
def list_archive( def list_archive(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
list_arguments, list_arguments,
global_arguments, global_arguments,
@ -139,7 +140,7 @@ def list_archive(
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, a storage config dict, the local Borg version, global Given a local or remote repository path, a configuration dict, the local Borg version, global
arguments as an argparse.Namespace, the arguments to the list action as an argparse.Namespace, arguments as an argparse.Namespace, the arguments to the list action as an argparse.Namespace,
and local and remote Borg paths, display the output of listing the files of a Borg archive (or and local and remote Borg paths, display the output of listing the files of a Borg archive (or
return JSON output). If list_arguments.find_paths are given, list the files by searching across return JSON output). If list_arguments.find_paths are given, list the files by searching across
@ -167,7 +168,7 @@ def list_archive(
) )
return rlist.list_repository( return rlist.list_repository(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
rlist_arguments, rlist_arguments,
global_arguments, global_arguments,
@ -187,7 +188,7 @@ def list_archive(
'The --json flag on the list action is not supported when using the --archive/--find flags.' 'The --json flag on the list action is not supported when using the --archive/--find flags.'
) )
borg_environment = environment.make_environment(storage_config) borg_environment = environment.make_environment(config)
# If there are any paths to find (and there's not a single archive already selected), start by # If there are any paths to find (and there's not a single archive already selected), start by
# getting a list of archives to search. # getting a list of archives to search.
@ -209,7 +210,7 @@ def list_archive(
execute_command_and_capture_output( execute_command_and_capture_output(
rlist.make_rlist_command( rlist.make_rlist_command(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
rlist_arguments, rlist_arguments,
global_arguments, global_arguments,
@ -217,6 +218,7 @@ def list_archive(
remote_path, remote_path,
), ),
extra_environment=borg_environment, extra_environment=borg_environment,
borg_local_path=local_path,
) )
.strip('\n') .strip('\n')
.split('\n') .split('\n')
@ -238,7 +240,7 @@ def list_archive(
main_command = make_list_command( main_command = make_list_command(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
archive_arguments, archive_arguments,
global_arguments, global_arguments,

View file

@ -10,7 +10,7 @@ def mount_archive(
repository_path, repository_path,
archive, archive,
mount_arguments, mount_arguments,
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path='borg', local_path='borg',
@ -22,8 +22,8 @@ def mount_archive(
dict, the local Borg version, global arguments as an argparse.Namespace instance, and optional dict, the local Borg version, global arguments as an argparse.Namespace instance, and optional
local and remote Borg paths, mount the archive onto the mount point. local and remote Borg paths, mount the archive onto the mount point.
''' '''
umask = storage_config.get('umask', None) umask = config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
full_command = ( full_command = (
(local_path, 'mount') (local_path, 'mount')
@ -58,7 +58,7 @@ def mount_archive(
+ (tuple(mount_arguments.paths) if mount_arguments.paths else ()) + (tuple(mount_arguments.paths) if mount_arguments.paths else ())
) )
borg_environment = environment.make_environment(storage_config) borg_environment = environment.make_environment(config)
# Don't capture the output when foreground mode is used so that ctrl-C can work properly. # Don't capture the output when foreground mode is used so that ctrl-C can work properly.
if mount_arguments.foreground: if mount_arguments.foreground:

View file

@ -7,9 +7,9 @@ from borgmatic.execute import execute_command
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def make_prune_flags(storage_config, retention_config, local_borg_version): def make_prune_flags(config, local_borg_version):
''' '''
Given a retention config dict mapping from option name to value, transform it into an sequence of Given a configuration dict mapping from option name to value, transform it into an sequence of
command-line flags. command-line flags.
For example, given a retention config of: For example, given a retention config of:
@ -23,12 +23,12 @@ def make_prune_flags(storage_config, retention_config, local_borg_version):
('--keep-monthly', '6'), ('--keep-monthly', '6'),
) )
''' '''
config = retention_config.copy()
prefix = config.pop('prefix', None)
flag_pairs = ( flag_pairs = (
('--' + option_name.replace('_', '-'), str(value)) for option_name, value in config.items() ('--' + option_name.replace('_', '-'), str(value))
for option_name, value in config.items()
if option_name.startswith('keep_')
) )
prefix = config.get('prefix')
return tuple(element for pair in flag_pairs for element in pair) + ( return tuple(element for pair in flag_pairs for element in pair) + (
( (
@ -39,8 +39,8 @@ def make_prune_flags(storage_config, retention_config, local_borg_version):
if prefix if prefix
else ( else (
flags.make_match_archives_flags( flags.make_match_archives_flags(
storage_config.get('match_archives'), config.get('match_archives'),
storage_config.get('archive_name_format'), config.get('archive_name_format'),
local_borg_version, local_borg_version,
) )
) )
@ -50,8 +50,7 @@ def make_prune_flags(storage_config, retention_config, local_borg_version):
def prune_archives( def prune_archives(
dry_run, dry_run,
repository_path, repository_path,
storage_config, config,
retention_config,
local_borg_version, local_borg_version,
prune_arguments, prune_arguments,
global_arguments, global_arguments,
@ -59,18 +58,17 @@ def prune_archives(
remote_path=None, remote_path=None,
): ):
''' '''
Given dry-run flag, a local or remote repository path, a storage config dict, and a Given dry-run flag, a local or remote repository path, and a configuration dict, prune Borg
retention config dict, prune Borg archives according to the retention policy specified in that archives according to the retention policy specified in that configuration.
configuration.
''' '''
borgmatic.logger.add_custom_log_levels() borgmatic.logger.add_custom_log_levels()
umask = storage_config.get('umask', None) umask = config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
extra_borg_options = storage_config.get('extra_borg_options', {}).get('prune', '') extra_borg_options = config.get('extra_borg_options', {}).get('prune', '')
full_command = ( full_command = (
(local_path, 'prune') (local_path, 'prune')
+ make_prune_flags(storage_config, retention_config, local_borg_version) + make_prune_flags(config, local_borg_version)
+ (('--remote-path', remote_path) if remote_path else ()) + (('--remote-path', remote_path) if remote_path else ())
+ (('--umask', str(umask)) if umask else ()) + (('--umask', str(umask)) if umask else ())
+ (('--log-json',) if global_arguments.log_json else ()) + (('--log-json',) if global_arguments.log_json else ())
@ -97,5 +95,5 @@ def prune_archives(
full_command, full_command,
output_log_level=output_log_level, output_log_level=output_log_level,
borg_local_path=local_path, borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config), extra_environment=environment.make_environment(config),
) )

View file

@ -14,7 +14,7 @@ RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE = 2
def create_repository( def create_repository(
dry_run, dry_run,
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
encryption_mode, encryption_mode,
@ -27,15 +27,15 @@ def create_repository(
remote_path=None, remote_path=None,
): ):
''' '''
Given a dry-run flag, a local or remote repository path, a storage configuration dict, the local Given a dry-run flag, a local or remote repository path, a configuration dict, the local Borg
Borg version, a Borg encryption mode, the path to another repo whose key material should be version, a Borg encryption mode, the path to another repo whose key material should be reused,
reused, whether the repository should be append-only, and the storage quota to use, create the whether the repository should be append-only, and the storage quota to use, create the
repository. If the repository already exists, then log and skip creation. repository. If the repository already exists, then log and skip creation.
''' '''
try: try:
rinfo.display_repository_info( rinfo.display_repository_info(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
argparse.Namespace(json=True), argparse.Namespace(json=True),
global_arguments, global_arguments,
@ -48,8 +48,8 @@ def create_repository(
if error.returncode != RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE: if error.returncode != RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE:
raise raise
lock_wait = storage_config.get('lock_wait') lock_wait = config.get('lock_wait')
extra_borg_options = storage_config.get('extra_borg_options', {}).get('rcreate', '') extra_borg_options = config.get('extra_borg_options', {}).get('rcreate', '')
rcreate_command = ( rcreate_command = (
(local_path,) (local_path,)
@ -82,5 +82,5 @@ def create_repository(
rcreate_command, rcreate_command,
output_file=DO_NOT_CAPTURE, output_file=DO_NOT_CAPTURE,
borg_local_path=local_path, borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config), extra_environment=environment.make_environment(config),
) )

View file

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def display_repository_info( def display_repository_info(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
rinfo_arguments, rinfo_arguments,
global_arguments, global_arguments,
@ -17,12 +17,12 @@ def display_repository_info(
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, a storage config dict, the local Borg version, the Given a local or remote repository path, a configuration dict, the local Borg version, the
arguments to the rinfo action, and global arguments as an argparse.Namespace, display summary arguments to the rinfo action, and global arguments as an argparse.Namespace, display summary
information for the Borg repository or return JSON summary information. information for the Borg repository or return JSON summary information.
''' '''
borgmatic.logger.add_custom_log_levels() borgmatic.logger.add_custom_log_levels()
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
full_command = ( full_command = (
(local_path,) (local_path,)
@ -48,12 +48,13 @@ def display_repository_info(
+ flags.make_repository_flags(repository_path, local_borg_version) + flags.make_repository_flags(repository_path, local_borg_version)
) )
extra_environment = environment.make_environment(storage_config) extra_environment = environment.make_environment(config)
if rinfo_arguments.json: if rinfo_arguments.json:
return execute_command_and_capture_output( return execute_command_and_capture_output(
full_command, full_command,
extra_environment=extra_environment, extra_environment=extra_environment,
borg_local_path=local_path,
) )
else: else:
execute_command( execute_command(

View file

@ -10,14 +10,14 @@ logger = logging.getLogger(__name__)
def resolve_archive_name( def resolve_archive_name(
repository_path, repository_path,
archive, archive,
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path='borg', local_path='borg',
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, an archive name, a storage config dict, the local Borg Given a local or remote repository path, an archive name, a configuration dict, the local Borg
version, global arguments as an argparse.Namespace, a local Borg path, and a remote Borg path, version, global arguments as an argparse.Namespace, a local Borg path, and a remote Borg path,
return the archive name. But if the archive name is "latest", then instead introspect the return the archive name. But if the archive name is "latest", then instead introspect the
repository for the latest archive and return its name. repository for the latest archive and return its name.
@ -34,7 +34,7 @@ def resolve_archive_name(
) )
+ flags.make_flags('remote-path', remote_path) + flags.make_flags('remote-path', remote_path)
+ flags.make_flags('log-json', global_arguments.log_json) + flags.make_flags('log-json', global_arguments.log_json)
+ flags.make_flags('lock-wait', storage_config.get('lock_wait')) + flags.make_flags('lock-wait', config.get('lock_wait'))
+ flags.make_flags('last', 1) + flags.make_flags('last', 1)
+ ('--short',) + ('--short',)
+ flags.make_repository_flags(repository_path, local_borg_version) + flags.make_repository_flags(repository_path, local_borg_version)
@ -42,7 +42,8 @@ def resolve_archive_name(
output = execute_command_and_capture_output( output = execute_command_and_capture_output(
full_command, full_command,
extra_environment=environment.make_environment(storage_config), extra_environment=environment.make_environment(config),
borg_local_path=local_path,
) )
try: try:
latest_archive = output.strip().splitlines()[-1] latest_archive = output.strip().splitlines()[-1]
@ -59,7 +60,7 @@ MAKE_FLAGS_EXCLUDES = ('repository', 'prefix', 'match_archives')
def make_rlist_command( def make_rlist_command(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
rlist_arguments, rlist_arguments,
global_arguments, global_arguments,
@ -67,7 +68,7 @@ def make_rlist_command(
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, a storage config dict, the local Borg version, the Given a local or remote repository path, a configuration dict, the local Borg version, the
arguments to the rlist action, global arguments as an argparse.Namespace instance, and local and arguments to the rlist action, global arguments as an argparse.Namespace instance, and local and
remote Borg paths, return a command as a tuple to list archives with a repository. remote Borg paths, return a command as a tuple to list archives with a repository.
''' '''
@ -88,7 +89,7 @@ def make_rlist_command(
) )
+ flags.make_flags('remote-path', remote_path) + flags.make_flags('remote-path', remote_path)
+ flags.make_flags('log-json', global_arguments.log_json) + flags.make_flags('log-json', global_arguments.log_json)
+ flags.make_flags('lock-wait', storage_config.get('lock_wait')) + flags.make_flags('lock-wait', config.get('lock_wait'))
+ ( + (
( (
flags.make_flags('match-archives', f'sh:{rlist_arguments.prefix}*') flags.make_flags('match-archives', f'sh:{rlist_arguments.prefix}*')
@ -98,8 +99,8 @@ def make_rlist_command(
if rlist_arguments.prefix if rlist_arguments.prefix
else ( else (
flags.make_match_archives_flags( flags.make_match_archives_flags(
rlist_arguments.match_archives or storage_config.get('match_archives'), rlist_arguments.match_archives or config.get('match_archives'),
storage_config.get('archive_name_format'), config.get('archive_name_format'),
local_borg_version, local_borg_version,
) )
) )
@ -111,7 +112,7 @@ def make_rlist_command(
def list_repository( def list_repository(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
rlist_arguments, rlist_arguments,
global_arguments, global_arguments,
@ -119,17 +120,17 @@ def list_repository(
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, a storage config dict, the local Borg version, the Given a local or remote repository path, a configuration dict, the local Borg version, the
arguments to the list action, global arguments as an argparse.Namespace instance, and local and arguments to the list action, global arguments as an argparse.Namespace instance, and local and
remote Borg paths, display the output of listing Borg archives in the given repository (or remote Borg paths, display the output of listing Borg archives in the given repository (or
return JSON output). return JSON output).
''' '''
borgmatic.logger.add_custom_log_levels() borgmatic.logger.add_custom_log_levels()
borg_environment = environment.make_environment(storage_config) borg_environment = environment.make_environment(config)
main_command = make_rlist_command( main_command = make_rlist_command(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
rlist_arguments, rlist_arguments,
global_arguments, global_arguments,
@ -138,7 +139,9 @@ def list_repository(
) )
if rlist_arguments.json: if rlist_arguments.json:
return execute_command_and_capture_output(main_command, extra_environment=borg_environment) return execute_command_and_capture_output(
main_command, extra_environment=borg_environment, borg_local_path=local_path
)
else: else:
execute_command( execute_command(
main_command, main_command,

View file

@ -10,7 +10,7 @@ logger = logging.getLogger(__name__)
def transfer_archives( def transfer_archives(
dry_run, dry_run,
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
transfer_arguments, transfer_arguments,
global_arguments, global_arguments,
@ -18,7 +18,7 @@ def transfer_archives(
remote_path=None, remote_path=None,
): ):
''' '''
Given a dry-run flag, a local or remote repository path, a storage config dict, the local Borg Given a dry-run flag, a local or remote repository path, a configuration dict, the local Borg
version, the arguments to the transfer action, and global arguments as an argparse.Namespace version, the arguments to the transfer action, and global arguments as an argparse.Namespace
instance, transfer archives to the given repository. instance, transfer archives to the given repository.
''' '''
@ -30,7 +30,7 @@ def transfer_archives(
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
+ flags.make_flags('remote-path', remote_path) + flags.make_flags('remote-path', remote_path)
+ flags.make_flags('log-json', global_arguments.log_json) + flags.make_flags('log-json', global_arguments.log_json)
+ flags.make_flags('lock-wait', storage_config.get('lock_wait', None)) + flags.make_flags('lock-wait', config.get('lock_wait', None))
+ ( + (
flags.make_flags_from_arguments( flags.make_flags_from_arguments(
transfer_arguments, transfer_arguments,
@ -40,8 +40,8 @@ def transfer_archives(
flags.make_match_archives_flags( flags.make_match_archives_flags(
transfer_arguments.match_archives transfer_arguments.match_archives
or transfer_arguments.archive or transfer_arguments.archive
or storage_config.get('match_archives'), or config.get('match_archives'),
storage_config.get('archive_name_format'), config.get('archive_name_format'),
local_borg_version, local_borg_version,
) )
) )
@ -56,5 +56,5 @@ def transfer_archives(
output_log_level=logging.ANSWER, output_log_level=logging.ANSWER,
output_file=DO_NOT_CAPTURE if transfer_arguments.progress else None, output_file=DO_NOT_CAPTURE if transfer_arguments.progress else None,
borg_local_path=local_path, borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config), extra_environment=environment.make_environment(config),
) )

View file

@ -6,9 +6,9 @@ from borgmatic.execute import execute_command_and_capture_output
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def local_borg_version(storage_config, local_path='borg'): def local_borg_version(config, local_path='borg'):
''' '''
Given a storage configuration dict and a local Borg binary path, return a version string for it. Given a configuration dict and a local Borg binary path, return a version string for it.
Raise OSError or CalledProcessError if there is a problem running Borg. Raise OSError or CalledProcessError if there is a problem running Borg.
Raise ValueError if the version cannot be parsed. Raise ValueError if the version cannot be parsed.
@ -20,7 +20,8 @@ def local_borg_version(storage_config, local_path='borg'):
) )
output = execute_command_and_capture_output( output = execute_command_and_capture_output(
full_command, full_command,
extra_environment=environment.make_environment(storage_config), extra_environment=environment.make_environment(config),
borg_local_path=local_path,
) )
try: try:

View file

@ -1,7 +1,7 @@
import collections import collections
import itertools import itertools
import sys import sys
from argparse import Action, ArgumentParser from argparse import ArgumentParser
from borgmatic.config import collect from borgmatic.config import collect
@ -216,42 +216,12 @@ def parse_arguments_for_actions(unparsed_arguments, action_parsers, global_parse
arguments['global'], remaining = global_parser.parse_known_args(unparsed_arguments) arguments['global'], remaining = global_parser.parse_known_args(unparsed_arguments)
remaining_action_arguments.append(remaining) remaining_action_arguments.append(remaining)
# Prevent action names and arguments that follow "--config" paths from being considered as
# additional paths.
for argument_name in arguments.keys():
if argument_name == 'global':
continue
for action_name in [argument_name] + ACTION_ALIASES.get(argument_name, []):
try:
action_name_index = arguments['global'].config_paths.index(action_name)
arguments['global'].config_paths = arguments['global'].config_paths[
:action_name_index
]
break
except ValueError:
pass
return ( return (
arguments, arguments,
tuple(remaining_action_arguments) if arguments else unparsed_arguments, tuple(remaining_action_arguments) if arguments else unparsed_arguments,
) )
class Extend_action(Action):
'''
An argparse action to support Python 3.8's "extend" action in older versions of Python.
'''
def __call__(self, parser, namespace, values, option_string=None):
items = getattr(namespace, self.dest, None)
if items:
items.extend(values) # pragma: no cover
else:
setattr(namespace, self.dest, list(values))
def make_parsers(): def make_parsers():
''' '''
Build a global arguments parser, individual action parsers, and a combined parser containing Build a global arguments parser, individual action parsers, and a combined parser containing
@ -263,16 +233,14 @@ def make_parsers():
unexpanded_config_paths = collect.get_default_config_paths(expand_home=False) unexpanded_config_paths = collect.get_default_config_paths(expand_home=False)
global_parser = ArgumentParser(add_help=False) global_parser = ArgumentParser(add_help=False)
global_parser.register('action', 'extend', Extend_action)
global_group = global_parser.add_argument_group('global arguments') global_group = global_parser.add_argument_group('global arguments')
global_group.add_argument( global_group.add_argument(
'-c', '-c',
'--config', '--config',
nargs='*',
dest='config_paths', dest='config_paths',
default=config_paths, action='append',
help=f"Configuration filenames or directories, defaults to: {' '.join(unexpanded_config_paths)}", help=f"Configuration filename or directory, can specify flag multiple times, defaults to: {' '.join(unexpanded_config_paths)}",
) )
global_group.add_argument( global_group.add_argument(
'-n', '-n',
@ -330,11 +298,10 @@ def make_parsers():
) )
global_group.add_argument( global_group.add_argument(
'--override', '--override',
metavar='SECTION.OPTION=VALUE', metavar='OPTION.SUBOPTION=VALUE',
nargs='+',
dest='overrides', dest='overrides',
action='extend', action='append',
help='One or more configuration file options to override with specified values', help='Configuration file option to override with specified value, can specify flag multiple times',
) )
global_group.add_argument( global_group.add_argument(
'--no-environment-interpolation', '--no-environment-interpolation',
@ -672,9 +639,9 @@ def make_parsers():
'--path', '--path',
'--restore-path', '--restore-path',
metavar='PATH', metavar='PATH',
nargs='+',
dest='paths', dest='paths',
help='Paths to extract from archive, defaults to the entire archive', action='append',
help='Path to extract from archive, can specify flag multiple times, defaults to the entire archive',
) )
extract_group.add_argument( extract_group.add_argument(
'--destination', '--destination',
@ -826,9 +793,9 @@ def make_parsers():
export_tar_group.add_argument( export_tar_group.add_argument(
'--path', '--path',
metavar='PATH', metavar='PATH',
nargs='+',
dest='paths', dest='paths',
help='Paths to export from archive, defaults to the entire archive', action='append',
help='Path to export from archive, can specify flag multiple times, defaults to the entire archive',
) )
export_tar_group.add_argument( export_tar_group.add_argument(
'--destination', '--destination',
@ -877,9 +844,9 @@ def make_parsers():
mount_group.add_argument( mount_group.add_argument(
'--path', '--path',
metavar='PATH', metavar='PATH',
nargs='+',
dest='paths', dest='paths',
help='Paths to mount from archive, defaults to the entire archive', action='append',
help='Path to mount from archive, can specify multiple times, defaults to the entire archive',
) )
mount_group.add_argument( mount_group.add_argument(
'--foreground', '--foreground',
@ -954,16 +921,16 @@ def make_parsers():
restore_group.add_argument( restore_group.add_argument(
'--database', '--database',
metavar='NAME', metavar='NAME',
nargs='+',
dest='databases', dest='databases',
help="Names of databases to restore from archive, defaults to all databases. Note that any databases to restore must be defined in borgmatic's configuration", action='append',
help="Name of database to restore from archive, must be defined in borgmatic's configuration, can specify flag multiple times, defaults to all databases",
) )
restore_group.add_argument( restore_group.add_argument(
'--schema', '--schema',
metavar='NAME', metavar='NAME',
nargs='+',
dest='schemas', dest='schemas',
help='Names of schemas to restore from the database, defaults to all schemas. Schemas are only supported for PostgreSQL and MongoDB databases', action='append',
help='Name of schema to restore from the database, can specify flag multiple times, defaults to all schemas. Schemas are only supported for PostgreSQL and MongoDB databases',
) )
restore_group.add_argument( restore_group.add_argument(
'--hostname', '--hostname',
@ -1065,16 +1032,16 @@ def make_parsers():
list_group.add_argument( list_group.add_argument(
'--path', '--path',
metavar='PATH', metavar='PATH',
nargs='+',
dest='paths', dest='paths',
help='Paths or patterns to list from a single selected archive (via "--archive"), defaults to listing the entire archive', action='append',
help='Path or pattern to list from a single selected archive (via "--archive"), can specify flag multiple times, defaults to listing the entire archive',
) )
list_group.add_argument( list_group.add_argument(
'--find', '--find',
metavar='PATH', metavar='PATH',
nargs='+',
dest='find_paths', dest='find_paths',
help='Partial paths or patterns to search for and list across multiple archives', action='append',
help='Partial path or pattern to search for and list across multiple archives, can specify flag multiple times',
) )
list_group.add_argument( list_group.add_argument(
'--short', default=False, action='store_true', help='Output only path names' '--short', default=False, action='store_true', help='Output only path names'
@ -1248,6 +1215,9 @@ def parse_arguments(*unparsed_arguments):
unparsed_arguments, action_parsers.choices, global_parser unparsed_arguments, action_parsers.choices, global_parser
) )
if not arguments['global'].config_paths:
arguments['global'].config_paths = collect.get_default_config_paths(expand_home=True)
for action_name in ('bootstrap', 'generate', 'validate'): for action_name in ('bootstrap', 'generate', 'validate'):
if ( if (
action_name in arguments.keys() and len(arguments.keys()) > 2 action_name in arguments.keys() and len(arguments.keys()) > 2

View file

@ -58,16 +58,12 @@ def run_configuration(config_filename, config, arguments):
* JSON output strings from successfully executing any actions that produce JSON * JSON output strings from successfully executing any actions that produce JSON
* logging.LogRecord instances containing errors from any actions or backup hooks that fail * logging.LogRecord instances containing errors from any actions or backup hooks that fail
''' '''
(location, storage, retention, consistency, hooks) = (
config.get(section_name, {})
for section_name in ('location', 'storage', 'retention', 'consistency', 'hooks')
)
global_arguments = arguments['global'] global_arguments = arguments['global']
local_path = location.get('local_path', 'borg') local_path = config.get('local_path', 'borg')
remote_path = location.get('remote_path') remote_path = config.get('remote_path')
retries = storage.get('retries', 0) retries = config.get('retries', 0)
retry_wait = storage.get('retry_wait', 0) retry_wait = config.get('retry_wait', 0)
encountered_error = None encountered_error = None
error_repository = '' error_repository = ''
using_primary_action = {'create', 'prune', 'compact', 'check'}.intersection(arguments) using_primary_action = {'create', 'prune', 'compact', 'check'}.intersection(arguments)
@ -75,7 +71,7 @@ def run_configuration(config_filename, config, arguments):
monitoring_hooks_are_activated = using_primary_action and monitoring_log_level != DISABLED monitoring_hooks_are_activated = using_primary_action and monitoring_log_level != DISABLED
try: try:
local_borg_version = borg_version.local_borg_version(storage, local_path) local_borg_version = borg_version.local_borg_version(config, local_path)
except (OSError, CalledProcessError, ValueError) as error: except (OSError, CalledProcessError, ValueError) as error:
yield from log_error_records(f'{config_filename}: Error getting local Borg version', error) yield from log_error_records(f'{config_filename}: Error getting local Borg version', error)
return return
@ -84,7 +80,7 @@ def run_configuration(config_filename, config, arguments):
if monitoring_hooks_are_activated: if monitoring_hooks_are_activated:
dispatch.call_hooks( dispatch.call_hooks(
'initialize_monitor', 'initialize_monitor',
hooks, config,
config_filename, config_filename,
monitor.MONITOR_HOOK_NAMES, monitor.MONITOR_HOOK_NAMES,
monitoring_log_level, monitoring_log_level,
@ -93,7 +89,7 @@ def run_configuration(config_filename, config, arguments):
dispatch.call_hooks( dispatch.call_hooks(
'ping_monitor', 'ping_monitor',
hooks, config,
config_filename, config_filename,
monitor.MONITOR_HOOK_NAMES, monitor.MONITOR_HOOK_NAMES,
monitor.State.START, monitor.State.START,
@ -109,7 +105,7 @@ def run_configuration(config_filename, config, arguments):
if not encountered_error: if not encountered_error:
repo_queue = Queue() repo_queue = Queue()
for repo in location['repositories']: for repo in config['repositories']:
repo_queue.put( repo_queue.put(
(repo, 0), (repo, 0),
) )
@ -129,11 +125,7 @@ def run_configuration(config_filename, config, arguments):
yield from run_actions( yield from run_actions(
arguments=arguments, arguments=arguments,
config_filename=config_filename, config_filename=config_filename,
location=location, config=config,
storage=storage,
retention=retention,
consistency=consistency,
hooks=hooks,
local_path=local_path, local_path=local_path,
remote_path=remote_path, remote_path=remote_path,
local_borg_version=local_borg_version, local_borg_version=local_borg_version,
@ -172,7 +164,7 @@ def run_configuration(config_filename, config, arguments):
# send logs irrespective of error # send logs irrespective of error
dispatch.call_hooks( dispatch.call_hooks(
'ping_monitor', 'ping_monitor',
hooks, config,
config_filename, config_filename,
monitor.MONITOR_HOOK_NAMES, monitor.MONITOR_HOOK_NAMES,
monitor.State.LOG, monitor.State.LOG,
@ -191,7 +183,7 @@ def run_configuration(config_filename, config, arguments):
if monitoring_hooks_are_activated: if monitoring_hooks_are_activated:
dispatch.call_hooks( dispatch.call_hooks(
'ping_monitor', 'ping_monitor',
hooks, config,
config_filename, config_filename,
monitor.MONITOR_HOOK_NAMES, monitor.MONITOR_HOOK_NAMES,
monitor.State.FINISH, monitor.State.FINISH,
@ -200,7 +192,7 @@ def run_configuration(config_filename, config, arguments):
) )
dispatch.call_hooks( dispatch.call_hooks(
'destroy_monitor', 'destroy_monitor',
hooks, config,
config_filename, config_filename,
monitor.MONITOR_HOOK_NAMES, monitor.MONITOR_HOOK_NAMES,
monitoring_log_level, monitoring_log_level,
@ -216,8 +208,8 @@ def run_configuration(config_filename, config, arguments):
if encountered_error and using_primary_action: if encountered_error and using_primary_action:
try: try:
command.execute_hook( command.execute_hook(
hooks.get('on_error'), config.get('on_error'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'on-error', 'on-error',
global_arguments.dry_run, global_arguments.dry_run,
@ -227,7 +219,7 @@ def run_configuration(config_filename, config, arguments):
) )
dispatch.call_hooks( dispatch.call_hooks(
'ping_monitor', 'ping_monitor',
hooks, config,
config_filename, config_filename,
monitor.MONITOR_HOOK_NAMES, monitor.MONITOR_HOOK_NAMES,
monitor.State.FAIL, monitor.State.FAIL,
@ -236,7 +228,7 @@ def run_configuration(config_filename, config, arguments):
) )
dispatch.call_hooks( dispatch.call_hooks(
'destroy_monitor', 'destroy_monitor',
hooks, config,
config_filename, config_filename,
monitor.MONITOR_HOOK_NAMES, monitor.MONITOR_HOOK_NAMES,
monitoring_log_level, monitoring_log_level,
@ -253,11 +245,7 @@ def run_actions(
*, *,
arguments, arguments,
config_filename, config_filename,
location, config,
storage,
retention,
consistency,
hooks,
local_path, local_path,
remote_path, remote_path,
local_borg_version, local_borg_version,
@ -282,13 +270,13 @@ def run_actions(
hook_context = { hook_context = {
'repository': repository_path, 'repository': repository_path,
# Deprecated: For backwards compatibility with borgmatic < 1.6.0. # Deprecated: For backwards compatibility with borgmatic < 1.6.0.
'repositories': ','.join([repo['path'] for repo in location['repositories']]), 'repositories': ','.join([repo['path'] for repo in config['repositories']]),
'log_file': global_arguments.log_file if global_arguments.log_file else '', 'log_file': global_arguments.log_file if global_arguments.log_file else '',
} }
command.execute_hook( command.execute_hook(
hooks.get('before_actions'), config.get('before_actions'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'pre-actions', 'pre-actions',
global_arguments.dry_run, global_arguments.dry_run,
@ -299,7 +287,7 @@ def run_actions(
if action_name == 'rcreate': if action_name == 'rcreate':
borgmatic.actions.rcreate.run_rcreate( borgmatic.actions.rcreate.run_rcreate(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -309,7 +297,7 @@ def run_actions(
elif action_name == 'transfer': elif action_name == 'transfer':
borgmatic.actions.transfer.run_transfer( borgmatic.actions.transfer.run_transfer(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -320,9 +308,7 @@ def run_actions(
yield from borgmatic.actions.create.run_create( yield from borgmatic.actions.create.run_create(
config_filename, config_filename,
repository, repository,
location, config,
storage,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
@ -335,9 +321,7 @@ def run_actions(
borgmatic.actions.prune.run_prune( borgmatic.actions.prune.run_prune(
config_filename, config_filename,
repository, repository,
storage, config,
retention,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
@ -350,9 +334,7 @@ def run_actions(
borgmatic.actions.compact.run_compact( borgmatic.actions.compact.run_compact(
config_filename, config_filename,
repository, repository,
storage, config,
retention,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
@ -362,14 +344,11 @@ def run_actions(
remote_path, remote_path,
) )
elif action_name == 'check': elif action_name == 'check':
if checks.repository_enabled_for_checks(repository, consistency): if checks.repository_enabled_for_checks(repository, config):
borgmatic.actions.check.run_check( borgmatic.actions.check.run_check(
config_filename, config_filename,
repository, repository,
location, config,
storage,
consistency,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
@ -381,9 +360,7 @@ def run_actions(
borgmatic.actions.extract.run_extract( borgmatic.actions.extract.run_extract(
config_filename, config_filename,
repository, repository,
location, config,
storage,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
@ -394,7 +371,7 @@ def run_actions(
elif action_name == 'export-tar': elif action_name == 'export-tar':
borgmatic.actions.export_tar.run_export_tar( borgmatic.actions.export_tar.run_export_tar(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -404,7 +381,7 @@ def run_actions(
elif action_name == 'mount': elif action_name == 'mount':
borgmatic.actions.mount.run_mount( borgmatic.actions.mount.run_mount(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -414,9 +391,7 @@ def run_actions(
elif action_name == 'restore': elif action_name == 'restore':
borgmatic.actions.restore.run_restore( borgmatic.actions.restore.run_restore(
repository, repository,
location, config,
storage,
hooks,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -426,7 +401,7 @@ def run_actions(
elif action_name == 'rlist': elif action_name == 'rlist':
yield from borgmatic.actions.rlist.run_rlist( yield from borgmatic.actions.rlist.run_rlist(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -436,7 +411,7 @@ def run_actions(
elif action_name == 'list': elif action_name == 'list':
yield from borgmatic.actions.list.run_list( yield from borgmatic.actions.list.run_list(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -446,7 +421,7 @@ def run_actions(
elif action_name == 'rinfo': elif action_name == 'rinfo':
yield from borgmatic.actions.rinfo.run_rinfo( yield from borgmatic.actions.rinfo.run_rinfo(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -456,7 +431,7 @@ def run_actions(
elif action_name == 'info': elif action_name == 'info':
yield from borgmatic.actions.info.run_info( yield from borgmatic.actions.info.run_info(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -466,7 +441,7 @@ def run_actions(
elif action_name == 'break-lock': elif action_name == 'break-lock':
borgmatic.actions.break_lock.run_break_lock( borgmatic.actions.break_lock.run_break_lock(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -476,7 +451,7 @@ def run_actions(
elif action_name == 'borg': elif action_name == 'borg':
borgmatic.actions.borg.run_borg( borgmatic.actions.borg.run_borg(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -485,8 +460,8 @@ def run_actions(
) )
command.execute_hook( command.execute_hook(
hooks.get('after_actions'), config.get('after_actions'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'post-actions', 'post-actions',
global_arguments.dry_run, global_arguments.dry_run,
@ -569,6 +544,9 @@ def log_record(suppress_log=False, **kwargs):
return record return record
MAX_CAPTURED_OUTPUT_LENGTH = 1000
def log_error_records( def log_error_records(
message, error=None, levelno=logging.CRITICAL, log_command_error_output=False message, error=None, levelno=logging.CRITICAL, log_command_error_output=False
): ):
@ -591,12 +569,18 @@ def log_error_records(
except CalledProcessError as error: except CalledProcessError as error:
yield log_record(levelno=levelno, levelname=level_name, msg=message) yield log_record(levelno=levelno, levelname=level_name, msg=message)
if error.output: if error.output:
try:
output = error.output.decode('utf-8')
except (UnicodeDecodeError, AttributeError):
output = error.output
# Suppress these logs for now and save full error output for the log summary at the end. # Suppress these logs for now and save full error output for the log summary at the end.
yield log_record( yield log_record(
levelno=levelno, levelno=levelno,
levelname=level_name, levelname=level_name,
msg=error.output, msg=output[:MAX_CAPTURED_OUTPUT_LENGTH]
suppress_log=not log_command_error_output, + ' ...' * (len(output) > MAX_CAPTURED_OUTPUT_LENGTH),
suppress_log=True,
) )
yield log_record(levelno=levelno, levelname=level_name, msg=error) yield log_record(levelno=levelno, levelname=level_name, msg=error)
except (ValueError, OSError) as error: except (ValueError, OSError) as error:
@ -613,7 +597,7 @@ def get_local_path(configs):
Arbitrarily return the local path from the first configuration dict. Default to "borg" if not Arbitrarily return the local path from the first configuration dict. Default to "borg" if not
set. set.
''' '''
return next(iter(configs.values())).get('location', {}).get('local_path', 'borg') return next(iter(configs.values())).get('local_path', 'borg')
def collect_highlander_action_summary_logs(configs, arguments, configuration_parse_errors): def collect_highlander_action_summary_logs(configs, arguments, configuration_parse_errors):
@ -627,6 +611,8 @@ def collect_highlander_action_summary_logs(configs, arguments, configuration_par
A highlander action is an action that cannot coexist with other actions on the borgmatic A highlander action is an action that cannot coexist with other actions on the borgmatic
command-line, and borgmatic exits after processing such an action. command-line, and borgmatic exits after processing such an action.
''' '''
add_custom_log_levels()
if 'bootstrap' in arguments: if 'bootstrap' in arguments:
try: try:
# No configuration file is needed for bootstrap. # No configuration file is needed for bootstrap.
@ -744,10 +730,9 @@ def collect_configuration_run_summary_logs(configs, arguments):
if 'create' in arguments: if 'create' in arguments:
try: try:
for config_filename, config in configs.items(): for config_filename, config in configs.items():
hooks = config.get('hooks', {})
command.execute_hook( command.execute_hook(
hooks.get('before_everything'), config.get('before_everything'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'pre-everything', 'pre-everything',
arguments['global'].dry_run, arguments['global'].dry_run,
@ -792,10 +777,9 @@ def collect_configuration_run_summary_logs(configs, arguments):
if 'create' in arguments: if 'create' in arguments:
try: try:
for config_filename, config in configs.items(): for config_filename, config in configs.items():
hooks = config.get('hooks', {})
command.execute_hook( command.execute_hook(
hooks.get('after_everything'), config.get('after_everything'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'post-everything', 'post-everything',
arguments['global'].dry_run, arguments['global'].dry_run,

View file

@ -11,7 +11,7 @@ INDENT = 4
SEQUENCE_INDENT = 2 SEQUENCE_INDENT = 2
def _insert_newline_before_comment(config, field_name): def insert_newline_before_comment(config, field_name):
''' '''
Using some ruamel.yaml black magic, insert a blank line in the config right before the given Using some ruamel.yaml black magic, insert a blank line in the config right before the given
field and its comments. field and its comments.
@ -21,10 +21,10 @@ def _insert_newline_before_comment(config, field_name):
) )
def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False): def schema_to_sample_configuration(schema, level=0, parent_is_sequence=False):
''' '''
Given a loaded configuration schema, generate and return sample config for it. Include comments Given a loaded configuration schema, generate and return sample config for it. Include comments
for each section based on the schema "description". for each option based on the schema "description".
''' '''
schema_type = schema.get('type') schema_type = schema.get('type')
example = schema.get('example') example = schema.get('example')
@ -33,13 +33,13 @@ def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False):
if schema_type == 'array': if schema_type == 'array':
config = yaml.comments.CommentedSeq( config = yaml.comments.CommentedSeq(
[_schema_to_sample_configuration(schema['items'], level, parent_is_sequence=True)] [schema_to_sample_configuration(schema['items'], level, parent_is_sequence=True)]
) )
add_comments_to_configuration_sequence(config, schema, indent=(level * INDENT)) add_comments_to_configuration_sequence(config, schema, indent=(level * INDENT))
elif schema_type == 'object': elif schema_type == 'object':
config = yaml.comments.CommentedMap( config = yaml.comments.CommentedMap(
[ [
(field_name, _schema_to_sample_configuration(sub_schema, level + 1)) (field_name, schema_to_sample_configuration(sub_schema, level + 1))
for field_name, sub_schema in schema['properties'].items() for field_name, sub_schema in schema['properties'].items()
] ]
) )
@ -53,13 +53,13 @@ def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False):
return config return config
def _comment_out_line(line): def comment_out_line(line):
# If it's already is commented out (or empty), there's nothing further to do! # If it's already is commented out (or empty), there's nothing further to do!
stripped_line = line.lstrip() stripped_line = line.lstrip()
if not stripped_line or stripped_line.startswith('#'): if not stripped_line or stripped_line.startswith('#'):
return line return line
# Comment out the names of optional sections, inserting the '#' after any indent for aesthetics. # Comment out the names of optional options, inserting the '#' after any indent for aesthetics.
matches = re.match(r'(\s*)', line) matches = re.match(r'(\s*)', line)
indent_spaces = matches.group(0) if matches else '' indent_spaces = matches.group(0) if matches else ''
count_indent_spaces = len(indent_spaces) count_indent_spaces = len(indent_spaces)
@ -67,7 +67,7 @@ def _comment_out_line(line):
return '# '.join((indent_spaces, line[count_indent_spaces:])) return '# '.join((indent_spaces, line[count_indent_spaces:]))
def _comment_out_optional_configuration(rendered_config): def comment_out_optional_configuration(rendered_config):
''' '''
Post-process a rendered configuration string to comment out optional key/values, as determined Post-process a rendered configuration string to comment out optional key/values, as determined
by a sentinel in the comment before each key. by a sentinel in the comment before each key.
@ -92,7 +92,7 @@ def _comment_out_optional_configuration(rendered_config):
if not line.strip(): if not line.strip():
optional = False optional = False
lines.append(_comment_out_line(line) if optional else line) lines.append(comment_out_line(line) if optional else line)
return '\n'.join(lines) return '\n'.join(lines)
@ -165,7 +165,6 @@ def add_comments_to_configuration_sequence(config, schema, indent=0):
return return
REQUIRED_SECTION_NAMES = {'location', 'retention'}
REQUIRED_KEYS = {'source_directories', 'repositories', 'keep_daily'} REQUIRED_KEYS = {'source_directories', 'repositories', 'keep_daily'}
COMMENTED_OUT_SENTINEL = 'COMMENT_OUT' COMMENTED_OUT_SENTINEL = 'COMMENT_OUT'
@ -185,7 +184,7 @@ def add_comments_to_configuration_object(config, schema, indent=0, skip_first=Fa
# If this is an optional key, add an indicator to the comment flagging it to be commented # If this is an optional key, add an indicator to the comment flagging it to be commented
# out from the sample configuration. This sentinel is consumed by downstream processing that # out from the sample configuration. This sentinel is consumed by downstream processing that
# does the actual commenting out. # does the actual commenting out.
if field_name not in REQUIRED_SECTION_NAMES and field_name not in REQUIRED_KEYS: if field_name not in REQUIRED_KEYS:
description = ( description = (
'\n'.join((description, COMMENTED_OUT_SENTINEL)) '\n'.join((description, COMMENTED_OUT_SENTINEL))
if description if description
@ -199,7 +198,7 @@ def add_comments_to_configuration_object(config, schema, indent=0, skip_first=Fa
config.yaml_set_comment_before_after_key(key=field_name, before=description, indent=indent) config.yaml_set_comment_before_after_key(key=field_name, before=description, indent=indent)
if index > 0: if index > 0:
_insert_newline_before_comment(config, field_name) insert_newline_before_comment(config, field_name)
RUAMEL_YAML_COMMENTS_INDEX = 1 RUAMEL_YAML_COMMENTS_INDEX = 1
@ -284,7 +283,7 @@ def generate_sample_configuration(
normalize.normalize(source_filename, source_config) normalize.normalize(source_filename, source_config)
destination_config = merge_source_configuration_into_destination( destination_config = merge_source_configuration_into_destination(
_schema_to_sample_configuration(schema), source_config schema_to_sample_configuration(schema), source_config
) )
if dry_run: if dry_run:
@ -292,6 +291,6 @@ def generate_sample_configuration(
write_configuration( write_configuration(
destination_filename, destination_filename,
_comment_out_optional_configuration(render_configuration(destination_config)), comment_out_optional_configuration(render_configuration(destination_config)),
overwrite=overwrite, overwrite=overwrite,
) )

View file

@ -97,8 +97,8 @@ class Include_constructor(ruamel.yaml.SafeConstructor):
``` ```
These includes are deep merged into the current configuration file. For instance, in this These includes are deep merged into the current configuration file. For instance, in this
example, any "retention" options in common.yaml will get merged into the "retention" section example, any "option" with sub-options in common.yaml will get merged into the corresponding
in the example configuration file. "option" with sub-options in the example configuration file.
''' '''
representer = ruamel.yaml.representer.SafeRepresenter() representer = ruamel.yaml.representer.SafeRepresenter()
@ -116,7 +116,7 @@ def load_configuration(filename):
''' '''
Load the given configuration file and return its contents as a data structure of nested dicts Load the given configuration file and return its contents as a data structure of nested dicts
and lists. Also, replace any "{constant}" strings with the value of the "constant" key in the and lists. Also, replace any "{constant}" strings with the value of the "constant" key in the
"constants" section of the configuration file. "constants" option of the configuration file.
Raise ruamel.yaml.error.YAMLError if something goes wrong parsing the YAML, or RecursionError Raise ruamel.yaml.error.YAMLError if something goes wrong parsing the YAML, or RecursionError
if there are too many recursive includes. if there are too many recursive includes.
@ -223,8 +223,8 @@ def deep_merge_nodes(nodes):
If a mapping or sequence node has a YAML "!retain" tag, then that node is not merged. If a mapping or sequence node has a YAML "!retain" tag, then that node is not merged.
The purpose of deep merging like this is to support, for instance, merging one borgmatic The purpose of deep merging like this is to support, for instance, merging one borgmatic
configuration file into another for reuse, such that a configuration section ("retention", configuration file into another for reuse, such that a configuration option with sub-options
etc.) does not completely replace the corresponding section in a merged file. does not completely replace the corresponding option in a merged file.
Raise ValueError if a merge is implied using two incompatible types. Raise ValueError if a merge is implied using two incompatible types.
''' '''

View file

@ -2,21 +2,70 @@ import logging
import os import os
def normalize_sections(config_filename, config):
'''
Given a configuration filename and a configuration dict of its loaded contents, airlift any
options out of sections ("location:", etc.) to the global scope and delete those sections.
Return any log message warnings produced based on the normalization performed.
Raise ValueError if the "prefix" option is set in both "location" and "consistency" sections.
'''
location = config.get('location') or {}
storage = config.get('storage') or {}
consistency = config.get('consistency') or {}
hooks = config.get('hooks') or {}
if (
location.get('prefix')
and consistency.get('prefix')
and location.get('prefix') != consistency.get('prefix')
):
raise ValueError(
'The retention prefix and the consistency prefix cannot have different values (unless one is not set).'
)
if storage.get('umask') and hooks.get('umask') and storage.get('umask') != hooks.get('umask'):
raise ValueError(
'The storage umask and the hooks umask cannot have different values (unless one is not set).'
)
any_section_upgraded = False
# Move any options from deprecated sections into the global scope.
for section_name in ('location', 'storage', 'retention', 'consistency', 'output', 'hooks'):
section_config = config.get(section_name)
if section_config:
any_section_upgraded = True
del config[section_name]
config.update(section_config)
if any_section_upgraded:
return [
logging.makeLogRecord(
dict(
levelno=logging.WARNING,
levelname='WARNING',
msg=f'{config_filename}: Configuration sections like location: and storage: are deprecated and support will be removed from a future release. To prepare for this, move your options out of sections to the global scope.',
)
)
]
return []
def normalize(config_filename, config): def normalize(config_filename, config):
''' '''
Given a configuration filename and a configuration dict of its loaded contents, apply particular Given a configuration filename and a configuration dict of its loaded contents, apply particular
hard-coded rules to normalize the configuration to adhere to the current schema. Return any log hard-coded rules to normalize the configuration to adhere to the current schema. Return any log
message warnings produced based on the normalization performed. message warnings produced based on the normalization performed.
Raise ValueError the configuration cannot be normalized.
''' '''
logs = [] logs = normalize_sections(config_filename, config)
location = config.get('location') or {}
storage = config.get('storage') or {}
consistency = config.get('consistency') or {}
retention = config.get('retention') or {}
hooks = config.get('hooks') or {}
# Upgrade exclude_if_present from a string to a list. # Upgrade exclude_if_present from a string to a list.
exclude_if_present = location.get('exclude_if_present') exclude_if_present = config.get('exclude_if_present')
if isinstance(exclude_if_present, str): if isinstance(exclude_if_present, str):
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
@ -27,10 +76,10 @@ def normalize(config_filename, config):
) )
) )
) )
config['location']['exclude_if_present'] = [exclude_if_present] config['exclude_if_present'] = [exclude_if_present]
# Upgrade various monitoring hooks from a string to a dict. # Upgrade various monitoring hooks from a string to a dict.
healthchecks = hooks.get('healthchecks') healthchecks = config.get('healthchecks')
if isinstance(healthchecks, str): if isinstance(healthchecks, str):
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
@ -41,9 +90,9 @@ def normalize(config_filename, config):
) )
) )
) )
config['hooks']['healthchecks'] = {'ping_url': healthchecks} config['healthchecks'] = {'ping_url': healthchecks}
cronitor = hooks.get('cronitor') cronitor = config.get('cronitor')
if isinstance(cronitor, str): if isinstance(cronitor, str):
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
@ -54,9 +103,9 @@ def normalize(config_filename, config):
) )
) )
) )
config['hooks']['cronitor'] = {'ping_url': cronitor} config['cronitor'] = {'ping_url': cronitor}
pagerduty = hooks.get('pagerduty') pagerduty = config.get('pagerduty')
if isinstance(pagerduty, str): if isinstance(pagerduty, str):
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
@ -67,9 +116,9 @@ def normalize(config_filename, config):
) )
) )
) )
config['hooks']['pagerduty'] = {'integration_key': pagerduty} config['pagerduty'] = {'integration_key': pagerduty}
cronhub = hooks.get('cronhub') cronhub = config.get('cronhub')
if isinstance(cronhub, str): if isinstance(cronhub, str):
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
@ -80,10 +129,10 @@ def normalize(config_filename, config):
) )
) )
) )
config['hooks']['cronhub'] = {'ping_url': cronhub} config['cronhub'] = {'ping_url': cronhub}
# Upgrade consistency checks from a list of strings to a list of dicts. # Upgrade consistency checks from a list of strings to a list of dicts.
checks = consistency.get('checks') checks = config.get('checks')
if isinstance(checks, list) and len(checks) and isinstance(checks[0], str): if isinstance(checks, list) and len(checks) and isinstance(checks[0], str):
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
@ -94,10 +143,10 @@ def normalize(config_filename, config):
) )
) )
) )
config['consistency']['checks'] = [{'name': check_type} for check_type in checks] config['checks'] = [{'name': check_type} for check_type in checks]
# Rename various configuration options. # Rename various configuration options.
numeric_owner = location.pop('numeric_owner', None) numeric_owner = config.pop('numeric_owner', None)
if numeric_owner is not None: if numeric_owner is not None:
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
@ -108,9 +157,9 @@ def normalize(config_filename, config):
) )
) )
) )
config['location']['numeric_ids'] = numeric_owner config['numeric_ids'] = numeric_owner
bsd_flags = location.pop('bsd_flags', None) bsd_flags = config.pop('bsd_flags', None)
if bsd_flags is not None: if bsd_flags is not None:
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
@ -121,9 +170,9 @@ def normalize(config_filename, config):
) )
) )
) )
config['location']['flags'] = bsd_flags config['flags'] = bsd_flags
remote_rate_limit = storage.pop('remote_rate_limit', None) remote_rate_limit = config.pop('remote_rate_limit', None)
if remote_rate_limit is not None: if remote_rate_limit is not None:
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
@ -134,10 +183,10 @@ def normalize(config_filename, config):
) )
) )
) )
config['storage']['upload_rate_limit'] = remote_rate_limit config['upload_rate_limit'] = remote_rate_limit
# Upgrade remote repositories to ssh:// syntax, required in Borg 2. # Upgrade remote repositories to ssh:// syntax, required in Borg 2.
repositories = location.get('repositories') repositories = config.get('repositories')
if repositories: if repositories:
if isinstance(repositories[0], str): if isinstance(repositories[0], str):
logs.append( logs.append(
@ -149,11 +198,11 @@ def normalize(config_filename, config):
) )
) )
) )
config['location']['repositories'] = [ config['repositories'] = [{'path': repository} for repository in repositories]
{'path': repository} for repository in repositories repositories = config['repositories']
]
repositories = config['location']['repositories'] config['repositories'] = []
config['location']['repositories'] = []
for repository_dict in repositories: for repository_dict in repositories:
repository_path = repository_dict['path'] repository_path = repository_dict['path']
if '~' in repository_path: if '~' in repository_path:
@ -171,14 +220,14 @@ def normalize(config_filename, config):
updated_repository_path = os.path.abspath( updated_repository_path = os.path.abspath(
repository_path.partition('file://')[-1] repository_path.partition('file://')[-1]
) )
config['location']['repositories'].append( config['repositories'].append(
dict( dict(
repository_dict, repository_dict,
path=updated_repository_path, path=updated_repository_path,
) )
) )
elif repository_path.startswith('ssh://'): elif repository_path.startswith('ssh://'):
config['location']['repositories'].append(repository_dict) config['repositories'].append(repository_dict)
else: else:
rewritten_repository_path = f"ssh://{repository_path.replace(':~', '/~').replace(':/', '/').replace(':', '/./')}" rewritten_repository_path = f"ssh://{repository_path.replace(':~', '/~').replace(':/', '/').replace(':', '/./')}"
logs.append( logs.append(
@ -190,16 +239,16 @@ def normalize(config_filename, config):
) )
) )
) )
config['location']['repositories'].append( config['repositories'].append(
dict( dict(
repository_dict, repository_dict,
path=rewritten_repository_path, path=rewritten_repository_path,
) )
) )
else: else:
config['location']['repositories'].append(repository_dict) config['repositories'].append(repository_dict)
if consistency.get('prefix') or retention.get('prefix'): if config.get('prefix'):
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
dict( dict(

View file

@ -32,19 +32,33 @@ def convert_value_type(value):
return ruamel.yaml.YAML(typ='safe').load(io.StringIO(value)) return ruamel.yaml.YAML(typ='safe').load(io.StringIO(value))
LEGACY_SECTION_NAMES = {'location', 'storage', 'retention', 'consistency', 'output', 'hooks'}
def strip_section_names(parsed_override_key):
'''
Given a parsed override key as a tuple of option and suboption names, strip out any initial
legacy section names, since configuration file normalization also strips them out.
'''
if parsed_override_key[0] in LEGACY_SECTION_NAMES:
return parsed_override_key[1:]
return parsed_override_key
def parse_overrides(raw_overrides): def parse_overrides(raw_overrides):
''' '''
Given a sequence of configuration file override strings in the form of "section.option=value", Given a sequence of configuration file override strings in the form of "option.suboption=value",
parse and return a sequence of tuples (keys, values), where keys is a sequence of strings. For parse and return a sequence of tuples (keys, values), where keys is a sequence of strings. For
instance, given the following raw overrides: instance, given the following raw overrides:
['section.my_option=value1', 'section.other_option=value2'] ['my_option.suboption=value1', 'other_option=value2']
... return this: ... return this:
( (
(('section', 'my_option'), 'value1'), (('my_option', 'suboption'), 'value1'),
(('section', 'other_option'), 'value2'), (('other_option'), 'value2'),
) )
Raise ValueError if an override can't be parsed. Raise ValueError if an override can't be parsed.
@ -59,13 +73,13 @@ def parse_overrides(raw_overrides):
raw_keys, value = raw_override.split('=', 1) raw_keys, value = raw_override.split('=', 1)
parsed_overrides.append( parsed_overrides.append(
( (
tuple(raw_keys.split('.')), strip_section_names(tuple(raw_keys.split('.'))),
convert_value_type(value), convert_value_type(value),
) )
) )
except ValueError: except ValueError:
raise ValueError( raise ValueError(
f"Invalid override '{raw_override}'. Make sure you use the form: SECTION.OPTION=VALUE" f"Invalid override '{raw_override}'. Make sure you use the form: OPTION=VALUE or OPTION.SUBOPTION=VALUE"
) )
except ruamel.yaml.error.YAMLError as error: except ruamel.yaml.error.YAMLError as error:
raise ValueError(f"Invalid override '{raw_override}': {error.problem}") raise ValueError(f"Invalid override '{raw_override}': {error.problem}")
@ -76,7 +90,7 @@ def parse_overrides(raw_overrides):
def apply_overrides(config, raw_overrides): def apply_overrides(config, raw_overrides):
''' '''
Given a configuration dict and a sequence of configuration file override strings in the form of Given a configuration dict and a sequence of configuration file override strings in the form of
"section.option=value", parse each override and set it the configuration dict. "option.suboption=value", parse each override and set it the configuration dict.
''' '''
overrides = parse_overrides(raw_overrides) overrides = parse_overrides(raw_overrides)

File diff suppressed because it is too large Load diff

View file

@ -71,18 +71,15 @@ def apply_logical_validation(config_filename, parsed_configuration):
below), run through any additional logical validation checks. If there are any such validation below), run through any additional logical validation checks. If there are any such validation
problems, raise a Validation_error. problems, raise a Validation_error.
''' '''
location_repositories = parsed_configuration.get('location', {}).get('repositories') repositories = parsed_configuration.get('repositories')
check_repositories = parsed_configuration.get('consistency', {}).get('check_repositories', []) check_repositories = parsed_configuration.get('check_repositories', [])
for repository in check_repositories: for repository in check_repositories:
if not any( if not any(
repositories_match(repository, config_repository) repositories_match(repository, config_repository) for config_repository in repositories
for config_repository in location_repositories
): ):
raise Validation_error( raise Validation_error(
config_filename, config_filename,
( (f'Unknown repository in "check_repositories": {repository}',),
f'Unknown repository in the "consistency" section\'s "check_repositories": {repository}',
),
) )
@ -90,11 +87,15 @@ def parse_configuration(config_filename, schema_filename, overrides=None, resolv
''' '''
Given the path to a config filename in YAML format, the path to a schema filename in a YAML Given the path to a config filename in YAML format, the path to a schema filename in a YAML
rendition of JSON Schema format, a sequence of configuration file override strings in the form rendition of JSON Schema format, a sequence of configuration file override strings in the form
of "section.option=value", return the parsed configuration as a data structure of nested dicts of "option.suboption=value", return the parsed configuration as a data structure of nested dicts
and lists corresponding to the schema. Example return value: and lists corresponding to the schema. Example return value:
{'location': {'source_directories': ['/home', '/etc'], 'repository': 'hostname.borg'}, {
'retention': {'keep_daily': 7}, 'consistency': {'checks': ['repository', 'archives']}} 'source_directories': ['/home', '/etc'],
'repository': 'hostname.borg',
'keep_daily': 7,
'checks': ['repository', 'archives'],
}
Also return a sequence of logging.LogRecord instances containing any warnings about the Also return a sequence of logging.LogRecord instances containing any warnings about the
configuration. configuration.
@ -174,7 +175,7 @@ def guard_configuration_contains_repository(repository, configurations):
tuple( tuple(
config_repository config_repository
for config in configurations.values() for config in configurations.values()
for config_repository in config['location']['repositories'] for config_repository in config['repositories']
if repositories_match(config_repository, repository) if repositories_match(config_repository, repository)
) )
) )
@ -198,7 +199,7 @@ def guard_single_repository_selected(repository, configurations):
tuple( tuple(
config_repository config_repository
for config in configurations.values() for config in configurations.values()
for config_repository in config['location']['repositories'] for config_repository in config['repositories']
) )
) )

View file

@ -241,13 +241,16 @@ def execute_command_and_capture_output(
shell=False, shell=False,
extra_environment=None, extra_environment=None,
working_directory=None, working_directory=None,
borg_local_path=None,
): ):
''' '''
Execute the given command (a sequence of command/argument strings), capturing and returning its Execute the given command (a sequence of command/argument strings), capturing and returning its
output (stdout). If capture stderr is True, then capture and return stderr in addition to output (stdout). If capture stderr is True, then capture and return stderr in addition to
stdout. If shell is True, execute the command within a shell. If an extra environment dict is stdout. If shell is True, execute the command within a shell. If an extra environment dict is
given, then use it to augment the current environment, and pass the result into the command. If given, then use it to augment the current environment, and pass the result into the command. If
a working directory is given, use that as the present working directory when running the command. a working directory is given, use that as the present working directory when running the
command. If a Borg local path is given, and the command matches it (regardless of arguments),
treat exit code 1 as a warning instead of an error.
Raise subprocesses.CalledProcessError if an error occurs while running the command. Raise subprocesses.CalledProcessError if an error occurs while running the command.
''' '''
@ -264,7 +267,7 @@ def execute_command_and_capture_output(
cwd=working_directory, cwd=working_directory,
) )
except subprocess.CalledProcessError as error: except subprocess.CalledProcessError as error:
if exit_code_indicates_error(command, error.returncode): if exit_code_indicates_error(command, error.returncode, borg_local_path):
raise raise
output = error.output output = error.output

View file

@ -14,7 +14,7 @@ MONITOR_STATE_TO_CRONHUB = {
def initialize_monitor( def initialize_monitor(
ping_url, config_filename, monitoring_log_level, dry_run ping_url, config, config_filename, monitoring_log_level, dry_run
): # pragma: no cover ): # pragma: no cover
''' '''
No initialization is necessary for this monitor. No initialization is necessary for this monitor.
@ -22,7 +22,7 @@ def initialize_monitor(
pass pass
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run):
''' '''
Ping the configured Cronhub URL, modified with the monitor.State. Use the given configuration Ping the configured Cronhub URL, modified with the monitor.State. Use the given configuration
filename in any log entries. If this is a dry run, then don't actually ping anything. filename in any log entries. If this is a dry run, then don't actually ping anything.
@ -55,7 +55,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
def destroy_monitor( def destroy_monitor(
ping_url_or_uuid, config_filename, monitoring_log_level, dry_run ping_url_or_uuid, config, config_filename, monitoring_log_level, dry_run
): # pragma: no cover ): # pragma: no cover
''' '''
No destruction is necessary for this monitor. No destruction is necessary for this monitor.

View file

@ -14,7 +14,7 @@ MONITOR_STATE_TO_CRONITOR = {
def initialize_monitor( def initialize_monitor(
ping_url, config_filename, monitoring_log_level, dry_run ping_url, config, config_filename, monitoring_log_level, dry_run
): # pragma: no cover ): # pragma: no cover
''' '''
No initialization is necessary for this monitor. No initialization is necessary for this monitor.
@ -22,7 +22,7 @@ def initialize_monitor(
pass pass
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run):
''' '''
Ping the configured Cronitor URL, modified with the monitor.State. Use the given configuration Ping the configured Cronitor URL, modified with the monitor.State. Use the given configuration
filename in any log entries. If this is a dry run, then don't actually ping anything. filename in any log entries. If this is a dry run, then don't actually ping anything.
@ -50,7 +50,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
def destroy_monitor( def destroy_monitor(
ping_url_or_uuid, config_filename, monitoring_log_level, dry_run ping_url_or_uuid, config, config_filename, monitoring_log_level, dry_run
): # pragma: no cover ): # pragma: no cover
''' '''
No destruction is necessary for this monitor. No destruction is necessary for this monitor.

View file

@ -27,18 +27,17 @@ HOOK_NAME_TO_MODULE = {
} }
def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs): def call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs):
''' '''
Given the hooks configuration dict and a prefix to use in log entries, call the requested Given a configuration dict and a prefix to use in log entries, call the requested function of
function of the Python module corresponding to the given hook name. Supply that call with the the Python module corresponding to the given hook name. Supply that call with the configuration
configuration for this hook (if any), the log prefix, and any given args and kwargs. Return any for this hook (if any), the log prefix, and any given args and kwargs. Return any return value.
return value.
Raise ValueError if the hook name is unknown. Raise ValueError if the hook name is unknown.
Raise AttributeError if the function name is not found in the module. Raise AttributeError if the function name is not found in the module.
Raise anything else that the called function raises. Raise anything else that the called function raises.
''' '''
config = hooks.get(hook_name, {}) hook_config = config.get(hook_name, {})
try: try:
module = HOOK_NAME_TO_MODULE[hook_name] module = HOOK_NAME_TO_MODULE[hook_name]
@ -46,15 +45,15 @@ def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs):
raise ValueError(f'Unknown hook name: {hook_name}') raise ValueError(f'Unknown hook name: {hook_name}')
logger.debug(f'{log_prefix}: Calling {hook_name} hook function {function_name}') logger.debug(f'{log_prefix}: Calling {hook_name} hook function {function_name}')
return getattr(module, function_name)(config, log_prefix, *args, **kwargs) return getattr(module, function_name)(hook_config, config, log_prefix, *args, **kwargs)
def call_hooks(function_name, hooks, log_prefix, hook_names, *args, **kwargs): def call_hooks(function_name, config, log_prefix, hook_names, *args, **kwargs):
''' '''
Given the hooks configuration dict and a prefix to use in log entries, call the requested Given a configuration dict and a prefix to use in log entries, call the requested function of
function of the Python module corresponding to each given hook name. Supply each call with the the Python module corresponding to each given hook name. Supply each call with the configuration
configuration for that hook, the log prefix, and any given args and kwargs. Collect any return for that hook, the log prefix, and any given args and kwargs. Collect any return values into a
values into a dict from hook name to return value. dict from hook name to return value.
If the hook name is not present in the hooks configuration, then don't call the function for it If the hook name is not present in the hooks configuration, then don't call the function for it
and omit it from the return values. and omit it from the return values.
@ -64,23 +63,23 @@ def call_hooks(function_name, hooks, log_prefix, hook_names, *args, **kwargs):
Raise anything else that a called function raises. An error stops calls to subsequent functions. Raise anything else that a called function raises. An error stops calls to subsequent functions.
''' '''
return { return {
hook_name: call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs) hook_name: call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs)
for hook_name in hook_names for hook_name in hook_names
if hooks.get(hook_name) if config.get(hook_name)
} }
def call_hooks_even_if_unconfigured(function_name, hooks, log_prefix, hook_names, *args, **kwargs): def call_hooks_even_if_unconfigured(function_name, config, log_prefix, hook_names, *args, **kwargs):
''' '''
Given the hooks configuration dict and a prefix to use in log entries, call the requested Given a configuration dict and a prefix to use in log entries, call the requested function of
function of the Python module corresponding to each given hook name. Supply each call with the the Python module corresponding to each given hook name. Supply each call with the configuration
configuration for that hook, the log prefix, and any given args and kwargs. Collect any return for that hook, the log prefix, and any given args and kwargs. Collect any return values into a
values into a dict from hook name to return value. dict from hook name to return value.
Raise AttributeError if the function name is not found in the module. Raise AttributeError if the function name is not found in the module.
Raise anything else that a called function raises. An error stops calls to subsequent functions. Raise anything else that a called function raises. An error stops calls to subsequent functions.
''' '''
return { return {
hook_name: call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs) hook_name: call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs)
for hook_name in hook_names for hook_name in hook_names
} }

View file

@ -70,7 +70,7 @@ def format_buffered_logs_for_payload():
return payload return payload
def initialize_monitor(hook_config, config_filename, monitoring_log_level, dry_run): def initialize_monitor(hook_config, config, config_filename, monitoring_log_level, dry_run):
''' '''
Add a handler to the root logger that stores in memory the most recent logs emitted. That way, Add a handler to the root logger that stores in memory the most recent logs emitted. That way,
we can send them all to Healthchecks upon a finish or failure state. But skip this if the we can send them all to Healthchecks upon a finish or failure state. But skip this if the
@ -90,7 +90,7 @@ def initialize_monitor(hook_config, config_filename, monitoring_log_level, dry_r
) )
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run):
''' '''
Ping the configured Healthchecks URL or UUID, modified with the monitor.State. Use the given Ping the configured Healthchecks URL or UUID, modified with the monitor.State. Use the given
configuration filename in any log entries, and log to Healthchecks with the giving log level. configuration filename in any log entries, and log to Healthchecks with the giving log level.
@ -133,7 +133,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
logger.warning(f'{config_filename}: Healthchecks error: {error}') logger.warning(f'{config_filename}: Healthchecks error: {error}')
def destroy_monitor(hook_config, config_filename, monitoring_log_level, dry_run): def destroy_monitor(hook_config, config, config_filename, monitoring_log_level, dry_run):
''' '''
Remove the monitor handler that was added to the root logger. This prevents the handler from Remove the monitor handler that was added to the root logger. This prevents the handler from
getting reused by other instances of this monitor. getting reused by other instances of this monitor.

View file

@ -6,21 +6,20 @@ from borgmatic.hooks import dump
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def make_dump_path(location_config): # pragma: no cover def make_dump_path(config): # pragma: no cover
''' '''
Make the dump path from the given location configuration and the name of this hook. Make the dump path from the given configuration dict and the name of this hook.
''' '''
return dump.make_database_dump_path( return dump.make_database_dump_path(
location_config.get('borgmatic_source_directory'), 'mongodb_databases' config.get('borgmatic_source_directory'), 'mongodb_databases'
) )
def dump_databases(databases, log_prefix, location_config, dry_run): def dump_databases(databases, config, log_prefix, dry_run):
''' '''
Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of
dicts, one dict describing each database as per the configuration schema. Use the given log dicts, one dict describing each database as per the configuration schema. Use the configuration
prefix in any log entries. Use the given location configuration dict to construct the dict to construct the destination path and the given log prefix in any log entries.
destination path.
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence. pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@ -33,7 +32,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
for database in databases: for database in databases:
name = database['name'] name = database['name']
dump_filename = dump.make_database_dump_filename( dump_filename = dump.make_database_dump_filename(
make_dump_path(location_config), name, database.get('hostname') make_dump_path(config), name, database.get('hostname')
) )
dump_format = database.get('format', 'archive') dump_format = database.get('format', 'archive')
@ -82,47 +81,53 @@ def build_dump_command(database, dump_filename, dump_format):
return command return command
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
''' '''
Remove all database dump files for this hook regardless of the given databases. Use the log Remove all database dump files for this hook regardless of the given databases. Use the log
prefix in any log entries. Use the given location configuration dict to construct the prefix in any log entries. Use the given configuration dict to construct the destination path.
destination path. If this is a dry run, then don't actually remove anything. If this is a dry run, then don't actually remove anything.
''' '''
dump.remove_database_dumps(make_dump_path(location_config), 'MongoDB', log_prefix, dry_run) dump.remove_database_dumps(make_dump_path(config), 'MongoDB', log_prefix, dry_run)
def make_database_dump_pattern( def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
databases, log_prefix, location_config, name=None
): # pragma: no cover
''' '''
Given a sequence of configurations dicts, a prefix to log with, a location configuration dict, Given a sequence of database configurations dicts, a configuration dict, a prefix to log with,
and a database name to match, return the corresponding glob patterns to match the database dump and a database name to match, return the corresponding glob patterns to match the database dump
in an archive. in an archive.
''' '''
return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*') return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*')
def restore_database_dump( def restore_database_dump(
database_config, log_prefix, location_config, dry_run, extract_process, connection_params databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params
): ):
''' '''
Restore the given MongoDB database from an extract stream. The database is supplied as a Restore the given MongoDB database from an extract stream. The databases are supplied as a
one-element sequence containing a dict describing the database, as per the configuration schema. sequence containing one dict describing each database (as per the configuration schema), but
Use the given log prefix in any log entries. If this is a dry run, then don't actually restore only the database corresponding to the given database name is restored. Use the configuration
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce dict to construct the destination path and the given log prefix in any log entries. If this is a
output to consume. dry run, then don't actually restore anything. Trigger the given active extract process (an
instance of subprocess.Popen) to produce output to consume.
If the extract process is None, then restore the dump from the filesystem rather than from an If the extract process is None, then restore the dump from the filesystem rather than from an
extract stream. extract stream.
''' '''
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else '' dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
if len(database_config) != 1: try:
raise ValueError('The database configuration value is invalid') database = next(
database_config
for database_config in databases_config
if database_config.get('name') == database_name
)
except StopIteration:
raise ValueError(
f'A database named "{database_name}" could not be found in the configuration'
)
database = database_config[0]
dump_filename = dump.make_database_dump_filename( dump_filename = dump.make_database_dump_filename(
make_dump_path(location_config), database['name'], database.get('hostname') make_dump_path(config), database['name'], database.get('hostname')
) )
restore_command = build_restore_command( restore_command = build_restore_command(
extract_process, database, dump_filename, connection_params extract_process, database, dump_filename, connection_params

View file

@ -12,13 +12,11 @@ from borgmatic.hooks import dump
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def make_dump_path(location_config): # pragma: no cover def make_dump_path(config): # pragma: no cover
''' '''
Make the dump path from the given location configuration and the name of this hook. Make the dump path from the given configuration dict and the name of this hook.
''' '''
return dump.make_database_dump_path( return dump.make_database_dump_path(config.get('borgmatic_source_directory'), 'mysql_databases')
location_config.get('borgmatic_source_directory'), 'mysql_databases'
)
SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys') SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys')
@ -106,12 +104,11 @@ def execute_dump_command(
) )
def dump_databases(databases, log_prefix, location_config, dry_run): def dump_databases(databases, config, log_prefix, dry_run):
''' '''
Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence
of dicts, one dict describing each database as per the configuration schema. Use the given log of dicts, one dict describing each database as per the configuration schema. Use the given
prefix in any log entries. Use the given location configuration dict to construct the configuration dict to construct the destination path and the given log prefix in any log entries.
destination path.
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence. pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@ -122,7 +119,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
logger.info(f'{log_prefix}: Dumping MySQL databases{dry_run_label}') logger.info(f'{log_prefix}: Dumping MySQL databases{dry_run_label}')
for database in databases: for database in databases:
dump_path = make_dump_path(location_config) dump_path = make_dump_path(config)
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
dump_database_names = database_names_to_dump( dump_database_names = database_names_to_dump(
database, extra_environment, log_prefix, dry_run database, extra_environment, log_prefix, dry_run
@ -165,42 +162,46 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
return [process for process in processes if process] return [process for process in processes if process]
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
''' '''
Remove all database dump files for this hook regardless of the given databases. Use the log Remove all database dump files for this hook regardless of the given databases. Use the given
prefix in any log entries. Use the given location configuration dict to construct the configuration dict to construct the destination path and the log prefix in any log entries. If
destination path. If this is a dry run, then don't actually remove anything. this is a dry run, then don't actually remove anything.
''' '''
dump.remove_database_dumps(make_dump_path(location_config), 'MySQL', log_prefix, dry_run) dump.remove_database_dumps(make_dump_path(config), 'MySQL', log_prefix, dry_run)
def make_database_dump_pattern( def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
databases, log_prefix, location_config, name=None
): # pragma: no cover
''' '''
Given a sequence of configurations dicts, a prefix to log with, a location configuration dict, Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a
and a database name to match, return the corresponding glob patterns to match the database dump database name to match, return the corresponding glob patterns to match the database dump in an
in an archive. archive.
''' '''
return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*') return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*')
def restore_database_dump( def restore_database_dump(
database_config, log_prefix, location_config, dry_run, extract_process, connection_params databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params
): ):
''' '''
Restore the given MySQL/MariaDB database from an extract stream. The database is supplied as a Restore the given MySQL/MariaDB database from an extract stream. The databases are supplied as a
one-element sequence containing a dict describing the database, as per the configuration schema. sequence containing one dict describing each database (as per the configuration schema), but
Use the given log prefix in any log entries. If this is a dry run, then don't actually restore only the database corresponding to the given database name is restored. Use the given log
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce prefix in any log entries. If this is a dry run, then don't actually restore anything. Trigger
output to consume. the given active extract process (an instance of subprocess.Popen) to produce output to consume.
''' '''
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else '' dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
if len(database_config) != 1: try:
raise ValueError('The database configuration value is invalid') database = next(
database_config
database = database_config[0] for database_config in databases_config
if database_config.get('name') == database_name
)
except StopIteration:
raise ValueError(
f'A database named "{database_name}" could not be found in the configuration'
)
hostname = connection_params['hostname'] or database.get( hostname = connection_params['hostname'] or database.get(
'restore_hostname', database.get('hostname') 'restore_hostname', database.get('hostname')

View file

@ -6,7 +6,7 @@ logger = logging.getLogger(__name__)
def initialize_monitor( def initialize_monitor(
ping_url, config_filename, monitoring_log_level, dry_run ping_url, config, config_filename, monitoring_log_level, dry_run
): # pragma: no cover ): # pragma: no cover
''' '''
No initialization is necessary for this monitor. No initialization is necessary for this monitor.
@ -14,7 +14,7 @@ def initialize_monitor(
pass pass
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run):
''' '''
Ping the configured Ntfy topic. Use the given configuration filename in any log entries. Ping the configured Ntfy topic. Use the given configuration filename in any log entries.
If this is a dry run, then don't actually ping anything. If this is a dry run, then don't actually ping anything.
@ -75,7 +75,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
def destroy_monitor( def destroy_monitor(
ping_url_or_uuid, config_filename, monitoring_log_level, dry_run ping_url_or_uuid, config, config_filename, monitoring_log_level, dry_run
): # pragma: no cover ): # pragma: no cover
''' '''
No destruction is necessary for this monitor. No destruction is necessary for this monitor.

View file

@ -13,7 +13,7 @@ EVENTS_API_URL = 'https://events.pagerduty.com/v2/enqueue'
def initialize_monitor( def initialize_monitor(
integration_key, config_filename, monitoring_log_level, dry_run integration_key, config, config_filename, monitoring_log_level, dry_run
): # pragma: no cover ): # pragma: no cover
''' '''
No initialization is necessary for this monitor. No initialization is necessary for this monitor.
@ -21,7 +21,7 @@ def initialize_monitor(
pass pass
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run):
''' '''
If this is an error state, create a PagerDuty event with the configured integration key. Use If this is an error state, create a PagerDuty event with the configured integration key. Use
the given configuration filename in any log entries. If this is a dry run, then don't actually the given configuration filename in any log entries. If this is a dry run, then don't actually
@ -75,7 +75,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
def destroy_monitor( def destroy_monitor(
ping_url_or_uuid, config_filename, monitoring_log_level, dry_run ping_url_or_uuid, config, config_filename, monitoring_log_level, dry_run
): # pragma: no cover ): # pragma: no cover
''' '''
No destruction is necessary for this monitor. No destruction is necessary for this monitor.

View file

@ -14,12 +14,12 @@ from borgmatic.hooks import dump
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def make_dump_path(location_config): # pragma: no cover def make_dump_path(config): # pragma: no cover
''' '''
Make the dump path from the given location configuration and the name of this hook. Make the dump path from the given configuration dict and the name of this hook.
''' '''
return dump.make_database_dump_path( return dump.make_database_dump_path(
location_config.get('borgmatic_source_directory'), 'postgresql_databases' config.get('borgmatic_source_directory'), 'postgresql_databases'
) )
@ -92,12 +92,12 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
) )
def dump_databases(databases, log_prefix, location_config, dry_run): def dump_databases(databases, config, log_prefix, dry_run):
''' '''
Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of
dicts, one dict describing each database as per the configuration schema. Use the given log dicts, one dict describing each database as per the configuration schema. Use the given
prefix in any log entries. Use the given location configuration dict to construct the configuration dict to construct the destination path and the given log prefix in any log
destination path. entries.
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence. pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@ -111,7 +111,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
for database in databases: for database in databases:
extra_environment = make_extra_environment(database) extra_environment = make_extra_environment(database)
dump_path = make_dump_path(location_config) dump_path = make_dump_path(config)
dump_database_names = database_names_to_dump( dump_database_names = database_names_to_dump(
database, extra_environment, log_prefix, dry_run database, extra_environment, log_prefix, dry_run
) )
@ -183,35 +183,34 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
return processes return processes
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
''' '''
Remove all database dump files for this hook regardless of the given databases. Use the log Remove all database dump files for this hook regardless of the given databases. Use the given
prefix in any log entries. Use the given location configuration dict to construct the configuration dict to construct the destination path and the log prefix in any log entries. If
destination path. If this is a dry run, then don't actually remove anything. this is a dry run, then don't actually remove anything.
''' '''
dump.remove_database_dumps(make_dump_path(location_config), 'PostgreSQL', log_prefix, dry_run) dump.remove_database_dumps(make_dump_path(config), 'PostgreSQL', log_prefix, dry_run)
def make_database_dump_pattern( def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
databases, log_prefix, location_config, name=None
): # pragma: no cover
''' '''
Given a sequence of configurations dicts, a prefix to log with, a location configuration dict, Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a
and a database name to match, return the corresponding glob patterns to match the database dump database name to match, return the corresponding glob patterns to match the database dump in an
in an archive. archive.
''' '''
return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*') return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*')
def restore_database_dump( def restore_database_dump(
database_config, log_prefix, location_config, dry_run, extract_process, connection_params databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params
): ):
''' '''
Restore the given PostgreSQL database from an extract stream. The database is supplied as a Restore the given PostgreSQL database from an extract stream. The databases are supplied as a
one-element sequence containing a dict describing the database, as per the configuration schema. sequence containing one dict describing each database (as per the configuration schema), but
Use the given log prefix in any log entries. If this is a dry run, then don't actually restore only the database corresponding to the given database name is restored. Use the given
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce configuration dict to construct the destination path and the given log prefix in any log
output to consume. entries. If this is a dry run, then don't actually restore anything. Trigger the given active
extract process (an instance of subprocess.Popen) to produce output to consume.
If the extract process is None, then restore the dump from the filesystem rather than from an If the extract process is None, then restore the dump from the filesystem rather than from an
extract stream. extract stream.
@ -221,10 +220,16 @@ def restore_database_dump(
''' '''
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else '' dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
if len(database_config) != 1: try:
raise ValueError('The database configuration value is invalid') database = next(
database_config
database = database_config[0] for database_config in databases_config
if database_config.get('name') == database_name
)
except StopIteration:
raise ValueError(
f'A database named "{database_name}" could not be found in the configuration'
)
hostname = connection_params['hostname'] or database.get( hostname = connection_params['hostname'] or database.get(
'restore_hostname', database.get('hostname') 'restore_hostname', database.get('hostname')
@ -236,7 +241,7 @@ def restore_database_dump(
all_databases = bool(database['name'] == 'all') all_databases = bool(database['name'] == 'all')
dump_filename = dump.make_database_dump_filename( dump_filename = dump.make_database_dump_filename(
make_dump_path(location_config), database['name'], database.get('hostname') make_dump_path(config), database['name'], database.get('hostname')
) )
psql_command = shlex.split(database.get('psql_command') or 'psql') psql_command = shlex.split(database.get('psql_command') or 'psql')
analyze_command = ( analyze_command = (
@ -264,7 +269,7 @@ def restore_database_dump(
+ (() if extract_process else (dump_filename,)) + (() if extract_process else (dump_filename,))
+ tuple( + tuple(
itertools.chain.from_iterable(('--schema', schema) for schema in database['schemas']) itertools.chain.from_iterable(('--schema', schema) for schema in database['schemas'])
if database['schemas'] if database.get('schemas')
else () else ()
) )
) )

View file

@ -7,21 +7,21 @@ from borgmatic.hooks import dump
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def make_dump_path(location_config): # pragma: no cover def make_dump_path(config): # pragma: no cover
''' '''
Make the dump path from the given location configuration and the name of this hook. Make the dump path from the given configuration dict and the name of this hook.
''' '''
return dump.make_database_dump_path( return dump.make_database_dump_path(
location_config.get('borgmatic_source_directory'), 'sqlite_databases' config.get('borgmatic_source_directory'), 'sqlite_databases'
) )
def dump_databases(databases, log_prefix, location_config, dry_run): def dump_databases(databases, config, log_prefix, dry_run):
''' '''
Dump the given SQLite3 databases to a file. The databases are supplied as a sequence of Dump the given SQLite3 databases to a file. The databases are supplied as a sequence of
configuration dicts, as per the configuration schema. Use the given log prefix in any log configuration dicts, as per the configuration schema. Use the given configuration dict to
entries. Use the given location configuration dict to construct the destination path. If this construct the destination path and the given log prefix in any log entries. If this is a dry
is a dry run, then don't actually dump anything. run, then don't actually dump anything.
''' '''
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else '' dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
processes = [] processes = []
@ -38,7 +38,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
f'{log_prefix}: No SQLite database at {database_path}; An empty database will be created and dumped' f'{log_prefix}: No SQLite database at {database_path}; An empty database will be created and dumped'
) )
dump_path = make_dump_path(location_config) dump_path = make_dump_path(config)
dump_filename = dump.make_database_dump_filename(dump_path, database['name']) dump_filename = dump.make_database_dump_filename(dump_path, database['name'])
if os.path.exists(dump_filename): if os.path.exists(dump_filename):
logger.warning( logger.warning(
@ -65,43 +65,49 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
return processes return processes
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
''' '''
Remove the given SQLite3 database dumps from the filesystem. The databases are supplied as a Remove the given SQLite3 database dumps from the filesystem. The databases are supplied as a
sequence of configuration dicts, as per the configuration schema. Use the given log prefix in sequence of configuration dicts, as per the configuration schema. Use the given configuration
any log entries. Use the given location configuration dict to construct the destination path. dict to construct the destination path and the given log prefix in any log entries. If this is a
If this is a dry run, then don't actually remove anything. dry run, then don't actually remove anything.
''' '''
dump.remove_database_dumps(make_dump_path(location_config), 'SQLite', log_prefix, dry_run) dump.remove_database_dumps(make_dump_path(config), 'SQLite', log_prefix, dry_run)
def make_database_dump_pattern( def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
databases, log_prefix, location_config, name=None
): # pragma: no cover
''' '''
Make a pattern that matches the given SQLite3 databases. The databases are supplied as a Make a pattern that matches the given SQLite3 databases. The databases are supplied as a
sequence of configuration dicts, as per the configuration schema. sequence of configuration dicts, as per the configuration schema.
''' '''
return dump.make_database_dump_filename(make_dump_path(location_config), name) return dump.make_database_dump_filename(make_dump_path(config), name)
def restore_database_dump( def restore_database_dump(
database_config, log_prefix, location_config, dry_run, extract_process, connection_params databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params
): ):
''' '''
Restore the given SQLite3 database from an extract stream. The database is supplied as a Restore the given SQLite3 database from an extract stream. The databases are supplied as a
one-element sequence containing a dict describing the database, as per the configuration schema. sequence containing one dict describing each database (as per the configuration schema), but
Use the given log prefix in any log entries. If this is a dry run, then don't actually restore only the database corresponding to the given database name is restored. Use the given log prefix
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce in any log entries. If this is a dry run, then don't actually restore anything. Trigger the
output to consume. given active extract process (an instance of subprocess.Popen) to produce output to consume.
''' '''
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else '' dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
if len(database_config) != 1: try:
raise ValueError('The database configuration value is invalid') database = next(
database_config
for database_config in databases_config
if database_config.get('name') == database_name
)
except StopIteration:
raise ValueError(
f'A database named "{database_name}" could not be found in the configuration'
)
database_path = connection_params['restore_path'] or database_config[0].get( database_path = connection_params['restore_path'] or database.get(
'restore_path', database_config[0].get('path') 'restore_path', database.get('path')
) )
logger.debug(f'{log_prefix}: Restoring SQLite database at {database_path}{dry_run_label}') logger.debug(f'{log_prefix}: Restoring SQLite database at {database_path}{dry_run_label}')

View file

@ -1,5 +1,5 @@
<h2>Improve this documentation</h2> <h2>Improve this documentation</h2>
<p>Have an idea on how to make this documentation even better? Use our <a <p>Have an idea on how to make this documentation even better? Use our <a
href="https://projects.torsion.org/borgmatic-collective/borgmatic/issues">issue tracker</a> to send your href="https://torsion.org/borgmatic/#support-and-contributing">issue
feedback!</p> tracker</a> to send your feedback!</p>

View file

@ -21,11 +21,10 @@ running backups, and specify `after_backup` hooks to perform cleanup steps
afterwards. Here's an example: afterwards. Here's an example:
```yaml ```yaml
hooks: before_backup:
before_backup: - mount /some/filesystem
- mount /some/filesystem after_backup:
after_backup: - umount /some/filesystem
- umount /some/filesystem
``` ```
If your command contains a special YAML character such as a colon, you may If your command contains a special YAML character such as a colon, you may
@ -33,11 +32,23 @@ need to quote the entire string (or use a [multiline
string](https://yaml-multiline.info/)) to avoid an error: string](https://yaml-multiline.info/)) to avoid an error:
```yaml ```yaml
hooks: before_backup:
before_backup: - "echo Backup: start"
- "echo Backup: start"
``` ```
There are additional hooks that run before/after other actions as well. For
instance, `before_prune` runs before a `prune` action for a repository, while
`after_prune` runs after it.
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
these options in the `hooks:` section of your configuration.
<span class="minilink minilink-addedin">New in version 1.7.0</span> The
`before_actions` and `after_actions` hooks run before/after all the actions
(like `create`, `prune`, etc.) for each repository. These hooks are a good
place to run per-repository steps like mounting/unmounting a remote
filesystem.
<span class="minilink minilink-addedin">New in version 1.6.0</span> The <span class="minilink minilink-addedin">New in version 1.6.0</span> The
`before_backup` and `after_backup` hooks each run once per repository in a `before_backup` and `after_backup` hooks each run once per repository in a
configuration file. `before_backup` hooks runs right before the `create` configuration file. `before_backup` hooks runs right before the `create`
@ -46,16 +57,6 @@ but not if an error occurs in a previous hook or in the backups themselves.
(Prior to borgmatic 1.6.0, these hooks instead ran once per configuration file (Prior to borgmatic 1.6.0, these hooks instead ran once per configuration file
rather than once per repository.) rather than once per repository.)
There are additional hooks that run before/after other actions as well. For
instance, `before_prune` runs before a `prune` action for a repository, while
`after_prune` runs after it.
<span class="minilink minilink-addedin">New in version 1.7.0</span> The
`before_actions` and `after_actions` hooks run before/after all the actions
(like `create`, `prune`, etc.) for each repository. These hooks are a good
place to run per-repository steps like mounting/unmounting a remote
filesystem.
## Variable interpolation ## Variable interpolation
@ -64,11 +65,13 @@ variables into the hook command. Here's an example that assumes you provide a
separate shell script: separate shell script:
```yaml ```yaml
hooks: after_prune:
after_prune: - record-prune.sh "{configuration_filename}" "{repository}"
- record-prune.sh "{configuration_filename}" "{repository}"
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `hooks:` section of your configuration.
In this example, when the hook is triggered, borgmatic interpolates runtime In this example, when the hook is triggered, borgmatic interpolates runtime
values into the hook command: the borgmatic configuration filename and the values into the hook command: the borgmatic configuration filename and the
paths of the current Borg repository. Here's the full set of supported paths of the current Borg repository. Here's the full set of supported
@ -92,13 +95,15 @@ You can also use `before_everything` and `after_everything` hooks to perform
global setup or cleanup: global setup or cleanup:
```yaml ```yaml
hooks: before_everything:
before_everything: - set-up-stuff-globally
- set-up-stuff-globally after_everything:
after_everything: - clean-up-stuff-globally
- clean-up-stuff-globally
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
these options in the `hooks:` section of your configuration.
`before_everything` hooks collected from all borgmatic configuration files run `before_everything` hooks collected from all borgmatic configuration files run
once before all configuration files (prior to all actions), but only if there once before all configuration files (prior to all actions), but only if there
is a `create` action. An error encountered during a `before_everything` hook is a `create` action. An error encountered during a `before_everything` hook
@ -109,6 +114,7 @@ but only if there is a `create` action. It runs even if an error occurs during
a backup or a backup hook, but not if an error occurs during a a backup or a backup hook, but not if an error occurs during a
`before_everything` hook. `before_everything` hook.
## Error hooks ## Error hooks
borgmatic also runs `on_error` hooks if an error occurs, either when creating borgmatic also runs `on_error` hooks if an error occurs, either when creating
@ -116,6 +122,7 @@ a backup or running a backup hook. See the [monitoring and alerting
documentation](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/) documentation](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/)
for more information. for more information.
## Hook output ## Hook output
Any output produced by your hooks shows up both at the console and in syslog Any output produced by your hooks shows up both at the console and in syslog
@ -123,6 +130,7 @@ Any output produced by your hooks shows up both at the console and in syslog
href="https://torsion.org/borgmatic/docs/how-to/inspect-your-backups/">inspecting href="https://torsion.org/borgmatic/docs/how-to/inspect-your-backups/">inspecting
your backups</a>. your backups</a>.
## Security ## Security
An important security note about hooks: borgmatic executes all hook commands An important security note about hooks: borgmatic executes all hook commands

View file

@ -44,14 +44,16 @@ file](https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/),
say at `/etc/borgmatic.d/removable.yaml`: say at `/etc/borgmatic.d/removable.yaml`:
```yaml ```yaml
location: source_directories:
source_directories: - /home
- /home
repositories: repositories:
- path: /mnt/removable/backup.borg - path: /mnt/removable/backup.borg
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
these options in the `location:` section of your configuration.
<span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit <span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit
the `path:` portion of the `repositories` list. the `path:` portion of the `repositories` list.
@ -60,11 +62,13 @@ the external `findmnt` utility to see whether the drive is mounted before
proceeding. proceeding.
```yaml ```yaml
hooks: before_backup:
before_backup: - findmnt /mnt/removable > /dev/null || exit 75
- findmnt /mnt/removable > /dev/null || exit 75
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put this
option in the `hooks:` section of your configuration.
What this does is check if the `findmnt` command errors when probing for a What this does is check if the `findmnt` command errors when probing for a
particular mount point. If it does error, then it returns exit code 75 to particular mount point. If it does error, then it returns exit code 75 to
borgmatic. borgmatic logs the soft failure, skips all further actions in that borgmatic. borgmatic logs the soft failure, skips all further actions in that
@ -77,27 +81,21 @@ optionally using `before_actions` instead.
You can imagine a similar check for the sometimes-online server case: You can imagine a similar check for the sometimes-online server case:
```yaml ```yaml
location: source_directories:
source_directories: - /home
- /home
repositories: repositories:
- path: ssh://me@buddys-server.org/./backup.borg - path: ssh://me@buddys-server.org/./backup.borg
hooks: before_backup:
before_backup: - ping -q -c 1 buddys-server.org > /dev/null || exit 75
- ping -q -c 1 buddys-server.org > /dev/null || exit 75
``` ```
<span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit
the `path:` portion of the `repositories` list.
Or to only run backups if the battery level is high enough: Or to only run backups if the battery level is high enough:
```yaml ```yaml
hooks: before_backup:
before_backup: - is_battery_percent_at_least.sh 25
- is_battery_percent_at_least.sh 25
``` ```
(Writing the battery script is left as an exercise to the reader.) (Writing the battery script is left as an exercise to the reader.)

View file

@ -18,31 +18,32 @@ prior to running backups. For example, here is everything you need to dump and
backup a couple of local PostgreSQL databases and a MySQL/MariaDB database. backup a couple of local PostgreSQL databases and a MySQL/MariaDB database.
```yaml ```yaml
hooks: postgresql_databases:
postgresql_databases: - name: users
- name: users - name: orders
- name: orders mysql_databases:
mysql_databases: - name: posts
- name: posts
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
these and other database options in the `hooks:` section of your
configuration.
<span class="minilink minilink-addedin">New in version 1.5.22</span> You can <span class="minilink minilink-addedin">New in version 1.5.22</span> You can
also dump MongoDB databases. For example: also dump MongoDB databases. For example:
```yaml ```yaml
hooks: mongodb_databases:
mongodb_databases: - name: messages
- name: messages
``` ```
<span class="minilink minilink-addedin">New in version 1.7.9</span> <span class="minilink minilink-addedin">New in version 1.7.9</span>
Additionally, you can dump SQLite databases. For example: Additionally, you can dump SQLite databases. For example:
```yaml ```yaml
hooks: sqlite_databases:
sqlite_databases: - name: mydb
- name: mydb path: /var/lib/sqlite3/mydb.sqlite
path: /var/lib/sqlite3/mydb.sqlite
``` ```
As part of each backup, borgmatic streams a database dump for each configured As part of each backup, borgmatic streams a database dump for each configured
@ -54,7 +55,7 @@ temporary disk space.)
To support this, borgmatic creates temporary named pipes in `~/.borgmatic` by To support this, borgmatic creates temporary named pipes in `~/.borgmatic` by
default. To customize this path, set the `borgmatic_source_directory` option default. To customize this path, set the `borgmatic_source_directory` option
in the `location` section of borgmatic's configuration. in borgmatic's configuration.
Also note that using a database hook implicitly enables both the Also note that using a database hook implicitly enables both the
`read_special` and `one_file_system` configuration settings (even if they're `read_special` and `one_file_system` configuration settings (even if they're
@ -64,35 +65,34 @@ See Limitations below for more on this.
Here's a more involved example that connects to remote databases: Here's a more involved example that connects to remote databases:
```yaml ```yaml
hooks: postgresql_databases:
postgresql_databases: - name: users
- name: users hostname: database1.example.org
hostname: database1.example.org - name: orders
- name: orders hostname: database2.example.org
hostname: database2.example.org port: 5433
port: 5433 username: postgres
username: postgres password: trustsome1
password: trustsome1 format: tar
format: tar options: "--role=someone"
options: "--role=someone" mysql_databases:
mysql_databases: - name: posts
- name: posts hostname: database3.example.org
hostname: database3.example.org port: 3307
port: 3307 username: root
username: root password: trustsome1
password: trustsome1 options: "--skip-comments"
options: "--skip-comments" mongodb_databases:
mongodb_databases: - name: messages
- name: messages hostname: database4.example.org
hostname: database4.example.org port: 27018
port: 27018 username: dbuser
username: dbuser password: trustsome1
password: trustsome1 authentication_database: mongousers
authentication_database: mongousers options: "--ssl"
options: "--ssl" sqlite_databases:
sqlite_databases: - name: mydb
- name: mydb path: /var/lib/sqlite3/mydb.sqlite
path: /var/lib/sqlite3/mydb.sqlite
``` ```
See your [borgmatic configuration See your [borgmatic configuration
@ -106,13 +106,12 @@ listing databases, restoring databases, etc.).
If you want to dump all databases on a host, use `all` for the database name: If you want to dump all databases on a host, use `all` for the database name:
```yaml ```yaml
hooks: postgresql_databases:
postgresql_databases: - name: all
- name: all mysql_databases:
mysql_databases: - name: all
- name: all mongodb_databases:
mongodb_databases: - name: all
- name: all
``` ```
Note that you may need to use a `username` of the `postgres` superuser for Note that you may need to use a `username` of the `postgres` superuser for
@ -120,6 +119,9 @@ this to work with PostgreSQL.
The SQLite hook in particular does not consider "all" a special database name. The SQLite hook in particular does not consider "all" a special database name.
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
these options in the `hooks:` section of your configuration.
<span class="minilink minilink-addedin">New in version 1.7.6</span> With <span class="minilink minilink-addedin">New in version 1.7.6</span> With
PostgreSQL and MySQL, you can optionally dump "all" databases to separate PostgreSQL and MySQL, you can optionally dump "all" databases to separate
files instead of one combined dump file, allowing more convenient restores of files instead of one combined dump file, allowing more convenient restores of
@ -127,13 +129,12 @@ individual databases. Enable this by specifying your desired database dump
`format`: `format`:
```yaml ```yaml
hooks: postgresql_databases:
postgresql_databases: - name: all
- name: all format: custom
format: custom mysql_databases:
mysql_databases: - name: all
- name: all format: sql
format: sql
``` ```
### Containers ### Containers
@ -143,15 +144,17 @@ problem—configure borgmatic to connect to the container's name on its exposed
port. For instance: port. For instance:
```yaml ```yaml
hooks: postgresql_databases:
postgresql_databases: - name: users
- name: users hostname: your-database-container-name
hostname: your-database-container-name port: 5433
port: 5433 username: postgres
username: postgres password: trustsome1
password: trustsome1
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
these options in the `hooks:` section of your configuration.
But what if borgmatic is running on the host? You can still connect to a But what if borgmatic is running on the host? You can still connect to a
database container if its ports are properly exposed to the host. For database container if its ports are properly exposed to the host. For
instance, when running the database container, you can specify `--publish instance, when running the database container, you can specify `--publish
@ -179,8 +182,7 @@ hooks:
password: trustsome1 password: trustsome1
``` ```
You can alter the ports in these examples to suit your particular database Alter the ports in these examples to suit your particular database system.
system.
### No source directories ### No source directories
@ -196,6 +198,7 @@ it is a mandatory option there:
```yaml ```yaml
location: location:
source_directories: [] source_directories: []
hooks: hooks:
mysql_databases: mysql_databases:
- name: all - name: all
@ -292,7 +295,7 @@ restore one of them, use the `--database` flag to select one or more
databases. For instance: databases. For instance:
```bash ```bash
borgmatic restore --archive host-2023-... --database users borgmatic restore --archive host-2023-... --database users --database orders
``` ```
<span class="minilink minilink-addedin">New in version 1.7.6</span> You can <span class="minilink minilink-addedin">New in version 1.7.6</span> You can
@ -427,10 +430,9 @@ You can add any additional flags to the `options:` in your database
configuration. Here's an example: configuration. Here's an example:
```yaml ```yaml
hooks: mysql_databases:
mysql_databases: - name: posts
- name: posts options: "--single-transaction --quick"
options: "--single-transaction --quick"
``` ```
### borgmatic hangs during backup ### borgmatic hangs during backup

View file

@ -65,19 +65,20 @@ configure borgmatic to run repository checks only. Configure this in the
`consistency` section of borgmatic configuration: `consistency` section of borgmatic configuration:
```yaml ```yaml
consistency: checks:
checks: - name: repository
- name: repository
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `consistency:` section of your configuration.
<span class="minilink minilink-addedin">Prior to version 1.6.2</span> The <span class="minilink minilink-addedin">Prior to version 1.6.2</span> The
`checks` option was a plain list of strings without the `name:` part, and `checks` option was a plain list of strings without the `name:` part, and
borgmatic ran each configured check every time checks were run. For example: borgmatic ran each configured check every time checks were run. For example:
```yaml ```yaml
consistency: checks:
checks: - repository
- repository
``` ```
@ -103,14 +104,16 @@ optionally configure checks to run on a periodic basis rather than every time
borgmatic runs checks. For instance: borgmatic runs checks. For instance:
```yaml ```yaml
consistency: checks:
checks: - name: repository
- name: repository frequency: 2 weeks
frequency: 2 weeks - name: archives
- name: archives frequency: 1 month
frequency: 1 month
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `consistency:` section of your configuration.
This tells borgmatic to run the `repository` consistency check at most once This tells borgmatic to run the `repository` consistency check at most once
every two weeks for a given repository and the `archives` check at most once a every two weeks for a given repository and the `archives` check at most once a
month. The `frequency` value is a number followed by a unit of time, e.g. "3 month. The `frequency` value is a number followed by a unit of time, e.g. "3
@ -162,18 +165,19 @@ either for a single repository or for all repositories.
Disabling all consistency checks looks like this: Disabling all consistency checks looks like this:
```yaml ```yaml
consistency: checks:
checks: - name: disabled
- name: disabled
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `consistency:` section of your configuration.
<span class="minilink minilink-addedin">Prior to version 1.6.2</span> `checks` <span class="minilink minilink-addedin">Prior to version 1.6.2</span> `checks`
was a plain list of strings without the `name:` part. For instance: was a plain list of strings without the `name:` part. For instance:
```yaml ```yaml
consistency: checks:
checks: - disabled
- disabled
``` ```
If you have multiple repositories in your borgmatic configuration file, If you have multiple repositories in your borgmatic configuration file,
@ -181,9 +185,8 @@ you can keep running consistency checks, but only against a subset of the
repositories: repositories:
```yaml ```yaml
consistency: check_repositories:
check_repositories: - path/of/repository_to_check.borg
- path/of/repository_to_check.borg
``` ```
Finally, you can override your configuration file's consistency checks, and Finally, you can override your configuration file's consistency checks, and

View file

@ -7,7 +7,7 @@ eleventyNavigation:
--- ---
## Source code ## Source code
To get set up to hack on borgmatic, first clone it via HTTPS or SSH: To get set up to develop on borgmatic, first clone it via HTTPS or SSH:
```bash ```bash
git clone https://projects.torsion.org/borgmatic-collective/borgmatic.git git clone https://projects.torsion.org/borgmatic-collective/borgmatic.git
@ -21,8 +21,8 @@ git clone ssh://git@projects.torsion.org:3022/borgmatic-collective/borgmatic.git
Then, install borgmatic Then, install borgmatic
"[editable](https://pip.pypa.io/en/stable/cli/pip_install/#editable-installs)" "[editable](https://pip.pypa.io/en/stable/cli/pip_install/#editable-installs)"
so that you can run borgmatic commands while you're hacking on them to so that you can run borgmatic actions during development to make sure your
make sure your changes work. changes work.
```bash ```bash
cd borgmatic cd borgmatic

View file

@ -65,7 +65,7 @@ everything from an archive. To do that, tack on one or more `--path` values.
For instance: For instance:
```bash ```bash
borgmatic extract --archive latest --path path/1 path/2 borgmatic extract --archive latest --path path/1 --path path/2
``` ```
Note that the specified restore paths should not have a leading slash. Like a Note that the specified restore paths should not have a leading slash. Like a

View file

@ -60,7 +60,7 @@ with `--format`. Refer to the [borg list --format
documentation](https://borgbackup.readthedocs.io/en/stable/usage/list.html#the-format-specifier-syntax) documentation](https://borgbackup.readthedocs.io/en/stable/usage/list.html#the-format-specifier-syntax)
for available values. for available values.
*(No borgmatic `list` or `info` actions? Upgrade borgmatic!)* (No borgmatic `list` or `info` actions? Upgrade borgmatic!)
<span class="minilink minilink-addedin">New in borgmatic version 1.7.0</span> <span class="minilink minilink-addedin">New in borgmatic version 1.7.0</span>
There are also `rlist` and `rinfo` actions for displaying repository There are also `rlist` and `rinfo` actions for displaying repository

View file

@ -12,18 +12,20 @@ it. borgmatic supports this in its configuration by specifying multiple backup
repositories. Here's an example: repositories. Here's an example:
```yaml ```yaml
location: # List of source directories to backup.
# List of source directories to backup. source_directories:
source_directories: - /home
- /home - /etc
- /etc
# Paths of local or remote repositories to backup to. # Paths of local or remote repositories to backup to.
repositories: repositories:
- path: ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo - path: ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo
- path: /var/lib/backups/local.borg - path: /var/lib/backups/local.borg
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
these options in the `location:` section of your configuration.
<span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit <span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit
the `path:` portion of the `repositories` list. the `path:` portion of the `repositories` list.

View file

@ -74,14 +74,15 @@ and borgmatic uses that format to name any new archive it creates. For
instance: instance:
```yaml ```yaml
storage: archive_name_format: home-directories-{now}
...
archive_name_format: home-directories-{now}
``` ```
This means that when borgmatic creates an archive, its name will start with <span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
the string `home-directories-` and end with a timestamp for its creation time. this option in the `storage:` section of your configuration.
If `archive_name_format` is unspecified, the default is
This example means that when borgmatic creates an archive, its name will start
with the string `home-directories-` and end with a timestamp for its creation
time. If `archive_name_format` is unspecified, the default is
`{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}`, meaning your system hostname plus a `{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}`, meaning your system hostname plus a
timestamp in a particular format. timestamp in a particular format.
@ -103,11 +104,12 @@ to filter archives when running supported actions.
For instance, let's say that you have this in your configuration: For instance, let's say that you have this in your configuration:
```yaml ```yaml
storage: archive_name_format: {hostname}-user-data-{now}
...
archive_name_format: {hostname}-user-data-{now}
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `storage:` section of your configuration.
borgmatic considers `{now}` an emphemeral data placeholder that will probably borgmatic considers `{now}` an emphemeral data placeholder that will probably
change per archive, while `{hostname}` won't. So it turns the example value change per archive, while `{hostname}` won't. So it turns the example value
into `{hostname}-user-data-*` and applies it to filter down the set of into `{hostname}-user-data-*` and applies it to filter down the set of
@ -123,10 +125,8 @@ If this behavior isn't quite smart enough for your needs, you can use the
filtering archives. For example: filtering archives. For example:
```yaml ```yaml
storage: archive_name_format: {hostname}-user-data-{now}
... match_archives: sh:myhost-user-data-*
archive_name_format: {hostname}-user-data-{now}
match_archives: sh:myhost-user-data-*
``` ```
For Borg 1.x, use a shell pattern for the `match_archives` value and see the For Borg 1.x, use a shell pattern for the `match_archives` value and see the
@ -156,23 +156,28 @@ them. To achieve this, you can put fragments of common configuration options
into a file, and then include or inline that file into one or more borgmatic into a file, and then include or inline that file into one or more borgmatic
configuration files. configuration files.
Let's say that you want to include common retention configuration across all Let's say that you want to include common consistency check configuration across all
of your configuration files. You could do that in each configuration file with of your configuration files. You could do that in each configuration file with
the following: the following:
```yaml ```yaml
location: repositories:
... - path: repo.borg
retention: checks:
!include /etc/borgmatic/common_retention.yaml !include /etc/borgmatic/common_checks.yaml
``` ```
And then the contents of `common_retention.yaml` could be: <span class="minilink minilink-addedin">Prior to version 1.8.0</span> These
options were organized into sections like `location:` and `consistency:`.
The contents of `common_checks.yaml` could be:
```yaml ```yaml
keep_hourly: 24 - name: repository
keep_daily: 7 frequency: 3 weeks
- name: archives
frequency: 2 weeks
``` ```
To prevent borgmatic from trying to load these configuration fragments by To prevent borgmatic from trying to load these configuration fragments by
@ -184,18 +189,18 @@ When a configuration include is a relative path, borgmatic loads it from either
the current working directory or from the directory containing the file doing the current working directory or from the directory containing the file doing
the including. the including.
Note that this form of include must be a YAML value rather than a key. For Note that this form of include must be a value rather than an option name. For
example, this will not work: example, this will not work:
```yaml ```yaml
location: repositories:
... - path: repo.borg
# Don't do this. It won't work! # Don't do this. It won't work!
!include /etc/borgmatic/common_retention.yaml !include /etc/borgmatic/common_checks.yaml
``` ```
But if you do want to merge in a YAML key *and* its values, keep reading! But if you do want to merge in a option name *and* its values, keep reading!
## Include merging ## Include merging
@ -203,45 +208,43 @@ But if you do want to merge in a YAML key *and* its values, keep reading!
If you need to get even fancier and merge in common configuration options, you If you need to get even fancier and merge in common configuration options, you
can perform a YAML merge of included configuration using the YAML `<<` key. can perform a YAML merge of included configuration using the YAML `<<` key.
For instance, here's an example of a main configuration file that pulls in For instance, here's an example of a main configuration file that pulls in
retention and consistency options via a single include: retention and consistency checks options via a single include:
```yaml ```yaml
<<: !include /etc/borgmatic/common.yaml repositories:
- path: repo.borg
location: <<: !include /etc/borgmatic/common.yaml
...
``` ```
This is what `common.yaml` might look like: This is what `common.yaml` might look like:
```yaml ```yaml
retention: keep_hourly: 24
keep_hourly: 24 keep_daily: 7
keep_daily: 7
consistency: checks:
checks: - name: repository
- name: repository frequency: 3 weeks
- name: archives
frequency: 2 weeks
``` ```
Once this include gets merged in, the resulting configuration would have all <span class="minilink minilink-addedin">Prior to version 1.8.0</span> These
of the `location` options from the original configuration file *and* the options were organized into sections like `retention:` and `consistency:`.
`retention` and `consistency` options from the include.
Prior to borgmatic version 1.6.0, when there's a section collision between the Once this include gets merged in, the resulting configuration would have all
local file and the merged include, the local file's section takes precedence. of the options from the original configuration file *and* the options from the
So if the `retention` section appears in both the local file and the include include.
file, the included `retention` is ignored in favor of the local `retention`.
But see below about deep merge in version 1.6.0+.
Note that this `<<` include merging syntax is only for merging in mappings Note that this `<<` include merging syntax is only for merging in mappings
(configuration options and their values). But if you'd like to include a (configuration options and their values). But if you'd like to include a
single value directly, please see the section above about standard includes. single value directly, please see the above about standard includes.
Additionally, there is a limitation preventing multiple `<<` include merges Additionally, there is a limitation preventing multiple `<<` include merges
per section. So for instance, that means you can do one `<<` merge at the per file or option value. So for instance, that means you can do one `<<`
global level, another `<<` within each configuration section, etc. (This is a merge at the global level, another `<<` within each nested option value, etc.
YAML limitation.) (This is a YAML limitation.)
### Deep merge ### Deep merge
@ -252,29 +255,30 @@ at all levels in the two configuration files. This allows you to include
common configuration—up to full borgmatic configuration files—while overriding common configuration—up to full borgmatic configuration files—while overriding
only the parts you want to customize. only the parts you want to customize.
For instance, here's an example of a main configuration file that pulls in two For instance, here's an example of a main configuration file that pulls in
retention options via an include and then overrides one of them locally: options via an include and then overrides one of them locally:
```yaml ```yaml
<<: !include /etc/borgmatic/common.yaml <<: !include /etc/borgmatic/common.yaml
location: constants:
... hostname: myhostname
retention: repositories:
keep_daily: 5 - path: repo.borg
``` ```
This is what `common.yaml` might look like: This is what `common.yaml` might look like:
```yaml ```yaml
retention: constants:
keep_hourly: 24 prefix: myprefix
keep_daily: 7 hostname: otherhost
``` ```
Once this include gets merged in, the resulting configuration would have a Once this include gets merged in, the resulting configuration would have a
`keep_hourly` value of `24` and an overridden `keep_daily` value of `5`. `prefix` value of `myprefix` and an overridden `hostname` value of
`myhostname`.
When there's an option collision between the local file and the merged When there's an option collision between the local file and the merged
include, the local file's option takes precedence. include, the local file's option takes precedence.
@ -292,21 +296,22 @@ configuration file, you can omit it with an `!omit` tag. For instance:
```yaml ```yaml
<<: !include /etc/borgmatic/common.yaml <<: !include /etc/borgmatic/common.yaml
location: source_directories:
source_directories: - !omit /home
- !omit /home - /var
- /var
``` ```
And `common.yaml` like this: And `common.yaml` like this:
```yaml ```yaml
location: source_directories:
source_directories: - /home
- /home - /etc
- /etc
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `location:` section of your configuration.
Once this include gets merged in, the resulting configuration will have a Once this include gets merged in, the resulting configuration will have a
`source_directories` value of `/etc` and `/var`—with `/home` omitted. `source_directories` value of `/etc` and `/var`—with `/home` omitted.
@ -319,16 +324,15 @@ an example of some things not to do:
```yaml ```yaml
<<: !include /etc/borgmatic/common.yaml <<: !include /etc/borgmatic/common.yaml
location: source_directories:
source_directories: # Do not do this! It will not work. "!omit" belongs before "/home".
# Do not do this! It will not work. "!omit" belongs before "/home". - /home !omit
- /home !omit
# Do not do this either! "!omit" only works on scalar list items. # Do not do this either! "!omit" only works on scalar list items.
repositories: !omit repositories: !omit
# Also do not do this for the same reason! This is a list item, but it's # Also do not do this for the same reason! This is a list item, but it's
# not a scalar. # not a scalar.
- !omit path: repo.borg - !omit path: repo.borg
``` ```
Additionally, the `!omit` tag only works in a configuration file that also Additionally, the `!omit` tag only works in a configuration file that also
@ -342,8 +346,8 @@ includes.
### Shallow merge ### Shallow merge
Even though deep merging is generally pretty handy for included files, Even though deep merging is generally pretty handy for included files,
sometimes you want specific sections in the local file to take precedence over sometimes you want specific options in the local file to take precedence over
included sections—without any merging occurring for them. included options—without any merging occurring for them.
<span class="minilink minilink-addedin">New in version 1.7.12</span> That's <span class="minilink minilink-addedin">New in version 1.7.12</span> That's
where the `!retain` tag comes in. Whenever you're merging an included file where the `!retain` tag comes in. Whenever you're merging an included file
@ -357,37 +361,38 @@ on the `retention` mapping:
```yaml ```yaml
<<: !include /etc/borgmatic/common.yaml <<: !include /etc/borgmatic/common.yaml
location: repositories:
repositories: - path: repo.borg
- path: repo.borg
retention: !retain checks: !retain
keep_daily: 5 - name: repository
``` ```
And `common.yaml` like this: And `common.yaml` like this:
```yaml ```yaml
location: repositories:
repositories: - path: common.borg
- path: common.borg
retention: checks:
keep_hourly: 24 - name: archives
keep_daily: 7
``` ```
Once this include gets merged in, the resulting configuration will have a <span class="minilink minilink-addedin">Prior to version 1.8.0</span> These
`keep_daily` value of `5` and nothing else in the `retention` section. That's options were organized into sections like `location:` and `consistency:`.
because the `!retain` tag says to retain the local version of `retention` and
ignore any values coming in from the include. But because the `repositories`
list doesn't have a `!retain` tag, it still gets merged together to contain
both `common.borg` and `repo.borg`.
The `!retain` tag can only be placed on mappings and lists, and it goes right Once this include gets merged in, the resulting configuration will have a
after the name of the option (and its colon) on the same line. The effects of `checks` value with a name of `repository` and no other values. That's because
`!retain` are recursive, meaning that if you place a `!retain` tag on a the `!retain` tag says to retain the local version of `checks` and ignore any
top-level mapping, even deeply nested values within it will not be merged. values coming in from the include. But because the `repositories` list doesn't
have a `!retain` tag, it still gets merged together to contain both
`common.borg` and `repo.borg`.
The `!retain` tag can only be placed on mappings (keys/values) and lists, and
it goes right after the name of the option (and its colon) on the same line.
The effects of `!retain` are recursive, meaning that if you place a `!retain`
tag on a top-level mapping, even deeply nested values within it will not be
merged.
Additionally, the `!retain` tag only works in a configuration file that also Additionally, the `!retain` tag only works in a configuration file that also
performs a merge include with `<<: !include`. It doesn't make sense within, performs a merge include with `<<: !include`. It doesn't make sense within,
@ -434,43 +439,41 @@ Whatever the reason, you can override borgmatic configuration options at the
command-line via the `--override` flag. Here's an example: command-line via the `--override` flag. Here's an example:
```bash ```bash
borgmatic create --override location.remote_path=/usr/local/bin/borg1 borgmatic create --override remote_path=/usr/local/bin/borg1
``` ```
What this does is load your configuration files, and for each one, disregard What this does is load your configuration files, and for each one, disregard
the configured value for the `remote_path` option in the `location` section, the configured value for the `remote_path` option, and use the value of
and use the value of `/usr/local/bin/borg1` instead. `/usr/local/bin/borg1` instead.
You can even override multiple values at once. For instance: You can even override nested values or multiple values at once. For instance:
```bash ```bash
borgmatic create --override section.option1=value1 section.option2=value2 borgmatic create --override parent_option.option1=value1 --override parent_option.option2=value2
``` ```
This will accomplish the same thing: <span class="minilink minilink-addedin">Prior to version 1.8.0</span> Don't
forget to specify the section that an option is in. That looks like a prefix
```bash on the option name, e.g. `location.repositories`.
borgmatic create --override section.option1=value1 --override section.option2=value2
```
Note that each value is parsed as an actual YAML string, so you can even set Note that each value is parsed as an actual YAML string, so you can even set
list values by using brackets. For instance: list values by using brackets. For instance:
```bash ```bash
borgmatic create --override location.repositories=[test1.borg,test2.borg] borgmatic create --override repositories=[test1.borg,test2.borg]
``` ```
Or even a single list element: Or even a single list element:
```bash ```bash
borgmatic create --override location.repositories=[/root/test.borg] borgmatic create --override repositories=[/root/test.borg]
``` ```
If your override value contains special YAML characters like colons, then If your override value contains special YAML characters like colons, then
you'll need quotes for it to parse correctly: you'll need quotes for it to parse correctly:
```bash ```bash
borgmatic create --override location.repositories="['user@server:test.borg']" borgmatic create --override repositories="['user@server:test.borg']"
``` ```
There is not currently a way to override a single element of a list without There is not currently a way to override a single element of a list without
@ -486,7 +489,9 @@ indentation and a leading dash.)
Be sure to quote your overrides if they contain spaces or other characters Be sure to quote your overrides if they contain spaces or other characters
that your shell may interpret. that your shell may interpret.
An alternate to command-line overrides is passing in your values via [environment variables](https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/). An alternate to command-line overrides is passing in your values via
[environment
variables](https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/).
## Constant interpolation ## Constant interpolation
@ -506,16 +511,19 @@ constants:
user: foo user: foo
archive_prefix: bar archive_prefix: bar
location: source_directories:
source_directories: - /home/{user}/.config
- /home/{user}/.config - /home/{user}/.ssh
- /home/{user}/.ssh
...
storage: ...
archive_name_format: '{archive_prefix}-{now}'
archive_name_format: '{archive_prefix}-{now}'
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Don't
forget to specify the section (like `location:` or `storage:`) that any option
is in.
In this example, when borgmatic runs, all instances of `{user}` get replaced In this example, when borgmatic runs, all instances of `{user}` get replaced
with `foo` and all instances of `{archive-prefix}` get replaced with `bar-`. with `foo` and all instances of `{archive-prefix}` get replaced with `bar-`.
(And in this particular example, `{now}` doesn't get replaced with anything, (And in this particular example, `{now}` doesn't get replaced with anything,
@ -523,14 +531,13 @@ but gets passed directly to Borg.) After substitution, the logical result
looks something like this: looks something like this:
```yaml ```yaml
location: source_directories:
source_directories: - /home/foo/.config
- /home/foo/.config - /home/foo/.ssh
- /home/foo/.ssh
...
storage: ...
archive_name_format: 'bar-{now}'
archive_name_format: 'bar-{now}'
``` ```
An alternate to constants is passing in your values via [environment An alternate to constants is passing in your values via [environment

View file

@ -89,19 +89,20 @@ notifications or take other actions, so you can get alerted as soon as
something goes wrong. Here's a not-so-useful example: something goes wrong. Here's a not-so-useful example:
```yaml ```yaml
hooks: on_error:
on_error: - echo "Error while creating a backup or running a backup hook."
- echo "Error while creating a backup or running a backup hook."
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `hooks:` section of your configuration.
The `on_error` hook supports interpolating particular runtime variables into The `on_error` hook supports interpolating particular runtime variables into
the hook command. Here's an example that assumes you provide a separate shell the hook command. Here's an example that assumes you provide a separate shell
script to handle the alerting: script to handle the alerting:
```yaml ```yaml
hooks: on_error:
on_error: - send-text-message.sh "{configuration_filename}" "{repository}"
- send-text-message.sh "{configuration_filename}" "{repository}"
``` ```
In this example, when the error occurs, borgmatic interpolates runtime values In this example, when the error occurs, borgmatic interpolates runtime values
@ -135,11 +136,13 @@ URL" for your project. Here's an example:
```yaml ```yaml
hooks: healthchecks:
healthchecks: ping_url: https://hc-ping.com/addffa72-da17-40ae-be9c-ff591afb942a
ping_url: https://hc-ping.com/addffa72-da17-40ae-be9c-ff591afb942a
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `hooks:` section of your configuration.
With this hook in place, borgmatic pings your Healthchecks project when a With this hook in place, borgmatic pings your Healthchecks project when a
backup begins, ends, or errors. Specifically, after the <a backup begins, ends, or errors. Specifically, after the <a
href="https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/">`before_backup` href="https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/">`before_backup`
@ -179,11 +182,13 @@ API URL" for your monitor. Here's an example:
```yaml ```yaml
hooks: cronitor:
cronitor: ping_url: https://cronitor.link/d3x0c1
ping_url: https://cronitor.link/d3x0c1
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `hooks:` section of your configuration.
With this hook in place, borgmatic pings your Cronitor monitor when a backup With this hook in place, borgmatic pings your Cronitor monitor when a backup
begins, ends, or errors. Specifically, after the <a begins, ends, or errors. Specifically, after the <a
href="https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/">`before_backup` href="https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/">`before_backup`
@ -208,11 +213,13 @@ URL" for your monitor. Here's an example:
```yaml ```yaml
hooks: cronhub:
cronhub: ping_url: https://cronhub.io/start/1f5e3410-254c-11e8-b61d-55875966d031
ping_url: https://cronhub.io/start/1f5e3410-254c-11e8-b61d-55875966d031
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `hooks:` section of your configuration.
With this hook in place, borgmatic pings your Cronhub monitor when a backup With this hook in place, borgmatic pings your Cronhub monitor when a backup
begins, ends, or errors. Specifically, after the <a begins, ends, or errors. Specifically, after the <a
href="https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/">`before_backup` href="https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/">`before_backup`
@ -251,11 +258,13 @@ Here's an example:
```yaml ```yaml
hooks: pagerduty:
pagerduty: integration_key: a177cad45bd374409f78906a810a3074
integration_key: a177cad45bd374409f78906a810a3074
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `hooks:` section of your configuration.
With this hook in place, borgmatic creates a PagerDuty event for your service With this hook in place, borgmatic creates a PagerDuty event for your service
whenever backups fail. Specifically, if an error occurs during a `create`, whenever backups fail. Specifically, if an error occurs during a `create`,
`prune`, `compact`, or `check` action, borgmatic sends an event to PagerDuty `prune`, `compact`, or `check` action, borgmatic sends an event to PagerDuty
@ -291,31 +300,34 @@ An example configuration is shown here, with all the available options, includin
[tags](https://ntfy.sh/docs/publish/#tags-emojis): [tags](https://ntfy.sh/docs/publish/#tags-emojis):
```yaml ```yaml
hooks: ntfy:
ntfy: topic: my-unique-topic
topic: my-unique-topic server: https://ntfy.my-domain.com
server: https://ntfy.my-domain.com start:
start: title: A Borgmatic backup started
title: A Borgmatic backup started message: Watch this space...
message: Watch this space... tags: borgmatic
tags: borgmatic priority: min
priority: min finish:
finish: title: A Borgmatic backup completed successfully
title: A Borgmatic backup completed successfully message: Nice!
message: Nice! tags: borgmatic,+1
tags: borgmatic,+1 priority: min
priority: min fail:
fail: title: A Borgmatic backup failed
title: A Borgmatic backup failed message: You should probably fix it
message: You should probably fix it tags: borgmatic,-1,skull
tags: borgmatic,-1,skull priority: max
priority: max states:
states: - start
- start - finish
- finish - fail
- fail
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
the `ntfy:` option in the `hooks:` section of your configuration.
## Scripting borgmatic ## Scripting borgmatic
To consume the output of borgmatic in other software, you can include an To consume the output of borgmatic in other software, you can include an

View file

@ -20,10 +20,12 @@ pull your repository passphrase, your database passwords, or any other option
values from environment variables. For instance: values from environment variables. For instance:
```yaml ```yaml
storage: encryption_passphrase: ${MY_PASSPHRASE}
encryption_passphrase: ${MY_PASSPHRASE}
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `storage:` section of your configuration.
This uses the `MY_PASSPHRASE` environment variable as your encryption This uses the `MY_PASSPHRASE` environment variable as your encryption
passphrase. Note that the `{` `}` brackets are required. `$MY_PASSPHRASE` by passphrase. Note that the `{` `}` brackets are required. `$MY_PASSPHRASE` by
itself will not work. itself will not work.
@ -38,12 +40,14 @@ configuration](https://torsion.org/borgmatic/docs/how-to/backup-your-databases/)
the same approach applies. For example: the same approach applies. For example:
```yaml ```yaml
hooks: postgresql_databases:
postgresql_databases: - name: users
- name: users password: ${MY_DATABASE_PASSWORD}
password: ${MY_DATABASE_PASSWORD}
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `hooks:` section of your configuration.
This uses the `MY_DATABASE_PASSWORD` environment variable as your database This uses the `MY_DATABASE_PASSWORD` environment variable as your database
password. password.
@ -53,8 +57,7 @@ password.
If you'd like to set a default for your environment variables, you can do so with the following syntax: If you'd like to set a default for your environment variables, you can do so with the following syntax:
```yaml ```yaml
storage: encryption_passphrase: ${MY_PASSPHRASE:-defaultpass}
encryption_passphrase: ${MY_PASSPHRASE:-defaultpass}
``` ```
Here, "`defaultpass`" is the default passphrase if the `MY_PASSPHRASE` Here, "`defaultpass`" is the default passphrase if the `MY_PASSPHRASE`
@ -72,8 +75,7 @@ can escape it with a backslash. For instance, if your password is literally
`${A}@!`: `${A}@!`:
```yaml ```yaml
storage: encryption_passphrase: \${A}@!
encryption_passphrase: \${A}@!
``` ```
### Related features ### Related features

View file

@ -140,13 +140,14 @@ use the `--destination` flag, for instance: `--destination
You should edit the configuration file to suit your needs, as the generated You should edit the configuration file to suit your needs, as the generated
values are only representative. All options are optional except where values are only representative. All options are optional except where
indicated, so feel free to ignore anything you don't need. indicated, so feel free to ignore anything you don't need. Be sure to use
spaces rather than tabs for indentation; YAML does not allow tabs.
Note that the configuration file is organized into distinct sections, each <span class="minilink minilink-addedin">Prior to version 1.8.0</span> The
with a section name like `location:` or `storage:`. So take care that if you configuration file was organized into distinct sections, each with a section
uncomment a particular option, also uncomment its containing section name, or name like `location:` or `storage:`. So in older versions of borgmatic, take
else borgmatic won't recognize the option. Also be sure to use spaces rather care that if you uncomment a particular option, also uncomment its containing
than tabs for indentation; YAML does not allow tabs. section name—or else borgmatic won't recognize the option.
You can get the same sample configuration file from the [configuration You can get the same sample configuration file from the [configuration
reference](https://torsion.org/borgmatic/docs/reference/configuration/), the reference](https://torsion.org/borgmatic/docs/reference/configuration/), the

View file

@ -131,22 +131,23 @@ Let's say your original borgmatic repository configuration file looks something
like this: like this:
```yaml ```yaml
location: repositories:
repositories: - path: original.borg
- path: original.borg
``` ```
<span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit
the `path:` portion of the `repositories` list.
Change it to a new (not yet created) repository path: Change it to a new (not yet created) repository path:
```yaml ```yaml
location: repositories:
repositories: - path: upgraded.borg
- path: upgraded.borg
``` ```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> This
option was found in the `location:` section of your configuration.
<span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit
the `path:` portion of the `repositories` list.
Then, run the `rcreate` action (formerly `init`) to create that new Borg 2 Then, run the `rcreate` action (formerly `init`) to create that new Borg 2
repository: repository:

View file

@ -15,7 +15,7 @@ listed here do not have equivalents in borgmatic's [configuration
file](https://torsion.org/borgmatic/docs/reference/configuration/). file](https://torsion.org/borgmatic/docs/reference/configuration/).
If you're using an older version of borgmatic, some of these flags may not be If you're using an older version of borgmatic, some of these flags may not be
present in that version, and you should instead use `borgmatic --help` or present in that version and you should instead use `borgmatic --help` or
`borgmatic [action name] --help` (where `[action name]` is the name of an `borgmatic [action name] --help` (where `[action name]` is the name of an
action like `list`, `create`, etc.). action like `list`, `create`, etc.).

View file

@ -22,7 +22,7 @@ def generate_configuration(config_path, repository_path):
.replace('- /home', f'- {config_path}') .replace('- /home', f'- {config_path}')
.replace('- /etc', '') .replace('- /etc', '')
.replace('- /var/log/syslog*', '') .replace('- /var/log/syslog*', '')
+ 'storage:\n encryption_passphrase: "test"' + 'encryption_passphrase: "test"'
) )
config_file = open(config_path, 'w') config_file = open(config_path, 'w')
config_file.write(config) config_file.write(config)

View file

@ -22,60 +22,57 @@ def write_configuration(
storing database dumps, dump format (for PostgreSQL), and encryption passphrase. storing database dumps, dump format (for PostgreSQL), and encryption passphrase.
''' '''
config = f''' config = f'''
location: source_directories:
source_directories: - {source_directory}
- {source_directory} repositories:
repositories: - path: {repository_path}
- {repository_path} borgmatic_source_directory: {borgmatic_source_directory}
borgmatic_source_directory: {borgmatic_source_directory}
storage: encryption_passphrase: "test"
encryption_passphrase: "test"
hooks: postgresql_databases:
postgresql_databases: - name: test
- name: test hostname: postgresql
hostname: postgresql username: postgres
username: postgres password: test
password: test format: {postgresql_dump_format}
format: {postgresql_dump_format} - name: all
- name: all hostname: postgresql
hostname: postgresql username: postgres
username: postgres password: test
password: test - name: all
- name: all format: custom
format: custom hostname: postgresql
hostname: postgresql username: postgres
username: postgres password: test
password: test mysql_databases:
mysql_databases: - name: test
- name: test hostname: mysql
hostname: mysql username: root
username: root password: test
password: test - name: all
- name: all hostname: mysql
hostname: mysql username: root
username: root password: test
password: test - name: all
- name: all format: sql
format: sql hostname: mysql
hostname: mysql username: root
username: root password: test
password: test mongodb_databases:
mongodb_databases: - name: test
- name: test hostname: mongodb
hostname: mongodb username: root
username: root password: test
password: test authentication_database: admin
authentication_database: admin format: {mongodb_dump_format}
format: {mongodb_dump_format} - name: all
- name: all hostname: mongodb
hostname: mongodb username: root
username: root password: test
password: test sqlite_databases:
sqlite_databases: - name: sqlite_test
- name: sqlite_test path: /tmp/sqlite_test.db
path: /tmp/sqlite_test.db
''' '''
with open(config_path, 'w') as config_file: with open(config_path, 'w') as config_file:
@ -96,51 +93,48 @@ def write_custom_restore_configuration(
restore_username, restore_password and restore_path. restore_username, restore_password and restore_path.
''' '''
config = f''' config = f'''
location: source_directories:
source_directories: - {source_directory}
- {source_directory} repositories:
repositories: - path: {repository_path}
- {repository_path} borgmatic_source_directory: {borgmatic_source_directory}
borgmatic_source_directory: {borgmatic_source_directory}
storage: encryption_passphrase: "test"
encryption_passphrase: "test"
hooks: postgresql_databases:
postgresql_databases: - name: test
- name: test hostname: postgresql
hostname: postgresql username: postgres
username: postgres password: test
password: test format: {postgresql_dump_format}
format: {postgresql_dump_format} restore_hostname: postgresql2
restore_hostname: postgresql2 restore_port: 5433
restore_port: 5433 restore_username: postgres2
restore_username: postgres2 restore_password: test2
restore_password: test2 mysql_databases:
mysql_databases: - name: test
- name: test hostname: mysql
hostname: mysql username: root
username: root password: test
password: test restore_hostname: mysql2
restore_hostname: mysql2 restore_port: 3307
restore_port: 3307 restore_username: root
restore_username: root restore_password: test2
restore_password: test2 mongodb_databases:
mongodb_databases: - name: test
- name: test hostname: mongodb
hostname: mongodb username: root
username: root password: test
password: test authentication_database: admin
authentication_database: admin format: {mongodb_dump_format}
format: {mongodb_dump_format} restore_hostname: mongodb2
restore_hostname: mongodb2 restore_port: 27018
restore_port: 27018 restore_username: root2
restore_username: root2 restore_password: test2
restore_password: test2 sqlite_databases:
sqlite_databases: - name: sqlite_test
- name: sqlite_test path: /tmp/sqlite_test.db
path: /tmp/sqlite_test.db restore_path: /tmp/sqlite_test2.db
restore_path: /tmp/sqlite_test2.db
''' '''
with open(config_path, 'w') as config_file: with open(config_path, 'w') as config_file:
@ -161,23 +155,20 @@ def write_simple_custom_restore_configuration(
these options for PostgreSQL. these options for PostgreSQL.
''' '''
config = f''' config = f'''
location: source_directories:
source_directories: - {source_directory}
- {source_directory} repositories:
repositories: - path: {repository_path}
- {repository_path} borgmatic_source_directory: {borgmatic_source_directory}
borgmatic_source_directory: {borgmatic_source_directory}
storage: encryption_passphrase: "test"
encryption_passphrase: "test"
hooks: postgresql_databases:
postgresql_databases: - name: test
- name: test hostname: postgresql
hostname: postgresql username: postgres
username: postgres password: test
password: test format: {postgresql_dump_format}
format: {postgresql_dump_format}
''' '''
with open(config_path, 'w') as config_file: with open(config_path, 'w') as config_file:

View file

@ -21,7 +21,7 @@ def generate_configuration(config_path, repository_path):
.replace('- /home', f'- {config_path}') .replace('- /home', f'- {config_path}')
.replace('- /etc', '') .replace('- /etc', '')
.replace('- /var/log/syslog*', '') .replace('- /var/log/syslog*', '')
+ 'storage:\n encryption_passphrase: "test"' + 'encryption_passphrase: "test"'
) )
config_file = open(config_path, 'w') config_file = open(config_path, 'w')
config_file.write(config) config_file.write(config)

View file

@ -38,5 +38,4 @@ def test_validate_config_command_with_show_flag_displays_configuration():
f'validate-borgmatic-config --config {config_path} --show'.split(' ') f'validate-borgmatic-config --config {config_path} --show'.split(' ')
).decode(sys.stdout.encoding) ).decode(sys.stdout.encoding)
assert 'location:' in output
assert 'repositories:' in output assert 'repositories:' in output

View file

@ -84,7 +84,6 @@ def test_prune_archives_command_does_not_duplicate_flags_or_raise():
False, False,
'repo', 'repo',
{}, {},
{},
'2.3.4', '2.3.4',
fuzz_argument(arguments, argument_name), fuzz_argument(arguments, argument_name),
argparse.Namespace(log_json=False), argparse.Namespace(log_json=False),

View file

@ -17,10 +17,10 @@ def test_parse_arguments_with_no_arguments_uses_defaults():
assert global_arguments.log_file_verbosity == 0 assert global_arguments.log_file_verbosity == 0
def test_parse_arguments_with_multiple_config_paths_parses_as_list(): def test_parse_arguments_with_multiple_config_flags_parses_as_list():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
arguments = module.parse_arguments('--config', 'myconfig', 'otherconfig') arguments = module.parse_arguments('--config', 'myconfig', '--config', 'otherconfig')
global_arguments = arguments['global'] global_arguments = arguments['global']
assert global_arguments.config_paths == ['myconfig', 'otherconfig'] assert global_arguments.config_paths == ['myconfig', 'otherconfig']
@ -109,20 +109,11 @@ def test_parse_arguments_with_single_override_parses():
assert global_arguments.overrides == ['foo.bar=baz'] assert global_arguments.overrides == ['foo.bar=baz']
def test_parse_arguments_with_multiple_overrides_parses(): def test_parse_arguments_with_multiple_overrides_flags_parses():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
arguments = module.parse_arguments('--override', 'foo.bar=baz', 'foo.quux=7')
global_arguments = arguments['global']
assert global_arguments.overrides == ['foo.bar=baz', 'foo.quux=7']
def test_parse_arguments_with_multiple_overrides_and_flags_parses():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
arguments = module.parse_arguments( arguments = module.parse_arguments(
'--override', 'foo.bar=baz', '--override', 'foo.quux=7', 'this.that=8' '--override', 'foo.bar=baz', '--override', 'foo.quux=7', '--override', 'this.that=8'
) )
global_arguments = arguments['global'] global_arguments = arguments['global']

View file

@ -13,43 +13,43 @@ def test_insert_newline_before_comment_does_not_raise():
config = module.yaml.comments.CommentedMap([(field_name, 33)]) config = module.yaml.comments.CommentedMap([(field_name, 33)])
config.yaml_set_comment_before_after_key(key=field_name, before='Comment') config.yaml_set_comment_before_after_key(key=field_name, before='Comment')
module._insert_newline_before_comment(config, field_name) module.insert_newline_before_comment(config, field_name)
def test_comment_out_line_skips_blank_line(): def test_comment_out_line_skips_blank_line():
line = ' \n' line = ' \n'
assert module._comment_out_line(line) == line assert module.comment_out_line(line) == line
def test_comment_out_line_skips_already_commented_out_line(): def test_comment_out_line_skips_already_commented_out_line():
line = ' # foo' line = ' # foo'
assert module._comment_out_line(line) == line assert module.comment_out_line(line) == line
def test_comment_out_line_comments_section_name(): def test_comment_out_line_comments_section_name():
line = 'figgy-pudding:' line = 'figgy-pudding:'
assert module._comment_out_line(line) == '# ' + line assert module.comment_out_line(line) == '# ' + line
def test_comment_out_line_comments_indented_option(): def test_comment_out_line_comments_indented_option():
line = ' enabled: true' line = ' enabled: true'
assert module._comment_out_line(line) == ' # enabled: true' assert module.comment_out_line(line) == ' # enabled: true'
def test_comment_out_line_comments_twice_indented_option(): def test_comment_out_line_comments_twice_indented_option():
line = ' - item' line = ' - item'
assert module._comment_out_line(line) == ' # - item' assert module.comment_out_line(line) == ' # - item'
def test_comment_out_optional_configuration_comments_optional_config_only(): def test_comment_out_optional_configuration_comments_optional_config_only():
# The "# COMMENT_OUT" comment is a sentinel used to express that the following key is optional. # The "# COMMENT_OUT" comment is a sentinel used to express that the following key is optional.
# It's stripped out of the final output. # It's stripped out of the final output.
flexmock(module)._comment_out_line = lambda line: '# ' + line flexmock(module).comment_out_line = lambda line: '# ' + line
config = ''' config = '''
# COMMENT_OUT # COMMENT_OUT
foo: foo:
@ -58,14 +58,13 @@ foo:
- baz - baz
- quux - quux
location: repositories:
repositories: - one
- one - two
- two
# This comment should be kept. # This comment should be kept.
# COMMENT_OUT # COMMENT_OUT
other: thing other: thing
''' '''
# flake8: noqa # flake8: noqa
@ -75,16 +74,15 @@ location:
# - baz # - baz
# - quux # - quux
location: repositories:
repositories: - one
- one - two
- two
# This comment should be kept. # This comment should be kept.
# other: thing # other: thing
''' '''
assert module._comment_out_optional_configuration(config.strip()) == expected_config.strip() assert module.comment_out_optional_configuration(config.strip()) == expected_config.strip()
def test_render_configuration_converts_configuration_to_yaml_string(): def test_render_configuration_converts_configuration_to_yaml_string():
@ -204,10 +202,10 @@ def test_generate_sample_configuration_does_not_raise():
builtins = flexmock(sys.modules['builtins']) builtins = flexmock(sys.modules['builtins'])
builtins.should_receive('open').with_args('schema.yaml').and_return('') builtins.should_receive('open').with_args('schema.yaml').and_return('')
flexmock(module.yaml).should_receive('round_trip_load') flexmock(module.yaml).should_receive('round_trip_load')
flexmock(module).should_receive('_schema_to_sample_configuration') flexmock(module).should_receive('schema_to_sample_configuration')
flexmock(module).should_receive('merge_source_configuration_into_destination') flexmock(module).should_receive('merge_source_configuration_into_destination')
flexmock(module).should_receive('render_configuration') flexmock(module).should_receive('render_configuration')
flexmock(module).should_receive('_comment_out_optional_configuration') flexmock(module).should_receive('comment_out_optional_configuration')
flexmock(module).should_receive('write_configuration') flexmock(module).should_receive('write_configuration')
module.generate_sample_configuration(False, None, 'dest.yaml', 'schema.yaml') module.generate_sample_configuration(False, None, 'dest.yaml', 'schema.yaml')
@ -219,10 +217,10 @@ def test_generate_sample_configuration_with_source_filename_does_not_raise():
flexmock(module.yaml).should_receive('round_trip_load') flexmock(module.yaml).should_receive('round_trip_load')
flexmock(module.load).should_receive('load_configuration') flexmock(module.load).should_receive('load_configuration')
flexmock(module.normalize).should_receive('normalize') flexmock(module.normalize).should_receive('normalize')
flexmock(module).should_receive('_schema_to_sample_configuration') flexmock(module).should_receive('schema_to_sample_configuration')
flexmock(module).should_receive('merge_source_configuration_into_destination') flexmock(module).should_receive('merge_source_configuration_into_destination')
flexmock(module).should_receive('render_configuration') flexmock(module).should_receive('render_configuration')
flexmock(module).should_receive('_comment_out_optional_configuration') flexmock(module).should_receive('comment_out_optional_configuration')
flexmock(module).should_receive('write_configuration') flexmock(module).should_receive('write_configuration')
module.generate_sample_configuration(False, 'source.yaml', 'dest.yaml', 'schema.yaml') module.generate_sample_configuration(False, 'source.yaml', 'dest.yaml', 'schema.yaml')
@ -232,10 +230,10 @@ def test_generate_sample_configuration_with_dry_run_does_not_write_file():
builtins = flexmock(sys.modules['builtins']) builtins = flexmock(sys.modules['builtins'])
builtins.should_receive('open').with_args('schema.yaml').and_return('') builtins.should_receive('open').with_args('schema.yaml').and_return('')
flexmock(module.yaml).should_receive('round_trip_load') flexmock(module.yaml).should_receive('round_trip_load')
flexmock(module).should_receive('_schema_to_sample_configuration') flexmock(module).should_receive('schema_to_sample_configuration')
flexmock(module).should_receive('merge_source_configuration_into_destination') flexmock(module).should_receive('merge_source_configuration_into_destination')
flexmock(module).should_receive('render_configuration') flexmock(module).should_receive('render_configuration')
flexmock(module).should_receive('_comment_out_optional_configuration') flexmock(module).should_receive('comment_out_optional_configuration')
flexmock(module).should_receive('write_configuration').never() flexmock(module).should_receive('write_configuration').never()
module.generate_sample_configuration(True, None, 'dest.yaml', 'schema.yaml') module.generate_sample_configuration(True, None, 'dest.yaml', 'schema.yaml')

View file

@ -40,35 +40,32 @@ def mock_config_and_schema(config_yaml, schema_yaml=None):
def test_parse_configuration_transforms_file_into_mapping(): def test_parse_configuration_transforms_file_into_mapping():
mock_config_and_schema( mock_config_and_schema(
''' '''
location: source_directories:
source_directories: - /home
- /home - /etc
- /etc
repositories: repositories:
- path: hostname.borg - path: hostname.borg
retention: keep_minutely: 60
keep_minutely: 60 keep_hourly: 24
keep_hourly: 24 keep_daily: 7
keep_daily: 7
consistency: checks:
checks: - name: repository
- name: repository - name: archives
- name: archives
''' '''
) )
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
assert config == { assert config == {
'location': { 'source_directories': ['/home', '/etc'],
'source_directories': ['/home', '/etc'], 'repositories': [{'path': 'hostname.borg'}],
'repositories': [{'path': 'hostname.borg'}], 'keep_daily': 7,
}, 'keep_hourly': 24,
'retention': {'keep_daily': 7, 'keep_hourly': 24, 'keep_minutely': 60}, 'keep_minutely': 60,
'consistency': {'checks': [{'name': 'repository'}, {'name': 'archives'}]}, 'checks': [{'name': 'repository'}, {'name': 'archives'}],
} }
assert logs == [] assert logs == []
@ -78,22 +75,19 @@ def test_parse_configuration_passes_through_quoted_punctuation():
mock_config_and_schema( mock_config_and_schema(
f''' f'''
location: source_directories:
source_directories: - "/home/{escaped_punctuation}"
- "/home/{escaped_punctuation}"
repositories: repositories:
- path: test.borg - path: test.borg
''' '''
) )
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
assert config == { assert config == {
'location': { 'source_directories': [f'/home/{string.punctuation}'],
'source_directories': [f'/home/{string.punctuation}'], 'repositories': [{'path': 'test.borg'}],
'repositories': [{'path': 'test.borg'}],
}
} }
assert logs == [] assert logs == []
@ -101,26 +95,22 @@ def test_parse_configuration_passes_through_quoted_punctuation():
def test_parse_configuration_with_schema_lacking_examples_does_not_raise(): def test_parse_configuration_with_schema_lacking_examples_does_not_raise():
mock_config_and_schema( mock_config_and_schema(
''' '''
location: source_directories:
source_directories: - /home
- /home
repositories: repositories:
- path: hostname.borg - path: hostname.borg
''', ''',
''' '''
map: map:
location: source_directories:
required: true required: true
map: seq:
source_directories: - type: scalar
required: true repositories:
seq: required: true
- type: scalar seq:
repositories: - type: scalar
required: true
seq:
- type: scalar
''', ''',
) )
@ -130,12 +120,11 @@ def test_parse_configuration_with_schema_lacking_examples_does_not_raise():
def test_parse_configuration_inlines_include(): def test_parse_configuration_inlines_include():
mock_config_and_schema( mock_config_and_schema(
''' '''
location: source_directories:
source_directories: - /home
- /home
repositories: repositories:
- path: hostname.borg - path: hostname.borg
retention: retention:
!include include.yaml !include include.yaml
@ -154,25 +143,25 @@ def test_parse_configuration_inlines_include():
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
assert config == { assert config == {
'location': {'source_directories': ['/home'], 'repositories': [{'path': 'hostname.borg'}]}, 'source_directories': ['/home'],
'retention': {'keep_daily': 7, 'keep_hourly': 24}, 'repositories': [{'path': 'hostname.borg'}],
'keep_daily': 7,
'keep_hourly': 24,
} }
assert logs == [] assert len(logs) == 1
def test_parse_configuration_merges_include(): def test_parse_configuration_merges_include():
mock_config_and_schema( mock_config_and_schema(
''' '''
location: source_directories:
source_directories: - /home
- /home
repositories: repositories:
- path: hostname.borg - path: hostname.borg
retention: keep_daily: 1
keep_daily: 1 <<: !include include.yaml
<<: !include include.yaml
''' '''
) )
builtins = flexmock(sys.modules['builtins']) builtins = flexmock(sys.modules['builtins'])
@ -188,8 +177,10 @@ def test_parse_configuration_merges_include():
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
assert config == { assert config == {
'location': {'source_directories': ['/home'], 'repositories': [{'path': 'hostname.borg'}]}, 'source_directories': ['/home'],
'retention': {'keep_daily': 1, 'keep_hourly': 24}, 'repositories': [{'path': 'hostname.borg'}],
'keep_daily': 1,
'keep_hourly': 24,
} }
assert logs == [] assert logs == []
@ -218,10 +209,9 @@ def test_parse_configuration_raises_for_syntax_error():
def test_parse_configuration_raises_for_validation_error(): def test_parse_configuration_raises_for_validation_error():
mock_config_and_schema( mock_config_and_schema(
''' '''
location: source_directories: yes
source_directories: yes repositories:
repositories: - path: hostname.borg
- path: hostname.borg
''' '''
) )
@ -232,14 +222,13 @@ def test_parse_configuration_raises_for_validation_error():
def test_parse_configuration_applies_overrides(): def test_parse_configuration_applies_overrides():
mock_config_and_schema( mock_config_and_schema(
''' '''
location: source_directories:
source_directories: - /home
- /home
repositories: repositories:
- path: hostname.borg - path: hostname.borg
local_path: borg1 local_path: borg1
''' '''
) )
@ -248,11 +237,9 @@ def test_parse_configuration_applies_overrides():
) )
assert config == { assert config == {
'location': { 'source_directories': ['/home'],
'source_directories': ['/home'], 'repositories': [{'path': 'hostname.borg'}],
'repositories': [{'path': 'hostname.borg'}], 'local_path': 'borg2',
'local_path': 'borg2',
}
} }
assert logs == [] assert logs == []
@ -274,10 +261,8 @@ def test_parse_configuration_applies_normalization():
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
assert config == { assert config == {
'location': { 'source_directories': ['/home'],
'source_directories': ['/home'], 'repositories': [{'path': 'hostname.borg'}],
'repositories': [{'path': 'hostname.borg'}], 'exclude_if_present': ['.nobackup'],
'exclude_if_present': ['.nobackup'],
}
} }
assert logs assert logs

View file

@ -10,7 +10,7 @@ def test_destroy_monitor_removes_healthchecks_handler():
original_handlers = list(logger.handlers) original_handlers = list(logger.handlers)
logger.addHandler(module.Forgetful_buffering_handler(byte_capacity=100, log_level=1)) logger.addHandler(module.Forgetful_buffering_handler(byte_capacity=100, log_level=1))
module.destroy_monitor(flexmock(), flexmock(), flexmock(), flexmock()) module.destroy_monitor(flexmock(), flexmock(), flexmock(), flexmock(), flexmock())
assert logger.handlers == original_handlers assert logger.handlers == original_handlers
@ -19,6 +19,6 @@ def test_destroy_monitor_without_healthchecks_handler_does_not_raise():
logger = logging.getLogger() logger = logging.getLogger()
original_handlers = list(logger.handlers) original_handlers = list(logger.handlers)
module.destroy_monitor(flexmock(), flexmock(), flexmock(), flexmock()) module.destroy_monitor(flexmock(), flexmock(), flexmock(), flexmock(), flexmock())
assert logger.handlers == original_handlers assert logger.handlers == original_handlers

View file

@ -14,7 +14,7 @@ def test_run_borg_does_not_raise():
module.run_borg( module.run_borg(
repository={'path': 'repos'}, repository={'path': 'repos'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
borg_arguments=borg_arguments, borg_arguments=borg_arguments,

View file

@ -11,7 +11,7 @@ def test_run_break_lock_does_not_raise():
module.run_break_lock( module.run_break_lock(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
break_lock_arguments=break_lock_arguments, break_lock_arguments=break_lock_arguments,
global_arguments=flexmock(), global_arguments=flexmock(),

View file

@ -5,9 +5,6 @@ from borgmatic.actions import check as module
def test_run_check_calls_hooks_for_configured_repository(): def test_run_check_calls_hooks_for_configured_repository():
flexmock(module.logger).answer = lambda message: None flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.config.checks).should_receive(
'repository_enabled_for_checks'
).and_return(True)
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').never() flexmock(module.borgmatic.config.validate).should_receive('repositories_match').never()
flexmock(module.borgmatic.borg.check).should_receive('check_archives').once() flexmock(module.borgmatic.borg.check).should_receive('check_archives').once()
flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2) flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2)
@ -23,10 +20,7 @@ def test_run_check_calls_hooks_for_configured_repository():
module.run_check( module.run_check(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
location={'repositories': ['repo']}, config={'repositories': ['repo']},
storage={},
consistency={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
check_arguments=check_arguments, check_arguments=check_arguments,
@ -54,10 +48,7 @@ def test_run_check_runs_with_selected_repository():
module.run_check( module.run_check(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
location={'repositories': ['repo']}, config={'repositories': ['repo']},
storage={},
consistency={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
check_arguments=check_arguments, check_arguments=check_arguments,
@ -85,10 +76,7 @@ def test_run_check_bails_if_repository_does_not_match():
module.run_check( module.run_check(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
location={'repositories': ['repo']}, config={'repositories': ['repo']},
storage={},
consistency={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
check_arguments=check_arguments, check_arguments=check_arguments,

View file

@ -17,9 +17,7 @@ def test_compact_actions_calls_hooks_for_configured_repository():
module.run_compact( module.run_compact(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
retention={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
compact_arguments=compact_arguments, compact_arguments=compact_arguments,
@ -45,9 +43,7 @@ def test_compact_runs_with_selected_repository():
module.run_compact( module.run_compact(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
retention={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
compact_arguments=compact_arguments, compact_arguments=compact_arguments,
@ -73,9 +69,7 @@ def test_compact_bails_if_repository_does_not_match():
module.run_compact( module.run_compact(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
retention={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
compact_arguments=compact_arguments, compact_arguments=compact_arguments,

View file

@ -28,9 +28,7 @@ def test_run_create_executes_and_calls_hooks_for_configured_repository():
module.run_create( module.run_create(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
location={}, config={},
storage={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
create_arguments=create_arguments, create_arguments=create_arguments,
@ -49,6 +47,11 @@ def test_run_create_runs_with_selected_repository():
).once().and_return(True) ).once().and_return(True)
flexmock(module.borgmatic.borg.create).should_receive('create_archive').once() flexmock(module.borgmatic.borg.create).should_receive('create_archive').once()
flexmock(module).should_receive('create_borgmatic_manifest').once() flexmock(module).should_receive('create_borgmatic_manifest').once()
flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2)
flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return({})
flexmock(module.borgmatic.hooks.dispatch).should_receive(
'call_hooks_even_if_unconfigured'
).and_return({})
create_arguments = flexmock( create_arguments = flexmock(
repository=flexmock(), repository=flexmock(),
progress=flexmock(), progress=flexmock(),
@ -62,9 +65,7 @@ def test_run_create_runs_with_selected_repository():
module.run_create( module.run_create(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
location={}, config={},
storage={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
create_arguments=create_arguments, create_arguments=create_arguments,
@ -96,9 +97,7 @@ def test_run_create_bails_if_repository_does_not_match():
module.run_create( module.run_create(
config_filename='test.yaml', config_filename='test.yaml',
repository='repo', repository='repo',
location={}, config={},
storage={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
create_arguments=create_arguments, create_arguments=create_arguments,

View file

@ -20,7 +20,7 @@ def test_run_export_tar_does_not_raise():
module.run_export_tar( module.run_export_tar(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
export_tar_arguments=export_tar_arguments, export_tar_arguments=export_tar_arguments,
global_arguments=global_arguments, global_arguments=global_arguments,

View file

@ -21,9 +21,7 @@ def test_run_extract_calls_hooks():
module.run_extract( module.run_extract(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
location={'repositories': ['repo']}, config={'repositories': ['repo']},
storage={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
extract_arguments=extract_arguments, extract_arguments=extract_arguments,

View file

@ -18,7 +18,7 @@ def test_run_info_does_not_raise():
list( list(
module.run_info( module.run_info(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
info_arguments=info_arguments, info_arguments=info_arguments,
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),

View file

@ -18,7 +18,7 @@ def test_run_list_does_not_raise():
list( list(
module.run_list( module.run_list(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
list_arguments=list_arguments, list_arguments=list_arguments,
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),

View file

@ -18,7 +18,7 @@ def test_run_mount_does_not_raise():
module.run_mount( module.run_mount(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
mount_arguments=mount_arguments, mount_arguments=mount_arguments,
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),

View file

@ -14,9 +14,7 @@ def test_run_prune_calls_hooks_for_configured_repository():
module.run_prune( module.run_prune(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
retention={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
prune_arguments=prune_arguments, prune_arguments=prune_arguments,
@ -39,9 +37,7 @@ def test_run_prune_runs_with_selected_repository():
module.run_prune( module.run_prune(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
retention={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
prune_arguments=prune_arguments, prune_arguments=prune_arguments,
@ -64,9 +60,7 @@ def test_run_prune_bails_if_repository_does_not_match():
module.run_prune( module.run_prune(
config_filename='test.yaml', config_filename='test.yaml',
repository='repo', repository='repo',
storage={}, config={},
retention={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
prune_arguments=prune_arguments, prune_arguments=prune_arguments,

View file

@ -19,7 +19,7 @@ def test_run_rcreate_does_not_raise():
module.run_rcreate( module.run_rcreate(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
rcreate_arguments=arguments, rcreate_arguments=arguments,
global_arguments=flexmock(dry_run=False), global_arguments=flexmock(dry_run=False),
@ -46,7 +46,7 @@ def test_run_rcreate_bails_if_repository_does_not_match():
module.run_rcreate( module.run_rcreate(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
rcreate_arguments=arguments, rcreate_arguments=arguments,
global_arguments=flexmock(dry_run=False), global_arguments=flexmock(dry_run=False),

View file

@ -6,7 +6,7 @@ import borgmatic.actions.restore as module
def test_get_configured_database_matches_database_by_name(): def test_get_configured_database_matches_database_by_name():
assert module.get_configured_database( assert module.get_configured_database(
hooks={ config={
'other_databases': [{'name': 'other'}], 'other_databases': [{'name': 'other'}],
'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}], 'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}],
}, },
@ -18,7 +18,7 @@ def test_get_configured_database_matches_database_by_name():
def test_get_configured_database_matches_nothing_when_database_name_not_configured(): def test_get_configured_database_matches_nothing_when_database_name_not_configured():
assert module.get_configured_database( assert module.get_configured_database(
hooks={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]}, config={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]},
archive_database_names={'postgresql_databases': ['foo']}, archive_database_names={'postgresql_databases': ['foo']},
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='quux', database_name='quux',
@ -27,7 +27,7 @@ def test_get_configured_database_matches_nothing_when_database_name_not_configur
def test_get_configured_database_matches_nothing_when_database_name_not_in_archive(): def test_get_configured_database_matches_nothing_when_database_name_not_in_archive():
assert module.get_configured_database( assert module.get_configured_database(
hooks={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]}, config={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]},
archive_database_names={'postgresql_databases': ['bar']}, archive_database_names={'postgresql_databases': ['bar']},
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='foo', database_name='foo',
@ -36,7 +36,7 @@ def test_get_configured_database_matches_nothing_when_database_name_not_in_archi
def test_get_configured_database_matches_database_by_configuration_database_name(): def test_get_configured_database_matches_database_by_configuration_database_name():
assert module.get_configured_database( assert module.get_configured_database(
hooks={'postgresql_databases': [{'name': 'all'}, {'name': 'bar'}]}, config={'postgresql_databases': [{'name': 'all'}, {'name': 'bar'}]},
archive_database_names={'postgresql_databases': ['foo']}, archive_database_names={'postgresql_databases': ['foo']},
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='foo', database_name='foo',
@ -46,7 +46,7 @@ def test_get_configured_database_matches_database_by_configuration_database_name
def test_get_configured_database_with_unspecified_hook_matches_database_by_name(): def test_get_configured_database_with_unspecified_hook_matches_database_by_name():
assert module.get_configured_database( assert module.get_configured_database(
hooks={ config={
'other_databases': [{'name': 'other'}], 'other_databases': [{'name': 'other'}],
'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}], 'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}],
}, },
@ -69,8 +69,7 @@ def test_collect_archive_database_names_parses_archive_paths():
archive_database_names = module.collect_archive_database_names( archive_database_names = module.collect_archive_database_names(
repository={'path': 'repo'}, repository={'path': 'repo'},
archive='archive', archive='archive',
location={'borgmatic_source_directory': '.borgmatic'}, config={'borgmatic_source_directory': '.borgmatic'},
storage=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
local_path=flexmock(), local_path=flexmock(),
@ -95,8 +94,7 @@ def test_collect_archive_database_names_parses_directory_format_archive_paths():
archive_database_names = module.collect_archive_database_names( archive_database_names = module.collect_archive_database_names(
repository={'path': 'repo'}, repository={'path': 'repo'},
archive='archive', archive='archive',
location={'borgmatic_source_directory': '.borgmatic'}, config={'borgmatic_source_directory': '.borgmatic'},
storage=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
local_path=flexmock(), local_path=flexmock(),
@ -117,8 +115,7 @@ def test_collect_archive_database_names_skips_bad_archive_paths():
archive_database_names = module.collect_archive_database_names( archive_database_names = module.collect_archive_database_names(
repository={'path': 'repo'}, repository={'path': 'repo'},
archive='archive', archive='archive',
location={'borgmatic_source_directory': '.borgmatic'}, config={'borgmatic_source_directory': '.borgmatic'},
storage=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
local_path=flexmock(), local_path=flexmock(),
@ -231,9 +228,7 @@ def test_run_restore_restores_each_database():
).and_return(('postgresql_databases', {'name': 'bar'})) ).and_return(('postgresql_databases', {'name': 'bar'}))
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
location=object, config=object,
storage=object,
hooks=object,
local_borg_version=object, local_borg_version=object,
global_arguments=object, global_arguments=object,
local_path=object, local_path=object,
@ -245,9 +240,7 @@ def test_run_restore_restores_each_database():
).once() ).once()
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
location=object, config=object,
storage=object,
hooks=object,
local_borg_version=object, local_borg_version=object,
global_arguments=object, global_arguments=object,
local_path=object, local_path=object,
@ -261,9 +254,7 @@ def test_run_restore_restores_each_database():
module.run_restore( module.run_restore(
repository={'path': 'repo'}, repository={'path': 'repo'},
location=flexmock(), config=flexmock(),
storage=flexmock(),
hooks=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
restore_arguments=flexmock( restore_arguments=flexmock(
repository='repo', repository='repo',
@ -293,9 +284,7 @@ def test_run_restore_bails_for_non_matching_repository():
module.run_restore( module.run_restore(
repository={'path': 'repo'}, repository={'path': 'repo'},
location=flexmock(), config=flexmock(),
storage=flexmock(),
hooks=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()), restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()),
global_arguments=flexmock(dry_run=False), global_arguments=flexmock(dry_run=False),
@ -317,19 +306,19 @@ def test_run_restore_restores_database_configured_with_all_name():
flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock()) flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock())
flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names) flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_database').with_args( flexmock(module).should_receive('get_configured_database').with_args(
hooks=object, config=object,
archive_database_names=object, archive_database_names=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='foo', database_name='foo',
).and_return(('postgresql_databases', {'name': 'foo'})) ).and_return(('postgresql_databases', {'name': 'foo'}))
flexmock(module).should_receive('get_configured_database').with_args( flexmock(module).should_receive('get_configured_database').with_args(
hooks=object, config=object,
archive_database_names=object, archive_database_names=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='bar', database_name='bar',
).and_return((None, None)) ).and_return((None, None))
flexmock(module).should_receive('get_configured_database').with_args( flexmock(module).should_receive('get_configured_database').with_args(
hooks=object, config=object,
archive_database_names=object, archive_database_names=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='bar', database_name='bar',
@ -337,9 +326,7 @@ def test_run_restore_restores_database_configured_with_all_name():
).and_return(('postgresql_databases', {'name': 'bar'})) ).and_return(('postgresql_databases', {'name': 'bar'}))
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
location=object, config=object,
storage=object,
hooks=object,
local_borg_version=object, local_borg_version=object,
global_arguments=object, global_arguments=object,
local_path=object, local_path=object,
@ -351,9 +338,7 @@ def test_run_restore_restores_database_configured_with_all_name():
).once() ).once()
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
location=object, config=object,
storage=object,
hooks=object,
local_borg_version=object, local_borg_version=object,
global_arguments=object, global_arguments=object,
local_path=object, local_path=object,
@ -367,9 +352,7 @@ def test_run_restore_restores_database_configured_with_all_name():
module.run_restore( module.run_restore(
repository={'path': 'repo'}, repository={'path': 'repo'},
location=flexmock(), config=flexmock(),
storage=flexmock(),
hooks=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
restore_arguments=flexmock( restore_arguments=flexmock(
repository='repo', repository='repo',
@ -401,19 +384,19 @@ def test_run_restore_skips_missing_database():
flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock()) flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock())
flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names) flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_database').with_args( flexmock(module).should_receive('get_configured_database').with_args(
hooks=object, config=object,
archive_database_names=object, archive_database_names=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='foo', database_name='foo',
).and_return(('postgresql_databases', {'name': 'foo'})) ).and_return(('postgresql_databases', {'name': 'foo'}))
flexmock(module).should_receive('get_configured_database').with_args( flexmock(module).should_receive('get_configured_database').with_args(
hooks=object, config=object,
archive_database_names=object, archive_database_names=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='bar', database_name='bar',
).and_return((None, None)) ).and_return((None, None))
flexmock(module).should_receive('get_configured_database').with_args( flexmock(module).should_receive('get_configured_database').with_args(
hooks=object, config=object,
archive_database_names=object, archive_database_names=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='bar', database_name='bar',
@ -421,9 +404,7 @@ def test_run_restore_skips_missing_database():
).and_return((None, None)) ).and_return((None, None))
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
location=object, config=object,
storage=object,
hooks=object,
local_borg_version=object, local_borg_version=object,
global_arguments=object, global_arguments=object,
local_path=object, local_path=object,
@ -435,9 +416,7 @@ def test_run_restore_skips_missing_database():
).once() ).once()
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
location=object, config=object,
storage=object,
hooks=object,
local_borg_version=object, local_borg_version=object,
global_arguments=object, global_arguments=object,
local_path=object, local_path=object,
@ -451,9 +430,7 @@ def test_run_restore_skips_missing_database():
module.run_restore( module.run_restore(
repository={'path': 'repo'}, repository={'path': 'repo'},
location=flexmock(), config=flexmock(),
storage=flexmock(),
hooks=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
restore_arguments=flexmock( restore_arguments=flexmock(
repository='repo', repository='repo',
@ -486,22 +463,20 @@ def test_run_restore_restores_databases_from_different_hooks():
flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock()) flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock())
flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names) flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_database').with_args( flexmock(module).should_receive('get_configured_database').with_args(
hooks=object, config=object,
archive_database_names=object, archive_database_names=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='foo', database_name='foo',
).and_return(('postgresql_databases', {'name': 'foo'})) ).and_return(('postgresql_databases', {'name': 'foo'}))
flexmock(module).should_receive('get_configured_database').with_args( flexmock(module).should_receive('get_configured_database').with_args(
hooks=object, config=object,
archive_database_names=object, archive_database_names=object,
hook_name='mysql_databases', hook_name='mysql_databases',
database_name='bar', database_name='bar',
).and_return(('mysql_databases', {'name': 'bar'})) ).and_return(('mysql_databases', {'name': 'bar'}))
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
location=object, config=object,
storage=object,
hooks=object,
local_borg_version=object, local_borg_version=object,
global_arguments=object, global_arguments=object,
local_path=object, local_path=object,
@ -513,9 +488,7 @@ def test_run_restore_restores_databases_from_different_hooks():
).once() ).once()
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
location=object, config=object,
storage=object,
hooks=object,
local_borg_version=object, local_borg_version=object,
global_arguments=object, global_arguments=object,
local_path=object, local_path=object,
@ -529,9 +502,7 @@ def test_run_restore_restores_databases_from_different_hooks():
module.run_restore( module.run_restore(
repository={'path': 'repo'}, repository={'path': 'repo'},
location=flexmock(), config=flexmock(),
storage=flexmock(),
hooks=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
restore_arguments=flexmock( restore_arguments=flexmock(
repository='repo', repository='repo',

View file

@ -12,7 +12,7 @@ def test_run_rinfo_does_not_raise():
list( list(
module.run_rinfo( module.run_rinfo(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
rinfo_arguments=rinfo_arguments, rinfo_arguments=rinfo_arguments,
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),

View file

@ -12,7 +12,7 @@ def test_run_rlist_does_not_raise():
list( list(
module.run_rlist( module.run_rlist(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
rlist_arguments=rlist_arguments, rlist_arguments=rlist_arguments,
global_arguments=flexmock(), global_arguments=flexmock(),

View file

@ -11,7 +11,7 @@ def test_run_transfer_does_not_raise():
module.run_transfer( module.run_transfer(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
transfer_arguments=transfer_arguments, transfer_arguments=transfer_arguments,
global_arguments=global_arguments, global_arguments=global_arguments,

View file

@ -22,7 +22,7 @@ def test_run_arbitrary_borg_calls_borg_with_flags():
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['break-lock', '::'], options=['break-lock', '::'],
) )
@ -44,7 +44,7 @@ def test_run_arbitrary_borg_with_log_info_calls_borg_with_info_flag():
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['break-lock', '::'], options=['break-lock', '::'],
) )
@ -66,7 +66,7 @@ def test_run_arbitrary_borg_with_log_debug_calls_borg_with_debug_flag():
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['break-lock', '::'], options=['break-lock', '::'],
) )
@ -75,7 +75,7 @@ def test_run_arbitrary_borg_with_log_debug_calls_borg_with_debug_flag():
def test_run_arbitrary_borg_with_lock_wait_calls_borg_with_lock_wait_flags(): def test_run_arbitrary_borg_with_lock_wait_calls_borg_with_lock_wait_flags():
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
storage_config = {'lock_wait': 5} config = {'lock_wait': 5}
flexmock(module.flags).should_receive('make_flags').and_return(()).and_return( flexmock(module.flags).should_receive('make_flags').and_return(()).and_return(
('--lock-wait', '5') ('--lock-wait', '5')
) )
@ -90,7 +90,7 @@ def test_run_arbitrary_borg_with_lock_wait_calls_borg_with_lock_wait_flags():
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config=storage_config, config=config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['break-lock', '::'], options=['break-lock', '::'],
) )
@ -111,7 +111,7 @@ def test_run_arbitrary_borg_with_archive_calls_borg_with_archive_flag():
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['break-lock', '::$ARCHIVE'], options=['break-lock', '::$ARCHIVE'],
archive='archive', archive='archive',
@ -133,7 +133,7 @@ def test_run_arbitrary_borg_with_local_path_calls_borg_via_local_path():
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['break-lock', '::'], options=['break-lock', '::'],
local_path='borg1', local_path='borg1',
@ -157,7 +157,7 @@ def test_run_arbitrary_borg_with_remote_path_calls_borg_with_remote_path_flags()
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['break-lock', '::'], options=['break-lock', '::'],
remote_path='borg1', remote_path='borg1',
@ -179,7 +179,7 @@ def test_run_arbitrary_borg_passes_borg_specific_flags_to_borg():
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['list', '--progress', '::'], options=['list', '--progress', '::'],
) )
@ -200,7 +200,7 @@ def test_run_arbitrary_borg_omits_dash_dash_in_flags_passed_to_borg():
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['--', 'break-lock', '::'], options=['--', 'break-lock', '::'],
) )
@ -221,7 +221,7 @@ def test_run_arbitrary_borg_without_borg_specific_flags_does_not_raise():
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=[], options=[],
) )
@ -243,7 +243,7 @@ def test_run_arbitrary_borg_passes_key_sub_command_to_borg_before_injected_flags
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['key', 'export', '::'], options=['key', 'export', '::'],
) )
@ -265,7 +265,7 @@ def test_run_arbitrary_borg_passes_debug_sub_command_to_borg_before_injected_fla
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['debug', 'dump-manifest', '::', 'path'], options=['debug', 'dump-manifest', '::', 'path'],
) )

View file

@ -22,7 +22,7 @@ def test_break_lock_calls_borg_with_required_flags():
module.break_lock( module.break_lock(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -34,7 +34,7 @@ def test_break_lock_calls_borg_with_remote_path_flags():
module.break_lock( module.break_lock(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
remote_path='borg1', remote_path='borg1',
@ -47,7 +47,7 @@ def test_break_lock_calls_borg_with_umask_flags():
module.break_lock( module.break_lock(
repository_path='repo', repository_path='repo',
storage_config={'umask': '0770'}, config={'umask': '0770'},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -59,7 +59,7 @@ def test_break_lock_calls_borg_with_log_json_flags():
module.break_lock( module.break_lock(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=True), global_arguments=flexmock(log_json=True),
) )
@ -71,7 +71,7 @@ def test_break_lock_calls_borg_with_lock_wait_flags():
module.break_lock( module.break_lock(
repository_path='repo', repository_path='repo',
storage_config={'lock_wait': '5'}, config={'lock_wait': '5'},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -84,7 +84,7 @@ def test_break_lock_with_log_info_calls_borg_with_info_parameter():
module.break_lock( module.break_lock(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -97,7 +97,7 @@ def test_break_lock_with_log_debug_calls_borg_with_debug_flags():
module.break_lock( module.break_lock(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )

View file

@ -99,8 +99,7 @@ def test_filter_checks_on_frequency_without_config_uses_default_checks():
flexmock(module).should_receive('probe_for_check_time').and_return(None) flexmock(module).should_receive('probe_for_check_time').and_return(None)
assert module.filter_checks_on_frequency( assert module.filter_checks_on_frequency(
location_config={}, config={},
consistency_config={},
borg_repository_id='repo', borg_repository_id='repo',
checks=('repository', 'archives'), checks=('repository', 'archives'),
force=False, force=False,
@ -110,8 +109,7 @@ def test_filter_checks_on_frequency_without_config_uses_default_checks():
def test_filter_checks_on_frequency_retains_unconfigured_check(): def test_filter_checks_on_frequency_retains_unconfigured_check():
assert module.filter_checks_on_frequency( assert module.filter_checks_on_frequency(
location_config={}, config={},
consistency_config={},
borg_repository_id='repo', borg_repository_id='repo',
checks=('data',), checks=('data',),
force=False, force=False,
@ -122,8 +120,7 @@ def test_filter_checks_on_frequency_retains_check_without_frequency():
flexmock(module).should_receive('parse_frequency').and_return(None) flexmock(module).should_receive('parse_frequency').and_return(None)
assert module.filter_checks_on_frequency( assert module.filter_checks_on_frequency(
location_config={}, config={'checks': [{'name': 'archives'}]},
consistency_config={'checks': [{'name': 'archives'}]},
borg_repository_id='repo', borg_repository_id='repo',
checks=('archives',), checks=('archives',),
force=False, force=False,
@ -141,8 +138,7 @@ def test_filter_checks_on_frequency_retains_check_with_elapsed_frequency():
) )
assert module.filter_checks_on_frequency( assert module.filter_checks_on_frequency(
location_config={}, config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
borg_repository_id='repo', borg_repository_id='repo',
checks=('archives',), checks=('archives',),
force=False, force=False,
@ -158,8 +154,7 @@ def test_filter_checks_on_frequency_retains_check_with_missing_check_time_file()
flexmock(module).should_receive('probe_for_check_time').and_return(None) flexmock(module).should_receive('probe_for_check_time').and_return(None)
assert module.filter_checks_on_frequency( assert module.filter_checks_on_frequency(
location_config={}, config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
borg_repository_id='repo', borg_repository_id='repo',
checks=('archives',), checks=('archives',),
force=False, force=False,
@ -178,8 +173,7 @@ def test_filter_checks_on_frequency_skips_check_with_unelapsed_frequency():
assert ( assert (
module.filter_checks_on_frequency( module.filter_checks_on_frequency(
location_config={}, config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
borg_repository_id='repo', borg_repository_id='repo',
checks=('archives',), checks=('archives',),
force=False, force=False,
@ -191,8 +185,7 @@ def test_filter_checks_on_frequency_skips_check_with_unelapsed_frequency():
def test_filter_checks_on_frequency_restains_check_with_unelapsed_frequency_and_force(): def test_filter_checks_on_frequency_restains_check_with_unelapsed_frequency_and_force():
assert module.filter_checks_on_frequency( assert module.filter_checks_on_frequency(
location_config={}, config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
borg_repository_id='repo', borg_repository_id='repo',
checks=('archives',), checks=('archives',),
force=True, force=True,
@ -616,7 +609,7 @@ def test_upgrade_check_times_renames_stale_temporary_check_path():
def test_check_archives_with_progress_calls_borg_with_progress_parameter(): def test_check_archives_with_progress_calls_borg_with_progress_parameter():
checks = ('repository',) checks = ('repository',)
consistency_config = {'check_last': None} config = {'check_last': None}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -639,9 +632,7 @@ def test_check_archives_with_progress_calls_borg_with_progress_parameter():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
progress=True, progress=True,
@ -650,7 +641,7 @@ def test_check_archives_with_progress_calls_borg_with_progress_parameter():
def test_check_archives_with_repair_calls_borg_with_repair_parameter(): def test_check_archives_with_repair_calls_borg_with_repair_parameter():
checks = ('repository',) checks = ('repository',)
consistency_config = {'check_last': None} config = {'check_last': None}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -673,9 +664,7 @@ def test_check_archives_with_repair_calls_borg_with_repair_parameter():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
repair=True, repair=True,
@ -693,7 +682,7 @@ def test_check_archives_with_repair_calls_borg_with_repair_parameter():
) )
def test_check_archives_calls_borg_with_parameters(checks): def test_check_archives_calls_borg_with_parameters(checks):
check_last = flexmock() check_last = flexmock()
consistency_config = {'check_last': check_last} config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -710,9 +699,7 @@ def test_check_archives_calls_borg_with_parameters(checks):
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -721,7 +708,7 @@ def test_check_archives_calls_borg_with_parameters(checks):
def test_check_archives_with_json_error_raises(): def test_check_archives_with_json_error_raises():
checks = ('archives',) checks = ('archives',)
check_last = flexmock() check_last = flexmock()
consistency_config = {'check_last': check_last} config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"unexpected": {"id": "repo"}}' '{"unexpected": {"id": "repo"}}'
) )
@ -734,9 +721,7 @@ def test_check_archives_with_json_error_raises():
with pytest.raises(ValueError): with pytest.raises(ValueError):
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -745,7 +730,7 @@ def test_check_archives_with_json_error_raises():
def test_check_archives_with_missing_json_keys_raises(): def test_check_archives_with_missing_json_keys_raises():
checks = ('archives',) checks = ('archives',)
check_last = flexmock() check_last = flexmock()
consistency_config = {'check_last': check_last} config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return('{invalid JSON') flexmock(module.rinfo).should_receive('display_repository_info').and_return('{invalid JSON')
flexmock(module).should_receive('upgrade_check_times') flexmock(module).should_receive('upgrade_check_times')
flexmock(module).should_receive('parse_checks') flexmock(module).should_receive('parse_checks')
@ -756,9 +741,7 @@ def test_check_archives_with_missing_json_keys_raises():
with pytest.raises(ValueError): with pytest.raises(ValueError):
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -767,7 +750,7 @@ def test_check_archives_with_missing_json_keys_raises():
def test_check_archives_with_extract_check_calls_extract_only(): def test_check_archives_with_extract_check_calls_extract_only():
checks = ('extract',) checks = ('extract',)
check_last = flexmock() check_last = flexmock()
consistency_config = {'check_last': check_last} config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -784,9 +767,7 @@ def test_check_archives_with_extract_check_calls_extract_only():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -794,7 +775,7 @@ def test_check_archives_with_extract_check_calls_extract_only():
def test_check_archives_with_log_info_calls_borg_with_info_parameter(): def test_check_archives_with_log_info_calls_borg_with_info_parameter():
checks = ('repository',) checks = ('repository',)
consistency_config = {'check_last': None} config = {'check_last': None}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -812,9 +793,7 @@ def test_check_archives_with_log_info_calls_borg_with_info_parameter():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -822,7 +801,7 @@ def test_check_archives_with_log_info_calls_borg_with_info_parameter():
def test_check_archives_with_log_debug_calls_borg_with_debug_parameter(): def test_check_archives_with_log_debug_calls_borg_with_debug_parameter():
checks = ('repository',) checks = ('repository',)
consistency_config = {'check_last': None} config = {'check_last': None}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -840,16 +819,14 @@ def test_check_archives_with_log_debug_calls_borg_with_debug_parameter():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
def test_check_archives_without_any_checks_bails(): def test_check_archives_without_any_checks_bails():
consistency_config = {'check_last': None} config = {'check_last': None}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -862,9 +839,7 @@ def test_check_archives_without_any_checks_bails():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -873,7 +848,7 @@ def test_check_archives_without_any_checks_bails():
def test_check_archives_with_local_path_calls_borg_via_local_path(): def test_check_archives_with_local_path_calls_borg_via_local_path():
checks = ('repository',) checks = ('repository',)
check_last = flexmock() check_last = flexmock()
consistency_config = {'check_last': check_last} config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -890,9 +865,7 @@ def test_check_archives_with_local_path_calls_borg_via_local_path():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
local_path='borg1', local_path='borg1',
@ -902,7 +875,7 @@ def test_check_archives_with_local_path_calls_borg_via_local_path():
def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters(): def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters():
checks = ('repository',) checks = ('repository',)
check_last = flexmock() check_last = flexmock()
consistency_config = {'check_last': check_last} config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -919,9 +892,7 @@ def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters(
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
remote_path='borg1', remote_path='borg1',
@ -931,8 +902,7 @@ def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters(
def test_check_archives_with_log_json_calls_borg_with_log_json_parameters(): def test_check_archives_with_log_json_calls_borg_with_log_json_parameters():
checks = ('repository',) checks = ('repository',)
check_last = flexmock() check_last = flexmock()
storage_config = {} config = {'check_last': check_last}
consistency_config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -949,9 +919,7 @@ def test_check_archives_with_log_json_calls_borg_with_log_json_parameters():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config=storage_config,
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=True), global_arguments=flexmock(log_json=True),
) )
@ -960,8 +928,7 @@ def test_check_archives_with_log_json_calls_borg_with_log_json_parameters():
def test_check_archives_with_lock_wait_calls_borg_with_lock_wait_parameters(): def test_check_archives_with_lock_wait_calls_borg_with_lock_wait_parameters():
checks = ('repository',) checks = ('repository',)
check_last = flexmock() check_last = flexmock()
storage_config = {'lock_wait': 5} config = {'lock_wait': 5, 'check_last': check_last}
consistency_config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -978,9 +945,7 @@ def test_check_archives_with_lock_wait_calls_borg_with_lock_wait_parameters():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config=storage_config,
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -990,7 +955,7 @@ def test_check_archives_with_retention_prefix():
checks = ('repository',) checks = ('repository',)
check_last = flexmock() check_last = flexmock()
prefix = 'foo-' prefix = 'foo-'
consistency_config = {'check_last': check_last, 'prefix': prefix} config = {'check_last': check_last, 'prefix': prefix}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -1007,9 +972,7 @@ def test_check_archives_with_retention_prefix():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -1017,7 +980,7 @@ def test_check_archives_with_retention_prefix():
def test_check_archives_with_extra_borg_options_calls_borg_with_extra_options(): def test_check_archives_with_extra_borg_options_calls_borg_with_extra_options():
checks = ('repository',) checks = ('repository',)
consistency_config = {'check_last': None} config = {'check_last': None, 'extra_borg_options': {'check': '--extra --options'}}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -1034,9 +997,7 @@ def test_check_archives_with_extra_borg_options_calls_borg_with_extra_options():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={'extra_borg_options': {'check': '--extra --options'}},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )

Some files were not shown because too many files have changed in this diff Show more