Merge remote-tracking branch 'upstream/master' into borg2-archive-flags
This commit is contained in:
commit
1ee56805f1
148 changed files with 3875 additions and 1253 deletions
|
@ -24,6 +24,8 @@ clone:
|
||||||
steps:
|
steps:
|
||||||
- name: build
|
- name: build
|
||||||
image: alpine:3.13
|
image: alpine:3.13
|
||||||
|
environment:
|
||||||
|
TEST_CONTAINER: true
|
||||||
pull: always
|
pull: always
|
||||||
commands:
|
commands:
|
||||||
- scripts/run-full-tests
|
- scripts/run-full-tests
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
const pluginSyntaxHighlight = require("@11ty/eleventy-plugin-syntaxhighlight");
|
const pluginSyntaxHighlight = require("@11ty/eleventy-plugin-syntaxhighlight");
|
||||||
|
const codeClipboard = require("eleventy-plugin-code-clipboard");
|
||||||
const inclusiveLangPlugin = require("@11ty/eleventy-plugin-inclusive-language");
|
const inclusiveLangPlugin = require("@11ty/eleventy-plugin-inclusive-language");
|
||||||
const navigationPlugin = require("@11ty/eleventy-navigation");
|
const navigationPlugin = require("@11ty/eleventy-navigation");
|
||||||
|
|
||||||
|
@ -6,6 +7,7 @@ module.exports = function(eleventyConfig) {
|
||||||
eleventyConfig.addPlugin(pluginSyntaxHighlight);
|
eleventyConfig.addPlugin(pluginSyntaxHighlight);
|
||||||
eleventyConfig.addPlugin(inclusiveLangPlugin);
|
eleventyConfig.addPlugin(inclusiveLangPlugin);
|
||||||
eleventyConfig.addPlugin(navigationPlugin);
|
eleventyConfig.addPlugin(navigationPlugin);
|
||||||
|
eleventyConfig.addPlugin(codeClipboard);
|
||||||
|
|
||||||
let markdownIt = require("markdown-it");
|
let markdownIt = require("markdown-it");
|
||||||
let markdownItAnchor = require("markdown-it-anchor");
|
let markdownItAnchor = require("markdown-it-anchor");
|
||||||
|
@ -31,6 +33,7 @@ module.exports = function(eleventyConfig) {
|
||||||
markdownIt(markdownItOptions)
|
markdownIt(markdownItOptions)
|
||||||
.use(markdownItAnchor, markdownItAnchorOptions)
|
.use(markdownItAnchor, markdownItAnchorOptions)
|
||||||
.use(markdownItReplaceLink)
|
.use(markdownItReplaceLink)
|
||||||
|
.use(codeClipboard.markdownItCopyButton)
|
||||||
);
|
);
|
||||||
|
|
||||||
eleventyConfig.addPassthroughCopy({"docs/static": "static"});
|
eleventyConfig.addPassthroughCopy({"docs/static": "static"});
|
||||||
|
|
64
NEWS
64
NEWS
|
@ -1,10 +1,66 @@
|
||||||
1.7.10.dev0
|
1.7.13.dev0
|
||||||
|
* #375: Restore particular PostgreSQL schemas from a database dump via "borgmatic restore --schema"
|
||||||
|
flag. See the documentation for more information:
|
||||||
|
https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#restore-particular-schemas
|
||||||
|
|
||||||
|
1.7.12
|
||||||
|
* #413: Add "log_file" context to command hooks so your scripts can consume the borgmatic log file.
|
||||||
|
See the documentation for more information:
|
||||||
|
https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/
|
||||||
|
* #666, #670: Fix error when running the "info" action with the "--match-archives" or "--archive"
|
||||||
|
flags. Also fix the "--match-archives"/"--archive" flags to correctly override the
|
||||||
|
"match_archives" configuration option for the "transfer", "list", "rlist", and "info" actions.
|
||||||
|
* #668: Fix error when running the "prune" action with both "archive_name_format" and "prefix"
|
||||||
|
options set.
|
||||||
|
* #672: Selectively shallow merge certain mappings or sequences when including configuration files.
|
||||||
|
See the documentation for more information:
|
||||||
|
https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#shallow-merge
|
||||||
|
* #672: Selectively omit list values when including configuration files. See the documentation for
|
||||||
|
more information:
|
||||||
|
https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#list-merge
|
||||||
|
* #673: View the results of configuration file merging via "validate-borgmatic-config --show" flag.
|
||||||
|
See the documentation for more information:
|
||||||
|
https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#debugging-includes
|
||||||
|
* Add optional support for running end-to-end tests and building documentation with rootless Podman
|
||||||
|
instead of Docker.
|
||||||
|
|
||||||
|
1.7.11
|
||||||
|
* #479, #588: BREAKING: Automatically use the "archive_name_format" option to filter which archives
|
||||||
|
get used for borgmatic actions that operate on multiple archives. Override this behavior with the
|
||||||
|
new "match_archives" option in the storage section. This change is "breaking" in that it silently
|
||||||
|
changes which archives get considered for "rlist", "prune", "check", etc. See the documentation
|
||||||
|
for more information:
|
||||||
|
https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#archive-naming
|
||||||
|
* #479, #588: The "prefix" options have been deprecated in favor of the new "archive_name_format"
|
||||||
|
auto-matching behavior and the "match_archives" option.
|
||||||
|
* #658: Add "--log-file-format" flag for customizing the log message format. See the documentation
|
||||||
|
for more information:
|
||||||
|
https://torsion.org/borgmatic/docs/how-to/inspect-your-backups/#logging-to-file
|
||||||
|
* #662: Fix regression in which the "check_repositories" option failed to match repositories.
|
||||||
|
* #663: Fix regression in which the "transfer" action produced a traceback.
|
||||||
|
* Add spellchecking of source code during test runs.
|
||||||
|
|
||||||
|
1.7.10
|
||||||
|
* #396: When a database command errors, display and log the error message instead of swallowing it.
|
||||||
* #501: Optionally error if a source directory does not exist via "source_directories_must_exist"
|
* #501: Optionally error if a source directory does not exist via "source_directories_must_exist"
|
||||||
option in borgmatic's location configuration.
|
option in borgmatic's location configuration.
|
||||||
* #576: Add support for "file://" paths within "repositories" option.
|
* #576: Add support for "file://" paths within "repositories" option.
|
||||||
|
* #612: Define and use custom constants in borgmatic configuration files. See the documentation for
|
||||||
|
more information:
|
||||||
|
https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#constant-interpolation
|
||||||
* #618: Add support for BORG_FILES_CACHE_TTL environment variable via "borg_files_cache_ttl" option
|
* #618: Add support for BORG_FILES_CACHE_TTL environment variable via "borg_files_cache_ttl" option
|
||||||
in borgmatic's storage configuration.
|
in borgmatic's storage configuration.
|
||||||
* #623: Fix confusing message when an error occurs running actions for a configuration file.
|
* #623: Fix confusing message when an error occurs running actions for a configuration file.
|
||||||
|
* #635: Add optional repository labels so you can select a repository via "--repository yourlabel"
|
||||||
|
at the command-line. See the configuration reference for more information:
|
||||||
|
https://torsion.org/borgmatic/docs/reference/configuration/
|
||||||
|
* #649: Add documentation on backing up a database running in a container:
|
||||||
|
https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#containers
|
||||||
|
* #655: Fix error when databases are configured and a source directory doesn't exist.
|
||||||
|
* Add code style plugins to enforce use of Python f-strings and prevent single-letter variables.
|
||||||
|
To join in the pedantry, refresh your test environment with "tox --recreate".
|
||||||
|
* Rename scripts/run-full-dev-tests to scripts/run-end-to-end-dev-tests and make it run end-to-end
|
||||||
|
tests only. Continue using tox to run unit and integration tests.
|
||||||
|
|
||||||
1.7.9
|
1.7.9
|
||||||
* #295: Add a SQLite database dump/restore hook.
|
* #295: Add a SQLite database dump/restore hook.
|
||||||
|
@ -374,7 +430,7 @@
|
||||||
configuration schema descriptions.
|
configuration schema descriptions.
|
||||||
|
|
||||||
1.5.6
|
1.5.6
|
||||||
* #292: Allow before_backup and similiar hooks to exit with a soft failure without altering the
|
* #292: Allow before_backup and similar hooks to exit with a soft failure without altering the
|
||||||
monitoring status on Healthchecks or other providers. Support this by waiting to ping monitoring
|
monitoring status on Healthchecks or other providers. Support this by waiting to ping monitoring
|
||||||
services with a "start" status until after before_* hooks finish. Failures in before_* hooks
|
services with a "start" status until after before_* hooks finish. Failures in before_* hooks
|
||||||
still trigger a monitoring "fail" status.
|
still trigger a monitoring "fail" status.
|
||||||
|
@ -443,7 +499,7 @@
|
||||||
* For "list" and "info" actions, show repository names even at verbosity 0.
|
* For "list" and "info" actions, show repository names even at verbosity 0.
|
||||||
|
|
||||||
1.4.22
|
1.4.22
|
||||||
* #276, #285: Disable colored output when "--json" flag is used, so as to produce valid JSON ouput.
|
* #276, #285: Disable colored output when "--json" flag is used, so as to produce valid JSON output.
|
||||||
* After a backup of a database dump in directory format, properly remove the dump directory.
|
* After a backup of a database dump in directory format, properly remove the dump directory.
|
||||||
* In "borgmatic --help", don't expand $HOME in listing of default "--config" paths.
|
* In "borgmatic --help", don't expand $HOME in listing of default "--config" paths.
|
||||||
|
|
||||||
|
@ -815,7 +871,7 @@
|
||||||
* #77: Skip non-"*.yaml" config filenames in /etc/borgmatic.d/ so as not to parse backup files,
|
* #77: Skip non-"*.yaml" config filenames in /etc/borgmatic.d/ so as not to parse backup files,
|
||||||
editor swap files, etc.
|
editor swap files, etc.
|
||||||
* #81: Document user-defined hooks run before/after backup, or on error.
|
* #81: Document user-defined hooks run before/after backup, or on error.
|
||||||
* Add code style guidelines to the documention.
|
* Add code style guidelines to the documentation.
|
||||||
|
|
||||||
1.2.0
|
1.2.0
|
||||||
* #61: Support for Borg --list option via borgmatic command-line to list all archives.
|
* #61: Support for Borg --list option via borgmatic command-line to list all archives.
|
||||||
|
|
|
@ -24,9 +24,10 @@ location:
|
||||||
|
|
||||||
# Paths of local or remote repositories to backup to.
|
# Paths of local or remote repositories to backup to.
|
||||||
repositories:
|
repositories:
|
||||||
- ssh://1234@usw-s001.rsync.net/./backups.borg
|
- path: ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo
|
||||||
- ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo
|
label: borgbase
|
||||||
- /var/lib/backups/local.borg
|
- path: /var/lib/backups/local.borg
|
||||||
|
label: local
|
||||||
|
|
||||||
retention:
|
retention:
|
||||||
# Retention policy for how many backups to keep.
|
# Retention policy for how many backups to keep.
|
||||||
|
|
|
@ -8,7 +8,12 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def run_borg(
|
def run_borg(
|
||||||
repository, storage, local_borg_version, borg_arguments, local_path, remote_path,
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
borg_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Run the "borg" action for the given repository.
|
Run the "borg" action for the given repository.
|
||||||
|
@ -16,9 +21,9 @@ def run_borg(
|
||||||
if borg_arguments.repository is None or borgmatic.config.validate.repositories_match(
|
if borg_arguments.repository is None or borgmatic.config.validate.repositories_match(
|
||||||
repository, borg_arguments.repository
|
repository, borg_arguments.repository
|
||||||
):
|
):
|
||||||
logger.info('{}: Running arbitrary Borg command'.format(repository))
|
logger.info(f'{repository["path"]}: Running arbitrary Borg command')
|
||||||
archive_name = borgmatic.borg.rlist.resolve_archive_name(
|
archive_name = borgmatic.borg.rlist.resolve_archive_name(
|
||||||
repository,
|
repository['path'],
|
||||||
borg_arguments.archive,
|
borg_arguments.archive,
|
||||||
storage,
|
storage,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
|
@ -26,7 +31,7 @@ def run_borg(
|
||||||
remote_path,
|
remote_path,
|
||||||
)
|
)
|
||||||
borgmatic.borg.borg.run_arbitrary_borg(
|
borgmatic.borg.borg.run_arbitrary_borg(
|
||||||
repository,
|
repository['path'],
|
||||||
storage,
|
storage,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
options=borg_arguments.options,
|
options=borg_arguments.options,
|
||||||
|
|
|
@ -7,7 +7,12 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def run_break_lock(
|
def run_break_lock(
|
||||||
repository, storage, local_borg_version, break_lock_arguments, local_path, remote_path,
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
break_lock_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Run the "break-lock" action for the given repository.
|
Run the "break-lock" action for the given repository.
|
||||||
|
@ -15,7 +20,11 @@ def run_break_lock(
|
||||||
if break_lock_arguments.repository is None or borgmatic.config.validate.repositories_match(
|
if break_lock_arguments.repository is None or borgmatic.config.validate.repositories_match(
|
||||||
repository, break_lock_arguments.repository
|
repository, break_lock_arguments.repository
|
||||||
):
|
):
|
||||||
logger.info(f'{repository}: Breaking repository and cache locks')
|
logger.info(f'{repository["path"]}: Breaking repository and cache locks')
|
||||||
borgmatic.borg.break_lock.break_lock(
|
borgmatic.borg.break_lock.break_lock(
|
||||||
repository, storage, local_borg_version, local_path=local_path, remote_path=remote_path,
|
repository['path'],
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
)
|
)
|
||||||
|
|
|
@ -37,9 +37,9 @@ def run_check(
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
**hook_context,
|
**hook_context,
|
||||||
)
|
)
|
||||||
logger.info('{}: Running consistency checks'.format(repository))
|
logger.info(f'{repository["path"]}: Running consistency checks')
|
||||||
borgmatic.borg.check.check_archives(
|
borgmatic.borg.check.check_archives(
|
||||||
repository,
|
repository['path'],
|
||||||
location,
|
location,
|
||||||
storage,
|
storage,
|
||||||
consistency,
|
consistency,
|
||||||
|
|
|
@ -39,10 +39,10 @@ def run_compact(
|
||||||
**hook_context,
|
**hook_context,
|
||||||
)
|
)
|
||||||
if borgmatic.borg.feature.available(borgmatic.borg.feature.Feature.COMPACT, local_borg_version):
|
if borgmatic.borg.feature.available(borgmatic.borg.feature.Feature.COMPACT, local_borg_version):
|
||||||
logger.info('{}: Compacting segments{}'.format(repository, dry_run_label))
|
logger.info(f'{repository["path"]}: Compacting segments{dry_run_label}')
|
||||||
borgmatic.borg.compact.compact_segments(
|
borgmatic.borg.compact.compact_segments(
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
repository,
|
repository['path'],
|
||||||
storage,
|
storage,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
local_path=local_path,
|
local_path=local_path,
|
||||||
|
@ -52,7 +52,7 @@ def run_compact(
|
||||||
threshold=compact_arguments.threshold,
|
threshold=compact_arguments.threshold,
|
||||||
)
|
)
|
||||||
else: # pragma: nocover
|
else: # pragma: nocover
|
||||||
logger.info('{}: Skipping compact (only available/needed in Borg 1.2+)'.format(repository))
|
logger.info(f'{repository["path"]}: Skipping compact (only available/needed in Borg 1.2+)')
|
||||||
borgmatic.hooks.command.execute_hook(
|
borgmatic.hooks.command.execute_hook(
|
||||||
hooks.get('after_compact'),
|
hooks.get('after_compact'),
|
||||||
hooks.get('umask'),
|
hooks.get('umask'),
|
||||||
|
|
|
@ -42,11 +42,11 @@ def run_create(
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
**hook_context,
|
**hook_context,
|
||||||
)
|
)
|
||||||
logger.info('{}: Creating archive{}'.format(repository, dry_run_label))
|
logger.info(f'{repository["path"]}: Creating archive{dry_run_label}')
|
||||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||||
'remove_database_dumps',
|
'remove_database_dumps',
|
||||||
hooks,
|
hooks,
|
||||||
repository,
|
repository['path'],
|
||||||
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
|
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
|
||||||
location,
|
location,
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
|
@ -54,7 +54,7 @@ def run_create(
|
||||||
active_dumps = borgmatic.hooks.dispatch.call_hooks(
|
active_dumps = borgmatic.hooks.dispatch.call_hooks(
|
||||||
'dump_databases',
|
'dump_databases',
|
||||||
hooks,
|
hooks,
|
||||||
repository,
|
repository['path'],
|
||||||
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
|
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
|
||||||
location,
|
location,
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
|
@ -63,7 +63,7 @@ def run_create(
|
||||||
|
|
||||||
json_output = borgmatic.borg.create.create_archive(
|
json_output = borgmatic.borg.create.create_archive(
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
repository,
|
repository['path'],
|
||||||
location,
|
location,
|
||||||
storage,
|
storage,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
|
|
|
@ -23,13 +23,13 @@ def run_export_tar(
|
||||||
repository, export_tar_arguments.repository
|
repository, export_tar_arguments.repository
|
||||||
):
|
):
|
||||||
logger.info(
|
logger.info(
|
||||||
'{}: Exporting archive {} as tar file'.format(repository, export_tar_arguments.archive)
|
f'{repository["path"]}: Exporting archive {export_tar_arguments.archive} as tar file'
|
||||||
)
|
)
|
||||||
borgmatic.borg.export_tar.export_tar_archive(
|
borgmatic.borg.export_tar.export_tar_archive(
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
repository,
|
repository['path'],
|
||||||
borgmatic.borg.rlist.resolve_archive_name(
|
borgmatic.borg.rlist.resolve_archive_name(
|
||||||
repository,
|
repository['path'],
|
||||||
export_tar_arguments.archive,
|
export_tar_arguments.archive,
|
||||||
storage,
|
storage,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
|
|
|
@ -35,12 +35,12 @@ def run_extract(
|
||||||
if extract_arguments.repository is None or borgmatic.config.validate.repositories_match(
|
if extract_arguments.repository is None or borgmatic.config.validate.repositories_match(
|
||||||
repository, extract_arguments.repository
|
repository, extract_arguments.repository
|
||||||
):
|
):
|
||||||
logger.info('{}: Extracting archive {}'.format(repository, extract_arguments.archive))
|
logger.info(f'{repository["path"]}: Extracting archive {extract_arguments.archive}')
|
||||||
borgmatic.borg.extract.extract_archive(
|
borgmatic.borg.extract.extract_archive(
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
repository,
|
repository['path'],
|
||||||
borgmatic.borg.rlist.resolve_archive_name(
|
borgmatic.borg.rlist.resolve_archive_name(
|
||||||
repository,
|
repository['path'],
|
||||||
extract_arguments.archive,
|
extract_arguments.archive,
|
||||||
storage,
|
storage,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
|
|
|
@ -9,7 +9,12 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def run_info(
|
def run_info(
|
||||||
repository, storage, local_borg_version, info_arguments, local_path, remote_path,
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
info_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Run the "info" action for the given repository and archive.
|
Run the "info" action for the given repository and archive.
|
||||||
|
@ -20,9 +25,9 @@ def run_info(
|
||||||
repository, info_arguments.repository
|
repository, info_arguments.repository
|
||||||
):
|
):
|
||||||
if not info_arguments.json: # pragma: nocover
|
if not info_arguments.json: # pragma: nocover
|
||||||
logger.answer(f'{repository}: Displaying archive summary information')
|
logger.answer(f'{repository["path"]}: Displaying archive summary information')
|
||||||
info_arguments.archive = borgmatic.borg.rlist.resolve_archive_name(
|
info_arguments.archive = borgmatic.borg.rlist.resolve_archive_name(
|
||||||
repository,
|
repository['path'],
|
||||||
info_arguments.archive,
|
info_arguments.archive,
|
||||||
storage,
|
storage,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
|
@ -30,7 +35,7 @@ def run_info(
|
||||||
remote_path,
|
remote_path,
|
||||||
)
|
)
|
||||||
json_output = borgmatic.borg.info.display_archives_info(
|
json_output = borgmatic.borg.info.display_archives_info(
|
||||||
repository,
|
repository['path'],
|
||||||
storage,
|
storage,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
info_arguments=info_arguments,
|
info_arguments=info_arguments,
|
||||||
|
|
|
@ -8,7 +8,12 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def run_list(
|
def run_list(
|
||||||
repository, storage, local_borg_version, list_arguments, local_path, remote_path,
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
list_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Run the "list" action for the given repository and archive.
|
Run the "list" action for the given repository and archive.
|
||||||
|
@ -20,11 +25,11 @@ def run_list(
|
||||||
):
|
):
|
||||||
if not list_arguments.json: # pragma: nocover
|
if not list_arguments.json: # pragma: nocover
|
||||||
if list_arguments.find_paths:
|
if list_arguments.find_paths:
|
||||||
logger.answer(f'{repository}: Searching archives')
|
logger.answer(f'{repository["path"]}: Searching archives')
|
||||||
elif not list_arguments.archive:
|
elif not list_arguments.archive:
|
||||||
logger.answer(f'{repository}: Listing archives')
|
logger.answer(f'{repository["path"]}: Listing archives')
|
||||||
list_arguments.archive = borgmatic.borg.rlist.resolve_archive_name(
|
list_arguments.archive = borgmatic.borg.rlist.resolve_archive_name(
|
||||||
repository,
|
repository['path'],
|
||||||
list_arguments.archive,
|
list_arguments.archive,
|
||||||
storage,
|
storage,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
|
@ -32,7 +37,7 @@ def run_list(
|
||||||
remote_path,
|
remote_path,
|
||||||
)
|
)
|
||||||
json_output = borgmatic.borg.list.list_archive(
|
json_output = borgmatic.borg.list.list_archive(
|
||||||
repository,
|
repository['path'],
|
||||||
storage,
|
storage,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
list_arguments=list_arguments,
|
list_arguments=list_arguments,
|
||||||
|
|
|
@ -8,7 +8,12 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def run_mount(
|
def run_mount(
|
||||||
repository, storage, local_borg_version, mount_arguments, local_path, remote_path,
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
mount_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Run the "mount" action for the given repository.
|
Run the "mount" action for the given repository.
|
||||||
|
@ -17,14 +22,14 @@ def run_mount(
|
||||||
repository, mount_arguments.repository
|
repository, mount_arguments.repository
|
||||||
):
|
):
|
||||||
if mount_arguments.archive:
|
if mount_arguments.archive:
|
||||||
logger.info('{}: Mounting archive {}'.format(repository, mount_arguments.archive))
|
logger.info(f'{repository["path"]}: Mounting archive {mount_arguments.archive}')
|
||||||
else: # pragma: nocover
|
else: # pragma: nocover
|
||||||
logger.info('{}: Mounting repository'.format(repository))
|
logger.info(f'{repository["path"]}: Mounting repository')
|
||||||
|
|
||||||
borgmatic.borg.mount.mount_archive(
|
borgmatic.borg.mount.mount_archive(
|
||||||
repository,
|
repository['path'],
|
||||||
borgmatic.borg.rlist.resolve_archive_name(
|
borgmatic.borg.rlist.resolve_archive_name(
|
||||||
repository,
|
repository['path'],
|
||||||
mount_arguments.archive,
|
mount_arguments.archive,
|
||||||
storage,
|
storage,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
|
|
|
@ -37,10 +37,10 @@ def run_prune(
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
**hook_context,
|
**hook_context,
|
||||||
)
|
)
|
||||||
logger.info('{}: Pruning archives{}'.format(repository, dry_run_label))
|
logger.info(f'{repository["path"]}: Pruning archives{dry_run_label}')
|
||||||
borgmatic.borg.prune.prune_archives(
|
borgmatic.borg.prune.prune_archives(
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
repository,
|
repository['path'],
|
||||||
storage,
|
storage,
|
||||||
retention,
|
retention,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
|
|
|
@ -23,10 +23,10 @@ def run_rcreate(
|
||||||
):
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
logger.info('{}: Creating repository'.format(repository))
|
logger.info(f'{repository["path"]}: Creating repository')
|
||||||
borgmatic.borg.rcreate.create_repository(
|
borgmatic.borg.rcreate.create_repository(
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
repository,
|
repository['path'],
|
||||||
storage,
|
storage,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
rcreate_arguments.encryption_mode,
|
rcreate_arguments.encryption_mode,
|
||||||
|
|
|
@ -114,7 +114,13 @@ def restore_single_database(
|
||||||
|
|
||||||
|
|
||||||
def collect_archive_database_names(
|
def collect_archive_database_names(
|
||||||
repository, archive, location, storage, local_borg_version, local_path, remote_path,
|
repository,
|
||||||
|
archive,
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Given a local or remote repository path, a resolved archive name, a location configuration dict,
|
Given a local or remote repository path, a resolved archive name, a location configuration dict,
|
||||||
|
@ -180,7 +186,7 @@ def find_databases_to_restore(requested_database_names, archive_database_names):
|
||||||
if 'all' in restore_names[UNSPECIFIED_HOOK]:
|
if 'all' in restore_names[UNSPECIFIED_HOOK]:
|
||||||
restore_names[UNSPECIFIED_HOOK].remove('all')
|
restore_names[UNSPECIFIED_HOOK].remove('all')
|
||||||
|
|
||||||
for (hook_name, database_names) in archive_database_names.items():
|
for hook_name, database_names in archive_database_names.items():
|
||||||
restore_names.setdefault(hook_name, []).extend(database_names)
|
restore_names.setdefault(hook_name, []).extend(database_names)
|
||||||
|
|
||||||
# If a database is to be restored as part of "all", then remove it from restore names so
|
# If a database is to be restored as part of "all", then remove it from restore names so
|
||||||
|
@ -256,22 +262,34 @@ def run_restore(
|
||||||
return
|
return
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
'{}: Restoring databases from archive {}'.format(repository, restore_arguments.archive)
|
f'{repository["path"]}: Restoring databases from archive {restore_arguments.archive}'
|
||||||
)
|
)
|
||||||
|
|
||||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||||
'remove_database_dumps',
|
'remove_database_dumps',
|
||||||
hooks,
|
hooks,
|
||||||
repository,
|
repository['path'],
|
||||||
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
|
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
|
||||||
location,
|
location,
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
)
|
)
|
||||||
|
|
||||||
archive_name = borgmatic.borg.rlist.resolve_archive_name(
|
archive_name = borgmatic.borg.rlist.resolve_archive_name(
|
||||||
repository, restore_arguments.archive, storage, local_borg_version, local_path, remote_path,
|
repository['path'],
|
||||||
|
restore_arguments.archive,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
)
|
)
|
||||||
archive_database_names = collect_archive_database_names(
|
archive_database_names = collect_archive_database_names(
|
||||||
repository, archive_name, location, storage, local_borg_version, local_path, remote_path,
|
repository['path'],
|
||||||
|
archive_name,
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
)
|
)
|
||||||
restore_names = find_databases_to_restore(restore_arguments.databases, archive_database_names)
|
restore_names = find_databases_to_restore(restore_arguments.databases, archive_database_names)
|
||||||
found_names = set()
|
found_names = set()
|
||||||
|
@ -291,7 +309,7 @@ def run_restore(
|
||||||
|
|
||||||
found_names.add(database_name)
|
found_names.add(database_name)
|
||||||
restore_single_database(
|
restore_single_database(
|
||||||
repository,
|
repository['path'],
|
||||||
location,
|
location,
|
||||||
storage,
|
storage,
|
||||||
hooks,
|
hooks,
|
||||||
|
@ -301,7 +319,7 @@ def run_restore(
|
||||||
remote_path,
|
remote_path,
|
||||||
archive_name,
|
archive_name,
|
||||||
found_hook_name or hook_name,
|
found_hook_name or hook_name,
|
||||||
found_database,
|
dict(found_database, **{'schemas': restore_arguments.schemas}),
|
||||||
)
|
)
|
||||||
|
|
||||||
# For any database that weren't found via exact matches in the hooks configuration, try to
|
# For any database that weren't found via exact matches in the hooks configuration, try to
|
||||||
|
@ -320,7 +338,7 @@ def run_restore(
|
||||||
database['name'] = database_name
|
database['name'] = database_name
|
||||||
|
|
||||||
restore_single_database(
|
restore_single_database(
|
||||||
repository,
|
repository['path'],
|
||||||
location,
|
location,
|
||||||
storage,
|
storage,
|
||||||
hooks,
|
hooks,
|
||||||
|
@ -330,13 +348,13 @@ def run_restore(
|
||||||
remote_path,
|
remote_path,
|
||||||
archive_name,
|
archive_name,
|
||||||
found_hook_name or hook_name,
|
found_hook_name or hook_name,
|
||||||
database,
|
dict(database, **{'schemas': restore_arguments.schemas}),
|
||||||
)
|
)
|
||||||
|
|
||||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||||
'remove_database_dumps',
|
'remove_database_dumps',
|
||||||
hooks,
|
hooks,
|
||||||
repository,
|
repository['path'],
|
||||||
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
|
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
|
||||||
location,
|
location,
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
|
|
|
@ -8,7 +8,12 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def run_rinfo(
|
def run_rinfo(
|
||||||
repository, storage, local_borg_version, rinfo_arguments, local_path, remote_path,
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
rinfo_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Run the "rinfo" action for the given repository.
|
Run the "rinfo" action for the given repository.
|
||||||
|
@ -19,9 +24,10 @@ def run_rinfo(
|
||||||
repository, rinfo_arguments.repository
|
repository, rinfo_arguments.repository
|
||||||
):
|
):
|
||||||
if not rinfo_arguments.json: # pragma: nocover
|
if not rinfo_arguments.json: # pragma: nocover
|
||||||
logger.answer('{}: Displaying repository summary information'.format(repository))
|
logger.answer(f'{repository["path"]}: Displaying repository summary information')
|
||||||
|
|
||||||
json_output = borgmatic.borg.rinfo.display_repository_info(
|
json_output = borgmatic.borg.rinfo.display_repository_info(
|
||||||
repository,
|
repository['path'],
|
||||||
storage,
|
storage,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
rinfo_arguments=rinfo_arguments,
|
rinfo_arguments=rinfo_arguments,
|
||||||
|
|
|
@ -8,7 +8,12 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def run_rlist(
|
def run_rlist(
|
||||||
repository, storage, local_borg_version, rlist_arguments, local_path, remote_path,
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
rlist_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Run the "rlist" action for the given repository.
|
Run the "rlist" action for the given repository.
|
||||||
|
@ -19,9 +24,10 @@ def run_rlist(
|
||||||
repository, rlist_arguments.repository
|
repository, rlist_arguments.repository
|
||||||
):
|
):
|
||||||
if not rlist_arguments.json: # pragma: nocover
|
if not rlist_arguments.json: # pragma: nocover
|
||||||
logger.answer('{}: Listing repository'.format(repository))
|
logger.answer(f'{repository["path"]}: Listing repository')
|
||||||
|
|
||||||
json_output = borgmatic.borg.rlist.list_repository(
|
json_output = borgmatic.borg.rlist.list_repository(
|
||||||
repository,
|
repository['path'],
|
||||||
storage,
|
storage,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
rlist_arguments=rlist_arguments,
|
rlist_arguments=rlist_arguments,
|
||||||
|
|
|
@ -17,10 +17,10 @@ def run_transfer(
|
||||||
'''
|
'''
|
||||||
Run the "transfer" action for the given repository.
|
Run the "transfer" action for the given repository.
|
||||||
'''
|
'''
|
||||||
logger.info(f'{repository}: Transferring archives to repository')
|
logger.info(f'{repository["path"]}: Transferring archives to repository')
|
||||||
borgmatic.borg.transfer.transfer_archives(
|
borgmatic.borg.transfer.transfer_archives(
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
repository,
|
repository['path'],
|
||||||
storage,
|
storage,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
transfer_arguments,
|
transfer_arguments,
|
||||||
|
|
|
@ -13,7 +13,7 @@ BORG_SUBCOMMANDS_WITHOUT_REPOSITORY = (('debug', 'info'), ('debug', 'convert-pro
|
||||||
|
|
||||||
|
|
||||||
def run_arbitrary_borg(
|
def run_arbitrary_borg(
|
||||||
repository,
|
repository_path,
|
||||||
storage_config,
|
storage_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
options,
|
options,
|
||||||
|
@ -44,10 +44,10 @@ def run_arbitrary_borg(
|
||||||
repository_archive_flags = ()
|
repository_archive_flags = ()
|
||||||
elif archive:
|
elif archive:
|
||||||
repository_archive_flags = flags.make_repository_archive_flags(
|
repository_archive_flags = flags.make_repository_archive_flags(
|
||||||
repository, archive, local_borg_version
|
repository_path, archive, local_borg_version
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
repository_archive_flags = flags.make_repository_flags(repository, local_borg_version)
|
repository_archive_flags = flags.make_repository_flags(repository_path, local_borg_version)
|
||||||
|
|
||||||
full_command = (
|
full_command = (
|
||||||
(local_path,)
|
(local_path,)
|
||||||
|
|
|
@ -7,7 +7,11 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def break_lock(
|
def break_lock(
|
||||||
repository, storage_config, local_borg_version, local_path='borg', remote_path=None,
|
repository_path,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Given a local or remote repository path, a storage configuration dict, the local Borg version,
|
Given a local or remote repository path, a storage configuration dict, the local Borg version,
|
||||||
|
@ -24,7 +28,7 @@ def break_lock(
|
||||||
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||||
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
+ flags.make_repository_flags(repository, local_borg_version)
|
+ flags.make_repository_flags(repository_path, local_borg_version)
|
||||||
)
|
)
|
||||||
|
|
||||||
borg_environment = environment.make_environment(storage_config)
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
|
|
@ -12,7 +12,6 @@ DEFAULT_CHECKS = (
|
||||||
{'name': 'repository', 'frequency': '1 month'},
|
{'name': 'repository', 'frequency': '1 month'},
|
||||||
{'name': 'archives', 'frequency': '1 month'},
|
{'name': 'archives', 'frequency': '1 month'},
|
||||||
)
|
)
|
||||||
DEFAULT_PREFIX = '{hostname}-'
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -146,9 +145,10 @@ def filter_checks_on_frequency(
|
||||||
return tuple(filtered_checks)
|
return tuple(filtered_checks)
|
||||||
|
|
||||||
|
|
||||||
def make_check_flags(local_borg_version, checks, check_last=None, prefix=None):
|
def make_check_flags(local_borg_version, storage_config, checks, check_last=None, prefix=None):
|
||||||
'''
|
'''
|
||||||
Given the local Borg version and a parsed sequence of checks, transform the checks into tuple of
|
Given the local Borg version, a storage configuration dict, a parsed sequence of checks, the
|
||||||
|
check last value, and a consistency check prefix, transform the checks into tuple of
|
||||||
command-line flags.
|
command-line flags.
|
||||||
|
|
||||||
For example, given parsed checks of:
|
For example, given parsed checks of:
|
||||||
|
@ -174,10 +174,21 @@ def make_check_flags(local_borg_version, checks, check_last=None, prefix=None):
|
||||||
|
|
||||||
if 'archives' in checks:
|
if 'archives' in checks:
|
||||||
last_flags = ('--last', str(check_last)) if check_last else ()
|
last_flags = ('--last', str(check_last)) if check_last else ()
|
||||||
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version):
|
match_archives_flags = (
|
||||||
match_archives_flags = ('--match-archives', f'sh:{prefix}*') if prefix else ()
|
(
|
||||||
else:
|
('--match-archives', f'sh:{prefix}*')
|
||||||
match_archives_flags = ('--glob-archives', f'{prefix}*') if prefix else ()
|
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version)
|
||||||
|
else ('--glob-archives', f'{prefix}*')
|
||||||
|
)
|
||||||
|
if prefix
|
||||||
|
else (
|
||||||
|
flags.make_match_archives_flags(
|
||||||
|
storage_config.get('match_archives'),
|
||||||
|
storage_config.get('archive_name_format'),
|
||||||
|
local_borg_version,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
last_flags = ()
|
last_flags = ()
|
||||||
match_archives_flags = ()
|
match_archives_flags = ()
|
||||||
|
@ -196,7 +207,7 @@ def make_check_flags(local_borg_version, checks, check_last=None, prefix=None):
|
||||||
return common_flags
|
return common_flags
|
||||||
|
|
||||||
return (
|
return (
|
||||||
tuple('--{}-only'.format(check) for check in checks if check in ('repository', 'archives'))
|
tuple(f'--{check}-only' for check in checks if check in ('repository', 'archives'))
|
||||||
+ common_flags
|
+ common_flags
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -243,7 +254,7 @@ def read_check_time(path):
|
||||||
|
|
||||||
|
|
||||||
def check_archives(
|
def check_archives(
|
||||||
repository,
|
repository_path,
|
||||||
location_config,
|
location_config,
|
||||||
storage_config,
|
storage_config,
|
||||||
consistency_config,
|
consistency_config,
|
||||||
|
@ -268,7 +279,7 @@ def check_archives(
|
||||||
try:
|
try:
|
||||||
borg_repository_id = json.loads(
|
borg_repository_id = json.loads(
|
||||||
rinfo.display_repository_info(
|
rinfo.display_repository_info(
|
||||||
repository,
|
repository_path,
|
||||||
storage_config,
|
storage_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
argparse.Namespace(json=True),
|
argparse.Namespace(json=True),
|
||||||
|
@ -277,7 +288,7 @@ def check_archives(
|
||||||
)
|
)
|
||||||
)['repository']['id']
|
)['repository']['id']
|
||||||
except (json.JSONDecodeError, KeyError):
|
except (json.JSONDecodeError, KeyError):
|
||||||
raise ValueError(f'Cannot determine Borg repository ID for {repository}')
|
raise ValueError(f'Cannot determine Borg repository ID for {repository_path}')
|
||||||
|
|
||||||
checks = filter_checks_on_frequency(
|
checks = filter_checks_on_frequency(
|
||||||
location_config,
|
location_config,
|
||||||
|
@ -291,7 +302,7 @@ def check_archives(
|
||||||
extra_borg_options = storage_config.get('extra_borg_options', {}).get('check', '')
|
extra_borg_options = storage_config.get('extra_borg_options', {}).get('check', '')
|
||||||
|
|
||||||
if set(checks).intersection({'repository', 'archives', 'data'}):
|
if set(checks).intersection({'repository', 'archives', 'data'}):
|
||||||
lock_wait = storage_config.get('lock_wait', None)
|
lock_wait = storage_config.get('lock_wait')
|
||||||
|
|
||||||
verbosity_flags = ()
|
verbosity_flags = ()
|
||||||
if logger.isEnabledFor(logging.INFO):
|
if logger.isEnabledFor(logging.INFO):
|
||||||
|
@ -299,18 +310,18 @@ def check_archives(
|
||||||
if logger.isEnabledFor(logging.DEBUG):
|
if logger.isEnabledFor(logging.DEBUG):
|
||||||
verbosity_flags = ('--debug', '--show-rc')
|
verbosity_flags = ('--debug', '--show-rc')
|
||||||
|
|
||||||
prefix = consistency_config.get('prefix', DEFAULT_PREFIX)
|
prefix = consistency_config.get('prefix')
|
||||||
|
|
||||||
full_command = (
|
full_command = (
|
||||||
(local_path, 'check')
|
(local_path, 'check')
|
||||||
+ (('--repair',) if repair else ())
|
+ (('--repair',) if repair else ())
|
||||||
+ make_check_flags(local_borg_version, checks, check_last, prefix)
|
+ make_check_flags(local_borg_version, storage_config, checks, check_last, prefix)
|
||||||
+ (('--remote-path', remote_path) if remote_path else ())
|
+ (('--remote-path', remote_path) if remote_path else ())
|
||||||
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||||
+ verbosity_flags
|
+ verbosity_flags
|
||||||
+ (('--progress',) if progress else ())
|
+ (('--progress',) if progress else ())
|
||||||
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
||||||
+ flags.make_repository_flags(repository, local_borg_version)
|
+ flags.make_repository_flags(repository_path, local_borg_version)
|
||||||
)
|
)
|
||||||
|
|
||||||
borg_environment = environment.make_environment(storage_config)
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
@ -329,6 +340,6 @@ def check_archives(
|
||||||
|
|
||||||
if 'extract' in checks:
|
if 'extract' in checks:
|
||||||
extract.extract_last_archive_dry_run(
|
extract.extract_last_archive_dry_run(
|
||||||
storage_config, local_borg_version, repository, lock_wait, local_path, remote_path
|
storage_config, local_borg_version, repository_path, lock_wait, local_path, remote_path
|
||||||
)
|
)
|
||||||
write_check_time(make_check_time_path(location_config, borg_repository_id, 'extract'))
|
write_check_time(make_check_time_path(location_config, borg_repository_id, 'extract'))
|
||||||
|
|
|
@ -8,7 +8,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
def compact_segments(
|
def compact_segments(
|
||||||
dry_run,
|
dry_run,
|
||||||
repository,
|
repository_path,
|
||||||
storage_config,
|
storage_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
local_path='borg',
|
local_path='borg',
|
||||||
|
@ -36,11 +36,11 @@ def compact_segments(
|
||||||
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
||||||
+ flags.make_repository_flags(repository, local_borg_version)
|
+ flags.make_repository_flags(repository_path, local_borg_version)
|
||||||
)
|
)
|
||||||
|
|
||||||
if dry_run:
|
if dry_run:
|
||||||
logging.info(f'{repository}: Skipping compact (dry run)')
|
logging.info(f'{repository_path}: Skipping compact (dry run)')
|
||||||
return
|
return
|
||||||
|
|
||||||
execute_command(
|
execute_command(
|
||||||
|
|
|
@ -217,7 +217,7 @@ def make_list_filter_flags(local_borg_version, dry_run):
|
||||||
return f'{base_flags}-'
|
return f'{base_flags}-'
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_ARCHIVE_NAME_FORMAT = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}'
|
DEFAULT_ARCHIVE_NAME_FORMAT = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}' # noqa: FS003
|
||||||
|
|
||||||
|
|
||||||
def collect_borgmatic_source_directories(borgmatic_source_directory):
|
def collect_borgmatic_source_directories(borgmatic_source_directory):
|
||||||
|
@ -322,7 +322,7 @@ def check_all_source_directories_exist(source_directories):
|
||||||
|
|
||||||
def create_archive(
|
def create_archive(
|
||||||
dry_run,
|
dry_run,
|
||||||
repository,
|
repository_path,
|
||||||
location_config,
|
location_config,
|
||||||
storage_config,
|
storage_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
|
@ -411,7 +411,7 @@ def create_archive(
|
||||||
|
|
||||||
if stream_processes and location_config.get('read_special') is False:
|
if stream_processes and location_config.get('read_special') is False:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f'{repository}: Ignoring configured "read_special" value of false, as true is needed for database hooks.'
|
f'{repository_path}: Ignoring configured "read_special" value of false, as true is needed for database hooks.'
|
||||||
)
|
)
|
||||||
|
|
||||||
create_command = (
|
create_command = (
|
||||||
|
@ -446,7 +446,9 @@ def create_archive(
|
||||||
)
|
)
|
||||||
+ (('--dry-run',) if dry_run else ())
|
+ (('--dry-run',) if dry_run else ())
|
||||||
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
||||||
+ flags.make_repository_archive_flags(repository, archive_name_format, local_borg_version)
|
+ flags.make_repository_archive_flags(
|
||||||
|
repository_path, archive_name_format, local_borg_version
|
||||||
|
)
|
||||||
+ (sources if not pattern_file else ())
|
+ (sources if not pattern_file else ())
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -466,7 +468,7 @@ def create_archive(
|
||||||
# If database hooks are enabled (as indicated by streaming processes), exclude files that might
|
# If database hooks are enabled (as indicated by streaming processes), exclude files that might
|
||||||
# cause Borg to hang. But skip this if the user has explicitly set the "read_special" to True.
|
# cause Borg to hang. But skip this if the user has explicitly set the "read_special" to True.
|
||||||
if stream_processes and not location_config.get('read_special'):
|
if stream_processes and not location_config.get('read_special'):
|
||||||
logger.debug(f'{repository}: Collecting special file paths')
|
logger.debug(f'{repository_path}: Collecting special file paths')
|
||||||
special_file_paths = collect_special_file_paths(
|
special_file_paths = collect_special_file_paths(
|
||||||
create_command,
|
create_command,
|
||||||
local_path,
|
local_path,
|
||||||
|
@ -477,7 +479,7 @@ def create_archive(
|
||||||
|
|
||||||
if special_file_paths:
|
if special_file_paths:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f'{repository}: Excluding special files to prevent Borg from hanging: {", ".join(special_file_paths)}'
|
f'{repository_path}: Excluding special files to prevent Borg from hanging: {", ".join(special_file_paths)}'
|
||||||
)
|
)
|
||||||
exclude_file = write_pattern_file(
|
exclude_file = write_pattern_file(
|
||||||
expand_home_directories(
|
expand_home_directories(
|
||||||
|
@ -507,7 +509,9 @@ def create_archive(
|
||||||
)
|
)
|
||||||
elif output_log_level is None:
|
elif output_log_level is None:
|
||||||
return execute_command_and_capture_output(
|
return execute_command_and_capture_output(
|
||||||
create_command, working_directory=working_directory, extra_environment=borg_environment,
|
create_command,
|
||||||
|
working_directory=working_directory,
|
||||||
|
extra_environment=borg_environment,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
execute_command(
|
execute_command(
|
||||||
|
|
|
@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
def export_tar_archive(
|
def export_tar_archive(
|
||||||
dry_run,
|
dry_run,
|
||||||
repository,
|
repository_path,
|
||||||
archive,
|
archive,
|
||||||
paths,
|
paths,
|
||||||
destination_path,
|
destination_path,
|
||||||
|
@ -45,7 +45,11 @@ def export_tar_archive(
|
||||||
+ (('--dry-run',) if dry_run else ())
|
+ (('--dry-run',) if dry_run else ())
|
||||||
+ (('--tar-filter', tar_filter) if tar_filter else ())
|
+ (('--tar-filter', tar_filter) if tar_filter else ())
|
||||||
+ (('--strip-components', str(strip_components)) if strip_components else ())
|
+ (('--strip-components', str(strip_components)) if strip_components else ())
|
||||||
+ flags.make_repository_archive_flags(repository, archive, local_borg_version,)
|
+ flags.make_repository_archive_flags(
|
||||||
|
repository_path,
|
||||||
|
archive,
|
||||||
|
local_borg_version,
|
||||||
|
)
|
||||||
+ (destination_path,)
|
+ (destination_path,)
|
||||||
+ (tuple(paths) if paths else ())
|
+ (tuple(paths) if paths else ())
|
||||||
)
|
)
|
||||||
|
@ -56,7 +60,7 @@ def export_tar_archive(
|
||||||
output_log_level = logging.INFO
|
output_log_level = logging.INFO
|
||||||
|
|
||||||
if dry_run:
|
if dry_run:
|
||||||
logging.info('{}: Skipping export to tar file (dry run)'.format(repository))
|
logging.info(f'{repository_path}: Skipping export to tar file (dry run)')
|
||||||
return
|
return
|
||||||
|
|
||||||
execute_command(
|
execute_command(
|
||||||
|
|
|
@ -11,7 +11,7 @@ logger = logging.getLogger(__name__)
|
||||||
def extract_last_archive_dry_run(
|
def extract_last_archive_dry_run(
|
||||||
storage_config,
|
storage_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
repository,
|
repository_path,
|
||||||
lock_wait=None,
|
lock_wait=None,
|
||||||
local_path='borg',
|
local_path='borg',
|
||||||
remote_path=None,
|
remote_path=None,
|
||||||
|
@ -30,7 +30,7 @@ def extract_last_archive_dry_run(
|
||||||
|
|
||||||
try:
|
try:
|
||||||
last_archive_name = rlist.resolve_archive_name(
|
last_archive_name = rlist.resolve_archive_name(
|
||||||
repository, 'latest', storage_config, local_borg_version, local_path, remote_path
|
repository_path, 'latest', storage_config, local_borg_version, local_path, remote_path
|
||||||
)
|
)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
logger.warning('No archives found. Skipping extract consistency check.')
|
logger.warning('No archives found. Skipping extract consistency check.')
|
||||||
|
@ -44,7 +44,9 @@ def extract_last_archive_dry_run(
|
||||||
+ lock_wait_flags
|
+ lock_wait_flags
|
||||||
+ verbosity_flags
|
+ verbosity_flags
|
||||||
+ list_flag
|
+ list_flag
|
||||||
+ flags.make_repository_archive_flags(repository, last_archive_name, local_borg_version)
|
+ flags.make_repository_archive_flags(
|
||||||
|
repository_path, last_archive_name, local_borg_version
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
execute_command(
|
execute_command(
|
||||||
|
@ -106,7 +108,11 @@ def extract_archive(
|
||||||
+ (('--strip-components', str(strip_components)) if strip_components else ())
|
+ (('--strip-components', str(strip_components)) if strip_components else ())
|
||||||
+ (('--progress',) if progress else ())
|
+ (('--progress',) if progress else ())
|
||||||
+ (('--stdout',) if extract_to_stdout else ())
|
+ (('--stdout',) if extract_to_stdout else ())
|
||||||
+ flags.make_repository_archive_flags(repository, archive, local_borg_version,)
|
+ flags.make_repository_archive_flags(
|
||||||
|
repository,
|
||||||
|
archive,
|
||||||
|
local_borg_version,
|
||||||
|
)
|
||||||
+ (tuple(paths) if paths else ())
|
+ (tuple(paths) if paths else ())
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
from pkg_resources import parse_version
|
from packaging.version import parse
|
||||||
|
|
||||||
|
|
||||||
class Feature(Enum):
|
class Feature(Enum):
|
||||||
|
@ -18,17 +18,17 @@ class Feature(Enum):
|
||||||
|
|
||||||
|
|
||||||
FEATURE_TO_MINIMUM_BORG_VERSION = {
|
FEATURE_TO_MINIMUM_BORG_VERSION = {
|
||||||
Feature.COMPACT: parse_version('1.2.0a2'), # borg compact
|
Feature.COMPACT: parse('1.2.0a2'), # borg compact
|
||||||
Feature.ATIME: parse_version('1.2.0a7'), # borg create --atime
|
Feature.ATIME: parse('1.2.0a7'), # borg create --atime
|
||||||
Feature.NOFLAGS: parse_version('1.2.0a8'), # borg create --noflags
|
Feature.NOFLAGS: parse('1.2.0a8'), # borg create --noflags
|
||||||
Feature.NUMERIC_IDS: parse_version('1.2.0b3'), # borg create/extract/mount --numeric-ids
|
Feature.NUMERIC_IDS: parse('1.2.0b3'), # borg create/extract/mount --numeric-ids
|
||||||
Feature.UPLOAD_RATELIMIT: parse_version('1.2.0b3'), # borg create --upload-ratelimit
|
Feature.UPLOAD_RATELIMIT: parse('1.2.0b3'), # borg create --upload-ratelimit
|
||||||
Feature.SEPARATE_REPOSITORY_ARCHIVE: parse_version('2.0.0a2'), # --repo with separate archive
|
Feature.SEPARATE_REPOSITORY_ARCHIVE: parse('2.0.0a2'), # --repo with separate archive
|
||||||
Feature.RCREATE: parse_version('2.0.0a2'), # borg rcreate
|
Feature.RCREATE: parse('2.0.0a2'), # borg rcreate
|
||||||
Feature.RLIST: parse_version('2.0.0a2'), # borg rlist
|
Feature.RLIST: parse('2.0.0a2'), # borg rlist
|
||||||
Feature.RINFO: parse_version('2.0.0a2'), # borg rinfo
|
Feature.RINFO: parse('2.0.0a2'), # borg rinfo
|
||||||
Feature.MATCH_ARCHIVES: parse_version('2.0.0b3'), # borg --match-archives
|
Feature.MATCH_ARCHIVES: parse('2.0.0b3'), # borg --match-archives
|
||||||
Feature.EXCLUDED_FILES_MINUS: parse_version('2.0.0b5'), # --list --filter uses "-" for excludes
|
Feature.EXCLUDED_FILES_MINUS: parse('2.0.0b5'), # --list --filter uses "-" for excludes
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -37,4 +37,4 @@ def available(feature, borg_version):
|
||||||
Given a Borg Feature constant and a Borg version string, return whether that feature is
|
Given a Borg Feature constant and a Borg version string, return whether that feature is
|
||||||
available in that version of Borg.
|
available in that version of Borg.
|
||||||
'''
|
'''
|
||||||
return FEATURE_TO_MINIMUM_BORG_VERSION[feature] <= parse_version(borg_version)
|
return FEATURE_TO_MINIMUM_BORG_VERSION[feature] <= parse(borg_version)
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import itertools
|
import itertools
|
||||||
|
import re
|
||||||
|
|
||||||
from borgmatic.borg import feature
|
from borgmatic.borg import feature
|
||||||
|
|
||||||
|
@ -10,7 +11,7 @@ def make_flags(name, value):
|
||||||
if not value:
|
if not value:
|
||||||
return ()
|
return ()
|
||||||
|
|
||||||
flag = '--{}'.format(name.replace('_', '-'))
|
flag = f"--{name.replace('_', '-')}"
|
||||||
|
|
||||||
if value is True:
|
if value is True:
|
||||||
return (flag,)
|
return (flag,)
|
||||||
|
@ -33,7 +34,7 @@ def make_flags_from_arguments(arguments, excludes=()):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def make_repository_flags(repository, local_borg_version):
|
def make_repository_flags(repository_path, local_borg_version):
|
||||||
'''
|
'''
|
||||||
Given the path of a Borg repository and the local Borg version, return Borg-version-appropriate
|
Given the path of a Borg repository and the local Borg version, return Borg-version-appropriate
|
||||||
command-line flags (as a tuple) for selecting that repository.
|
command-line flags (as a tuple) for selecting that repository.
|
||||||
|
@ -42,17 +43,41 @@ def make_repository_flags(repository, local_borg_version):
|
||||||
('--repo',)
|
('--repo',)
|
||||||
if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version)
|
if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version)
|
||||||
else ()
|
else ()
|
||||||
) + (repository,)
|
) + (repository_path,)
|
||||||
|
|
||||||
|
|
||||||
def make_repository_archive_flags(repository, archive, local_borg_version):
|
def make_repository_archive_flags(repository_path, archive, local_borg_version):
|
||||||
'''
|
'''
|
||||||
Given the path of a Borg repository, an archive name or pattern, and the local Borg version,
|
Given the path of a Borg repository, an archive name or pattern, and the local Borg version,
|
||||||
return Borg-version-appropriate command-line flags (as a tuple) for selecting that repository
|
return Borg-version-appropriate command-line flags (as a tuple) for selecting that repository
|
||||||
and archive.
|
and archive.
|
||||||
'''
|
'''
|
||||||
return (
|
return (
|
||||||
('--repo', repository, archive)
|
('--repo', repository_path, archive)
|
||||||
if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version)
|
if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version)
|
||||||
else (f'{repository}::{archive}',)
|
else (f'{repository_path}::{archive}',)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def make_match_archives_flags(match_archives, archive_name_format, local_borg_version):
|
||||||
|
'''
|
||||||
|
Return match archives flags based on the given match archives value, if any. If it isn't set,
|
||||||
|
return match archives flags to match archives created with the given archive name format, if
|
||||||
|
any. This is done by replacing certain archive name format placeholders for ephemeral data (like
|
||||||
|
"{now}") with globs.
|
||||||
|
'''
|
||||||
|
if match_archives:
|
||||||
|
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version):
|
||||||
|
return ('--match-archives', match_archives)
|
||||||
|
else:
|
||||||
|
return ('--glob-archives', re.sub(r'^sh:', '', match_archives))
|
||||||
|
|
||||||
|
if not archive_name_format:
|
||||||
|
return ()
|
||||||
|
|
||||||
|
derived_match_archives = re.sub(r'\{(now|utcnow|pid)([:%\w\.-]*)\}', '*', archive_name_format)
|
||||||
|
|
||||||
|
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version):
|
||||||
|
return ('--match-archives', f'sh:{derived_match_archives}')
|
||||||
|
else:
|
||||||
|
return ('--glob-archives', f'{derived_match_archives}')
|
||||||
|
|
|
@ -8,7 +8,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def display_archives_info(
|
def display_archives_info(
|
||||||
repository,
|
repository_path,
|
||||||
storage_config,
|
storage_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
info_arguments,
|
info_arguments,
|
||||||
|
@ -44,22 +44,26 @@ def display_archives_info(
|
||||||
else flags.make_flags('glob-archives', f'{info_arguments.prefix}*')
|
else flags.make_flags('glob-archives', f'{info_arguments.prefix}*')
|
||||||
)
|
)
|
||||||
if info_arguments.prefix
|
if info_arguments.prefix
|
||||||
else ()
|
else (
|
||||||
|
flags.make_match_archives_flags(
|
||||||
|
info_arguments.match_archives
|
||||||
|
or info_arguments.archive
|
||||||
|
or storage_config.get('match_archives'),
|
||||||
|
storage_config.get('archive_name_format'),
|
||||||
|
local_borg_version,
|
||||||
|
)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
+ flags.make_flags_from_arguments(
|
+ flags.make_flags_from_arguments(
|
||||||
info_arguments, excludes=('repository', 'archive', 'prefix')
|
info_arguments, excludes=('repository', 'archive', 'prefix', 'match_archives')
|
||||||
)
|
|
||||||
+ flags.make_repository_flags(repository, local_borg_version)
|
|
||||||
+ (
|
|
||||||
flags.make_flags('match-archives', info_arguments.archive)
|
|
||||||
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version)
|
|
||||||
else flags.make_flags('glob-archives', info_arguments.archive)
|
|
||||||
)
|
)
|
||||||
|
+ flags.make_repository_flags(repository_path, local_borg_version)
|
||||||
)
|
)
|
||||||
|
|
||||||
if info_arguments.json:
|
if info_arguments.json:
|
||||||
return execute_command_and_capture_output(
|
return execute_command_and_capture_output(
|
||||||
full_command, extra_environment=environment.make_environment(storage_config),
|
full_command,
|
||||||
|
extra_environment=environment.make_environment(storage_config),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
execute_command(
|
execute_command(
|
||||||
|
|
|
@ -21,7 +21,7 @@ MAKE_FLAGS_EXCLUDES = (
|
||||||
|
|
||||||
|
|
||||||
def make_list_command(
|
def make_list_command(
|
||||||
repository,
|
repository_path,
|
||||||
storage_config,
|
storage_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
list_arguments,
|
list_arguments,
|
||||||
|
@ -52,10 +52,10 @@ def make_list_command(
|
||||||
+ flags.make_flags_from_arguments(list_arguments, excludes=MAKE_FLAGS_EXCLUDES)
|
+ flags.make_flags_from_arguments(list_arguments, excludes=MAKE_FLAGS_EXCLUDES)
|
||||||
+ (
|
+ (
|
||||||
flags.make_repository_archive_flags(
|
flags.make_repository_archive_flags(
|
||||||
repository, list_arguments.archive, local_borg_version
|
repository_path, list_arguments.archive, local_borg_version
|
||||||
)
|
)
|
||||||
if list_arguments.archive
|
if list_arguments.archive
|
||||||
else flags.make_repository_flags(repository, local_borg_version)
|
else flags.make_repository_flags(repository_path, local_borg_version)
|
||||||
)
|
)
|
||||||
+ (tuple(list_arguments.paths) if list_arguments.paths else ())
|
+ (tuple(list_arguments.paths) if list_arguments.paths else ())
|
||||||
)
|
)
|
||||||
|
@ -86,7 +86,7 @@ def make_find_paths(find_paths):
|
||||||
|
|
||||||
|
|
||||||
def capture_archive_listing(
|
def capture_archive_listing(
|
||||||
repository,
|
repository_path,
|
||||||
archive,
|
archive,
|
||||||
storage_config,
|
storage_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
|
@ -104,16 +104,16 @@ def capture_archive_listing(
|
||||||
return tuple(
|
return tuple(
|
||||||
execute_command_and_capture_output(
|
execute_command_and_capture_output(
|
||||||
make_list_command(
|
make_list_command(
|
||||||
repository,
|
repository_path,
|
||||||
storage_config,
|
storage_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
argparse.Namespace(
|
argparse.Namespace(
|
||||||
repository=repository,
|
repository=repository_path,
|
||||||
archive=archive,
|
archive=archive,
|
||||||
paths=[f'sh:{list_path}'],
|
paths=[f'sh:{list_path}'],
|
||||||
find_paths=None,
|
find_paths=None,
|
||||||
json=None,
|
json=None,
|
||||||
format='{path}{NL}',
|
format='{path}{NL}', # noqa: FS003
|
||||||
),
|
),
|
||||||
local_path,
|
local_path,
|
||||||
remote_path,
|
remote_path,
|
||||||
|
@ -126,7 +126,7 @@ def capture_archive_listing(
|
||||||
|
|
||||||
|
|
||||||
def list_archive(
|
def list_archive(
|
||||||
repository,
|
repository_path,
|
||||||
storage_config,
|
storage_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
list_arguments,
|
list_arguments,
|
||||||
|
@ -149,7 +149,7 @@ def list_archive(
|
||||||
)
|
)
|
||||||
|
|
||||||
rlist_arguments = argparse.Namespace(
|
rlist_arguments = argparse.Namespace(
|
||||||
repository=repository,
|
repository=repository_path,
|
||||||
short=list_arguments.short,
|
short=list_arguments.short,
|
||||||
format=list_arguments.format,
|
format=list_arguments.format,
|
||||||
json=list_arguments.json,
|
json=list_arguments.json,
|
||||||
|
@ -160,7 +160,12 @@ def list_archive(
|
||||||
last=list_arguments.last,
|
last=list_arguments.last,
|
||||||
)
|
)
|
||||||
return rlist.list_repository(
|
return rlist.list_repository(
|
||||||
repository, storage_config, local_borg_version, rlist_arguments, local_path, remote_path
|
repository_path,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
rlist_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
if list_arguments.archive:
|
if list_arguments.archive:
|
||||||
|
@ -181,7 +186,7 @@ def list_archive(
|
||||||
# getting a list of archives to search.
|
# getting a list of archives to search.
|
||||||
if list_arguments.find_paths and not list_arguments.archive:
|
if list_arguments.find_paths and not list_arguments.archive:
|
||||||
rlist_arguments = argparse.Namespace(
|
rlist_arguments = argparse.Namespace(
|
||||||
repository=repository,
|
repository=repository_path,
|
||||||
short=True,
|
short=True,
|
||||||
format=None,
|
format=None,
|
||||||
json=None,
|
json=None,
|
||||||
|
@ -196,7 +201,7 @@ def list_archive(
|
||||||
archive_lines = tuple(
|
archive_lines = tuple(
|
||||||
execute_command_and_capture_output(
|
execute_command_and_capture_output(
|
||||||
rlist.make_rlist_command(
|
rlist.make_rlist_command(
|
||||||
repository,
|
repository_path,
|
||||||
storage_config,
|
storage_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
rlist_arguments,
|
rlist_arguments,
|
||||||
|
@ -213,7 +218,7 @@ def list_archive(
|
||||||
|
|
||||||
# For each archive listed by Borg, run list on the contents of that archive.
|
# For each archive listed by Borg, run list on the contents of that archive.
|
||||||
for archive in archive_lines:
|
for archive in archive_lines:
|
||||||
logger.answer(f'{repository}: Listing archive {archive}')
|
logger.answer(f'{repository_path}: Listing archive {archive}')
|
||||||
|
|
||||||
archive_arguments = copy.copy(list_arguments)
|
archive_arguments = copy.copy(list_arguments)
|
||||||
archive_arguments.archive = archive
|
archive_arguments.archive = archive
|
||||||
|
@ -224,7 +229,7 @@ def list_archive(
|
||||||
setattr(archive_arguments, name, None)
|
setattr(archive_arguments, name, None)
|
||||||
|
|
||||||
main_command = make_list_command(
|
main_command = make_list_command(
|
||||||
repository,
|
repository_path,
|
||||||
storage_config,
|
storage_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
archive_arguments,
|
archive_arguments,
|
||||||
|
|
|
@ -7,7 +7,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def mount_archive(
|
def mount_archive(
|
||||||
repository,
|
repository_path,
|
||||||
archive,
|
archive,
|
||||||
mount_arguments,
|
mount_arguments,
|
||||||
storage_config,
|
storage_config,
|
||||||
|
@ -40,7 +40,7 @@ def mount_archive(
|
||||||
+ (('-o', mount_arguments.options) if mount_arguments.options else ())
|
+ (('-o', mount_arguments.options) if mount_arguments.options else ())
|
||||||
+ (
|
+ (
|
||||||
(
|
(
|
||||||
flags.make_repository_flags(repository, local_borg_version)
|
flags.make_repository_flags(repository_path, local_borg_version)
|
||||||
+ (
|
+ (
|
||||||
('--match-archives', archive)
|
('--match-archives', archive)
|
||||||
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version)
|
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version)
|
||||||
|
@ -49,9 +49,9 @@ def mount_archive(
|
||||||
)
|
)
|
||||||
if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version)
|
if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version)
|
||||||
else (
|
else (
|
||||||
flags.make_repository_archive_flags(repository, archive, local_borg_version)
|
flags.make_repository_archive_flags(repository_path, archive, local_borg_version)
|
||||||
if archive
|
if archive
|
||||||
else flags.make_repository_flags(repository, local_borg_version)
|
else flags.make_repository_flags(repository_path, local_borg_version)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
+ (mount_arguments.mount_point,)
|
+ (mount_arguments.mount_point,)
|
||||||
|
|
|
@ -7,10 +7,10 @@ from borgmatic.execute import execute_command
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def make_prune_flags(retention_config, local_borg_version):
|
def make_prune_flags(storage_config, retention_config, local_borg_version):
|
||||||
'''
|
'''
|
||||||
Given a retention config dict mapping from option name to value, tranform it into an iterable of
|
Given a retention config dict mapping from option name to value, transform it into an sequence of
|
||||||
command-line name-value flag pairs.
|
command-line flags.
|
||||||
|
|
||||||
For example, given a retention config of:
|
For example, given a retention config of:
|
||||||
|
|
||||||
|
@ -24,22 +24,32 @@ def make_prune_flags(retention_config, local_borg_version):
|
||||||
)
|
)
|
||||||
'''
|
'''
|
||||||
config = retention_config.copy()
|
config = retention_config.copy()
|
||||||
prefix = config.pop('prefix', '{hostname}-')
|
prefix = config.pop('prefix', None)
|
||||||
|
|
||||||
if prefix:
|
flag_pairs = (
|
||||||
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version):
|
|
||||||
config['match_archives'] = f'sh:{prefix}*'
|
|
||||||
else:
|
|
||||||
config['glob_archives'] = f'{prefix}*'
|
|
||||||
|
|
||||||
return (
|
|
||||||
('--' + option_name.replace('_', '-'), str(value)) for option_name, value in config.items()
|
('--' + option_name.replace('_', '-'), str(value)) for option_name, value in config.items()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
return tuple(element for pair in flag_pairs for element in pair) + (
|
||||||
|
(
|
||||||
|
('--match-archives', f'sh:{prefix}*')
|
||||||
|
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version)
|
||||||
|
else ('--glob-archives', f'{prefix}*')
|
||||||
|
)
|
||||||
|
if prefix
|
||||||
|
else (
|
||||||
|
flags.make_match_archives_flags(
|
||||||
|
storage_config.get('match_archives'),
|
||||||
|
storage_config.get('archive_name_format'),
|
||||||
|
local_borg_version,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def prune_archives(
|
def prune_archives(
|
||||||
dry_run,
|
dry_run,
|
||||||
repository,
|
repository_path,
|
||||||
storage_config,
|
storage_config,
|
||||||
retention_config,
|
retention_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
|
@ -59,11 +69,7 @@ def prune_archives(
|
||||||
|
|
||||||
full_command = (
|
full_command = (
|
||||||
(local_path, 'prune')
|
(local_path, 'prune')
|
||||||
+ tuple(
|
+ make_prune_flags(storage_config, retention_config, local_borg_version)
|
||||||
element
|
|
||||||
for pair in make_prune_flags(retention_config, local_borg_version)
|
|
||||||
for element in pair
|
|
||||||
)
|
|
||||||
+ (('--remote-path', remote_path) if remote_path else ())
|
+ (('--remote-path', remote_path) if remote_path else ())
|
||||||
+ (('--umask', str(umask)) if umask else ())
|
+ (('--umask', str(umask)) if umask else ())
|
||||||
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||||
|
@ -78,7 +84,7 @@ def prune_archives(
|
||||||
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
|
|
||||||
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
||||||
+ flags.make_repository_flags(repository, local_borg_version)
|
+ flags.make_repository_flags(repository_path, local_borg_version)
|
||||||
)
|
)
|
||||||
|
|
||||||
if prune_arguments.stats or prune_arguments.list_archives:
|
if prune_arguments.stats or prune_arguments.list_archives:
|
||||||
|
|
|
@ -13,7 +13,7 @@ RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE = 2
|
||||||
|
|
||||||
def create_repository(
|
def create_repository(
|
||||||
dry_run,
|
dry_run,
|
||||||
repository,
|
repository_path,
|
||||||
storage_config,
|
storage_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
encryption_mode,
|
encryption_mode,
|
||||||
|
@ -33,14 +33,14 @@ def create_repository(
|
||||||
'''
|
'''
|
||||||
try:
|
try:
|
||||||
rinfo.display_repository_info(
|
rinfo.display_repository_info(
|
||||||
repository,
|
repository_path,
|
||||||
storage_config,
|
storage_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
argparse.Namespace(json=True),
|
argparse.Namespace(json=True),
|
||||||
local_path,
|
local_path,
|
||||||
remote_path,
|
remote_path,
|
||||||
)
|
)
|
||||||
logger.info(f'{repository}: Repository already exists. Skipping creation.')
|
logger.info(f'{repository_path}: Repository already exists. Skipping creation.')
|
||||||
return
|
return
|
||||||
except subprocess.CalledProcessError as error:
|
except subprocess.CalledProcessError as error:
|
||||||
if error.returncode != RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE:
|
if error.returncode != RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE:
|
||||||
|
@ -65,11 +65,11 @@ def create_repository(
|
||||||
+ (('--debug',) if logger.isEnabledFor(logging.DEBUG) else ())
|
+ (('--debug',) if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
+ (('--remote-path', remote_path) if remote_path else ())
|
+ (('--remote-path', remote_path) if remote_path else ())
|
||||||
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
||||||
+ flags.make_repository_flags(repository, local_borg_version)
|
+ flags.make_repository_flags(repository_path, local_borg_version)
|
||||||
)
|
)
|
||||||
|
|
||||||
if dry_run:
|
if dry_run:
|
||||||
logging.info(f'{repository}: Skipping repository creation (dry run)')
|
logging.info(f'{repository_path}: Skipping repository creation (dry run)')
|
||||||
return
|
return
|
||||||
|
|
||||||
# Do not capture output here, so as to support interactive prompts.
|
# Do not capture output here, so as to support interactive prompts.
|
||||||
|
|
|
@ -8,7 +8,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def display_repository_info(
|
def display_repository_info(
|
||||||
repository,
|
repository_path,
|
||||||
storage_config,
|
storage_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
rinfo_arguments,
|
rinfo_arguments,
|
||||||
|
@ -43,14 +43,15 @@ def display_repository_info(
|
||||||
+ flags.make_flags('remote-path', remote_path)
|
+ flags.make_flags('remote-path', remote_path)
|
||||||
+ flags.make_flags('lock-wait', lock_wait)
|
+ flags.make_flags('lock-wait', lock_wait)
|
||||||
+ (('--json',) if rinfo_arguments.json else ())
|
+ (('--json',) if rinfo_arguments.json else ())
|
||||||
+ flags.make_repository_flags(repository, local_borg_version)
|
+ flags.make_repository_flags(repository_path, local_borg_version)
|
||||||
)
|
)
|
||||||
|
|
||||||
extra_environment = environment.make_environment(storage_config)
|
extra_environment = environment.make_environment(storage_config)
|
||||||
|
|
||||||
if rinfo_arguments.json:
|
if rinfo_arguments.json:
|
||||||
return execute_command_and_capture_output(
|
return execute_command_and_capture_output(
|
||||||
full_command, extra_environment=extra_environment,
|
full_command,
|
||||||
|
extra_environment=extra_environment,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
execute_command(
|
execute_command(
|
||||||
|
|
|
@ -8,7 +8,12 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def resolve_archive_name(
|
def resolve_archive_name(
|
||||||
repository, archive, storage_config, local_borg_version, local_path='borg', remote_path=None
|
repository_path,
|
||||||
|
archive,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Given a local or remote repository path, an archive name, a storage config dict, a local Borg
|
Given a local or remote repository path, an archive name, a storage config dict, a local Borg
|
||||||
|
@ -31,27 +36,28 @@ def resolve_archive_name(
|
||||||
+ flags.make_flags('lock-wait', lock_wait)
|
+ flags.make_flags('lock-wait', lock_wait)
|
||||||
+ flags.make_flags('last', 1)
|
+ flags.make_flags('last', 1)
|
||||||
+ ('--short',)
|
+ ('--short',)
|
||||||
+ flags.make_repository_flags(repository, local_borg_version)
|
+ flags.make_repository_flags(repository_path, local_borg_version)
|
||||||
)
|
)
|
||||||
|
|
||||||
output = execute_command_and_capture_output(
|
output = execute_command_and_capture_output(
|
||||||
full_command, extra_environment=environment.make_environment(storage_config),
|
full_command,
|
||||||
|
extra_environment=environment.make_environment(storage_config),
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
latest_archive = output.strip().splitlines()[-1]
|
latest_archive = output.strip().splitlines()[-1]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
raise ValueError('No archives found in the repository')
|
raise ValueError('No archives found in the repository')
|
||||||
|
|
||||||
logger.debug('{}: Latest archive is {}'.format(repository, latest_archive))
|
logger.debug(f'{repository_path}: Latest archive is {latest_archive}')
|
||||||
|
|
||||||
return latest_archive
|
return latest_archive
|
||||||
|
|
||||||
|
|
||||||
MAKE_FLAGS_EXCLUDES = ('repository', 'prefix')
|
MAKE_FLAGS_EXCLUDES = ('repository', 'prefix', 'match_archives')
|
||||||
|
|
||||||
|
|
||||||
def make_rlist_command(
|
def make_rlist_command(
|
||||||
repository,
|
repository_path,
|
||||||
storage_config,
|
storage_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
rlist_arguments,
|
rlist_arguments,
|
||||||
|
@ -89,15 +95,21 @@ def make_rlist_command(
|
||||||
else flags.make_flags('glob-archives', f'{rlist_arguments.prefix}*')
|
else flags.make_flags('glob-archives', f'{rlist_arguments.prefix}*')
|
||||||
)
|
)
|
||||||
if rlist_arguments.prefix
|
if rlist_arguments.prefix
|
||||||
else ()
|
else (
|
||||||
|
flags.make_match_archives_flags(
|
||||||
|
rlist_arguments.match_archives or storage_config.get('match_archives'),
|
||||||
|
storage_config.get('archive_name_format'),
|
||||||
|
local_borg_version,
|
||||||
|
)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
+ flags.make_flags_from_arguments(rlist_arguments, excludes=MAKE_FLAGS_EXCLUDES)
|
+ flags.make_flags_from_arguments(rlist_arguments, excludes=MAKE_FLAGS_EXCLUDES)
|
||||||
+ flags.make_repository_flags(repository, local_borg_version)
|
+ flags.make_repository_flags(repository_path, local_borg_version)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def list_repository(
|
def list_repository(
|
||||||
repository,
|
repository_path,
|
||||||
storage_config,
|
storage_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
rlist_arguments,
|
rlist_arguments,
|
||||||
|
@ -113,11 +125,16 @@ def list_repository(
|
||||||
borg_environment = environment.make_environment(storage_config)
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
|
||||||
main_command = make_rlist_command(
|
main_command = make_rlist_command(
|
||||||
repository, storage_config, local_borg_version, rlist_arguments, local_path, remote_path
|
repository_path,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
rlist_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
if rlist_arguments.json:
|
if rlist_arguments.json:
|
||||||
return execute_command_and_capture_output(main_command, extra_environment=borg_environment,)
|
return execute_command_and_capture_output(main_command, extra_environment=borg_environment)
|
||||||
else:
|
else:
|
||||||
execute_command(
|
execute_command(
|
||||||
main_command,
|
main_command,
|
||||||
|
|
|
@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
def transfer_archives(
|
def transfer_archives(
|
||||||
dry_run,
|
dry_run,
|
||||||
repository,
|
repository_path,
|
||||||
storage_config,
|
storage_config,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
transfer_arguments,
|
transfer_arguments,
|
||||||
|
@ -28,17 +28,22 @@ def transfer_archives(
|
||||||
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
+ flags.make_flags('remote-path', remote_path)
|
+ flags.make_flags('remote-path', remote_path)
|
||||||
+ flags.make_flags('lock-wait', storage_config.get('lock_wait', None))
|
+ flags.make_flags('lock-wait', storage_config.get('lock_wait', None))
|
||||||
+ (('--progress',) if transfer_arguments.progress else ())
|
|
||||||
+ (
|
+ (
|
||||||
flags.make_flags(
|
flags.make_flags_from_arguments(
|
||||||
'match-archives', transfer_arguments.match_archives or transfer_arguments.archive
|
transfer_arguments,
|
||||||
|
excludes=('repository', 'source_repository', 'archive', 'match_archives'),
|
||||||
|
)
|
||||||
|
or (
|
||||||
|
flags.make_match_archives_flags(
|
||||||
|
transfer_arguments.match_archives
|
||||||
|
or transfer_arguments.archive
|
||||||
|
or storage_config.get('match_archives'),
|
||||||
|
storage_config.get('archive_name_format'),
|
||||||
|
local_borg_version,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
+ flags.make_flags_from_arguments(
|
+ flags.make_repository_flags(repository_path, local_borg_version)
|
||||||
transfer_arguments,
|
|
||||||
excludes=('repository', 'source_repository', 'archive', 'match_archives'),
|
|
||||||
)
|
|
||||||
+ flags.make_repository_flags(repository, local_borg_version)
|
|
||||||
+ flags.make_flags('other-repo', transfer_arguments.source_repository)
|
+ flags.make_flags('other-repo', transfer_arguments.source_repository)
|
||||||
+ flags.make_flags('dry-run', dry_run)
|
+ flags.make_flags('dry-run', dry_run)
|
||||||
)
|
)
|
||||||
|
|
|
@ -19,7 +19,8 @@ def local_borg_version(storage_config, local_path='borg'):
|
||||||
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
)
|
)
|
||||||
output = execute_command_and_capture_output(
|
output = execute_command_and_capture_output(
|
||||||
full_command, extra_environment=environment.make_environment(storage_config),
|
full_command,
|
||||||
|
extra_environment=environment.make_environment(storage_config),
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -131,9 +131,7 @@ def make_parsers():
|
||||||
nargs='*',
|
nargs='*',
|
||||||
dest='config_paths',
|
dest='config_paths',
|
||||||
default=config_paths,
|
default=config_paths,
|
||||||
help='Configuration filenames or directories, defaults to: {}'.format(
|
help=f"Configuration filenames or directories, defaults to: {' '.join(unexpanded_config_paths)}",
|
||||||
' '.join(unexpanded_config_paths)
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
global_group.add_argument(
|
global_group.add_argument(
|
||||||
'--excludes',
|
'--excludes',
|
||||||
|
@ -182,9 +180,13 @@ def make_parsers():
|
||||||
global_group.add_argument(
|
global_group.add_argument(
|
||||||
'--log-file',
|
'--log-file',
|
||||||
type=str,
|
type=str,
|
||||||
default=None,
|
|
||||||
help='Write log messages to this file instead of syslog',
|
help='Write log messages to this file instead of syslog',
|
||||||
)
|
)
|
||||||
|
global_group.add_argument(
|
||||||
|
'--log-file-format',
|
||||||
|
type=str,
|
||||||
|
help='Log format string used for log messages written to the log file',
|
||||||
|
)
|
||||||
global_group.add_argument(
|
global_group.add_argument(
|
||||||
'--override',
|
'--override',
|
||||||
metavar='SECTION.OPTION=VALUE',
|
metavar='SECTION.OPTION=VALUE',
|
||||||
|
@ -225,7 +227,7 @@ def make_parsers():
|
||||||
subparsers = top_level_parser.add_subparsers(
|
subparsers = top_level_parser.add_subparsers(
|
||||||
title='actions',
|
title='actions',
|
||||||
metavar='',
|
metavar='',
|
||||||
help='Specify zero or more actions. Defaults to creat, prune, compact, and check. Use --help with action for details:',
|
help='Specify zero or more actions. Defaults to create, prune, compact, and check. Use --help with action for details:',
|
||||||
)
|
)
|
||||||
rcreate_parser = subparsers.add_parser(
|
rcreate_parser = subparsers.add_parser(
|
||||||
'rcreate',
|
'rcreate',
|
||||||
|
@ -258,10 +260,13 @@ def make_parsers():
|
||||||
help='Copy the crypt key used for authenticated encryption from the source repository, defaults to a new random key [Borg 2.x+ only]',
|
help='Copy the crypt key used for authenticated encryption from the source repository, defaults to a new random key [Borg 2.x+ only]',
|
||||||
)
|
)
|
||||||
rcreate_group.add_argument(
|
rcreate_group.add_argument(
|
||||||
'--append-only', action='store_true', help='Create an append-only repository',
|
'--append-only',
|
||||||
|
action='store_true',
|
||||||
|
help='Create an append-only repository',
|
||||||
)
|
)
|
||||||
rcreate_group.add_argument(
|
rcreate_group.add_argument(
|
||||||
'--storage-quota', help='Create a repository with a fixed storage quota',
|
'--storage-quota',
|
||||||
|
help='Create a repository with a fixed storage quota',
|
||||||
)
|
)
|
||||||
rcreate_group.add_argument(
|
rcreate_group.add_argument(
|
||||||
'--make-parent-dirs',
|
'--make-parent-dirs',
|
||||||
|
@ -295,7 +300,7 @@ def make_parsers():
|
||||||
)
|
)
|
||||||
transfer_group.add_argument(
|
transfer_group.add_argument(
|
||||||
'--upgrader',
|
'--upgrader',
|
||||||
help='Upgrader type used to convert the transfered data, e.g. "From12To20" to upgrade data from Borg 1.2 to 2.0 format, defaults to no conversion',
|
help='Upgrader type used to convert the transferred data, e.g. "From12To20" to upgrade data from Borg 1.2 to 2.0 format, defaults to no conversion',
|
||||||
)
|
)
|
||||||
transfer_group.add_argument(
|
transfer_group.add_argument(
|
||||||
'--progress',
|
'--progress',
|
||||||
|
@ -673,6 +678,13 @@ def make_parsers():
|
||||||
dest='databases',
|
dest='databases',
|
||||||
help="Names of databases to restore from archive, defaults to all databases. Note that any databases to restore must be defined in borgmatic's configuration",
|
help="Names of databases to restore from archive, defaults to all databases. Note that any databases to restore must be defined in borgmatic's configuration",
|
||||||
)
|
)
|
||||||
|
restore_group.add_argument(
|
||||||
|
'--schema',
|
||||||
|
metavar='NAME',
|
||||||
|
nargs='+',
|
||||||
|
dest='schemas',
|
||||||
|
help='Names of schemas to restore from the database, defaults to all schemas. Schemas are only supported for PostgreSQL and MongoDB databases',
|
||||||
|
)
|
||||||
restore_group.add_argument(
|
restore_group.add_argument(
|
||||||
'-h', '--help', action='help', help='Show this help message and exit'
|
'-h', '--help', action='help', help='Show this help message and exit'
|
||||||
)
|
)
|
||||||
|
@ -686,7 +698,8 @@ def make_parsers():
|
||||||
)
|
)
|
||||||
rlist_group = rlist_parser.add_argument_group('rlist arguments')
|
rlist_group = rlist_parser.add_argument_group('rlist arguments')
|
||||||
rlist_group.add_argument(
|
rlist_group.add_argument(
|
||||||
'--repository', help='Path of repository to list, defaults to the configured repositories',
|
'--repository',
|
||||||
|
help='Path of repository to list, defaults to the configured repositories',
|
||||||
)
|
)
|
||||||
rlist_group.add_argument(
|
rlist_group.add_argument(
|
||||||
'--short', default=False, action='store_true', help='Output only archive names'
|
'--short', default=False, action='store_true', help='Output only archive names'
|
||||||
|
@ -696,7 +709,7 @@ def make_parsers():
|
||||||
'--json', default=False, action='store_true', help='Output results as JSON'
|
'--json', default=False, action='store_true', help='Output results as JSON'
|
||||||
)
|
)
|
||||||
rlist_group.add_argument(
|
rlist_group.add_argument(
|
||||||
'-P', '--prefix', help='Only list archive names starting with this prefix'
|
'-P', '--prefix', help='Deprecated. Only list archive names starting with this prefix'
|
||||||
)
|
)
|
||||||
rlist_group.add_argument(
|
rlist_group.add_argument(
|
||||||
'-a',
|
'-a',
|
||||||
|
@ -763,7 +776,7 @@ def make_parsers():
|
||||||
'--json', default=False, action='store_true', help='Output results as JSON'
|
'--json', default=False, action='store_true', help='Output results as JSON'
|
||||||
)
|
)
|
||||||
list_group.add_argument(
|
list_group.add_argument(
|
||||||
'-P', '--prefix', help='Only list archive names starting with this prefix'
|
'-P', '--prefix', help='Deprecated. Only list archive names starting with this prefix'
|
||||||
)
|
)
|
||||||
list_group.add_argument(
|
list_group.add_argument(
|
||||||
'-a',
|
'-a',
|
||||||
|
@ -835,7 +848,9 @@ def make_parsers():
|
||||||
'--json', dest='json', default=False, action='store_true', help='Output results as JSON'
|
'--json', dest='json', default=False, action='store_true', help='Output results as JSON'
|
||||||
)
|
)
|
||||||
info_group.add_argument(
|
info_group.add_argument(
|
||||||
'-P', '--prefix', help='Only show info for archive names starting with this prefix'
|
'-P',
|
||||||
|
'--prefix',
|
||||||
|
help='Deprecated. Only show info for archive names starting with this prefix',
|
||||||
)
|
)
|
||||||
info_group.add_argument(
|
info_group.add_argument(
|
||||||
'-a',
|
'-a',
|
||||||
|
@ -945,7 +960,17 @@ def parse_arguments(*unparsed_arguments):
|
||||||
and arguments['transfer'].match_archives
|
and arguments['transfer'].match_archives
|
||||||
):
|
):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'With the transfer action, only one of --archive and --glob-archives flags can be used.'
|
'With the transfer action, only one of --archive and --match-archives flags can be used.'
|
||||||
|
)
|
||||||
|
|
||||||
|
if 'list' in arguments and (arguments['list'].prefix and arguments['list'].match_archives):
|
||||||
|
raise ValueError(
|
||||||
|
'With the list action, only one of --prefix or --match-archives flags can be used.'
|
||||||
|
)
|
||||||
|
|
||||||
|
if 'rlist' in arguments and (arguments['rlist'].prefix and arguments['rlist'].match_archives):
|
||||||
|
raise ValueError(
|
||||||
|
'With the rlist action, only one of --prefix or --match-archives flags can be used.'
|
||||||
)
|
)
|
||||||
|
|
||||||
if 'info' in arguments and (
|
if 'info' in arguments and (
|
||||||
|
|
|
@ -8,7 +8,11 @@ from queue import Queue
|
||||||
from subprocess import CalledProcessError
|
from subprocess import CalledProcessError
|
||||||
|
|
||||||
import colorama
|
import colorama
|
||||||
import pkg_resources
|
|
||||||
|
try:
|
||||||
|
import importlib_metadata
|
||||||
|
except ModuleNotFoundError: # pragma: nocover
|
||||||
|
import importlib.metadata as importlib_metadata
|
||||||
|
|
||||||
import borgmatic.actions.borg
|
import borgmatic.actions.borg
|
||||||
import borgmatic.actions.break_lock
|
import borgmatic.actions.break_lock
|
||||||
|
@ -70,9 +74,7 @@ def run_configuration(config_filename, config, arguments):
|
||||||
try:
|
try:
|
||||||
local_borg_version = borg_version.local_borg_version(storage, local_path)
|
local_borg_version = borg_version.local_borg_version(storage, local_path)
|
||||||
except (OSError, CalledProcessError, ValueError) as error:
|
except (OSError, CalledProcessError, ValueError) as error:
|
||||||
yield from log_error_records(
|
yield from log_error_records(f'{config_filename}: Error getting local Borg version', error)
|
||||||
'{}: Error getting local Borg version'.format(config_filename), error
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -100,15 +102,18 @@ def run_configuration(config_filename, config, arguments):
|
||||||
return
|
return
|
||||||
|
|
||||||
encountered_error = error
|
encountered_error = error
|
||||||
yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error)
|
yield from log_error_records(f'{config_filename}: Error pinging monitor', error)
|
||||||
|
|
||||||
if not encountered_error:
|
if not encountered_error:
|
||||||
repo_queue = Queue()
|
repo_queue = Queue()
|
||||||
for repo in location['repositories']:
|
for repo in location['repositories']:
|
||||||
repo_queue.put((repo, 0),)
|
repo_queue.put(
|
||||||
|
(repo, 0),
|
||||||
|
)
|
||||||
|
|
||||||
while not repo_queue.empty():
|
while not repo_queue.empty():
|
||||||
repository_path, retry_num = repo_queue.get()
|
repository, retry_num = repo_queue.get()
|
||||||
|
logger.debug(f'{repository["path"]}: Running actions for repository')
|
||||||
timeout = retry_num * retry_wait
|
timeout = retry_num * retry_wait
|
||||||
if timeout:
|
if timeout:
|
||||||
logger.warning(f'{config_filename}: Sleeping {timeout}s before next retry')
|
logger.warning(f'{config_filename}: Sleeping {timeout}s before next retry')
|
||||||
|
@ -125,14 +130,16 @@ def run_configuration(config_filename, config, arguments):
|
||||||
local_path=local_path,
|
local_path=local_path,
|
||||||
remote_path=remote_path,
|
remote_path=remote_path,
|
||||||
local_borg_version=local_borg_version,
|
local_borg_version=local_borg_version,
|
||||||
repository_path=repository_path,
|
repository=repository,
|
||||||
)
|
)
|
||||||
except (OSError, CalledProcessError, ValueError) as error:
|
except (OSError, CalledProcessError, ValueError) as error:
|
||||||
if retry_num < retries:
|
if retry_num < retries:
|
||||||
repo_queue.put((repository_path, retry_num + 1),)
|
repo_queue.put(
|
||||||
|
(repository, retry_num + 1),
|
||||||
|
)
|
||||||
tuple( # Consume the generator so as to trigger logging.
|
tuple( # Consume the generator so as to trigger logging.
|
||||||
log_error_records(
|
log_error_records(
|
||||||
'{}: Error running actions for repository'.format(repository_path),
|
f'{repository["path"]}: Error running actions for repository',
|
||||||
error,
|
error,
|
||||||
levelno=logging.WARNING,
|
levelno=logging.WARNING,
|
||||||
log_command_error_output=True,
|
log_command_error_output=True,
|
||||||
|
@ -147,10 +154,10 @@ def run_configuration(config_filename, config, arguments):
|
||||||
return
|
return
|
||||||
|
|
||||||
yield from log_error_records(
|
yield from log_error_records(
|
||||||
'{}: Error running actions for repository'.format(repository_path), error
|
f'{repository["path"]}: Error running actions for repository', error
|
||||||
)
|
)
|
||||||
encountered_error = error
|
encountered_error = error
|
||||||
error_repository = repository_path
|
error_repository = repository['path']
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if using_primary_action:
|
if using_primary_action:
|
||||||
|
@ -169,7 +176,7 @@ def run_configuration(config_filename, config, arguments):
|
||||||
return
|
return
|
||||||
|
|
||||||
encountered_error = error
|
encountered_error = error
|
||||||
yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error)
|
yield from log_error_records(f'{repository["path"]}: Error pinging monitor', error)
|
||||||
|
|
||||||
if not encountered_error:
|
if not encountered_error:
|
||||||
try:
|
try:
|
||||||
|
@ -196,7 +203,7 @@ def run_configuration(config_filename, config, arguments):
|
||||||
return
|
return
|
||||||
|
|
||||||
encountered_error = error
|
encountered_error = error
|
||||||
yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error)
|
yield from log_error_records(f'{config_filename}: Error pinging monitor', error)
|
||||||
|
|
||||||
if encountered_error and using_primary_action:
|
if encountered_error and using_primary_action:
|
||||||
try:
|
try:
|
||||||
|
@ -231,9 +238,7 @@ def run_configuration(config_filename, config, arguments):
|
||||||
if command.considered_soft_failure(config_filename, error):
|
if command.considered_soft_failure(config_filename, error):
|
||||||
return
|
return
|
||||||
|
|
||||||
yield from log_error_records(
|
yield from log_error_records(f'{config_filename}: Error running on-error hook', error)
|
||||||
'{}: Error running on-error hook'.format(config_filename), error
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def run_actions(
|
def run_actions(
|
||||||
|
@ -248,7 +253,7 @@ def run_actions(
|
||||||
local_path,
|
local_path,
|
||||||
remote_path,
|
remote_path,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
repository_path,
|
repository,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Given parsed command-line arguments as an argparse.ArgumentParser instance, the configuration
|
Given parsed command-line arguments as an argparse.ArgumentParser instance, the configuration
|
||||||
|
@ -263,13 +268,14 @@ def run_actions(
|
||||||
invalid.
|
invalid.
|
||||||
'''
|
'''
|
||||||
add_custom_log_levels()
|
add_custom_log_levels()
|
||||||
repository = os.path.expanduser(repository_path)
|
repository_path = os.path.expanduser(repository['path'])
|
||||||
global_arguments = arguments['global']
|
global_arguments = arguments['global']
|
||||||
dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else ''
|
dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else ''
|
||||||
hook_context = {
|
hook_context = {
|
||||||
'repository': repository_path,
|
'repository': repository_path,
|
||||||
# Deprecated: For backwards compatibility with borgmatic < 1.6.0.
|
# Deprecated: For backwards compatibility with borgmatic < 1.6.0.
|
||||||
'repositories': ','.join(location['repositories']),
|
'repositories': ','.join([repo['path'] for repo in location['repositories']]),
|
||||||
|
'log_file': global_arguments.log_file if global_arguments.log_file else '',
|
||||||
}
|
}
|
||||||
|
|
||||||
command.execute_hook(
|
command.execute_hook(
|
||||||
|
@ -281,7 +287,7 @@ def run_actions(
|
||||||
**hook_context,
|
**hook_context,
|
||||||
)
|
)
|
||||||
|
|
||||||
for (action_name, action_arguments) in arguments.items():
|
for action_name, action_arguments in arguments.items():
|
||||||
if action_name == 'rcreate':
|
if action_name == 'rcreate':
|
||||||
borgmatic.actions.rcreate.run_rcreate(
|
borgmatic.actions.rcreate.run_rcreate(
|
||||||
repository,
|
repository,
|
||||||
|
@ -410,19 +416,39 @@ def run_actions(
|
||||||
)
|
)
|
||||||
elif action_name == 'rlist':
|
elif action_name == 'rlist':
|
||||||
yield from borgmatic.actions.rlist.run_rlist(
|
yield from borgmatic.actions.rlist.run_rlist(
|
||||||
repository, storage, local_borg_version, action_arguments, local_path, remote_path,
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
action_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
)
|
)
|
||||||
elif action_name == 'list':
|
elif action_name == 'list':
|
||||||
yield from borgmatic.actions.list.run_list(
|
yield from borgmatic.actions.list.run_list(
|
||||||
repository, storage, local_borg_version, action_arguments, local_path, remote_path,
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
action_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
)
|
)
|
||||||
elif action_name == 'rinfo':
|
elif action_name == 'rinfo':
|
||||||
yield from borgmatic.actions.rinfo.run_rinfo(
|
yield from borgmatic.actions.rinfo.run_rinfo(
|
||||||
repository, storage, local_borg_version, action_arguments, local_path, remote_path,
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
action_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
)
|
)
|
||||||
elif action_name == 'info':
|
elif action_name == 'info':
|
||||||
yield from borgmatic.actions.info.run_info(
|
yield from borgmatic.actions.info.run_info(
|
||||||
repository, storage, local_borg_version, action_arguments, local_path, remote_path,
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
action_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
)
|
)
|
||||||
elif action_name == 'break-lock':
|
elif action_name == 'break-lock':
|
||||||
borgmatic.actions.break_lock.run_break_lock(
|
borgmatic.actions.break_lock.run_break_lock(
|
||||||
|
@ -435,7 +461,12 @@ def run_actions(
|
||||||
)
|
)
|
||||||
elif action_name == 'borg':
|
elif action_name == 'borg':
|
||||||
borgmatic.actions.borg.run_borg(
|
borgmatic.actions.borg.run_borg(
|
||||||
repository, storage, local_borg_version, action_arguments, local_path, remote_path,
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
action_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
command.execute_hook(
|
command.execute_hook(
|
||||||
|
@ -472,9 +503,7 @@ def load_configurations(config_filenames, overrides=None, resolve_env=True):
|
||||||
dict(
|
dict(
|
||||||
levelno=logging.WARNING,
|
levelno=logging.WARNING,
|
||||||
levelname='WARNING',
|
levelname='WARNING',
|
||||||
msg='{}: Insufficient permissions to read configuration file'.format(
|
msg=f'{config_filename}: Insufficient permissions to read configuration file',
|
||||||
config_filename
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
@ -486,7 +515,7 @@ def load_configurations(config_filenames, overrides=None, resolve_env=True):
|
||||||
dict(
|
dict(
|
||||||
levelno=logging.CRITICAL,
|
levelno=logging.CRITICAL,
|
||||||
levelname='CRITICAL',
|
levelname='CRITICAL',
|
||||||
msg='{}: Error parsing configuration file'.format(config_filename),
|
msg=f'{config_filename}: Error parsing configuration file',
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
logging.makeLogRecord(
|
logging.makeLogRecord(
|
||||||
|
@ -587,9 +616,7 @@ def collect_configuration_run_summary_logs(configs, arguments):
|
||||||
|
|
||||||
if not configs:
|
if not configs:
|
||||||
yield from log_error_records(
|
yield from log_error_records(
|
||||||
'{}: No valid configuration files found'.format(
|
f"{' '.join(arguments['global'].config_paths)}: No valid configuration files found",
|
||||||
' '.join(arguments['global'].config_paths)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -615,24 +642,25 @@ def collect_configuration_run_summary_logs(configs, arguments):
|
||||||
error_logs = tuple(result for result in results if isinstance(result, logging.LogRecord))
|
error_logs = tuple(result for result in results if isinstance(result, logging.LogRecord))
|
||||||
|
|
||||||
if error_logs:
|
if error_logs:
|
||||||
yield from log_error_records('{}: An error occurred'.format(config_filename))
|
yield from log_error_records(f'{config_filename}: An error occurred')
|
||||||
yield from error_logs
|
yield from error_logs
|
||||||
else:
|
else:
|
||||||
yield logging.makeLogRecord(
|
yield logging.makeLogRecord(
|
||||||
dict(
|
dict(
|
||||||
levelno=logging.INFO,
|
levelno=logging.INFO,
|
||||||
levelname='INFO',
|
levelname='INFO',
|
||||||
msg='{}: Successfully ran configuration file'.format(config_filename),
|
msg=f'{config_filename}: Successfully ran configuration file',
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if results:
|
if results:
|
||||||
json_results.extend(results)
|
json_results.extend(results)
|
||||||
|
|
||||||
if 'umount' in arguments:
|
if 'umount' in arguments:
|
||||||
logger.info('Unmounting mount point {}'.format(arguments['umount'].mount_point))
|
logger.info(f"Unmounting mount point {arguments['umount'].mount_point}")
|
||||||
try:
|
try:
|
||||||
borg_umount.unmount_archive(
|
borg_umount.unmount_archive(
|
||||||
mount_point=arguments['umount'].mount_point, local_path=get_local_path(configs),
|
mount_point=arguments['umount'].mount_point,
|
||||||
|
local_path=get_local_path(configs),
|
||||||
)
|
)
|
||||||
except (CalledProcessError, OSError) as error:
|
except (CalledProcessError, OSError) as error:
|
||||||
yield from log_error_records('Error unmounting mount point', error)
|
yield from log_error_records('Error unmounting mount point', error)
|
||||||
|
@ -677,12 +705,12 @@ def main(): # pragma: no cover
|
||||||
if error.code == 0:
|
if error.code == 0:
|
||||||
raise error
|
raise error
|
||||||
configure_logging(logging.CRITICAL)
|
configure_logging(logging.CRITICAL)
|
||||||
logger.critical('Error parsing arguments: {}'.format(' '.join(sys.argv)))
|
logger.critical(f"Error parsing arguments: {' '.join(sys.argv)}")
|
||||||
exit_with_help_link()
|
exit_with_help_link()
|
||||||
|
|
||||||
global_arguments = arguments['global']
|
global_arguments = arguments['global']
|
||||||
if global_arguments.version:
|
if global_arguments.version:
|
||||||
print(pkg_resources.require('borgmatic')[0].version)
|
print(importlib_metadata.version('borgmatic'))
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
if global_arguments.bash_completion:
|
if global_arguments.bash_completion:
|
||||||
print(borgmatic.commands.completion.bash_completion())
|
print(borgmatic.commands.completion.bash_completion())
|
||||||
|
@ -707,10 +735,11 @@ def main(): # pragma: no cover
|
||||||
verbosity_to_log_level(global_arguments.log_file_verbosity),
|
verbosity_to_log_level(global_arguments.log_file_verbosity),
|
||||||
verbosity_to_log_level(global_arguments.monitoring_verbosity),
|
verbosity_to_log_level(global_arguments.monitoring_verbosity),
|
||||||
global_arguments.log_file,
|
global_arguments.log_file,
|
||||||
|
global_arguments.log_file_format,
|
||||||
)
|
)
|
||||||
except (FileNotFoundError, PermissionError) as error:
|
except (FileNotFoundError, PermissionError) as error:
|
||||||
configure_logging(logging.CRITICAL)
|
configure_logging(logging.CRITICAL)
|
||||||
logger.critical('Error configuring logging: {}'.format(error))
|
logger.critical(f'Error configuring logging: {error}')
|
||||||
exit_with_help_link()
|
exit_with_help_link()
|
||||||
|
|
||||||
logger.debug('Ensuring legacy configuration is upgraded')
|
logger.debug('Ensuring legacy configuration is upgraded')
|
||||||
|
|
|
@ -34,7 +34,7 @@ def bash_completion():
|
||||||
' local this_script="$(cat "$BASH_SOURCE" 2> /dev/null)"',
|
' local this_script="$(cat "$BASH_SOURCE" 2> /dev/null)"',
|
||||||
' local installed_script="$(borgmatic --bash-completion 2> /dev/null)"',
|
' local installed_script="$(borgmatic --bash-completion 2> /dev/null)"',
|
||||||
' if [ "$this_script" != "$installed_script" ] && [ "$installed_script" != "" ];'
|
' if [ "$this_script" != "$installed_script" ] && [ "$installed_script" != "" ];'
|
||||||
' then cat << EOF\n%s\nEOF' % UPGRADE_MESSAGE,
|
f' then cat << EOF\n{UPGRADE_MESSAGE}\nEOF',
|
||||||
' fi',
|
' fi',
|
||||||
'}',
|
'}',
|
||||||
'complete_borgmatic() {',
|
'complete_borgmatic() {',
|
||||||
|
@ -48,7 +48,7 @@ def bash_completion():
|
||||||
for action, subparser in subparsers.choices.items()
|
for action, subparser in subparsers.choices.items()
|
||||||
)
|
)
|
||||||
+ (
|
+ (
|
||||||
' COMPREPLY=($(compgen -W "%s %s" -- "${COMP_WORDS[COMP_CWORD]}"))'
|
' COMPREPLY=($(compgen -W "%s %s" -- "${COMP_WORDS[COMP_CWORD]}"))' # noqa: FS003
|
||||||
% (actions, global_flags),
|
% (actions, global_flags),
|
||||||
' (check_version &)',
|
' (check_version &)',
|
||||||
'}',
|
'}',
|
||||||
|
|
|
@ -28,9 +28,7 @@ def parse_arguments(*arguments):
|
||||||
'--source-config',
|
'--source-config',
|
||||||
dest='source_config_filename',
|
dest='source_config_filename',
|
||||||
default=DEFAULT_SOURCE_CONFIG_FILENAME,
|
default=DEFAULT_SOURCE_CONFIG_FILENAME,
|
||||||
help='Source INI-style configuration filename. Default: {}'.format(
|
help=f'Source INI-style configuration filename. Default: {DEFAULT_SOURCE_CONFIG_FILENAME}',
|
||||||
DEFAULT_SOURCE_CONFIG_FILENAME
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-e',
|
'-e',
|
||||||
|
@ -46,9 +44,7 @@ def parse_arguments(*arguments):
|
||||||
'--destination-config',
|
'--destination-config',
|
||||||
dest='destination_config_filename',
|
dest='destination_config_filename',
|
||||||
default=DEFAULT_DESTINATION_CONFIG_FILENAME,
|
default=DEFAULT_DESTINATION_CONFIG_FILENAME,
|
||||||
help='Destination YAML configuration filename. Default: {}'.format(
|
help=f'Destination YAML configuration filename. Default: {DEFAULT_DESTINATION_CONFIG_FILENAME}',
|
||||||
DEFAULT_DESTINATION_CONFIG_FILENAME
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return parser.parse_args(arguments)
|
return parser.parse_args(arguments)
|
||||||
|
@ -59,19 +55,15 @@ TEXT_WRAP_CHARACTERS = 80
|
||||||
|
|
||||||
def display_result(args): # pragma: no cover
|
def display_result(args): # pragma: no cover
|
||||||
result_lines = textwrap.wrap(
|
result_lines = textwrap.wrap(
|
||||||
'Your borgmatic configuration has been upgraded. Please review the result in {}.'.format(
|
f'Your borgmatic configuration has been upgraded. Please review the result in {args.destination_config_filename}.',
|
||||||
args.destination_config_filename
|
|
||||||
),
|
|
||||||
TEXT_WRAP_CHARACTERS,
|
TEXT_WRAP_CHARACTERS,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
excludes_phrase = (
|
||||||
|
f' and {args.source_excludes_filename}' if args.source_excludes_filename else ''
|
||||||
|
)
|
||||||
delete_lines = textwrap.wrap(
|
delete_lines = textwrap.wrap(
|
||||||
'Once you are satisfied, you can safely delete {}{}.'.format(
|
f'Once you are satisfied, you can safely delete {args.source_config_filename}{excludes_phrase}.',
|
||||||
args.source_config_filename,
|
|
||||||
' and {}'.format(args.source_excludes_filename)
|
|
||||||
if args.source_excludes_filename
|
|
||||||
else '',
|
|
||||||
),
|
|
||||||
TEXT_WRAP_CHARACTERS,
|
TEXT_WRAP_CHARACTERS,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -23,9 +23,7 @@ def parse_arguments(*arguments):
|
||||||
'--destination',
|
'--destination',
|
||||||
dest='destination_filename',
|
dest='destination_filename',
|
||||||
default=DEFAULT_DESTINATION_CONFIG_FILENAME,
|
default=DEFAULT_DESTINATION_CONFIG_FILENAME,
|
||||||
help='Destination YAML configuration file, default: {}'.format(
|
help=f'Destination YAML configuration file, default: {DEFAULT_DESTINATION_CONFIG_FILENAME}',
|
||||||
DEFAULT_DESTINATION_CONFIG_FILENAME
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--overwrite',
|
'--overwrite',
|
||||||
|
@ -48,17 +46,13 @@ def main(): # pragma: no cover
|
||||||
overwrite=args.overwrite,
|
overwrite=args.overwrite,
|
||||||
)
|
)
|
||||||
|
|
||||||
print('Generated a sample configuration file at {}.'.format(args.destination_filename))
|
print(f'Generated a sample configuration file at {args.destination_filename}.')
|
||||||
print()
|
print()
|
||||||
if args.source_filename:
|
if args.source_filename:
|
||||||
print(
|
print(f'Merged in the contents of configuration file at {args.source_filename}.')
|
||||||
'Merged in the contents of configuration file at {}.'.format(args.source_filename)
|
|
||||||
)
|
|
||||||
print('To review the changes made, run:')
|
print('To review the changes made, run:')
|
||||||
print()
|
print()
|
||||||
print(
|
print(f' diff --unified {args.source_filename} {args.destination_filename}')
|
||||||
' diff --unified {} {}'.format(args.source_filename, args.destination_filename)
|
|
||||||
)
|
|
||||||
print()
|
print()
|
||||||
print('This includes all available configuration options with example values. The few')
|
print('This includes all available configuration options with example values. The few')
|
||||||
print('required options are indicated. Please edit the file to suit your needs.')
|
print('required options are indicated. Please edit the file to suit your needs.')
|
||||||
|
|
|
@ -2,6 +2,7 @@ import logging
|
||||||
import sys
|
import sys
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
import borgmatic.config.generate
|
||||||
from borgmatic.config import collect, validate
|
from borgmatic.config import collect, validate
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -21,20 +22,24 @@ def parse_arguments(*arguments):
|
||||||
nargs='+',
|
nargs='+',
|
||||||
dest='config_paths',
|
dest='config_paths',
|
||||||
default=config_paths,
|
default=config_paths,
|
||||||
help='Configuration filenames or directories, defaults to: {}'.format(
|
help=f'Configuration filenames or directories, defaults to: {config_paths}',
|
||||||
' '.join(config_paths)
|
)
|
||||||
),
|
parser.add_argument(
|
||||||
|
'-s',
|
||||||
|
'--show',
|
||||||
|
action='store_true',
|
||||||
|
help='Show the validated configuration after all include merging has occurred',
|
||||||
)
|
)
|
||||||
|
|
||||||
return parser.parse_args(arguments)
|
return parser.parse_args(arguments)
|
||||||
|
|
||||||
|
|
||||||
def main(): # pragma: no cover
|
def main(): # pragma: no cover
|
||||||
args = parse_arguments(*sys.argv[1:])
|
arguments = parse_arguments(*sys.argv[1:])
|
||||||
|
|
||||||
logging.basicConfig(level=logging.INFO, format='%(message)s')
|
logging.basicConfig(level=logging.INFO, format='%(message)s')
|
||||||
|
|
||||||
config_filenames = tuple(collect.collect_config_filenames(args.config_paths))
|
config_filenames = tuple(collect.collect_config_filenames(arguments.config_paths))
|
||||||
if len(config_filenames) == 0:
|
if len(config_filenames) == 0:
|
||||||
logger.critical('No files to validate found')
|
logger.critical('No files to validate found')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
@ -42,15 +47,22 @@ def main(): # pragma: no cover
|
||||||
found_issues = False
|
found_issues = False
|
||||||
for config_filename in config_filenames:
|
for config_filename in config_filenames:
|
||||||
try:
|
try:
|
||||||
validate.parse_configuration(config_filename, validate.schema_filename())
|
config, parse_logs = validate.parse_configuration(
|
||||||
|
config_filename, validate.schema_filename()
|
||||||
|
)
|
||||||
except (ValueError, OSError, validate.Validation_error) as error:
|
except (ValueError, OSError, validate.Validation_error) as error:
|
||||||
logging.critical('{}: Error parsing configuration file'.format(config_filename))
|
logging.critical(f'{config_filename}: Error parsing configuration file')
|
||||||
logging.critical(error)
|
logging.critical(error)
|
||||||
found_issues = True
|
found_issues = True
|
||||||
|
else:
|
||||||
|
for log in parse_logs:
|
||||||
|
logger.handle(log)
|
||||||
|
|
||||||
|
if arguments.show:
|
||||||
|
print('---')
|
||||||
|
print(borgmatic.config.generate.render_configuration(config))
|
||||||
|
|
||||||
if found_issues:
|
if found_issues:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
|
||||||
logger.info(
|
logger.info(f"All given configuration files are valid: {', '.join(config_filenames)}")
|
||||||
'All given configuration files are valid: {}'.format(', '.join(config_filenames))
|
|
||||||
)
|
|
||||||
|
|
|
@ -16,8 +16,8 @@ def get_default_config_paths(expand_home=True):
|
||||||
return [
|
return [
|
||||||
'/etc/borgmatic/config.yaml',
|
'/etc/borgmatic/config.yaml',
|
||||||
'/etc/borgmatic.d',
|
'/etc/borgmatic.d',
|
||||||
'%s/borgmatic/config.yaml' % user_config_directory,
|
os.path.join(user_config_directory, 'borgmatic/config.yaml'),
|
||||||
'%s/borgmatic.d' % user_config_directory,
|
os.path.join(user_config_directory, 'borgmatic.d'),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,7 @@ def convert_legacy_parsed_config(source_config, source_excludes, schema):
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
# Split space-seperated values into actual lists, make "repository" into a list, and merge in
|
# Split space-separated values into actual lists, make "repository" into a list, and merge in
|
||||||
# excludes.
|
# excludes.
|
||||||
location = destination_config['location']
|
location = destination_config['location']
|
||||||
location['source_directories'] = source_config.location['source_directories'].split(' ')
|
location['source_directories'] = source_config.location['source_directories'].split(' ')
|
||||||
|
|
|
@ -14,11 +14,14 @@ def _resolve_string(matcher):
|
||||||
if matcher.group('escape') is not None:
|
if matcher.group('escape') is not None:
|
||||||
# in case of escaped envvar, unescape it
|
# in case of escaped envvar, unescape it
|
||||||
return matcher.group('variable')
|
return matcher.group('variable')
|
||||||
|
|
||||||
# resolve the env var
|
# resolve the env var
|
||||||
name, default = matcher.group('name'), matcher.group('default')
|
name, default = matcher.group('name'), matcher.group('default')
|
||||||
out = os.getenv(name, default=default)
|
out = os.getenv(name, default=default)
|
||||||
|
|
||||||
if out is None:
|
if out is None:
|
||||||
raise ValueError('Cannot find variable ${name} in environment'.format(name=name))
|
raise ValueError(f'Cannot find variable {name} in environment')
|
||||||
|
|
||||||
return out
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -48,7 +48,7 @@ def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False):
|
||||||
config, schema, indent=indent, skip_first=parent_is_sequence
|
config, schema, indent=indent, skip_first=parent_is_sequence
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise ValueError('Schema at level {} is unsupported: {}'.format(level, schema))
|
raise ValueError(f'Schema at level {level} is unsupported: {schema}')
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
@ -84,7 +84,7 @@ def _comment_out_optional_configuration(rendered_config):
|
||||||
for line in rendered_config.split('\n'):
|
for line in rendered_config.split('\n'):
|
||||||
# Upon encountering an optional configuration option, comment out lines until the next blank
|
# Upon encountering an optional configuration option, comment out lines until the next blank
|
||||||
# line.
|
# line.
|
||||||
if line.strip().startswith('# {}'.format(COMMENTED_OUT_SENTINEL)):
|
if line.strip().startswith(f'# {COMMENTED_OUT_SENTINEL}'):
|
||||||
optional = True
|
optional = True
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -117,9 +117,7 @@ def write_configuration(config_filename, rendered_config, mode=0o600, overwrite=
|
||||||
'''
|
'''
|
||||||
if not overwrite and os.path.exists(config_filename):
|
if not overwrite and os.path.exists(config_filename):
|
||||||
raise FileExistsError(
|
raise FileExistsError(
|
||||||
'{} already exists. Aborting. Use --overwrite to replace the file.'.format(
|
f'{config_filename} already exists. Aborting. Use --overwrite to replace the file.'
|
||||||
config_filename
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -218,7 +216,7 @@ def remove_commented_out_sentinel(config, field_name):
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return
|
return
|
||||||
|
|
||||||
if last_comment_value == '# {}\n'.format(COMMENTED_OUT_SENTINEL):
|
if last_comment_value == f'# {COMMENTED_OUT_SENTINEL}\n':
|
||||||
config.ca.items[field_name][RUAMEL_YAML_COMMENTS_INDEX].pop()
|
config.ca.items[field_name][RUAMEL_YAML_COMMENTS_INDEX].pop()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -70,13 +70,11 @@ def validate_configuration_format(parser, config_format):
|
||||||
section_format.name for section_format in config_format
|
section_format.name for section_format in config_format
|
||||||
)
|
)
|
||||||
if unknown_section_names:
|
if unknown_section_names:
|
||||||
raise ValueError(
|
raise ValueError(f"Unknown config sections found: {', '.join(unknown_section_names)}")
|
||||||
'Unknown config sections found: {}'.format(', '.join(unknown_section_names))
|
|
||||||
)
|
|
||||||
|
|
||||||
missing_section_names = set(required_section_names) - section_names
|
missing_section_names = set(required_section_names) - section_names
|
||||||
if missing_section_names:
|
if missing_section_names:
|
||||||
raise ValueError('Missing config sections: {}'.format(', '.join(missing_section_names)))
|
raise ValueError(f"Missing config sections: {', '.join(missing_section_names)}")
|
||||||
|
|
||||||
for section_format in config_format:
|
for section_format in config_format:
|
||||||
if section_format.name not in section_names:
|
if section_format.name not in section_names:
|
||||||
|
@ -91,9 +89,7 @@ def validate_configuration_format(parser, config_format):
|
||||||
|
|
||||||
if unexpected_option_names:
|
if unexpected_option_names:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'Unexpected options found in config section {}: {}'.format(
|
f"Unexpected options found in config section {section_format.name}: {', '.join(sorted(unexpected_option_names))}",
|
||||||
section_format.name, ', '.join(sorted(unexpected_option_names))
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
missing_option_names = tuple(
|
missing_option_names = tuple(
|
||||||
|
@ -105,9 +101,7 @@ def validate_configuration_format(parser, config_format):
|
||||||
|
|
||||||
if missing_option_names:
|
if missing_option_names:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'Required options missing from config section {}: {}'.format(
|
f"Required options missing from config section {section_format.name}: {', '.join(missing_option_names)}",
|
||||||
section_format.name, ', '.join(missing_option_names)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -137,7 +131,7 @@ def parse_configuration(config_filename, config_format):
|
||||||
'''
|
'''
|
||||||
parser = RawConfigParser()
|
parser = RawConfigParser()
|
||||||
if not parser.read(config_filename):
|
if not parser.read(config_filename):
|
||||||
raise ValueError('Configuration file cannot be opened: {}'.format(config_filename))
|
raise ValueError(f'Configuration file cannot be opened: {config_filename}')
|
||||||
|
|
||||||
validate_configuration_format(parser, config_format)
|
validate_configuration_format(parser, config_format)
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import functools
|
import functools
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
@ -37,6 +38,37 @@ def include_configuration(loader, filename_node, include_directory):
|
||||||
return load_configuration(include_filename)
|
return load_configuration(include_filename)
|
||||||
|
|
||||||
|
|
||||||
|
def raise_retain_node_error(loader, node):
|
||||||
|
'''
|
||||||
|
Given a ruamel.yaml.loader.Loader and a YAML node, raise an error about "!retain" usage.
|
||||||
|
|
||||||
|
Raise ValueError if a mapping or sequence node is given, as that indicates that "!retain" was
|
||||||
|
used in a configuration file without a merge. In configuration files with a merge, mapping and
|
||||||
|
sequence nodes with "!retain" tags are handled by deep_merge_nodes() below.
|
||||||
|
|
||||||
|
Also raise ValueError if a scalar node is given, as "!retain" is not supported on scalar nodes.
|
||||||
|
'''
|
||||||
|
if isinstance(node, (ruamel.yaml.nodes.MappingNode, ruamel.yaml.nodes.SequenceNode)):
|
||||||
|
raise ValueError(
|
||||||
|
'The !retain tag may only be used within a configuration file containing a merged !include tag.'
|
||||||
|
)
|
||||||
|
|
||||||
|
raise ValueError('The !retain tag may only be used on a YAML mapping or sequence.')
|
||||||
|
|
||||||
|
|
||||||
|
def raise_omit_node_error(loader, node):
|
||||||
|
'''
|
||||||
|
Given a ruamel.yaml.loader.Loader and a YAML node, raise an error about "!omit" usage.
|
||||||
|
|
||||||
|
Raise ValueError unconditionally, as an "!omit" node here indicates it was used in a
|
||||||
|
configuration file without a merge. In configuration files with a merge, nodes with "!omit"
|
||||||
|
tags are handled by deep_merge_nodes() below.
|
||||||
|
'''
|
||||||
|
raise ValueError(
|
||||||
|
'The !omit tag may only be used on a scalar (e.g., string) list element within a configuration file containing a merged !include tag.'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Include_constructor(ruamel.yaml.SafeConstructor):
|
class Include_constructor(ruamel.yaml.SafeConstructor):
|
||||||
'''
|
'''
|
||||||
A YAML "constructor" (a ruamel.yaml concept) that supports a custom "!include" tag for including
|
A YAML "constructor" (a ruamel.yaml concept) that supports a custom "!include" tag for including
|
||||||
|
@ -49,6 +81,8 @@ class Include_constructor(ruamel.yaml.SafeConstructor):
|
||||||
'!include',
|
'!include',
|
||||||
functools.partial(include_configuration, include_directory=include_directory),
|
functools.partial(include_configuration, include_directory=include_directory),
|
||||||
)
|
)
|
||||||
|
self.add_constructor('!retain', raise_retain_node_error)
|
||||||
|
self.add_constructor('!omit', raise_omit_node_error)
|
||||||
|
|
||||||
def flatten_mapping(self, node):
|
def flatten_mapping(self, node):
|
||||||
'''
|
'''
|
||||||
|
@ -81,11 +115,13 @@ class Include_constructor(ruamel.yaml.SafeConstructor):
|
||||||
def load_configuration(filename):
|
def load_configuration(filename):
|
||||||
'''
|
'''
|
||||||
Load the given configuration file and return its contents as a data structure of nested dicts
|
Load the given configuration file and return its contents as a data structure of nested dicts
|
||||||
and lists.
|
and lists. Also, replace any "{constant}" strings with the value of the "constant" key in the
|
||||||
|
"constants" section of the configuration file.
|
||||||
|
|
||||||
Raise ruamel.yaml.error.YAMLError if something goes wrong parsing the YAML, or RecursionError
|
Raise ruamel.yaml.error.YAMLError if something goes wrong parsing the YAML, or RecursionError
|
||||||
if there are too many recursive includes.
|
if there are too many recursive includes.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
# Use an embedded derived class for the include constructor so as to capture the filename
|
# Use an embedded derived class for the include constructor so as to capture the filename
|
||||||
# value. (functools.partial doesn't work for this use case because yaml.Constructor has to be
|
# value. (functools.partial doesn't work for this use case because yaml.Constructor has to be
|
||||||
# an actual class.)
|
# an actual class.)
|
||||||
|
@ -98,7 +134,29 @@ def load_configuration(filename):
|
||||||
yaml = ruamel.yaml.YAML(typ='safe')
|
yaml = ruamel.yaml.YAML(typ='safe')
|
||||||
yaml.Constructor = Include_constructor_with_include_directory
|
yaml.Constructor = Include_constructor_with_include_directory
|
||||||
|
|
||||||
return yaml.load(open(filename))
|
with open(filename) as file:
|
||||||
|
file_contents = file.read()
|
||||||
|
config = yaml.load(file_contents)
|
||||||
|
|
||||||
|
if config and 'constants' in config:
|
||||||
|
for key, value in config['constants'].items():
|
||||||
|
value = json.dumps(value)
|
||||||
|
file_contents = file_contents.replace(f'{{{key}}}', value.strip('"'))
|
||||||
|
|
||||||
|
config = yaml.load(file_contents)
|
||||||
|
del config['constants']
|
||||||
|
|
||||||
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
def filter_omitted_nodes(nodes):
|
||||||
|
'''
|
||||||
|
Given a list of nodes, return a filtered list omitting any nodes with an "!omit" tag or with a
|
||||||
|
value matching such nodes.
|
||||||
|
'''
|
||||||
|
omitted_values = tuple(node.value for node in nodes if node.tag == '!omit')
|
||||||
|
|
||||||
|
return [node for node in nodes if node.value not in omitted_values]
|
||||||
|
|
||||||
|
|
||||||
DELETED_NODE = object()
|
DELETED_NODE = object()
|
||||||
|
@ -162,6 +220,8 @@ def deep_merge_nodes(nodes):
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
If a mapping or sequence node has a YAML "!retain" tag, then that node is not merged.
|
||||||
|
|
||||||
The purpose of deep merging like this is to support, for instance, merging one borgmatic
|
The purpose of deep merging like this is to support, for instance, merging one borgmatic
|
||||||
configuration file into another for reuse, such that a configuration section ("retention",
|
configuration file into another for reuse, such that a configuration section ("retention",
|
||||||
etc.) does not completely replace the corresponding section in a merged file.
|
etc.) does not completely replace the corresponding section in a merged file.
|
||||||
|
@ -184,32 +244,42 @@ def deep_merge_nodes(nodes):
|
||||||
|
|
||||||
# If we're dealing with MappingNodes, recurse and merge its values as well.
|
# If we're dealing with MappingNodes, recurse and merge its values as well.
|
||||||
if isinstance(b_value, ruamel.yaml.nodes.MappingNode):
|
if isinstance(b_value, ruamel.yaml.nodes.MappingNode):
|
||||||
replaced_nodes[(b_key, b_value)] = (
|
# A "!retain" tag says to skip deep merging for this node. Replace the tag so
|
||||||
b_key,
|
# downstream schema validation doesn't break on our application-specific tag.
|
||||||
ruamel.yaml.nodes.MappingNode(
|
if b_value.tag == '!retain':
|
||||||
tag=b_value.tag,
|
b_value.tag = 'tag:yaml.org,2002:map'
|
||||||
value=deep_merge_nodes(a_value.value + b_value.value),
|
else:
|
||||||
start_mark=b_value.start_mark,
|
replaced_nodes[(b_key, b_value)] = (
|
||||||
end_mark=b_value.end_mark,
|
b_key,
|
||||||
flow_style=b_value.flow_style,
|
ruamel.yaml.nodes.MappingNode(
|
||||||
comment=b_value.comment,
|
tag=b_value.tag,
|
||||||
anchor=b_value.anchor,
|
value=deep_merge_nodes(a_value.value + b_value.value),
|
||||||
),
|
start_mark=b_value.start_mark,
|
||||||
)
|
end_mark=b_value.end_mark,
|
||||||
|
flow_style=b_value.flow_style,
|
||||||
|
comment=b_value.comment,
|
||||||
|
anchor=b_value.anchor,
|
||||||
|
),
|
||||||
|
)
|
||||||
# If we're dealing with SequenceNodes, merge by appending one sequence to the other.
|
# If we're dealing with SequenceNodes, merge by appending one sequence to the other.
|
||||||
elif isinstance(b_value, ruamel.yaml.nodes.SequenceNode):
|
elif isinstance(b_value, ruamel.yaml.nodes.SequenceNode):
|
||||||
replaced_nodes[(b_key, b_value)] = (
|
# A "!retain" tag says to skip deep merging for this node. Replace the tag so
|
||||||
b_key,
|
# downstream schema validation doesn't break on our application-specific tag.
|
||||||
ruamel.yaml.nodes.SequenceNode(
|
if b_value.tag == '!retain':
|
||||||
tag=b_value.tag,
|
b_value.tag = 'tag:yaml.org,2002:seq'
|
||||||
value=a_value.value + b_value.value,
|
else:
|
||||||
start_mark=b_value.start_mark,
|
replaced_nodes[(b_key, b_value)] = (
|
||||||
end_mark=b_value.end_mark,
|
b_key,
|
||||||
flow_style=b_value.flow_style,
|
ruamel.yaml.nodes.SequenceNode(
|
||||||
comment=b_value.comment,
|
tag=b_value.tag,
|
||||||
anchor=b_value.anchor,
|
value=filter_omitted_nodes(a_value.value + b_value.value),
|
||||||
),
|
start_mark=b_value.start_mark,
|
||||||
)
|
end_mark=b_value.end_mark,
|
||||||
|
flow_style=b_value.flow_style,
|
||||||
|
comment=b_value.comment,
|
||||||
|
anchor=b_value.anchor,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
return [
|
return [
|
||||||
replaced_nodes.get(node, node) for node in nodes if replaced_nodes.get(node) != DELETED_NODE
|
replaced_nodes.get(node, node) for node in nodes if replaced_nodes.get(node) != DELETED_NODE
|
||||||
|
|
|
@ -57,9 +57,15 @@ def normalize(config_filename, config):
|
||||||
# Upgrade remote repositories to ssh:// syntax, required in Borg 2.
|
# Upgrade remote repositories to ssh:// syntax, required in Borg 2.
|
||||||
repositories = location.get('repositories')
|
repositories = location.get('repositories')
|
||||||
if repositories:
|
if repositories:
|
||||||
|
if isinstance(repositories[0], str):
|
||||||
|
config['location']['repositories'] = [
|
||||||
|
{'path': repository} for repository in repositories
|
||||||
|
]
|
||||||
|
repositories = config['location']['repositories']
|
||||||
config['location']['repositories'] = []
|
config['location']['repositories'] = []
|
||||||
for repository in repositories:
|
for repository_dict in repositories:
|
||||||
if '~' in repository:
|
repository_path = repository_dict['path']
|
||||||
|
if '~' in repository_path:
|
||||||
logs.append(
|
logs.append(
|
||||||
logging.makeLogRecord(
|
logging.makeLogRecord(
|
||||||
dict(
|
dict(
|
||||||
|
@ -69,26 +75,37 @@ def normalize(config_filename, config):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if ':' in repository:
|
if ':' in repository_path:
|
||||||
if repository.startswith('file://'):
|
if repository_path.startswith('file://'):
|
||||||
config['location']['repositories'].append(
|
updated_repository_path = os.path.abspath(
|
||||||
os.path.abspath(repository.partition('file://')[-1])
|
repository_path.partition('file://')[-1]
|
||||||
)
|
)
|
||||||
elif repository.startswith('ssh://'):
|
config['location']['repositories'].append(
|
||||||
config['location']['repositories'].append(repository)
|
dict(
|
||||||
|
repository_dict,
|
||||||
|
path=updated_repository_path,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif repository_path.startswith('ssh://'):
|
||||||
|
config['location']['repositories'].append(repository_dict)
|
||||||
else:
|
else:
|
||||||
rewritten_repository = f"ssh://{repository.replace(':~', '/~').replace(':/', '/').replace(':', '/./')}"
|
rewritten_repository_path = f"ssh://{repository_path.replace(':~', '/~').replace(':/', '/').replace(':', '/./')}"
|
||||||
logs.append(
|
logs.append(
|
||||||
logging.makeLogRecord(
|
logging.makeLogRecord(
|
||||||
dict(
|
dict(
|
||||||
levelno=logging.WARNING,
|
levelno=logging.WARNING,
|
||||||
levelname='WARNING',
|
levelname='WARNING',
|
||||||
msg=f'{config_filename}: Remote repository paths without ssh:// syntax are deprecated. Interpreting "{repository}" as "{rewritten_repository}"',
|
msg=f'{config_filename}: Remote repository paths without ssh:// syntax are deprecated. Interpreting "{repository_path}" as "{rewritten_repository_path}"',
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
config['location']['repositories'].append(rewritten_repository)
|
config['location']['repositories'].append(
|
||||||
|
dict(
|
||||||
|
repository_dict,
|
||||||
|
path=rewritten_repository_path,
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
config['location']['repositories'].append(repository)
|
config['location']['repositories'].append(repository_dict)
|
||||||
|
|
||||||
return logs
|
return logs
|
||||||
|
|
|
@ -57,7 +57,12 @@ def parse_overrides(raw_overrides):
|
||||||
for raw_override in raw_overrides:
|
for raw_override in raw_overrides:
|
||||||
try:
|
try:
|
||||||
raw_keys, value = raw_override.split('=', 1)
|
raw_keys, value = raw_override.split('=', 1)
|
||||||
parsed_overrides.append((tuple(raw_keys.split('.')), convert_value_type(value),))
|
parsed_overrides.append(
|
||||||
|
(
|
||||||
|
tuple(raw_keys.split('.')),
|
||||||
|
convert_value_type(value),
|
||||||
|
)
|
||||||
|
)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"Invalid override '{raw_override}'. Make sure you use the form: SECTION.OPTION=VALUE"
|
f"Invalid override '{raw_override}'. Make sure you use the form: SECTION.OPTION=VALUE"
|
||||||
|
@ -75,5 +80,5 @@ def apply_overrides(config, raw_overrides):
|
||||||
'''
|
'''
|
||||||
overrides = parse_overrides(raw_overrides)
|
overrides = parse_overrides(raw_overrides)
|
||||||
|
|
||||||
for (keys, value) in overrides:
|
for keys, value in overrides:
|
||||||
set_values(config, keys, value)
|
set_values(config, keys, value)
|
||||||
|
|
|
@ -3,6 +3,17 @@ required:
|
||||||
- location
|
- location
|
||||||
additionalProperties: false
|
additionalProperties: false
|
||||||
properties:
|
properties:
|
||||||
|
constants:
|
||||||
|
type: object
|
||||||
|
description: |
|
||||||
|
Constants to use in the configuration file. All occurrences of the
|
||||||
|
constant name within culy braces will be replaced with the value.
|
||||||
|
For example, if you have a constant named "hostname" with the value
|
||||||
|
"myhostname", then the string "{hostname}" will be replaced with
|
||||||
|
"myhostname" in the configuration file.
|
||||||
|
example:
|
||||||
|
hostname: myhostname
|
||||||
|
prefix: myprefix
|
||||||
location:
|
location:
|
||||||
type: object
|
type: object
|
||||||
description: |
|
description: |
|
||||||
|
@ -29,19 +40,32 @@ properties:
|
||||||
repositories:
|
repositories:
|
||||||
type: array
|
type: array
|
||||||
items:
|
items:
|
||||||
type: string
|
type: object
|
||||||
|
required:
|
||||||
|
- path
|
||||||
|
properties:
|
||||||
|
path:
|
||||||
|
type: string
|
||||||
|
example: ssh://user@backupserver/./{fqdn}
|
||||||
|
label:
|
||||||
|
type: string
|
||||||
|
example: backupserver
|
||||||
description: |
|
description: |
|
||||||
Paths to local or remote repositories (required). Tildes are
|
A required list of local or remote repositories with paths
|
||||||
expanded. Multiple repositories are backed up to in
|
and optional labels (which can be used with the --repository
|
||||||
sequence. Borg placeholders can be used. See the output of
|
flag to select a repository). Tildes are expanded. Multiple
|
||||||
"borg help placeholders" for details. See ssh_command for
|
repositories are backed up to in sequence. Borg placeholders
|
||||||
SSH options like identity file or port. If systemd service
|
can be used. See the output of "borg help placeholders" for
|
||||||
is used, then add local repository paths in the systemd
|
details. See ssh_command for SSH options like identity file
|
||||||
service file to the ReadWritePaths list.
|
or port. If systemd service is used, then add local
|
||||||
|
repository paths in the systemd service file to the
|
||||||
|
ReadWritePaths list. Prior to borgmatic 1.7.10, repositories
|
||||||
|
was just a list of plain path strings.
|
||||||
example:
|
example:
|
||||||
- ssh://user@backupserver/./sourcehostname.borg
|
- path: ssh://user@backupserver/./sourcehostname.borg
|
||||||
- ssh://user@backupserver/./{fqdn}
|
label: backupserver
|
||||||
- /var/local/backups/local.borg
|
- path: /mnt/backup
|
||||||
|
label: local
|
||||||
working_directory:
|
working_directory:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
|
@ -354,12 +378,21 @@ properties:
|
||||||
description: |
|
description: |
|
||||||
Name of the archive. Borg placeholders can be used. See the
|
Name of the archive. Borg placeholders can be used. See the
|
||||||
output of "borg help placeholders" for details. Defaults to
|
output of "borg help placeholders" for details. Defaults to
|
||||||
"{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}". If you specify this
|
"{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}". When running
|
||||||
option, consider also specifying a prefix in the retention
|
actions like rlist, info, or check, borgmatic automatically
|
||||||
and consistency sections to avoid accidental
|
tries to match only archives created with this name format.
|
||||||
pruning/checking of archives with different archive name
|
|
||||||
formats.
|
|
||||||
example: "{hostname}-documents-{now}"
|
example: "{hostname}-documents-{now}"
|
||||||
|
match_archives:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
A Borg pattern for filtering down the archives used by
|
||||||
|
borgmatic actions that operate on multiple archives. For
|
||||||
|
Borg 1.x, use a shell pattern here and see the output of
|
||||||
|
"borg help placeholders" for details. For Borg 2.x, see the
|
||||||
|
output of "borg help match-archives". If match_archives is
|
||||||
|
not specified, borgmatic defaults to deriving the
|
||||||
|
match_archives value from archive_name_format.
|
||||||
|
example: "sh:{hostname}-*"
|
||||||
relocated_repo_access_is_ok:
|
relocated_repo_access_is_ok:
|
||||||
type: boolean
|
type: boolean
|
||||||
description: |
|
description: |
|
||||||
|
@ -453,10 +486,12 @@ properties:
|
||||||
prefix:
|
prefix:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
When pruning, only consider archive names starting with this
|
Deprecated. When pruning, only consider archive names
|
||||||
prefix. Borg placeholders can be used. See the output of
|
starting with this prefix. Borg placeholders can be used.
|
||||||
"borg help placeholders" for details. Defaults to
|
See the output of "borg help placeholders" for details.
|
||||||
"{hostname}-". Use an empty value to disable the default.
|
If a prefix is not specified, borgmatic defaults to
|
||||||
|
matching archives based on the archive_name_format (see
|
||||||
|
above).
|
||||||
example: sourcehostname
|
example: sourcehostname
|
||||||
consistency:
|
consistency:
|
||||||
type: object
|
type: object
|
||||||
|
@ -514,12 +549,12 @@ properties:
|
||||||
items:
|
items:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
Paths to a subset of the repositories in the location
|
Paths or labels for a subset of the repositories in the
|
||||||
section on which to run consistency checks. Handy in case
|
location section on which to run consistency checks. Handy
|
||||||
some of your repositories are very large, and so running
|
in case some of your repositories are very large, and so
|
||||||
consistency checks on them would take too long. Defaults to
|
running consistency checks on them would take too long.
|
||||||
running consistency checks on all repositories configured in
|
Defaults to running consistency checks on all repositories
|
||||||
the location section.
|
configured in the location section.
|
||||||
example:
|
example:
|
||||||
- user@backupserver:sourcehostname.borg
|
- user@backupserver:sourcehostname.borg
|
||||||
check_last:
|
check_last:
|
||||||
|
@ -532,11 +567,12 @@ properties:
|
||||||
prefix:
|
prefix:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
When performing the "archives" check, only consider archive
|
Deprecated. When performing the "archives" check, only
|
||||||
names starting with this prefix. Borg placeholders can be
|
consider archive names starting with this prefix. Borg
|
||||||
used. See the output of "borg help placeholders" for
|
placeholders can be used. See the output of "borg help
|
||||||
details. Defaults to "{hostname}-". Use an empty value to
|
placeholders" for details. If a prefix is not specified,
|
||||||
disable the default.
|
borgmatic defaults to matching archives based on the
|
||||||
|
archive_name_format (see above).
|
||||||
example: sourcehostname
|
example: sourcehostname
|
||||||
output:
|
output:
|
||||||
type: object
|
type: object
|
||||||
|
@ -905,14 +941,14 @@ properties:
|
||||||
type: string
|
type: string
|
||||||
enum: ['sql']
|
enum: ['sql']
|
||||||
description: |
|
description: |
|
||||||
Database dump output format. Currenly only "sql"
|
Database dump output format. Currently only
|
||||||
is supported. Defaults to "sql" for a single
|
"sql" is supported. Defaults to "sql" for a
|
||||||
database. Or, when database name is "all" and
|
single database. Or, when database name is "all"
|
||||||
format is blank, dumps all databases to a single
|
and format is blank, dumps all databases to a
|
||||||
file. But if a format is specified with an "all"
|
single file. But if a format is specified with
|
||||||
database name, dumps each database to a separate
|
an "all" database name, dumps each database to a
|
||||||
file of that format, allowing more convenient
|
separate file of that format, allowing more
|
||||||
restores of individual databases.
|
convenient restores of individual databases.
|
||||||
example: directory
|
example: directory
|
||||||
add_drop_database:
|
add_drop_database:
|
||||||
type: boolean
|
type: boolean
|
||||||
|
|
|
@ -1,9 +1,13 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import jsonschema
|
import jsonschema
|
||||||
import pkg_resources
|
|
||||||
import ruamel.yaml
|
import ruamel.yaml
|
||||||
|
|
||||||
|
try:
|
||||||
|
import importlib_metadata
|
||||||
|
except ModuleNotFoundError: # pragma: nocover
|
||||||
|
import importlib.metadata as importlib_metadata
|
||||||
|
|
||||||
from borgmatic.config import environment, load, normalize, override
|
from borgmatic.config import environment, load, normalize, override
|
||||||
|
|
||||||
|
|
||||||
|
@ -11,8 +15,17 @@ def schema_filename():
|
||||||
'''
|
'''
|
||||||
Path to the installed YAML configuration schema file, used to validate and parse the
|
Path to the installed YAML configuration schema file, used to validate and parse the
|
||||||
configuration.
|
configuration.
|
||||||
|
|
||||||
|
Raise FileNotFoundError when the schema path does not exist.
|
||||||
'''
|
'''
|
||||||
return pkg_resources.resource_filename('borgmatic', 'config/schema.yaml')
|
try:
|
||||||
|
return next(
|
||||||
|
str(path.locate())
|
||||||
|
for path in importlib_metadata.files('borgmatic')
|
||||||
|
if path.match('config/schema.yaml')
|
||||||
|
)
|
||||||
|
except StopIteration:
|
||||||
|
raise FileNotFoundError('Configuration file schema could not be found')
|
||||||
|
|
||||||
|
|
||||||
def format_json_error_path_element(path_element):
|
def format_json_error_path_element(path_element):
|
||||||
|
@ -20,9 +33,9 @@ def format_json_error_path_element(path_element):
|
||||||
Given a path element into a JSON data structure, format it for display as a string.
|
Given a path element into a JSON data structure, format it for display as a string.
|
||||||
'''
|
'''
|
||||||
if isinstance(path_element, int):
|
if isinstance(path_element, int):
|
||||||
return str('[{}]'.format(path_element))
|
return str(f'[{path_element}]')
|
||||||
|
|
||||||
return str('.{}'.format(path_element))
|
return str(f'.{path_element}')
|
||||||
|
|
||||||
|
|
||||||
def format_json_error(error):
|
def format_json_error(error):
|
||||||
|
@ -30,10 +43,10 @@ def format_json_error(error):
|
||||||
Given an instance of jsonschema.exceptions.ValidationError, format it for display as a string.
|
Given an instance of jsonschema.exceptions.ValidationError, format it for display as a string.
|
||||||
'''
|
'''
|
||||||
if not error.path:
|
if not error.path:
|
||||||
return 'At the top level: {}'.format(error.message)
|
return f'At the top level: {error.message}'
|
||||||
|
|
||||||
formatted_path = ''.join(format_json_error_path_element(element) for element in error.path)
|
formatted_path = ''.join(format_json_error_path_element(element) for element in error.path)
|
||||||
return "At '{}': {}".format(formatted_path.lstrip('.'), error.message)
|
return f"At '{formatted_path.lstrip('.')}': {error.message}"
|
||||||
|
|
||||||
|
|
||||||
class Validation_error(ValueError):
|
class Validation_error(ValueError):
|
||||||
|
@ -54,9 +67,10 @@ class Validation_error(ValueError):
|
||||||
'''
|
'''
|
||||||
Render a validation error as a user-facing string.
|
Render a validation error as a user-facing string.
|
||||||
'''
|
'''
|
||||||
return 'An error occurred while parsing a configuration file at {}:\n'.format(
|
return (
|
||||||
self.config_filename
|
f'An error occurred while parsing a configuration file at {self.config_filename}:\n'
|
||||||
) + '\n'.join(error for error in self.errors)
|
+ '\n'.join(error for error in self.errors)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def apply_logical_validation(config_filename, parsed_configuration):
|
def apply_logical_validation(config_filename, parsed_configuration):
|
||||||
|
@ -68,13 +82,14 @@ def apply_logical_validation(config_filename, parsed_configuration):
|
||||||
location_repositories = parsed_configuration.get('location', {}).get('repositories')
|
location_repositories = parsed_configuration.get('location', {}).get('repositories')
|
||||||
check_repositories = parsed_configuration.get('consistency', {}).get('check_repositories', [])
|
check_repositories = parsed_configuration.get('consistency', {}).get('check_repositories', [])
|
||||||
for repository in check_repositories:
|
for repository in check_repositories:
|
||||||
if repository not in location_repositories:
|
if not any(
|
||||||
|
repositories_match(repository, config_repository)
|
||||||
|
for config_repository in location_repositories
|
||||||
|
):
|
||||||
raise Validation_error(
|
raise Validation_error(
|
||||||
config_filename,
|
config_filename,
|
||||||
(
|
(
|
||||||
'Unknown repository in the "consistency" section\'s "check_repositories": {}'.format(
|
f'Unknown repository in the "consistency" section\'s "check_repositories": {repository}',
|
||||||
repository
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -138,9 +153,17 @@ def normalize_repository_path(repository):
|
||||||
|
|
||||||
def repositories_match(first, second):
|
def repositories_match(first, second):
|
||||||
'''
|
'''
|
||||||
Given two repository paths (relative and/or absolute), return whether they match.
|
Given two repository dicts with keys 'path' (relative and/or absolute),
|
||||||
|
and 'label', or two repository paths, return whether they match.
|
||||||
'''
|
'''
|
||||||
return normalize_repository_path(first) == normalize_repository_path(second)
|
if isinstance(first, str):
|
||||||
|
first = {'path': first, 'label': first}
|
||||||
|
if isinstance(second, str):
|
||||||
|
second = {'path': second, 'label': second}
|
||||||
|
return (first.get('label') == second.get('label')) or (
|
||||||
|
normalize_repository_path(first.get('path'))
|
||||||
|
== normalize_repository_path(second.get('path'))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def guard_configuration_contains_repository(repository, configurations):
|
def guard_configuration_contains_repository(repository, configurations):
|
||||||
|
@ -160,14 +183,14 @@ def guard_configuration_contains_repository(repository, configurations):
|
||||||
config_repository
|
config_repository
|
||||||
for config in configurations.values()
|
for config in configurations.values()
|
||||||
for config_repository in config['location']['repositories']
|
for config_repository in config['location']['repositories']
|
||||||
if repositories_match(repository, config_repository)
|
if repositories_match(config_repository, repository)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
if count == 0:
|
if count == 0:
|
||||||
raise ValueError('Repository {} not found in configuration files'.format(repository))
|
raise ValueError(f'Repository {repository} not found in configuration files')
|
||||||
if count > 1:
|
if count > 1:
|
||||||
raise ValueError('Repository {} found in multiple configuration files'.format(repository))
|
raise ValueError(f'Repository {repository} found in multiple configuration files')
|
||||||
|
|
||||||
|
|
||||||
def guard_single_repository_selected(repository, configurations):
|
def guard_single_repository_selected(repository, configurations):
|
||||||
|
|
|
@ -11,7 +11,7 @@ ERROR_OUTPUT_MAX_LINE_COUNT = 25
|
||||||
BORG_ERROR_EXIT_CODE = 2
|
BORG_ERROR_EXIT_CODE = 2
|
||||||
|
|
||||||
|
|
||||||
def exit_code_indicates_error(process, exit_code, borg_local_path=None):
|
def exit_code_indicates_error(command, exit_code, borg_local_path=None):
|
||||||
'''
|
'''
|
||||||
Return True if the given exit code from running a command corresponds to an error. If a Borg
|
Return True if the given exit code from running a command corresponds to an error. If a Borg
|
||||||
local path is given and matches the process' command, then treat exit code 1 as a warning
|
local path is given and matches the process' command, then treat exit code 1 as a warning
|
||||||
|
@ -20,8 +20,6 @@ def exit_code_indicates_error(process, exit_code, borg_local_path=None):
|
||||||
if exit_code is None:
|
if exit_code is None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
command = process.args.split(' ') if isinstance(process.args, str) else process.args
|
|
||||||
|
|
||||||
if borg_local_path and command[0] == borg_local_path:
|
if borg_local_path and command[0] == borg_local_path:
|
||||||
return bool(exit_code < 0 or exit_code >= BORG_ERROR_EXIT_CODE)
|
return bool(exit_code < 0 or exit_code >= BORG_ERROR_EXIT_CODE)
|
||||||
|
|
||||||
|
@ -45,6 +43,23 @@ def output_buffer_for_process(process, exclude_stdouts):
|
||||||
return process.stderr if process.stdout in exclude_stdouts else process.stdout
|
return process.stderr if process.stdout in exclude_stdouts else process.stdout
|
||||||
|
|
||||||
|
|
||||||
|
def append_last_lines(last_lines, captured_output, line, output_log_level):
|
||||||
|
'''
|
||||||
|
Given a rolling list of last lines, a list of captured output, a line to append, and an output
|
||||||
|
log level, append the line to the last lines and (if necessary) the captured output. Then log
|
||||||
|
the line at the requested output log level.
|
||||||
|
'''
|
||||||
|
last_lines.append(line)
|
||||||
|
|
||||||
|
if len(last_lines) > ERROR_OUTPUT_MAX_LINE_COUNT:
|
||||||
|
last_lines.pop(0)
|
||||||
|
|
||||||
|
if output_log_level is None:
|
||||||
|
captured_output.append(line)
|
||||||
|
else:
|
||||||
|
logger.log(output_log_level, line)
|
||||||
|
|
||||||
|
|
||||||
def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path):
|
def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path):
|
||||||
'''
|
'''
|
||||||
Given a sequence of subprocess.Popen() instances for multiple processes, log the output for each
|
Given a sequence of subprocess.Popen() instances for multiple processes, log the output for each
|
||||||
|
@ -100,15 +115,12 @@ def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path):
|
||||||
|
|
||||||
# Keep the last few lines of output in case the process errors, and we need the output for
|
# Keep the last few lines of output in case the process errors, and we need the output for
|
||||||
# the exception below.
|
# the exception below.
|
||||||
last_lines = buffer_last_lines[ready_buffer]
|
append_last_lines(
|
||||||
last_lines.append(line)
|
buffer_last_lines[ready_buffer],
|
||||||
if len(last_lines) > ERROR_OUTPUT_MAX_LINE_COUNT:
|
captured_outputs[ready_process],
|
||||||
last_lines.pop(0)
|
line,
|
||||||
|
output_log_level,
|
||||||
if output_log_level is None:
|
)
|
||||||
captured_outputs[ready_process].append(line)
|
|
||||||
else:
|
|
||||||
logger.log(output_log_level, line)
|
|
||||||
|
|
||||||
if not still_running:
|
if not still_running:
|
||||||
break
|
break
|
||||||
|
@ -121,13 +133,24 @@ def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path):
|
||||||
if exit_code is None:
|
if exit_code is None:
|
||||||
still_running = True
|
still_running = True
|
||||||
|
|
||||||
|
command = process.args.split(' ') if isinstance(process.args, str) else process.args
|
||||||
# If any process errors, then raise accordingly.
|
# If any process errors, then raise accordingly.
|
||||||
if exit_code_indicates_error(process, exit_code, borg_local_path):
|
if exit_code_indicates_error(command, exit_code, borg_local_path):
|
||||||
# If an error occurs, include its output in the raised exception so that we don't
|
# If an error occurs, include its output in the raised exception so that we don't
|
||||||
# inadvertently hide error output.
|
# inadvertently hide error output.
|
||||||
output_buffer = output_buffer_for_process(process, exclude_stdouts)
|
output_buffer = output_buffer_for_process(process, exclude_stdouts)
|
||||||
|
|
||||||
last_lines = buffer_last_lines[output_buffer] if output_buffer else []
|
last_lines = buffer_last_lines[output_buffer] if output_buffer else []
|
||||||
|
|
||||||
|
# Collect any straggling output lines that came in since we last gathered output.
|
||||||
|
while output_buffer: # pragma: no cover
|
||||||
|
line = output_buffer.readline().rstrip().decode()
|
||||||
|
if not line:
|
||||||
|
break
|
||||||
|
|
||||||
|
append_last_lines(
|
||||||
|
last_lines, captured_outputs[process], line, output_log_level=logging.ERROR
|
||||||
|
)
|
||||||
|
|
||||||
if len(last_lines) == ERROR_OUTPUT_MAX_LINE_COUNT:
|
if len(last_lines) == ERROR_OUTPUT_MAX_LINE_COUNT:
|
||||||
last_lines.insert(0, '...')
|
last_lines.insert(0, '...')
|
||||||
|
|
||||||
|
@ -155,8 +178,8 @@ def log_command(full_command, input_file=None, output_file=None):
|
||||||
'''
|
'''
|
||||||
logger.debug(
|
logger.debug(
|
||||||
' '.join(full_command)
|
' '.join(full_command)
|
||||||
+ (' < {}'.format(getattr(input_file, 'name', '')) if input_file else '')
|
+ (f" < {getattr(input_file, 'name', '')}" if input_file else '')
|
||||||
+ (' > {}'.format(getattr(output_file, 'name', '')) if output_file else '')
|
+ (f" > {getattr(output_file, 'name', '')}" if output_file else '')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -213,7 +236,11 @@ def execute_command(
|
||||||
|
|
||||||
|
|
||||||
def execute_command_and_capture_output(
|
def execute_command_and_capture_output(
|
||||||
full_command, capture_stderr=False, shell=False, extra_environment=None, working_directory=None,
|
full_command,
|
||||||
|
capture_stderr=False,
|
||||||
|
shell=False,
|
||||||
|
extra_environment=None,
|
||||||
|
working_directory=None,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Execute the given command (a sequence of command/argument strings), capturing and returning its
|
Execute the given command (a sequence of command/argument strings), capturing and returning its
|
||||||
|
@ -228,13 +255,18 @@ def execute_command_and_capture_output(
|
||||||
environment = {**os.environ, **extra_environment} if extra_environment else None
|
environment = {**os.environ, **extra_environment} if extra_environment else None
|
||||||
command = ' '.join(full_command) if shell else full_command
|
command = ' '.join(full_command) if shell else full_command
|
||||||
|
|
||||||
output = subprocess.check_output(
|
try:
|
||||||
command,
|
output = subprocess.check_output(
|
||||||
stderr=subprocess.STDOUT if capture_stderr else None,
|
command,
|
||||||
shell=shell,
|
stderr=subprocess.STDOUT if capture_stderr else None,
|
||||||
env=environment,
|
shell=shell,
|
||||||
cwd=working_directory,
|
env=environment,
|
||||||
)
|
cwd=working_directory,
|
||||||
|
)
|
||||||
|
except subprocess.CalledProcessError as error:
|
||||||
|
if exit_code_indicates_error(command, error.returncode):
|
||||||
|
raise
|
||||||
|
output = error.output
|
||||||
|
|
||||||
return output.decode() if output is not None else None
|
return output.decode() if output is not None else None
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,7 @@ def interpolate_context(config_filename, hook_description, command, context):
|
||||||
names/values, interpolate the values by "{name}" into the command and return the result.
|
names/values, interpolate the values by "{name}" into the command and return the result.
|
||||||
'''
|
'''
|
||||||
for name, value in context.items():
|
for name, value in context.items():
|
||||||
command = command.replace('{%s}' % name, str(value))
|
command = command.replace(f'{{{name}}}', str(value))
|
||||||
|
|
||||||
for unsupported_variable in re.findall(r'{\w+}', command):
|
for unsupported_variable in re.findall(r'{\w+}', command):
|
||||||
logger.warning(
|
logger.warning(
|
||||||
|
@ -38,7 +38,7 @@ def execute_hook(commands, umask, config_filename, description, dry_run, **conte
|
||||||
Raise subprocesses.CalledProcessError if an error occurs in a hook.
|
Raise subprocesses.CalledProcessError if an error occurs in a hook.
|
||||||
'''
|
'''
|
||||||
if not commands:
|
if not commands:
|
||||||
logger.debug('{}: No commands to run for {} hook'.format(config_filename, description))
|
logger.debug(f'{config_filename}: No commands to run for {description} hook')
|
||||||
return
|
return
|
||||||
|
|
||||||
dry_run_label = ' (dry run; not actually running hooks)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually running hooks)' if dry_run else ''
|
||||||
|
@ -49,19 +49,15 @@ def execute_hook(commands, umask, config_filename, description, dry_run, **conte
|
||||||
]
|
]
|
||||||
|
|
||||||
if len(commands) == 1:
|
if len(commands) == 1:
|
||||||
logger.info(
|
logger.info(f'{config_filename}: Running command for {description} hook{dry_run_label}')
|
||||||
'{}: Running command for {} hook{}'.format(config_filename, description, dry_run_label)
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
logger.info(
|
logger.info(
|
||||||
'{}: Running {} commands for {} hook{}'.format(
|
f'{config_filename}: Running {len(commands)} commands for {description} hook{dry_run_label}',
|
||||||
config_filename, len(commands), description, dry_run_label
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if umask:
|
if umask:
|
||||||
parsed_umask = int(str(umask), 8)
|
parsed_umask = int(str(umask), 8)
|
||||||
logger.debug('{}: Set hook umask to {}'.format(config_filename, oct(parsed_umask)))
|
logger.debug(f'{config_filename}: Set hook umask to {oct(parsed_umask)}')
|
||||||
original_umask = os.umask(parsed_umask)
|
original_umask = os.umask(parsed_umask)
|
||||||
else:
|
else:
|
||||||
original_umask = None
|
original_umask = None
|
||||||
|
@ -93,9 +89,7 @@ def considered_soft_failure(config_filename, error):
|
||||||
|
|
||||||
if exit_code == SOFT_FAIL_EXIT_CODE:
|
if exit_code == SOFT_FAIL_EXIT_CODE:
|
||||||
logger.info(
|
logger.info(
|
||||||
'{}: Command hook exited with soft failure exit code ({}); skipping remaining actions'.format(
|
f'{config_filename}: Command hook exited with soft failure exit code ({SOFT_FAIL_EXIT_CODE}); skipping remaining actions',
|
||||||
config_filename, SOFT_FAIL_EXIT_CODE
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
|
@ -34,17 +34,15 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
|
||||||
return
|
return
|
||||||
|
|
||||||
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
||||||
formatted_state = '/{}/'.format(MONITOR_STATE_TO_CRONHUB[state])
|
formatted_state = f'/{MONITOR_STATE_TO_CRONHUB[state]}/'
|
||||||
ping_url = (
|
ping_url = (
|
||||||
hook_config['ping_url']
|
hook_config['ping_url']
|
||||||
.replace('/start/', formatted_state)
|
.replace('/start/', formatted_state)
|
||||||
.replace('/ping/', formatted_state)
|
.replace('/ping/', formatted_state)
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info(
|
logger.info(f'{config_filename}: Pinging Cronhub {state.name.lower()}{dry_run_label}')
|
||||||
'{}: Pinging Cronhub {}{}'.format(config_filename, state.name.lower(), dry_run_label)
|
logger.debug(f'{config_filename}: Using Cronhub ping URL {ping_url}')
|
||||||
)
|
|
||||||
logger.debug('{}: Using Cronhub ping URL {}'.format(config_filename, ping_url))
|
|
||||||
|
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||||
|
|
|
@ -34,12 +34,10 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
|
||||||
return
|
return
|
||||||
|
|
||||||
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
||||||
ping_url = '{}/{}'.format(hook_config['ping_url'], MONITOR_STATE_TO_CRONITOR[state])
|
ping_url = f"{hook_config['ping_url']}/{MONITOR_STATE_TO_CRONITOR[state]}"
|
||||||
|
|
||||||
logger.info(
|
logger.info(f'{config_filename}: Pinging Cronitor {state.name.lower()}{dry_run_label}')
|
||||||
'{}: Pinging Cronitor {}{}'.format(config_filename, state.name.lower(), dry_run_label)
|
logger.debug(f'{config_filename}: Using Cronitor ping URL {ping_url}')
|
||||||
)
|
|
||||||
logger.debug('{}: Using Cronitor ping URL {}'.format(config_filename, ping_url))
|
|
||||||
|
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||||
|
|
|
@ -43,9 +43,9 @@ def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
module = HOOK_NAME_TO_MODULE[hook_name]
|
module = HOOK_NAME_TO_MODULE[hook_name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise ValueError('Unknown hook name: {}'.format(hook_name))
|
raise ValueError(f'Unknown hook name: {hook_name}')
|
||||||
|
|
||||||
logger.debug('{}: Calling {} hook function {}'.format(log_prefix, hook_name, function_name))
|
logger.debug(f'{log_prefix}: Calling {hook_name} hook function {function_name}')
|
||||||
return getattr(module, function_name)(config, log_prefix, *args, **kwargs)
|
return getattr(module, function_name)(config, log_prefix, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -33,7 +33,7 @@ def make_database_dump_filename(dump_path, name, hostname=None):
|
||||||
Raise ValueError if the database name is invalid.
|
Raise ValueError if the database name is invalid.
|
||||||
'''
|
'''
|
||||||
if os.path.sep in name:
|
if os.path.sep in name:
|
||||||
raise ValueError('Invalid database name {}'.format(name))
|
raise ValueError(f'Invalid database name {name}')
|
||||||
|
|
||||||
return os.path.join(os.path.expanduser(dump_path), hostname or 'localhost', name)
|
return os.path.join(os.path.expanduser(dump_path), hostname or 'localhost', name)
|
||||||
|
|
||||||
|
@ -60,9 +60,7 @@ def remove_database_dumps(dump_path, database_type_name, log_prefix, dry_run):
|
||||||
'''
|
'''
|
||||||
dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(f'{log_prefix}: Removing {database_type_name} database dumps{dry_run_label}')
|
||||||
'{}: Removing {} database dumps{}'.format(log_prefix, database_type_name, dry_run_label)
|
|
||||||
)
|
|
||||||
|
|
||||||
expanded_path = os.path.expanduser(dump_path)
|
expanded_path = os.path.expanduser(dump_path)
|
||||||
|
|
||||||
|
@ -78,4 +76,4 @@ def convert_glob_patterns_to_borg_patterns(patterns):
|
||||||
Convert a sequence of shell glob patterns like "/etc/*" to the corresponding Borg archive
|
Convert a sequence of shell glob patterns like "/etc/*" to the corresponding Borg archive
|
||||||
patterns like "sh:etc/*".
|
patterns like "sh:etc/*".
|
||||||
'''
|
'''
|
||||||
return ['sh:{}'.format(pattern.lstrip(os.path.sep)) for pattern in patterns]
|
return [f'sh:{pattern.lstrip(os.path.sep)}' for pattern in patterns]
|
||||||
|
|
|
@ -99,7 +99,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
|
||||||
ping_url = (
|
ping_url = (
|
||||||
hook_config['ping_url']
|
hook_config['ping_url']
|
||||||
if hook_config['ping_url'].startswith('http')
|
if hook_config['ping_url'].startswith('http')
|
||||||
else 'https://hc-ping.com/{}'.format(hook_config['ping_url'])
|
else f"https://hc-ping.com/{hook_config['ping_url']}"
|
||||||
)
|
)
|
||||||
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
||||||
|
|
||||||
|
@ -111,12 +111,10 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
|
||||||
|
|
||||||
healthchecks_state = MONITOR_STATE_TO_HEALTHCHECKS.get(state)
|
healthchecks_state = MONITOR_STATE_TO_HEALTHCHECKS.get(state)
|
||||||
if healthchecks_state:
|
if healthchecks_state:
|
||||||
ping_url = '{}/{}'.format(ping_url, healthchecks_state)
|
ping_url = f'{ping_url}/{healthchecks_state}'
|
||||||
|
|
||||||
logger.info(
|
logger.info(f'{config_filename}: Pinging Healthchecks {state.name.lower()}{dry_run_label}')
|
||||||
'{}: Pinging Healthchecks {}{}'.format(config_filename, state.name.lower(), dry_run_label)
|
logger.debug(f'{config_filename}: Using Healthchecks ping URL {ping_url}')
|
||||||
)
|
|
||||||
logger.debug('{}: Using Healthchecks ping URL {}'.format(config_filename, ping_url))
|
|
||||||
|
|
||||||
if state in (monitor.State.FINISH, monitor.State.FAIL, monitor.State.LOG):
|
if state in (monitor.State.FINISH, monitor.State.FAIL, monitor.State.LOG):
|
||||||
payload = format_buffered_logs_for_payload()
|
payload = format_buffered_logs_for_payload()
|
||||||
|
|
|
@ -27,7 +27,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
|
||||||
'''
|
'''
|
||||||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||||
|
|
||||||
logger.info('{}: Dumping MongoDB databases{}'.format(log_prefix, dry_run_label))
|
logger.info(f'{log_prefix}: Dumping MongoDB databases{dry_run_label}')
|
||||||
|
|
||||||
processes = []
|
processes = []
|
||||||
for database in databases:
|
for database in databases:
|
||||||
|
@ -38,9 +38,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
|
||||||
dump_format = database.get('format', 'archive')
|
dump_format = database.get('format', 'archive')
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'{}: Dumping MongoDB database {} to {}{}'.format(
|
f'{log_prefix}: Dumping MongoDB database {name} to {dump_filename}{dry_run_label}',
|
||||||
log_prefix, name, dump_filename, dry_run_label
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
if dry_run:
|
if dry_run:
|
||||||
continue
|
continue
|
||||||
|
@ -126,9 +124,7 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
|
||||||
)
|
)
|
||||||
restore_command = build_restore_command(extract_process, database, dump_filename)
|
restore_command = build_restore_command(extract_process, database, dump_filename)
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(f"{log_prefix}: Restoring MongoDB database {database['name']}{dry_run_label}")
|
||||||
'{}: Restoring MongoDB database {}{}'.format(log_prefix, database['name'], dry_run_label)
|
|
||||||
)
|
|
||||||
if dry_run:
|
if dry_run:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -165,4 +161,7 @@ def build_restore_command(extract_process, database, dump_filename):
|
||||||
command.extend(('--authenticationDatabase', database['authentication_database']))
|
command.extend(('--authenticationDatabase', database['authentication_database']))
|
||||||
if 'restore_options' in database:
|
if 'restore_options' in database:
|
||||||
command.extend(database['restore_options'].split(' '))
|
command.extend(database['restore_options'].split(' '))
|
||||||
|
if database['schemas']:
|
||||||
|
for schema in database['schemas']:
|
||||||
|
command.extend(('--nsInclude', schema))
|
||||||
return command
|
return command
|
||||||
|
|
|
@ -88,9 +88,7 @@ def execute_dump_command(
|
||||||
+ (('--user', database['username']) if 'username' in database else ())
|
+ (('--user', database['username']) if 'username' in database else ())
|
||||||
+ ('--databases',)
|
+ ('--databases',)
|
||||||
+ database_names
|
+ database_names
|
||||||
# Use shell redirection rather than execute_command(output_file=open(...)) to prevent
|
+ ('--result-file', dump_filename)
|
||||||
# the open() call on a named pipe from hanging the main borgmatic process.
|
|
||||||
+ ('>', dump_filename)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
|
@ -102,7 +100,9 @@ def execute_dump_command(
|
||||||
dump.create_named_pipe_for_dump(dump_filename)
|
dump.create_named_pipe_for_dump(dump_filename)
|
||||||
|
|
||||||
return execute_command(
|
return execute_command(
|
||||||
dump_command, shell=True, extra_environment=extra_environment, run_to_completion=False,
|
dump_command,
|
||||||
|
extra_environment=extra_environment,
|
||||||
|
run_to_completion=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -119,7 +119,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
|
||||||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||||
processes = []
|
processes = []
|
||||||
|
|
||||||
logger.info('{}: Dumping MySQL databases{}'.format(log_prefix, dry_run_label))
|
logger.info(f'{log_prefix}: Dumping MySQL databases{dry_run_label}')
|
||||||
|
|
||||||
for database in databases:
|
for database in databases:
|
||||||
dump_path = make_dump_path(location_config)
|
dump_path = make_dump_path(location_config)
|
||||||
|
@ -209,9 +209,7 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
|
||||||
)
|
)
|
||||||
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
|
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(f"{log_prefix}: Restoring MySQL database {database['name']}{dry_run_label}")
|
||||||
'{}: Restoring MySQL database {}{}'.format(log_prefix, database['name'], dry_run_label)
|
|
||||||
)
|
|
||||||
if dry_run:
|
if dry_run:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
|
@ -29,14 +29,12 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
|
||||||
'''
|
'''
|
||||||
if state != monitor.State.FAIL:
|
if state != monitor.State.FAIL:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'{}: Ignoring unsupported monitoring {} in PagerDuty hook'.format(
|
f'{config_filename}: Ignoring unsupported monitoring {state.name.lower()} in PagerDuty hook',
|
||||||
config_filename, state.name.lower()
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
dry_run_label = ' (dry run; not actually sending)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually sending)' if dry_run else ''
|
||||||
logger.info('{}: Sending failure event to PagerDuty {}'.format(config_filename, dry_run_label))
|
logger.info(f'{config_filename}: Sending failure event to PagerDuty {dry_run_label}')
|
||||||
|
|
||||||
if dry_run:
|
if dry_run:
|
||||||
return
|
return
|
||||||
|
@ -50,7 +48,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
|
||||||
'routing_key': hook_config['integration_key'],
|
'routing_key': hook_config['integration_key'],
|
||||||
'event_action': 'trigger',
|
'event_action': 'trigger',
|
||||||
'payload': {
|
'payload': {
|
||||||
'summary': 'backup failed on {}'.format(hostname),
|
'summary': f'backup failed on {hostname}',
|
||||||
'severity': 'error',
|
'severity': 'error',
|
||||||
'source': hostname,
|
'source': hostname,
|
||||||
'timestamp': local_timestamp,
|
'timestamp': local_timestamp,
|
||||||
|
@ -65,7 +63,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
logger.debug('{}: Using PagerDuty payload: {}'.format(config_filename, payload))
|
logger.debug(f'{config_filename}: Using PagerDuty payload: {payload}')
|
||||||
|
|
||||||
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import csv
|
import csv
|
||||||
|
import itertools
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
@ -93,7 +94,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
|
||||||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||||
processes = []
|
processes = []
|
||||||
|
|
||||||
logger.info('{}: Dumping PostgreSQL databases{}'.format(log_prefix, dry_run_label))
|
logger.info(f'{log_prefix}: Dumping PostgreSQL databases{dry_run_label}')
|
||||||
|
|
||||||
for database in databases:
|
for database in databases:
|
||||||
extra_environment = make_extra_environment(database)
|
extra_environment = make_extra_environment(database)
|
||||||
|
@ -122,7 +123,12 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
command = (
|
command = (
|
||||||
(dump_command, '--no-password', '--clean', '--if-exists',)
|
(
|
||||||
|
dump_command,
|
||||||
|
'--no-password',
|
||||||
|
'--clean',
|
||||||
|
'--if-exists',
|
||||||
|
)
|
||||||
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
||||||
+ (('--port', str(database['port'])) if 'port' in database else ())
|
+ (('--port', str(database['port'])) if 'port' in database else ())
|
||||||
+ (('--username', database['username']) if 'username' in database else ())
|
+ (('--username', database['username']) if 'username' in database else ())
|
||||||
|
@ -145,7 +151,9 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
|
||||||
if dump_format == 'directory':
|
if dump_format == 'directory':
|
||||||
dump.create_parent_directory_for_dump(dump_filename)
|
dump.create_parent_directory_for_dump(dump_filename)
|
||||||
execute_command(
|
execute_command(
|
||||||
command, shell=True, extra_environment=extra_environment,
|
command,
|
||||||
|
shell=True,
|
||||||
|
extra_environment=extra_environment,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
dump.create_named_pipe_for_dump(dump_filename)
|
dump.create_named_pipe_for_dump(dump_filename)
|
||||||
|
@ -225,12 +233,16 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
|
||||||
+ (('--username', database['username']) if 'username' in database else ())
|
+ (('--username', database['username']) if 'username' in database else ())
|
||||||
+ (tuple(database['restore_options'].split(' ')) if 'restore_options' in database else ())
|
+ (tuple(database['restore_options'].split(' ')) if 'restore_options' in database else ())
|
||||||
+ (() if extract_process else (dump_filename,))
|
+ (() if extract_process else (dump_filename,))
|
||||||
|
+ tuple(
|
||||||
|
itertools.chain.from_iterable(('--schema', schema) for schema in database['schemas'])
|
||||||
|
if database['schemas']
|
||||||
|
else ()
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
extra_environment = make_extra_environment(database)
|
extra_environment = make_extra_environment(database)
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(f"{log_prefix}: Restoring PostgreSQL database {database['name']}{dry_run_label}")
|
||||||
'{}: Restoring PostgreSQL database {}{}'.format(log_prefix, database['name'], dry_run_label)
|
|
||||||
)
|
|
||||||
if dry_run:
|
if dry_run:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
|
||||||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||||
processes = []
|
processes = []
|
||||||
|
|
||||||
logger.info('{}: Dumping SQLite databases{}'.format(log_prefix, dry_run_label))
|
logger.info(f'{log_prefix}: Dumping SQLite databases{dry_run_label}')
|
||||||
|
|
||||||
for database in databases:
|
for database in databases:
|
||||||
database_path = database['path']
|
database_path = database['path']
|
||||||
|
|
|
@ -68,7 +68,7 @@ class Multi_stream_handler(logging.Handler):
|
||||||
|
|
||||||
def emit(self, record):
|
def emit(self, record):
|
||||||
'''
|
'''
|
||||||
Dispatch the log record to the approriate stream handler for the record's log level.
|
Dispatch the log record to the appropriate stream handler for the record's log level.
|
||||||
'''
|
'''
|
||||||
self.log_level_to_handler[record.levelno].emit(record)
|
self.log_level_to_handler[record.levelno].emit(record)
|
||||||
|
|
||||||
|
@ -108,7 +108,7 @@ def color_text(color, message):
|
||||||
if not color:
|
if not color:
|
||||||
return message
|
return message
|
||||||
|
|
||||||
return '{}{}{}'.format(color, message, colorama.Style.RESET_ALL)
|
return f'{color}{message}{colorama.Style.RESET_ALL}'
|
||||||
|
|
||||||
|
|
||||||
def add_logging_level(level_name, level_number):
|
def add_logging_level(level_name, level_number):
|
||||||
|
@ -156,6 +156,7 @@ def configure_logging(
|
||||||
log_file_log_level=None,
|
log_file_log_level=None,
|
||||||
monitoring_log_level=None,
|
monitoring_log_level=None,
|
||||||
log_file=None,
|
log_file=None,
|
||||||
|
log_file_format=None,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Configure logging to go to both the console and (syslog or log file). Use the given log levels,
|
Configure logging to go to both the console and (syslog or log file). Use the given log levels,
|
||||||
|
@ -200,12 +201,18 @@ def configure_logging(
|
||||||
|
|
||||||
if syslog_path and not interactive_console():
|
if syslog_path and not interactive_console():
|
||||||
syslog_handler = logging.handlers.SysLogHandler(address=syslog_path)
|
syslog_handler = logging.handlers.SysLogHandler(address=syslog_path)
|
||||||
syslog_handler.setFormatter(logging.Formatter('borgmatic: %(levelname)s %(message)s'))
|
syslog_handler.setFormatter(
|
||||||
|
logging.Formatter('borgmatic: {levelname} {message}', style='{') # noqa: FS003
|
||||||
|
)
|
||||||
syslog_handler.setLevel(syslog_log_level)
|
syslog_handler.setLevel(syslog_log_level)
|
||||||
handlers = (console_handler, syslog_handler)
|
handlers = (console_handler, syslog_handler)
|
||||||
elif log_file:
|
elif log_file:
|
||||||
file_handler = logging.handlers.WatchedFileHandler(log_file)
|
file_handler = logging.handlers.WatchedFileHandler(log_file)
|
||||||
file_handler.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)s: %(message)s'))
|
file_handler.setFormatter(
|
||||||
|
logging.Formatter(
|
||||||
|
log_file_format or '[{asctime}] {levelname}: {message}', style='{' # noqa: FS003
|
||||||
|
)
|
||||||
|
)
|
||||||
file_handler.setLevel(log_file_log_level)
|
file_handler.setLevel(log_file_log_level)
|
||||||
handlers = (console_handler, file_handler)
|
handlers = (console_handler, file_handler)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
FROM alpine:3.17.1 as borgmatic
|
FROM docker.io/alpine:3.17.1 as borgmatic
|
||||||
|
|
||||||
COPY . /app
|
COPY . /app
|
||||||
RUN apk add --no-cache py3-pip py3-ruamel.yaml py3-ruamel.yaml.clib
|
RUN apk add --no-cache py3-pip py3-ruamel.yaml py3-ruamel.yaml.clib
|
||||||
|
@ -8,7 +8,7 @@ RUN borgmatic --help > /command-line.txt \
|
||||||
echo -e "\n--------------------------------------------------------------------------------\n" >> /command-line.txt \
|
echo -e "\n--------------------------------------------------------------------------------\n" >> /command-line.txt \
|
||||||
&& borgmatic "$action" --help >> /command-line.txt; done
|
&& borgmatic "$action" --help >> /command-line.txt; done
|
||||||
|
|
||||||
FROM node:19.5.0-alpine as html
|
FROM docker.io/node:19.5.0-alpine as html
|
||||||
|
|
||||||
ARG ENVIRONMENT=production
|
ARG ENVIRONMENT=production
|
||||||
|
|
||||||
|
@ -18,6 +18,7 @@ RUN npm install @11ty/eleventy \
|
||||||
@11ty/eleventy-plugin-syntaxhighlight \
|
@11ty/eleventy-plugin-syntaxhighlight \
|
||||||
@11ty/eleventy-plugin-inclusive-language \
|
@11ty/eleventy-plugin-inclusive-language \
|
||||||
@11ty/eleventy-navigation \
|
@11ty/eleventy-navigation \
|
||||||
|
eleventy-plugin-code-clipboard \
|
||||||
markdown-it \
|
markdown-it \
|
||||||
markdown-it-anchor \
|
markdown-it-anchor \
|
||||||
markdown-it-replace-link
|
markdown-it-replace-link
|
||||||
|
@ -27,7 +28,7 @@ COPY . /source
|
||||||
RUN NODE_ENV=${ENVIRONMENT} npx eleventy --input=/source/docs --output=/output/docs \
|
RUN NODE_ENV=${ENVIRONMENT} npx eleventy --input=/source/docs --output=/output/docs \
|
||||||
&& mv /output/docs/index.html /output/index.html
|
&& mv /output/docs/index.html /output/index.html
|
||||||
|
|
||||||
FROM nginx:1.22.1-alpine
|
FROM docker.io/nginx:1.22.1-alpine
|
||||||
|
|
||||||
COPY --from=html /output /usr/share/nginx/html
|
COPY --from=html /output /usr/share/nginx/html
|
||||||
COPY --from=borgmatic /etc/borgmatic/config.yaml /usr/share/nginx/html/docs/reference/config.yaml
|
COPY --from=borgmatic /etc/borgmatic/config.yaml /usr/share/nginx/html/docs/reference/config.yaml
|
||||||
|
|
|
@ -94,7 +94,7 @@
|
||||||
display: block;
|
display: block;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Footer catgory navigation */
|
/* Footer category navigation */
|
||||||
.elv-cat-list-active {
|
.elv-cat-list-active {
|
||||||
font-weight: 600;
|
font-weight: 600;
|
||||||
}
|
}
|
||||||
|
|
|
@ -533,3 +533,18 @@ main .elv-toc + h1 .direct-link {
|
||||||
.header-anchor:hover::after {
|
.header-anchor:hover::after {
|
||||||
content: " 🔗";
|
content: " 🔗";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.mdi {
|
||||||
|
display: inline-block;
|
||||||
|
width: 1em;
|
||||||
|
height: 1em;
|
||||||
|
background-color: currentColor;
|
||||||
|
-webkit-mask: no-repeat center / 100%;
|
||||||
|
mask: no-repeat center / 100%;
|
||||||
|
-webkit-mask-image: var(--svg);
|
||||||
|
mask-image: var(--svg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.mdi.mdi-content-copy {
|
||||||
|
--svg: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' width='24' height='24'%3E%3Cpath fill='black' d='M19 21H8V7h11m0-2H8a2 2 0 0 0-2 2v14a2 2 0 0 0 2 2h11a2 2 0 0 0 2-2V7a2 2 0 0 0-2-2m-3-4H4a2 2 0 0 0-2 2v14h2V3h12V1Z'/%3E%3C/svg%3E");
|
||||||
|
}
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<link rel="icon" href="docs/static/borgmatic.png" type="image/x-icon">
|
||||||
<title>{{ subtitle + ' - ' if subtitle}}{{ title }}</title>
|
<title>{{ subtitle + ' - ' if subtitle}}{{ title }}</title>
|
||||||
{%- set css %}
|
{%- set css %}
|
||||||
{% include 'index.css' %}
|
{% include 'index.css' %}
|
||||||
|
@ -22,6 +23,6 @@
|
||||||
<body>
|
<body>
|
||||||
|
|
||||||
{{ content | safe }}
|
{{ content | safe }}
|
||||||
|
{% initClipboardJS %}
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
|
@ -66,6 +66,9 @@ variables you can use here:
|
||||||
|
|
||||||
* `configuration_filename`: borgmatic configuration filename in which the
|
* `configuration_filename`: borgmatic configuration filename in which the
|
||||||
hook was defined
|
hook was defined
|
||||||
|
* `log_file`
|
||||||
|
<span class="minilink minilink-addedin">New in version 1.7.12</span>:
|
||||||
|
path of the borgmatic log file, only set when the `--log-file` flag is used
|
||||||
* `repository`: path of the current repository as configured in the current
|
* `repository`: path of the current repository as configured in the current
|
||||||
borgmatic configuration file
|
borgmatic configuration file
|
||||||
|
|
||||||
|
|
|
@ -49,9 +49,12 @@ location:
|
||||||
- /home
|
- /home
|
||||||
|
|
||||||
repositories:
|
repositories:
|
||||||
- /mnt/removable/backup.borg
|
- path: /mnt/removable/backup.borg
|
||||||
```
|
```
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit
|
||||||
|
the `path:` portion of the `repositories` list.
|
||||||
|
|
||||||
Then, write a `before_backup` hook in that same configuration file that uses
|
Then, write a `before_backup` hook in that same configuration file that uses
|
||||||
the external `findmnt` utility to see whether the drive is mounted before
|
the external `findmnt` utility to see whether the drive is mounted before
|
||||||
proceeding.
|
proceeding.
|
||||||
|
@ -79,13 +82,16 @@ location:
|
||||||
- /home
|
- /home
|
||||||
|
|
||||||
repositories:
|
repositories:
|
||||||
- ssh://me@buddys-server.org/./backup.borg
|
- path: ssh://me@buddys-server.org/./backup.borg
|
||||||
|
|
||||||
hooks:
|
hooks:
|
||||||
before_backup:
|
before_backup:
|
||||||
- ping -q -c 1 buddys-server.org > /dev/null || exit 75
|
- ping -q -c 1 buddys-server.org > /dev/null || exit 75
|
||||||
```
|
```
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit
|
||||||
|
the `path:` portion of the `repositories` list.
|
||||||
|
|
||||||
Or to only run backups if the battery level is high enough:
|
Or to only run backups if the battery level is high enough:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
|
@ -110,8 +116,8 @@ There are some caveats you should be aware of with this feature.
|
||||||
* You'll generally want to put a soft failure command in the `before_backup`
|
* You'll generally want to put a soft failure command in the `before_backup`
|
||||||
hook, so as to gate whether the backup action occurs. While a soft failure is
|
hook, so as to gate whether the backup action occurs. While a soft failure is
|
||||||
also supported in the `after_backup` hook, returning a soft failure there
|
also supported in the `after_backup` hook, returning a soft failure there
|
||||||
won't prevent any actions from occuring, because they've already occurred!
|
won't prevent any actions from occurring, because they've already occurred!
|
||||||
Similiarly, you can return a soft failure from an `on_error` hook, but at
|
Similarly, you can return a soft failure from an `on_error` hook, but at
|
||||||
that point it's too late to prevent the error.
|
that point it's too late to prevent the error.
|
||||||
* Returning a soft failure does prevent further commands in the same hook from
|
* Returning a soft failure does prevent further commands in the same hook from
|
||||||
executing. So, like a standard error, it is an "early out". Unlike a standard
|
executing. So, like a standard error, it is an "early out". Unlike a standard
|
||||||
|
|
|
@ -136,6 +136,53 @@ hooks:
|
||||||
format: sql
|
format: sql
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Containers
|
||||||
|
|
||||||
|
If your database is running within a Docker container and borgmatic is too, no
|
||||||
|
problem—simply configure borgmatic to connect to the container's name on its
|
||||||
|
exposed port. For instance:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
hooks:
|
||||||
|
postgresql_databases:
|
||||||
|
- name: users
|
||||||
|
hostname: your-database-container-name
|
||||||
|
port: 5433
|
||||||
|
username: postgres
|
||||||
|
password: trustsome1
|
||||||
|
```
|
||||||
|
|
||||||
|
But what if borgmatic is running on the host? You can still connect to a
|
||||||
|
database container if its ports are properly exposed to the host. For
|
||||||
|
instance, when running the database container with Docker, you can specify
|
||||||
|
`--publish 127.0.0.1:5433:5432` so that it exposes the container's port 5432
|
||||||
|
to port 5433 on the host (only reachable on localhost, in this case). Or the
|
||||||
|
same thing with Docker Compose:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
services:
|
||||||
|
your-database-container-name:
|
||||||
|
image: postgres
|
||||||
|
ports:
|
||||||
|
- 127.0.0.1:5433:5432
|
||||||
|
```
|
||||||
|
|
||||||
|
And then you can connect to the database from borgmatic running on the host:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
hooks:
|
||||||
|
postgresql_databases:
|
||||||
|
- name: users
|
||||||
|
hostname: 127.0.0.1
|
||||||
|
port: 5433
|
||||||
|
username: postgres
|
||||||
|
password: trustsome1
|
||||||
|
```
|
||||||
|
|
||||||
|
Of course, alter the ports in these examples to suit your particular database
|
||||||
|
system.
|
||||||
|
|
||||||
|
|
||||||
### No source directories
|
### No source directories
|
||||||
|
|
||||||
<span class="minilink minilink-addedin">New in version 1.7.1</span> If you
|
<span class="minilink minilink-addedin">New in version 1.7.1</span> If you
|
||||||
|
@ -154,7 +201,6 @@ hooks:
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
### External passwords
|
### External passwords
|
||||||
|
|
||||||
If you don't want to keep your database passwords in your borgmatic
|
If you don't want to keep your database passwords in your borgmatic
|
||||||
|
@ -231,7 +277,8 @@ If you have a single repository in your borgmatic configuration file(s), no
|
||||||
problem: the `restore` action figures out which repository to use.
|
problem: the `restore` action figures out which repository to use.
|
||||||
|
|
||||||
But if you have multiple repositories configured, then you'll need to specify
|
But if you have multiple repositories configured, then you'll need to specify
|
||||||
the repository path containing the archive to restore. Here's an example:
|
the repository to use via the `--repository` flag. This can be done either
|
||||||
|
with the repository's path or its label as configured in your borgmatic configuration file.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
borgmatic restore --repository repo.borg --archive host-2023-...
|
borgmatic restore --repository repo.borg --archive host-2023-...
|
||||||
|
@ -277,6 +324,17 @@ includes any combined dump file named "all" and any other individual database
|
||||||
dumps found in the archive.
|
dumps found in the archive.
|
||||||
|
|
||||||
|
|
||||||
|
### Restore particular schemas
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in version 1.7.13</span> With
|
||||||
|
PostgreSQL and MongoDB, you can limit the restore to a single schema found
|
||||||
|
within the database dump:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
borgmatic restore --archive latest --database users --schema tentant1
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
### Limitations
|
### Limitations
|
||||||
|
|
||||||
There are a few important limitations with borgmatic's current database
|
There are a few important limitations with borgmatic's current database
|
||||||
|
@ -334,6 +392,23 @@ dumps with any database system.
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
|
### PostgreSQL/MySQL authentication errors
|
||||||
|
|
||||||
|
With PostgreSQL and MySQL/MariaDB, if you're getting authentication errors
|
||||||
|
when borgmatic tries to connect to your database, a natural reaction is to
|
||||||
|
increase your borgmatic verbosity with `--verbosity 2` and go looking in the
|
||||||
|
logs. You'll notice however that your database password does not show up in
|
||||||
|
the logs. This is likely not the cause of the authentication problem unless
|
||||||
|
you mistyped your password, however; borgmatic passes your password to the
|
||||||
|
database via an environment variable that does not appear in the logs.
|
||||||
|
|
||||||
|
The cause of an authentication error is often on the database side—in the
|
||||||
|
configuration of which users are allowed to connect and how they are
|
||||||
|
authenticated. For instance, with PostgreSQL, check your
|
||||||
|
[pg_hba.conf](https://www.postgresql.org/docs/current/auth-pg-hba-conf.html)
|
||||||
|
file for that configuration.
|
||||||
|
|
||||||
|
|
||||||
### MySQL table lock errors
|
### MySQL table lock errors
|
||||||
|
|
||||||
If you encounter table lock errors during a database dump with MySQL/MariaDB,
|
If you encounter table lock errors during a database dump with MySQL/MariaDB,
|
||||||
|
|
|
@ -25,7 +25,7 @@ so that you can run borgmatic commands while you're hacking on them to
|
||||||
make sure your changes work.
|
make sure your changes work.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
cd borgmatic/
|
cd borgmatic
|
||||||
pip3 install --user --editable .
|
pip3 install --user --editable .
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -51,7 +51,6 @@ pip3 install --user tox
|
||||||
Finally, to actually run tests, run:
|
Finally, to actually run tests, run:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
cd borgmatic
|
|
||||||
tox
|
tox
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -74,6 +73,15 @@ can ask isort to order your imports for you:
|
||||||
tox -e isort
|
tox -e isort
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Similarly, if you get errors about spelling mistakes in source code, you can
|
||||||
|
ask [codespell](https://github.com/codespell-project/codespell) to correct
|
||||||
|
them:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
tox -e codespell
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
### End-to-end tests
|
### End-to-end tests
|
||||||
|
|
||||||
borgmatic additionally includes some end-to-end tests that integration test
|
borgmatic additionally includes some end-to-end tests that integration test
|
||||||
|
@ -87,12 +95,36 @@ If you would like to run the full test suite, first install Docker and [Docker
|
||||||
Compose](https://docs.docker.com/compose/install/). Then run:
|
Compose](https://docs.docker.com/compose/install/). Then run:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
scripts/run-full-dev-tests
|
scripts/run-end-to-end-dev-tests
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that this scripts assumes you have permission to run Docker. If you
|
Note that this scripts assumes you have permission to run Docker. If you
|
||||||
don't, then you may need to run with `sudo`.
|
don't, then you may need to run with `sudo`.
|
||||||
|
|
||||||
|
|
||||||
|
#### Podman
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in version 1.7.12</span>
|
||||||
|
borgmatic's end-to-end tests optionally support using
|
||||||
|
[rootless](https://github.com/containers/podman/blob/main/docs/tutorials/rootless_tutorial.md)
|
||||||
|
[Podman](https://podman.io/) instead of Docker.
|
||||||
|
|
||||||
|
Setting up Podman is outside the scope of this documentation, but here are
|
||||||
|
some key points to double-check:
|
||||||
|
|
||||||
|
* Install Podman along with `podman-docker` and your desired networking
|
||||||
|
support.
|
||||||
|
* Configure `/etc/subuid` and `/etc/subgid` to map users/groups for the
|
||||||
|
non-root user who will run tests.
|
||||||
|
* Create a non-root Podman socket for that user:
|
||||||
|
```bash
|
||||||
|
systemctl --user enable --now podman.socket
|
||||||
|
```
|
||||||
|
|
||||||
|
Then you'll be able to run end-to-end tests as per normal, and the test script
|
||||||
|
will automatically use your non-root Podman socket instead of a Docker socket.
|
||||||
|
|
||||||
|
|
||||||
## Code style
|
## Code style
|
||||||
|
|
||||||
Start with [PEP 8](https://www.python.org/dev/peps/pep-0008/). But then, apply
|
Start with [PEP 8](https://www.python.org/dev/peps/pep-0008/). But then, apply
|
||||||
|
@ -101,10 +133,10 @@ the following deviations from it:
|
||||||
* For strings, prefer single quotes over double quotes.
|
* For strings, prefer single quotes over double quotes.
|
||||||
* Limit all lines to a maximum of 100 characters.
|
* Limit all lines to a maximum of 100 characters.
|
||||||
* Use trailing commas within multiline values or argument lists.
|
* Use trailing commas within multiline values or argument lists.
|
||||||
* For multiline constructs, put opening and closing delimeters on lines
|
* For multiline constructs, put opening and closing delimiters on lines
|
||||||
separate from their contents.
|
separate from their contents.
|
||||||
* Within multiline constructs, use standard four-space indentation. Don't align
|
* Within multiline constructs, use standard four-space indentation. Don't align
|
||||||
indentation with an opening delimeter.
|
indentation with an opening delimiter.
|
||||||
|
|
||||||
borgmatic code uses the [Black](https://black.readthedocs.io/en/stable/) code
|
borgmatic code uses the [Black](https://black.readthedocs.io/en/stable/) code
|
||||||
formatter, the [Flake8](http://flake8.pycqa.org/en/latest/) code checker, and
|
formatter, the [Flake8](http://flake8.pycqa.org/en/latest/) code checker, and
|
||||||
|
@ -141,3 +173,15 @@ http://localhost:8080 to view the documentation with your changes.
|
||||||
To close the documentation server, ctrl-C the script. Note that it does not
|
To close the documentation server, ctrl-C the script. Note that it does not
|
||||||
currently auto-reload, so you'll need to stop it and re-run it for any
|
currently auto-reload, so you'll need to stop it and re-run it for any
|
||||||
additional documentation changes to take effect.
|
additional documentation changes to take effect.
|
||||||
|
|
||||||
|
|
||||||
|
#### Podman
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in version 1.7.12</span>
|
||||||
|
borgmatic's developer build for documentation optionally supports using
|
||||||
|
[rootless](https://github.com/containers/podman/blob/main/docs/tutorials/rootless_tutorial.md)
|
||||||
|
[Podman](https://podman.io/) instead of Docker.
|
||||||
|
|
||||||
|
Setting up Podman is outside the scope of this documentation. But once you
|
||||||
|
install `podman-docker`, then `scripts/dev-docs` should automatically use
|
||||||
|
Podman instead of Docker.
|
||||||
|
|
|
@ -51,7 +51,8 @@ If you have a single repository in your borgmatic configuration file(s), no
|
||||||
problem: the `extract` action figures out which repository to use.
|
problem: the `extract` action figures out which repository to use.
|
||||||
|
|
||||||
But if you have multiple repositories configured, then you'll need to specify
|
But if you have multiple repositories configured, then you'll need to specify
|
||||||
the repository path containing the archive to extract. Here's an example:
|
the repository to use via the `--repository` flag. This can be done either
|
||||||
|
with the repository's path or its label as configured in your borgmatic configuration file.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
borgmatic extract --repository repo.borg --archive host-2023-...
|
borgmatic extract --repository repo.borg --archive host-2023-...
|
||||||
|
|
|
@ -111,7 +111,7 @@ By default, borgmatic logs to a local syslog-compatible daemon if one is
|
||||||
present and borgmatic is running in a non-interactive console. Where those
|
present and borgmatic is running in a non-interactive console. Where those
|
||||||
logs show up depends on your particular system. If you're using systemd, try
|
logs show up depends on your particular system. If you're using systemd, try
|
||||||
running `journalctl -xe`. Otherwise, try viewing `/var/log/syslog` or
|
running `journalctl -xe`. Otherwise, try viewing `/var/log/syslog` or
|
||||||
similiar.
|
similar.
|
||||||
|
|
||||||
You can customize the log level used for syslog logging with the
|
You can customize the log level used for syslog logging with the
|
||||||
`--syslog-verbosity` flag, and this is independent from the console logging
|
`--syslog-verbosity` flag, and this is independent from the console logging
|
||||||
|
@ -154,5 +154,39 @@ borgmatic --log-file /path/to/file.log
|
||||||
|
|
||||||
Note that if you use the `--log-file` flag, you are responsible for rotating
|
Note that if you use the `--log-file` flag, you are responsible for rotating
|
||||||
the log file so it doesn't grow too large, for example with
|
the log file so it doesn't grow too large, for example with
|
||||||
[logrotate](https://wiki.archlinux.org/index.php/Logrotate). Also, there is a
|
[logrotate](https://wiki.archlinux.org/index.php/Logrotate).
|
||||||
`--log-file-verbosity` flag to customize the log file's log level.
|
|
||||||
|
You can the `--log-file-verbosity` flag to customize the log file's log level:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
borgmatic --log-file /path/to/file.log --log-file-verbosity 2
|
||||||
|
```
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in version 1.7.11</span> Use the
|
||||||
|
`--log-file-format` flag to override the default log message format. This
|
||||||
|
format string can contain a series of named placeholders wrapped in curly
|
||||||
|
brackets. For instance, the default log format is: `[{asctime}] {levelname}:
|
||||||
|
{message}`. This means each log message is recorded as the log time (in square
|
||||||
|
brackets), a logging level name, a colon, and the actual log message.
|
||||||
|
|
||||||
|
So if you just want each log message to get logged *without* a timestamp or a
|
||||||
|
logging level name:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
borgmatic --log-file /path/to/file.log --log-file-format "{message}"
|
||||||
|
```
|
||||||
|
|
||||||
|
Here is a list of available placeholders:
|
||||||
|
|
||||||
|
* `{asctime}`: time the log message was created
|
||||||
|
* `{levelname}`: level of the log message (`INFO`, `DEBUG`, etc.)
|
||||||
|
* `{lineno}`: line number in the source file where the log message originated
|
||||||
|
* `{message}`: actual log message
|
||||||
|
* `{pathname}`: path of the source file where the log message originated
|
||||||
|
|
||||||
|
See the [Python logging
|
||||||
|
documentation](https://docs.python.org/3/library/logging.html#logrecord-attributes)
|
||||||
|
for additional placeholders.
|
||||||
|
|
||||||
|
Note that this `--log-file-format` flg only applies to the specified
|
||||||
|
`--log-file` and not to syslog or other logging.
|
||||||
|
|
|
@ -20,11 +20,13 @@ location:
|
||||||
|
|
||||||
# Paths of local or remote repositories to backup to.
|
# Paths of local or remote repositories to backup to.
|
||||||
repositories:
|
repositories:
|
||||||
- ssh://1234@usw-s001.rsync.net/./backups.borg
|
- path: ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo
|
||||||
- ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo
|
- path: /var/lib/backups/local.borg
|
||||||
- /var/lib/backups/local.borg
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit
|
||||||
|
the `path:` portion of the `repositories` list.
|
||||||
|
|
||||||
When you run borgmatic with this configuration, it invokes Borg once for each
|
When you run borgmatic with this configuration, it invokes Borg once for each
|
||||||
configured repository in sequence. (So, not in parallel.) That means—in each
|
configured repository in sequence. (So, not in parallel.) That means—in each
|
||||||
repository—borgmatic creates a single new backup archive containing all of
|
repository—borgmatic creates a single new backup archive containing all of
|
||||||
|
@ -32,9 +34,8 @@ your source directories.
|
||||||
|
|
||||||
Here's a way of visualizing what borgmatic does with the above configuration:
|
Here's a way of visualizing what borgmatic does with the above configuration:
|
||||||
|
|
||||||
1. Backup `/home` and `/etc` to `1234@usw-s001.rsync.net:backups.borg`
|
1. Backup `/home` and `/etc` to `k8pDxu32@k8pDxu32.repo.borgbase.com:repo`
|
||||||
2. Backup `/home` and `/etc` to `k8pDxu32@k8pDxu32.repo.borgbase.com:repo`
|
2. Backup `/home` and `/etc` to `/var/lib/backups/local.borg`
|
||||||
3. Backup `/home` and `/etc` to `/var/lib/backups/local.borg`
|
|
||||||
|
|
||||||
This gives you redundancy of your data across repositories and even
|
This gives you redundancy of your data across repositories and even
|
||||||
potentially across providers.
|
potentially across providers.
|
||||||
|
|
|
@ -54,6 +54,93 @@ choice](https://torsion.org/borgmatic/docs/how-to/set-up-backups/#autopilot),
|
||||||
each entry using borgmatic's `--config` flag instead of relying on
|
each entry using borgmatic's `--config` flag instead of relying on
|
||||||
`/etc/borgmatic.d`.
|
`/etc/borgmatic.d`.
|
||||||
|
|
||||||
|
|
||||||
|
## Archive naming
|
||||||
|
|
||||||
|
If you've got multiple borgmatic configuration files, you might want to create
|
||||||
|
archives with different naming schemes for each one. This is especially handy
|
||||||
|
if each configuration file is backing up to the same Borg repository but you
|
||||||
|
still want to be able to distinguish backup archives for one application from
|
||||||
|
another.
|
||||||
|
|
||||||
|
borgmatic supports this use case with an `archive_name_format` option. The
|
||||||
|
idea is that you define a string format containing a number of [Borg
|
||||||
|
placeholders](https://borgbackup.readthedocs.io/en/stable/usage/help.html#borg-placeholders),
|
||||||
|
and borgmatic uses that format to name any new archive it creates. For
|
||||||
|
instance:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
storage:
|
||||||
|
...
|
||||||
|
archive_name_format: home-directories-{now}
|
||||||
|
```
|
||||||
|
|
||||||
|
This means that when borgmatic creates an archive, its name will start with
|
||||||
|
the string `home-directories-` and end with a timestamp for its creation time.
|
||||||
|
If `archive_name_format` is unspecified, the default is
|
||||||
|
`{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}`, meaning your system hostname plus a
|
||||||
|
timestamp in a particular format.
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in version 1.7.11</span> borgmatic
|
||||||
|
uses the `archive_name_format` option to automatically limit which archives
|
||||||
|
get used for actions operating on multiple archives. This prevents, for
|
||||||
|
instance, duplicate archives from showing up in `rlist` or `info` results—even
|
||||||
|
if the same repository appears in multiple borgmatic configuration files. To
|
||||||
|
take advantage of this feature, simply use a different `archive_name_format`
|
||||||
|
in each configuration file.
|
||||||
|
|
||||||
|
Under the hood, borgmatic accomplishes this by substituting globs for certain
|
||||||
|
ephemeral data placeholders in your `archive_name_format`—and using the result
|
||||||
|
to filter archives when running supported actions.
|
||||||
|
|
||||||
|
For instance, let's say that you have this in your configuration:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
storage:
|
||||||
|
...
|
||||||
|
archive_name_format: {hostname}-user-data-{now}
|
||||||
|
```
|
||||||
|
|
||||||
|
borgmatic considers `{now}` an emphemeral data placeholder that will probably
|
||||||
|
change per archive, while `{hostname}` won't. So it turns the example value
|
||||||
|
into `{hostname}-user-data-*` and applies it to filter down the set of
|
||||||
|
archives used for actions like `rlist`, `info`, `prune`, `check`, etc.
|
||||||
|
|
||||||
|
The end result is that when borgmatic runs the actions for a particular
|
||||||
|
application-specific configuration file, it only operates on the archives
|
||||||
|
created for that application. Of course, this doesn't apply to actions like
|
||||||
|
`compact` that operate on an entire repository.
|
||||||
|
|
||||||
|
If this behavior isn't quite smart enough for your needs, you can use the
|
||||||
|
`match_archives` option to override the pattern that borgmatic uses for
|
||||||
|
filtering archives. For example:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
storage:
|
||||||
|
...
|
||||||
|
archive_name_format: {hostname}-user-data-{now}
|
||||||
|
match_archives: sh:myhost-user-data-*
|
||||||
|
```
|
||||||
|
|
||||||
|
For Borg 1.x, use a shell pattern for the `match_archives` value and see the
|
||||||
|
[Borg patterns
|
||||||
|
documentation](https://borgbackup.readthedocs.io/en/stable/usage/help.html#borg-help-patterns)
|
||||||
|
for more information. For Borg 2.x, see the [match archives
|
||||||
|
documentation](https://borgbackup.readthedocs.io/en/2.0.0b5/usage/help.html#borg-help-match-archives).
|
||||||
|
|
||||||
|
Some borgmatic command-line actions also have a `--match-archives` flag that
|
||||||
|
overrides both the auto-matching behavior and the `match_archives`
|
||||||
|
configuration option.
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">Prior to 1.7.11</span> The way to
|
||||||
|
limit the archives used for the `prune` action was a `prefix` option in the
|
||||||
|
`retention` section for matching against the start of archive names. And the
|
||||||
|
option for limiting the archives used for the `check` action was a separate
|
||||||
|
`prefix` in the `consistency` section. Both of these options are deprecated in
|
||||||
|
favor of the auto-matching behavior (or `match_archives`/`--match-archives`)
|
||||||
|
in newer versions of borgmatic.
|
||||||
|
|
||||||
|
|
||||||
## Configuration includes
|
## Configuration includes
|
||||||
|
|
||||||
Once you have multiple different configuration files, you might want to share
|
Once you have multiple different configuration files, you might want to share
|
||||||
|
@ -185,9 +272,140 @@ Once this include gets merged in, the resulting configuration would have a
|
||||||
When there's an option collision between the local file and the merged
|
When there's an option collision between the local file and the merged
|
||||||
include, the local file's option takes precedence.
|
include, the local file's option takes precedence.
|
||||||
|
|
||||||
|
|
||||||
|
#### List merge
|
||||||
|
|
||||||
<span class="minilink minilink-addedin">New in version 1.6.1</span> Colliding
|
<span class="minilink minilink-addedin">New in version 1.6.1</span> Colliding
|
||||||
list values are appended together.
|
list values are appended together.
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in version 1.7.12</span> If there
|
||||||
|
is a list value from an include that you *don't* want in your local
|
||||||
|
configuration file, you can omit it with an `!omit` tag. For instance:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
<<: !include /etc/borgmatic/common.yaml
|
||||||
|
|
||||||
|
location:
|
||||||
|
source_directories:
|
||||||
|
- !omit /home
|
||||||
|
- /var
|
||||||
|
```
|
||||||
|
|
||||||
|
And `common.yaml` like this:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
location:
|
||||||
|
source_directories:
|
||||||
|
- /home
|
||||||
|
- /etc
|
||||||
|
```
|
||||||
|
|
||||||
|
Once this include gets merged in, the resulting configuration will have a
|
||||||
|
`source_directories` value of `/etc` and `/var`—with `/home` omitted.
|
||||||
|
|
||||||
|
This feature currently only works on scalar (e.g. string or number) list items
|
||||||
|
and will not work elsewhere in a configuration file. Be sure to put the
|
||||||
|
`!omit` tag *before* the list item (after the dash). Putting `!omit` after the
|
||||||
|
list item will not work, as it gets interpreted as part of the string. Here's
|
||||||
|
an example of some things not to do:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
<<: !include /etc/borgmatic/common.yaml
|
||||||
|
|
||||||
|
location:
|
||||||
|
source_directories:
|
||||||
|
# Do not do this! It will not work. "!omit" belongs before "/home".
|
||||||
|
- /home !omit
|
||||||
|
|
||||||
|
# Do not do this either! "!omit" only works on scalar list items.
|
||||||
|
repositories: !omit
|
||||||
|
# Also do not do this for the same reason! This is a list item, but it's
|
||||||
|
# not a scalar.
|
||||||
|
- !omit path: repo.borg
|
||||||
|
```
|
||||||
|
|
||||||
|
Additionally, the `!omit` tag only works in a configuration file that also
|
||||||
|
performs a merge include with `<<: !include`. It doesn't make sense within,
|
||||||
|
for instance, an included configuration file itself (unless it in turn
|
||||||
|
performs its own merge include). That's because `!omit` only applies to the
|
||||||
|
file doing the include; it doesn't work in reverse or propagate through
|
||||||
|
includes.
|
||||||
|
|
||||||
|
|
||||||
|
### Shallow merge
|
||||||
|
|
||||||
|
Even though deep merging is generally pretty handy for included files,
|
||||||
|
sometimes you want specific sections in the local file to take precedence over
|
||||||
|
included sections—without any merging occurring for them.
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in version 1.7.12</span> That's
|
||||||
|
where the `!retain` tag comes in. Whenever you're merging an included file
|
||||||
|
into your configuration file, you can optionally add the `!retain` tag to
|
||||||
|
particular local mappings or lists to retain the local values and ignore
|
||||||
|
included values.
|
||||||
|
|
||||||
|
For instance, start with this configuration file containing the `!retain` tag
|
||||||
|
on the `retention` mapping:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
<<: !include /etc/borgmatic/common.yaml
|
||||||
|
|
||||||
|
location:
|
||||||
|
repositories:
|
||||||
|
- path: repo.borg
|
||||||
|
|
||||||
|
retention: !retain
|
||||||
|
keep_daily: 5
|
||||||
|
```
|
||||||
|
|
||||||
|
And `common.yaml` like this:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
location:
|
||||||
|
repositories:
|
||||||
|
- path: common.borg
|
||||||
|
|
||||||
|
retention:
|
||||||
|
keep_hourly: 24
|
||||||
|
keep_daily: 7
|
||||||
|
```
|
||||||
|
|
||||||
|
Once this include gets merged in, the resulting configuration will have a
|
||||||
|
`keep_daily` value of `5` and nothing else in the `retention` section. That's
|
||||||
|
because the `!retain` tag says to retain the local version of `retention` and
|
||||||
|
ignore any values coming in from the include. But because the `repositories`
|
||||||
|
list doesn't have a `!retain` tag, it still gets merged together to contain
|
||||||
|
both `common.borg` and `repo.borg`.
|
||||||
|
|
||||||
|
The `!retain` tag can only be placed on mappings and lists, and it goes right
|
||||||
|
after the name of the option (and its colon) on the same line. The effects of
|
||||||
|
`!retain` are recursive, meaning that if you place a `!retain` tag on a
|
||||||
|
top-level mapping, even deeply nested values within it will not be merged.
|
||||||
|
|
||||||
|
Additionally, the `!retain` tag only works in a configuration file that also
|
||||||
|
performs a merge include with `<<: !include`. It doesn't make sense within,
|
||||||
|
for instance, an included configuration file itself (unless it in turn
|
||||||
|
performs its own merge include). That's because `!retain` only applies to the
|
||||||
|
file doing the include; it doesn't work in reverse or propagate through
|
||||||
|
includes.
|
||||||
|
|
||||||
|
|
||||||
|
## Debugging includes
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in version 1.7.12</span> If you'd
|
||||||
|
like to see what the loaded configuration looks like after includes get merged
|
||||||
|
in, run `validate-borgmatic-config` on your configuration file:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo validate-borgmatic-config --show
|
||||||
|
```
|
||||||
|
|
||||||
|
You'll need to specify your configuration file with `--config` if it's not in
|
||||||
|
a default location.
|
||||||
|
|
||||||
|
This will output the merged configuration as borgmatic sees it, which can be
|
||||||
|
helpful for understanding how your includes work in practice.
|
||||||
|
|
||||||
|
|
||||||
## Configuration overrides
|
## Configuration overrides
|
||||||
|
|
||||||
|
@ -255,3 +473,51 @@ Be sure to quote your overrides if they contain spaces or other characters
|
||||||
that your shell may interpret.
|
that your shell may interpret.
|
||||||
|
|
||||||
An alternate to command-line overrides is passing in your values via [environment variables](https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/).
|
An alternate to command-line overrides is passing in your values via [environment variables](https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/).
|
||||||
|
|
||||||
|
|
||||||
|
## Constant interpolation
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in version 1.7.10</span> Another
|
||||||
|
tool is borgmatic's support for defining custom constants. This is similar to
|
||||||
|
the [variable interpolation
|
||||||
|
feature](https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/#variable-interpolation)
|
||||||
|
for command hooks, but the constants feature lets you substitute your own
|
||||||
|
custom values into anywhere in the entire configuration file. (Constants don't
|
||||||
|
work across includes or separate configuration files though.)
|
||||||
|
|
||||||
|
Here's an example usage:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
constants:
|
||||||
|
user: foo
|
||||||
|
archive_prefix: bar
|
||||||
|
|
||||||
|
location:
|
||||||
|
source_directories:
|
||||||
|
- /home/{user}/.config
|
||||||
|
- /home/{user}/.ssh
|
||||||
|
...
|
||||||
|
|
||||||
|
storage:
|
||||||
|
archive_name_format: '{archive_prefix}-{now}'
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example, when borgmatic runs, all instances of `{user}` get replaced
|
||||||
|
with `foo` and all instances of `{archive-prefix}` get replaced with `bar-`.
|
||||||
|
(And in this particular example, `{now}` doesn't get replaced with anything,
|
||||||
|
but gets passed directly to Borg.) After substitution, the logical result
|
||||||
|
looks something like this:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
location:
|
||||||
|
source_directories:
|
||||||
|
- /home/foo/.config
|
||||||
|
- /home/foo/.ssh
|
||||||
|
...
|
||||||
|
|
||||||
|
storage:
|
||||||
|
archive_name_format: 'bar-{now}'
|
||||||
|
```
|
||||||
|
|
||||||
|
An alternate to constants is passing in your values via [environment
|
||||||
|
variables](https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/).
|
||||||
|
|
|
@ -53,7 +53,8 @@ This runs Borg's `rlist` command once on each configured borgmatic repository.
|
||||||
(The native `borgmatic rlist` action should be preferred for most use.)
|
(The native `borgmatic rlist` action should be preferred for most use.)
|
||||||
|
|
||||||
What if you only want to run Borg on a single configured borgmatic repository
|
What if you only want to run Borg on a single configured borgmatic repository
|
||||||
when you've got several configured? Not a problem.
|
when you've got several configured? Not a problem. The `--repository` argument
|
||||||
|
lets you specify the repository to use, either by its path or its label:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
borgmatic borg --repository repo.borg break-lock
|
borgmatic borg --repository repo.borg break-lock
|
||||||
|
|
|
@ -90,7 +90,7 @@ installing borgmatic:
|
||||||
* [Fedora unofficial](https://copr.fedorainfracloud.org/coprs/heffer/borgmatic/)
|
* [Fedora unofficial](https://copr.fedorainfracloud.org/coprs/heffer/borgmatic/)
|
||||||
* [Arch Linux](https://www.archlinux.org/packages/community/any/borgmatic/)
|
* [Arch Linux](https://www.archlinux.org/packages/community/any/borgmatic/)
|
||||||
* [Alpine Linux](https://pkgs.alpinelinux.org/packages?name=borgmatic)
|
* [Alpine Linux](https://pkgs.alpinelinux.org/packages?name=borgmatic)
|
||||||
* [OpenBSD](http://ports.su/sysutils/borgmatic)
|
* [OpenBSD](https://openports.pl/path/sysutils/borgmatic)
|
||||||
* [openSUSE](https://software.opensuse.org/package/borgmatic)
|
* [openSUSE](https://software.opensuse.org/package/borgmatic)
|
||||||
* [macOS (via Homebrew)](https://formulae.brew.sh/formula/borgmatic)
|
* [macOS (via Homebrew)](https://formulae.brew.sh/formula/borgmatic)
|
||||||
* [macOS (via MacPorts)](https://ports.macports.org/port/borgmatic/)
|
* [macOS (via MacPorts)](https://ports.macports.org/port/borgmatic/)
|
||||||
|
@ -157,7 +157,7 @@ variable or set the `BORG_PASSPHRASE` environment variable. See the
|
||||||
section](https://borgbackup.readthedocs.io/en/stable/quickstart.html#repository-encryption)
|
section](https://borgbackup.readthedocs.io/en/stable/quickstart.html#repository-encryption)
|
||||||
of the Borg Quick Start for more info.
|
of the Borg Quick Start for more info.
|
||||||
|
|
||||||
Alternatively, you can specify the passphrase programatically by setting
|
Alternatively, you can specify the passphrase programmatically by setting
|
||||||
either the borgmatic `encryption_passcommand` configuration variable or the
|
either the borgmatic `encryption_passcommand` configuration variable or the
|
||||||
`BORG_PASSCOMMAND` environment variable. See the [Borg Security
|
`BORG_PASSCOMMAND` environment variable. See the [Borg Security
|
||||||
FAQ](http://borgbackup.readthedocs.io/en/stable/faq.html#how-can-i-specify-the-encryption-passphrase-programmatically)
|
FAQ](http://borgbackup.readthedocs.io/en/stable/faq.html#how-can-i-specify-the-encryption-passphrase-programmatically)
|
||||||
|
@ -180,6 +180,9 @@ following command is available for that:
|
||||||
sudo validate-borgmatic-config
|
sudo validate-borgmatic-config
|
||||||
```
|
```
|
||||||
|
|
||||||
|
You'll need to specify your configuration file with `--config` if it's not in
|
||||||
|
a default location.
|
||||||
|
|
||||||
This command's exit status (`$?` in Bash) is zero when configuration is valid
|
This command's exit status (`$?` in Bash) is zero when configuration is valid
|
||||||
and non-zero otherwise.
|
and non-zero otherwise.
|
||||||
|
|
||||||
|
|
|
@ -145,15 +145,18 @@ like this:
|
||||||
```yaml
|
```yaml
|
||||||
location:
|
location:
|
||||||
repositories:
|
repositories:
|
||||||
- original.borg
|
- path: original.borg
|
||||||
```
|
```
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit
|
||||||
|
the `path:` portion of the `repositories` list.
|
||||||
|
|
||||||
Change it to a new (not yet created) repository path:
|
Change it to a new (not yet created) repository path:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
location:
|
location:
|
||||||
repositories:
|
repositories:
|
||||||
- upgraded.borg
|
- path: upgraded.borg
|
||||||
```
|
```
|
||||||
|
|
||||||
Then, run the `rcreate` action (formerly `init`) to create that new Borg 2
|
Then, run the `rcreate` action (formerly `init`) to create that new Borg 2
|
||||||
|
|
|
@ -7,8 +7,10 @@ eleventyNavigation:
|
||||||
---
|
---
|
||||||
## borgmatic options
|
## borgmatic options
|
||||||
|
|
||||||
Here are all of the available borgmatic command-line options. This includes the separate options for
|
Here are all of the available borgmatic command-line options, including the
|
||||||
each action sub-command:
|
separate options for each action sub-command. Note that most of the
|
||||||
|
flags listed here do not have equivalents in borgmatic's [configuration
|
||||||
|
file](https://torsion.org/borgmatic/docs/reference/configuration/).
|
||||||
|
|
||||||
```
|
```
|
||||||
{% include borgmatic/command-line.txt %}
|
{% include borgmatic/command-line.txt %}
|
||||||
|
|
20
scripts/run-end-to-end-dev-tests
Executable file
20
scripts/run-end-to-end-dev-tests
Executable file
|
@ -0,0 +1,20 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# This script is for running end-to-end tests on a developer machine. It sets up database containers
|
||||||
|
# to run tests against, runs the tests, and then tears down the containers.
|
||||||
|
#
|
||||||
|
# Run this script from the root directory of the borgmatic source.
|
||||||
|
#
|
||||||
|
# For more information, see:
|
||||||
|
# https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
USER_PODMAN_SOCKET_PATH=/run/user/$UID/podman/podman.sock
|
||||||
|
|
||||||
|
if [ -e "$USER_PODMAN_SOCKET_PATH" ]; then
|
||||||
|
export DOCKER_HOST="unix://$USER_PODMAN_SOCKET_PATH"
|
||||||
|
fi
|
||||||
|
|
||||||
|
docker-compose --file tests/end-to-end/docker-compose.yaml up --force-recreate \
|
||||||
|
--renew-anon-volumes --abort-on-container-exit
|
|
@ -1,14 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
|
|
||||||
# This script is for running all tests, including end-to-end tests, on a developer machine. It sets
|
|
||||||
# up database containers to run tests against, runs the tests, and then tears down the containers.
|
|
||||||
#
|
|
||||||
# Run this script from the root directory of the borgmatic source.
|
|
||||||
#
|
|
||||||
# For more information, see:
|
|
||||||
# https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
docker-compose --file tests/end-to-end/docker-compose.yaml up --force-recreate \
|
|
||||||
--renew-anon-volumes --abort-on-container-exit
|
|
|
@ -3,13 +3,20 @@
|
||||||
# This script installs test dependencies and runs all tests, including end-to-end tests. It
|
# This script installs test dependencies and runs all tests, including end-to-end tests. It
|
||||||
# is designed to run inside a test container, and presumes that other test infrastructure like
|
# is designed to run inside a test container, and presumes that other test infrastructure like
|
||||||
# databases are already running. Therefore, on a developer machine, you should not run this script
|
# databases are already running. Therefore, on a developer machine, you should not run this script
|
||||||
# directly. Instead, run scripts/run-full-dev-tests
|
# directly. Instead, run scripts/run-end-to-end-dev-tests
|
||||||
#
|
#
|
||||||
# For more information, see:
|
# For more information, see:
|
||||||
# https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/
|
# https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
|
if [ -z "$TEST_CONTAINER" ] ; then
|
||||||
|
echo "This script is designed to work inside a test container and is not intended to"
|
||||||
|
echo "be run manually. If you're trying to run borgmatic's end-to-end tests, execute"
|
||||||
|
echo "scripts/run-end-to-end-dev-tests instead."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
apk add --no-cache python3 py3-pip borgbackup postgresql-client mariadb-client mongodb-tools \
|
apk add --no-cache python3 py3-pip borgbackup postgresql-client mariadb-client mongodb-tools \
|
||||||
py3-ruamel.yaml py3-ruamel.yaml.clib bash sqlite
|
py3-ruamel.yaml py3-ruamel.yaml.clib bash sqlite
|
||||||
# If certain dependencies of black are available in this version of Alpine, install them.
|
# If certain dependencies of black are available in this version of Alpine, install them.
|
||||||
|
@ -17,5 +24,9 @@ apk add --no-cache py3-typed-ast py3-regex || true
|
||||||
python3 -m pip install --no-cache --upgrade pip==22.2.2 setuptools==64.0.1
|
python3 -m pip install --no-cache --upgrade pip==22.2.2 setuptools==64.0.1
|
||||||
pip3 install --ignore-installed tox==3.25.1
|
pip3 install --ignore-installed tox==3.25.1
|
||||||
export COVERAGE_FILE=/tmp/.coverage
|
export COVERAGE_FILE=/tmp/.coverage
|
||||||
tox --workdir /tmp/.tox --sitepackages
|
|
||||||
|
if [ "$1" != "--end-to-end-only" ] ; then
|
||||||
|
tox --workdir /tmp/.tox --sitepackages
|
||||||
|
fi
|
||||||
|
|
||||||
tox --workdir /tmp/.tox --sitepackages -e end-to-end
|
tox --workdir /tmp/.tox --sitepackages -e end-to-end
|
||||||
|
|
16
setup.cfg
16
setup.cfg
|
@ -4,19 +4,23 @@ description_file=README.md
|
||||||
[tool:pytest]
|
[tool:pytest]
|
||||||
testpaths = tests
|
testpaths = tests
|
||||||
addopts = --cov-report term-missing:skip-covered --cov=borgmatic --ignore=tests/end-to-end
|
addopts = --cov-report term-missing:skip-covered --cov=borgmatic --ignore=tests/end-to-end
|
||||||
filterwarnings =
|
|
||||||
ignore:Coverage disabled.*:pytest.PytestWarning
|
|
||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
ignore = E501,W503
|
max-line-length = 100
|
||||||
|
extend-ignore = E203,E501,W503
|
||||||
exclude = *.*/*
|
exclude = *.*/*
|
||||||
multiline-quotes = '''
|
multiline-quotes = '''
|
||||||
docstring-quotes = '''
|
docstring-quotes = '''
|
||||||
|
|
||||||
[tool:isort]
|
[tool:isort]
|
||||||
force_single_line = False
|
profile=black
|
||||||
include_trailing_comma = True
|
|
||||||
known_first_party = borgmatic
|
known_first_party = borgmatic
|
||||||
line_length = 100
|
line_length = 100
|
||||||
multi_line_output = 3
|
|
||||||
skip = .tox
|
skip = .tox
|
||||||
|
|
||||||
|
[codespell]
|
||||||
|
skip = .git,.tox,build
|
||||||
|
|
||||||
|
[pycodestyle]
|
||||||
|
ignore = E203
|
||||||
|
max_line_length = 100
|
||||||
|
|
3
setup.py
3
setup.py
|
@ -1,6 +1,6 @@
|
||||||
from setuptools import find_packages, setup
|
from setuptools import find_packages, setup
|
||||||
|
|
||||||
VERSION = '1.7.10.dev0'
|
VERSION = '1.7.13.dev0'
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
|
@ -32,6 +32,7 @@ setup(
|
||||||
install_requires=(
|
install_requires=(
|
||||||
'colorama>=0.4.1,<0.5',
|
'colorama>=0.4.1,<0.5',
|
||||||
'jsonschema',
|
'jsonschema',
|
||||||
|
'packaging',
|
||||||
'requests',
|
'requests',
|
||||||
'ruamel.yaml>0.15.0,<0.18.0',
|
'ruamel.yaml>0.15.0,<0.18.0',
|
||||||
'setuptools',
|
'setuptools',
|
||||||
|
|
|
@ -1,24 +1,33 @@
|
||||||
appdirs==1.4.4; python_version >= '3.8'
|
appdirs==1.4.4; python_version >= '3.8'
|
||||||
attrs==20.3.0; python_version >= '3.8'
|
attrs==22.2.0; python_version >= '3.8'
|
||||||
black==19.10b0; python_version >= '3.8'
|
black==23.3.0; python_version >= '3.8'
|
||||||
click==7.1.2; python_version >= '3.8'
|
chardet==5.1.0
|
||||||
colorama==0.4.4
|
click==8.1.3; python_version >= '3.8'
|
||||||
coverage==5.3
|
codespell==2.2.4
|
||||||
flake8==4.0.1
|
colorama==0.4.6
|
||||||
|
coverage==7.2.3
|
||||||
|
flake8==6.0.0
|
||||||
flake8-quotes==3.3.2
|
flake8-quotes==3.3.2
|
||||||
flexmock==0.10.4
|
flake8-use-fstring==1.4
|
||||||
isort==5.9.1
|
flake8-variables-names==0.0.5
|
||||||
mccabe==0.6.1
|
flexmock==0.11.3
|
||||||
pluggy==0.13.1
|
idna==3.4
|
||||||
pathspec==0.8.1; python_version >= '3.8'
|
importlib_metadata==6.3.0; python_version < '3.8'
|
||||||
py==1.10.0
|
isort==5.12.0
|
||||||
pycodestyle==2.8.0
|
mccabe==0.7.0
|
||||||
pyflakes==2.4.0
|
packaging==23.1
|
||||||
jsonschema==3.2.0
|
pluggy==1.0.0
|
||||||
pytest==7.2.0
|
pathspec==0.11.1; python_version >= '3.8'
|
||||||
|
py==1.11.0
|
||||||
|
pycodestyle==2.10.0
|
||||||
|
pyflakes==3.0.1
|
||||||
|
jsonschema==4.17.3
|
||||||
|
pytest==7.3.0
|
||||||
pytest-cov==4.0.0
|
pytest-cov==4.0.0
|
||||||
regex; python_version >= '3.8'
|
regex; python_version >= '3.8'
|
||||||
requests==2.25.0
|
requests==2.28.2
|
||||||
ruamel.yaml>0.15.0,<0.18.0
|
ruamel.yaml>0.15.0,<0.18.0
|
||||||
toml==0.10.2; python_version >= '3.8'
|
toml==0.10.2; python_version >= '3.8'
|
||||||
typed-ast; python_version >= '3.8'
|
typed-ast; python_version >= '3.8'
|
||||||
|
typing-extensions==4.5.0; python_version < '3.8'
|
||||||
|
zipp==3.15.0; python_version < '3.8'
|
||||||
|
|
|
@ -1,30 +1,34 @@
|
||||||
version: '3'
|
version: '3'
|
||||||
services:
|
services:
|
||||||
postgresql:
|
postgresql:
|
||||||
image: postgres:13.1-alpine
|
image: docker.io/postgres:13.1-alpine
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_PASSWORD: test
|
POSTGRES_PASSWORD: test
|
||||||
POSTGRES_DB: test
|
POSTGRES_DB: test
|
||||||
mysql:
|
mysql:
|
||||||
image: mariadb:10.5
|
image: docker.io/mariadb:10.5
|
||||||
environment:
|
environment:
|
||||||
MYSQL_ROOT_PASSWORD: test
|
MYSQL_ROOT_PASSWORD: test
|
||||||
MYSQL_DATABASE: test
|
MYSQL_DATABASE: test
|
||||||
mongodb:
|
mongodb:
|
||||||
image: mongo:5.0.5
|
image: docker.io/mongo:5.0.5
|
||||||
environment:
|
environment:
|
||||||
MONGO_INITDB_ROOT_USERNAME: root
|
MONGO_INITDB_ROOT_USERNAME: root
|
||||||
MONGO_INITDB_ROOT_PASSWORD: test
|
MONGO_INITDB_ROOT_PASSWORD: test
|
||||||
tests:
|
tests:
|
||||||
image: alpine:3.13
|
image: docker.io/alpine:3.13
|
||||||
|
environment:
|
||||||
|
TEST_CONTAINER: true
|
||||||
volumes:
|
volumes:
|
||||||
- "../..:/app:ro"
|
- "../..:/app:ro"
|
||||||
tmpfs:
|
tmpfs:
|
||||||
- "/app/borgmatic.egg-info"
|
- "/app/borgmatic.egg-info"
|
||||||
|
- "/app/build"
|
||||||
tty: true
|
tty: true
|
||||||
working_dir: /app
|
working_dir: /app
|
||||||
command:
|
entrypoint: /app/scripts/run-full-tests
|
||||||
- /app/scripts/run-full-tests
|
command: --end-to-end-only
|
||||||
depends_on:
|
depends_on:
|
||||||
- postgresql
|
- postgresql
|
||||||
- mysql
|
- mysql
|
||||||
|
- mongodb
|
||||||
|
|
|
@ -12,17 +12,14 @@ def generate_configuration(config_path, repository_path):
|
||||||
to work for testing (including injecting the given repository path and tacking on an encryption
|
to work for testing (including injecting the given repository path and tacking on an encryption
|
||||||
passphrase).
|
passphrase).
|
||||||
'''
|
'''
|
||||||
subprocess.check_call(
|
subprocess.check_call(f'generate-borgmatic-config --destination {config_path}'.split(' '))
|
||||||
'generate-borgmatic-config --destination {}'.format(config_path).split(' ')
|
|
||||||
)
|
|
||||||
config = (
|
config = (
|
||||||
open(config_path)
|
open(config_path)
|
||||||
.read()
|
.read()
|
||||||
.replace('ssh://user@backupserver/./sourcehostname.borg', repository_path)
|
.replace('ssh://user@backupserver/./sourcehostname.borg', repository_path)
|
||||||
.replace('- ssh://user@backupserver/./{fqdn}', '')
|
.replace('- path: /mnt/backup', '')
|
||||||
.replace('- /var/local/backups/local.borg', '')
|
.replace('label: local', '')
|
||||||
.replace('- /home/user/path with spaces', '')
|
.replace('- /home', f'- {config_path}')
|
||||||
.replace('- /home', '- {}'.format(config_path))
|
|
||||||
.replace('- /etc', '')
|
.replace('- /etc', '')
|
||||||
.replace('- /var/log/syslog*', '')
|
.replace('- /var/log/syslog*', '')
|
||||||
+ 'storage:\n encryption_passphrase: "test"'
|
+ 'storage:\n encryption_passphrase: "test"'
|
||||||
|
@ -47,13 +44,13 @@ def test_borgmatic_command():
|
||||||
generate_configuration(config_path, repository_path)
|
generate_configuration(config_path, repository_path)
|
||||||
|
|
||||||
subprocess.check_call(
|
subprocess.check_call(
|
||||||
'borgmatic -v 2 --config {} init --encryption repokey'.format(config_path).split(' ')
|
f'borgmatic -v 2 --config {config_path} init --encryption repokey'.split(' ')
|
||||||
)
|
)
|
||||||
|
|
||||||
# Run borgmatic to generate a backup archive, and then list it to make sure it exists.
|
# Run borgmatic to generate a backup archive, and then list it to make sure it exists.
|
||||||
subprocess.check_call('borgmatic --config {}'.format(config_path).split(' '))
|
subprocess.check_call(f'borgmatic --config {config_path}'.split(' '))
|
||||||
output = subprocess.check_output(
|
output = subprocess.check_output(
|
||||||
'borgmatic --config {} list --json'.format(config_path).split(' ')
|
f'borgmatic --config {config_path} list --json'.split(' ')
|
||||||
).decode(sys.stdout.encoding)
|
).decode(sys.stdout.encoding)
|
||||||
parsed_output = json.loads(output)
|
parsed_output = json.loads(output)
|
||||||
|
|
||||||
|
@ -64,16 +61,14 @@ def test_borgmatic_command():
|
||||||
# Extract the created archive into the current (temporary) directory, and confirm that the
|
# Extract the created archive into the current (temporary) directory, and confirm that the
|
||||||
# extracted file looks right.
|
# extracted file looks right.
|
||||||
output = subprocess.check_output(
|
output = subprocess.check_output(
|
||||||
'borgmatic --config {} extract --archive {}'.format(config_path, archive_name).split(
|
f'borgmatic --config {config_path} extract --archive {archive_name}'.split(' '),
|
||||||
' '
|
|
||||||
)
|
|
||||||
).decode(sys.stdout.encoding)
|
).decode(sys.stdout.encoding)
|
||||||
extracted_config_path = os.path.join(extract_path, config_path)
|
extracted_config_path = os.path.join(extract_path, config_path)
|
||||||
assert open(extracted_config_path).read() == open(config_path).read()
|
assert open(extracted_config_path).read() == open(config_path).read()
|
||||||
|
|
||||||
# Exercise the info action.
|
# Exercise the info action.
|
||||||
output = subprocess.check_output(
|
output = subprocess.check_output(
|
||||||
'borgmatic --config {} info --json'.format(config_path).split(' ')
|
f'borgmatic --config {config_path} info --json'.split(' '),
|
||||||
).decode(sys.stdout.encoding)
|
).decode(sys.stdout.encoding)
|
||||||
parsed_output = json.loads(output)
|
parsed_output = json.loads(output)
|
||||||
|
|
||||||
|
|
|
@ -189,7 +189,7 @@ def test_database_dump_with_error_causes_borgmatic_to_exit():
|
||||||
'-v',
|
'-v',
|
||||||
'2',
|
'2',
|
||||||
'--override',
|
'--override',
|
||||||
"hooks.postgresql_databases=[{'name': 'nope'}]",
|
"hooks.postgresql_databases=[{'name': 'nope'}]", # noqa: FS003
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
|
|
|
@ -10,17 +10,15 @@ def generate_configuration(config_path, repository_path):
|
||||||
to work for testing (including injecting the given repository path and tacking on an encryption
|
to work for testing (including injecting the given repository path and tacking on an encryption
|
||||||
passphrase).
|
passphrase).
|
||||||
'''
|
'''
|
||||||
subprocess.check_call(
|
subprocess.check_call(f'generate-borgmatic-config --destination {config_path}'.split(' '))
|
||||||
'generate-borgmatic-config --destination {}'.format(config_path).split(' ')
|
|
||||||
)
|
|
||||||
config = (
|
config = (
|
||||||
open(config_path)
|
open(config_path)
|
||||||
.read()
|
.read()
|
||||||
.replace('ssh://user@backupserver/./sourcehostname.borg', repository_path)
|
.replace('ssh://user@backupserver/./sourcehostname.borg', repository_path)
|
||||||
.replace('- ssh://user@backupserver/./{fqdn}', '')
|
.replace('- ssh://user@backupserver/./{fqdn}', '') # noqa: FS003
|
||||||
.replace('- /var/local/backups/local.borg', '')
|
.replace('- /var/local/backups/local.borg', '')
|
||||||
.replace('- /home/user/path with spaces', '')
|
.replace('- /home/user/path with spaces', '')
|
||||||
.replace('- /home', '- {}'.format(config_path))
|
.replace('- /home', f'- {config_path}')
|
||||||
.replace('- /etc', '')
|
.replace('- /etc', '')
|
||||||
.replace('- /var/log/syslog*', '')
|
.replace('- /var/log/syslog*', '')
|
||||||
+ 'storage:\n encryption_passphrase: "test"'
|
+ 'storage:\n encryption_passphrase: "test"'
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
|
|
||||||
|
@ -7,12 +8,8 @@ def test_validate_config_command_with_valid_configuration_succeeds():
|
||||||
with tempfile.TemporaryDirectory() as temporary_directory:
|
with tempfile.TemporaryDirectory() as temporary_directory:
|
||||||
config_path = os.path.join(temporary_directory, 'test.yaml')
|
config_path = os.path.join(temporary_directory, 'test.yaml')
|
||||||
|
|
||||||
subprocess.check_call(
|
subprocess.check_call(f'generate-borgmatic-config --destination {config_path}'.split(' '))
|
||||||
'generate-borgmatic-config --destination {}'.format(config_path).split(' ')
|
exit_code = subprocess.call(f'validate-borgmatic-config --config {config_path}'.split(' '))
|
||||||
)
|
|
||||||
exit_code = subprocess.call(
|
|
||||||
'validate-borgmatic-config --config {}'.format(config_path).split(' ')
|
|
||||||
)
|
|
||||||
|
|
||||||
assert exit_code == 0
|
assert exit_code == 0
|
||||||
|
|
||||||
|
@ -21,16 +18,25 @@ def test_validate_config_command_with_invalid_configuration_fails():
|
||||||
with tempfile.TemporaryDirectory() as temporary_directory:
|
with tempfile.TemporaryDirectory() as temporary_directory:
|
||||||
config_path = os.path.join(temporary_directory, 'test.yaml')
|
config_path = os.path.join(temporary_directory, 'test.yaml')
|
||||||
|
|
||||||
subprocess.check_call(
|
subprocess.check_call(f'generate-borgmatic-config --destination {config_path}'.split(' '))
|
||||||
'generate-borgmatic-config --destination {}'.format(config_path).split(' ')
|
|
||||||
)
|
|
||||||
config = open(config_path).read().replace('keep_daily: 7', 'keep_daily: "7"')
|
config = open(config_path).read().replace('keep_daily: 7', 'keep_daily: "7"')
|
||||||
config_file = open(config_path, 'w')
|
config_file = open(config_path, 'w')
|
||||||
config_file.write(config)
|
config_file.write(config)
|
||||||
config_file.close()
|
config_file.close()
|
||||||
|
|
||||||
exit_code = subprocess.call(
|
exit_code = subprocess.call(f'validate-borgmatic-config --config {config_path}'.split(' '))
|
||||||
'validate-borgmatic-config --config {}'.format(config_path).split(' ')
|
|
||||||
)
|
|
||||||
|
|
||||||
assert exit_code == 1
|
assert exit_code == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_validate_config_command_with_show_flag_displays_configuration():
|
||||||
|
with tempfile.TemporaryDirectory() as temporary_directory:
|
||||||
|
config_path = os.path.join(temporary_directory, 'test.yaml')
|
||||||
|
|
||||||
|
subprocess.check_call(f'generate-borgmatic-config --destination {config_path}'.split(' '))
|
||||||
|
output = subprocess.check_output(
|
||||||
|
f'validate-borgmatic-config --config {config_path} --show'.split(' ')
|
||||||
|
).decode(sys.stdout.encoding)
|
||||||
|
|
||||||
|
assert 'location:' in output
|
||||||
|
assert 'repositories:' in output
|
||||||
|
|
108
tests/integration/borg/test_commands.py
Normal file
108
tests/integration/borg/test_commands.py
Normal file
|
@ -0,0 +1,108 @@
|
||||||
|
import copy
|
||||||
|
|
||||||
|
from flexmock import flexmock
|
||||||
|
|
||||||
|
import borgmatic.borg.info
|
||||||
|
import borgmatic.borg.list
|
||||||
|
import borgmatic.borg.rlist
|
||||||
|
import borgmatic.borg.transfer
|
||||||
|
import borgmatic.commands.arguments
|
||||||
|
|
||||||
|
|
||||||
|
def assert_command_does_not_duplicate_flags(command, *args, **kwargs):
|
||||||
|
'''
|
||||||
|
Assert that the given Borg command sequence does not contain any duplicated flags, e.g.
|
||||||
|
"--match-archives" twice anywhere in the command.
|
||||||
|
'''
|
||||||
|
flag_counts = {}
|
||||||
|
|
||||||
|
for flag_name in command:
|
||||||
|
if not flag_name.startswith('--'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if flag_name in flag_counts:
|
||||||
|
flag_counts[flag_name] += 1
|
||||||
|
else:
|
||||||
|
flag_counts[flag_name] = 1
|
||||||
|
|
||||||
|
assert flag_counts == {
|
||||||
|
flag_name: 1 for flag_name in flag_counts
|
||||||
|
}, f"Duplicate flags found in: {' '.join(command)}"
|
||||||
|
|
||||||
|
|
||||||
|
def fuzz_argument(arguments, argument_name):
|
||||||
|
'''
|
||||||
|
Given an argparse.Namespace instance of arguments and an argument name in it, copy the arguments
|
||||||
|
namespace and set the argument name in the copy with a fake value. Return the copied arguments.
|
||||||
|
|
||||||
|
This is useful for "fuzzing" a unit under test by passing it each possible argument in turn,
|
||||||
|
making sure it doesn't blow up or duplicate Borg arguments.
|
||||||
|
'''
|
||||||
|
arguments_copy = copy.copy(arguments)
|
||||||
|
value = getattr(arguments_copy, argument_name)
|
||||||
|
setattr(arguments_copy, argument_name, not value if isinstance(value, bool) else 'value')
|
||||||
|
|
||||||
|
return arguments_copy
|
||||||
|
|
||||||
|
|
||||||
|
def test_transfer_archives_command_does_not_duplicate_flags_or_raise():
|
||||||
|
arguments = borgmatic.commands.arguments.parse_arguments(
|
||||||
|
'transfer', '--source-repository', 'foo'
|
||||||
|
)['transfer']
|
||||||
|
flexmock(borgmatic.borg.transfer).should_receive('execute_command').replace_with(
|
||||||
|
assert_command_does_not_duplicate_flags
|
||||||
|
)
|
||||||
|
|
||||||
|
for argument_name in dir(arguments):
|
||||||
|
if argument_name.startswith('_'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
borgmatic.borg.transfer.transfer_archives(
|
||||||
|
False, 'repo', {}, '2.3.4', fuzz_argument(arguments, argument_name)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_make_list_command_does_not_duplicate_flags_or_raise():
|
||||||
|
arguments = borgmatic.commands.arguments.parse_arguments('list')['list']
|
||||||
|
|
||||||
|
for argument_name in dir(arguments):
|
||||||
|
if argument_name.startswith('_'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
command = borgmatic.borg.list.make_list_command(
|
||||||
|
'repo', {}, '2.3.4', fuzz_argument(arguments, argument_name)
|
||||||
|
)
|
||||||
|
|
||||||
|
assert_command_does_not_duplicate_flags(command)
|
||||||
|
|
||||||
|
|
||||||
|
def test_make_rlist_command_does_not_duplicate_flags_or_raise():
|
||||||
|
arguments = borgmatic.commands.arguments.parse_arguments('rlist')['rlist']
|
||||||
|
|
||||||
|
for argument_name in dir(arguments):
|
||||||
|
if argument_name.startswith('_'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
command = borgmatic.borg.rlist.make_rlist_command(
|
||||||
|
'repo', {}, '2.3.4', fuzz_argument(arguments, argument_name)
|
||||||
|
)
|
||||||
|
|
||||||
|
assert_command_does_not_duplicate_flags(command)
|
||||||
|
|
||||||
|
|
||||||
|
def test_display_archives_info_command_does_not_duplicate_flags_or_raise():
|
||||||
|
arguments = borgmatic.commands.arguments.parse_arguments('info')['info']
|
||||||
|
flexmock(borgmatic.borg.info).should_receive('execute_command_and_capture_output').replace_with(
|
||||||
|
assert_command_does_not_duplicate_flags
|
||||||
|
)
|
||||||
|
flexmock(borgmatic.borg.info).should_receive('execute_command').replace_with(
|
||||||
|
assert_command_does_not_duplicate_flags
|
||||||
|
)
|
||||||
|
|
||||||
|
for argument_name in dir(arguments):
|
||||||
|
if argument_name.startswith('_'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
borgmatic.borg.info.display_archives_info(
|
||||||
|
'repo', {}, '2.3.4', fuzz_argument(arguments, argument_name)
|
||||||
|
)
|
|
@ -465,6 +465,20 @@ def test_parse_arguments_disallows_transfer_with_both_archive_and_match_archives
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_arguments_disallows_list_with_both_prefix_and_match_archives():
|
||||||
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.parse_arguments('list', '--prefix', 'foo', '--match-archives', 'sh:*bar')
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_arguments_disallows_rlist_with_both_prefix_and_match_archives():
|
||||||
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.parse_arguments('rlist', '--prefix', 'foo', '--match-archives', 'sh:*bar')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_disallows_info_with_both_archive_and_match_archives():
|
def test_parse_arguments_disallows_info_with_both_archive_and_match_archives():
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
|
|
@ -18,3 +18,12 @@ def test_parse_arguments_with_multiple_config_paths_parses_as_list():
|
||||||
parser = module.parse_arguments('--config', 'myconfig', 'otherconfig')
|
parser = module.parse_arguments('--config', 'myconfig', 'otherconfig')
|
||||||
|
|
||||||
assert parser.config_paths == ['myconfig', 'otherconfig']
|
assert parser.config_paths == ['myconfig', 'otherconfig']
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_arguments_supports_show_flag():
|
||||||
|
config_paths = ['default']
|
||||||
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(config_paths)
|
||||||
|
|
||||||
|
parser = module.parse_arguments('--config', 'myconfig', '--show')
|
||||||
|
|
||||||
|
assert parser.show
|
||||||
|
|
|
@ -7,7 +7,7 @@ from borgmatic.config import legacy as module
|
||||||
|
|
||||||
def test_parse_section_options_with_punctuation_should_return_section_options():
|
def test_parse_section_options_with_punctuation_should_return_section_options():
|
||||||
parser = module.RawConfigParser()
|
parser = module.RawConfigParser()
|
||||||
parser.read_file(StringIO('[section]\nfoo: {}\n'.format(string.punctuation)))
|
parser.read_file(StringIO(f'[section]\nfoo: {string.punctuation}\n'))
|
||||||
|
|
||||||
section_format = module.Section_format(
|
section_format = module.Section_format(
|
||||||
'section', (module.Config_option('foo', str, required=True),)
|
'section', (module.Config_option('foo', str, required=True),)
|
||||||
|
|
|
@ -2,7 +2,6 @@ import io
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import ruamel.yaml
|
|
||||||
from flexmock import flexmock
|
from flexmock import flexmock
|
||||||
|
|
||||||
from borgmatic.config import load as module
|
from borgmatic.config import load as module
|
||||||
|
@ -10,11 +9,41 @@ from borgmatic.config import load as module
|
||||||
|
|
||||||
def test_load_configuration_parses_contents():
|
def test_load_configuration_parses_contents():
|
||||||
builtins = flexmock(sys.modules['builtins'])
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
builtins.should_receive('open').with_args('config.yaml').and_return('key: value')
|
config_file = io.StringIO('key: value')
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
assert module.load_configuration('config.yaml') == {'key': 'value'}
|
assert module.load_configuration('config.yaml') == {'key': 'value'}
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_configuration_replaces_constants():
|
||||||
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
|
config_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
constants:
|
||||||
|
key: value
|
||||||
|
key: {key}
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
assert module.load_configuration('config.yaml') == {'key': 'value'}
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_configuration_replaces_complex_constants():
|
||||||
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
|
config_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
constants:
|
||||||
|
key:
|
||||||
|
subkey: value
|
||||||
|
key: {key}
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
assert module.load_configuration('config.yaml') == {'key': {'subkey': 'value'}}
|
||||||
|
|
||||||
|
|
||||||
def test_load_configuration_inlines_include_relative_to_current_directory():
|
def test_load_configuration_inlines_include_relative_to_current_directory():
|
||||||
builtins = flexmock(sys.modules['builtins'])
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
@ -120,6 +149,248 @@ def test_load_configuration_merges_include():
|
||||||
assert module.load_configuration('config.yaml') == {'foo': 'override', 'baz': 'quux'}
|
assert module.load_configuration('config.yaml') == {'foo': 'override', 'baz': 'quux'}
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_configuration_with_retain_tag_merges_include_but_keeps_local_values():
|
||||||
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
flexmock(module.os.path).should_receive('isabs').and_return(False)
|
||||||
|
flexmock(module.os.path).should_receive('exists').and_return(True)
|
||||||
|
include_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
stuff:
|
||||||
|
foo: bar
|
||||||
|
baz: quux
|
||||||
|
|
||||||
|
other:
|
||||||
|
a: b
|
||||||
|
c: d
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
include_file.name = 'include.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file)
|
||||||
|
config_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
stuff: !retain
|
||||||
|
foo: override
|
||||||
|
|
||||||
|
other:
|
||||||
|
a: override
|
||||||
|
<<: !include include.yaml
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
|
||||||
|
assert module.load_configuration('config.yaml') == {
|
||||||
|
'stuff': {'foo': 'override'},
|
||||||
|
'other': {'a': 'override', 'c': 'd'},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_configuration_with_retain_tag_but_without_merge_include_raises():
|
||||||
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
flexmock(module.os.path).should_receive('isabs').and_return(False)
|
||||||
|
flexmock(module.os.path).should_receive('exists').and_return(True)
|
||||||
|
include_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
stuff: !retain
|
||||||
|
foo: bar
|
||||||
|
baz: quux
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
include_file.name = 'include.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file)
|
||||||
|
config_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
stuff:
|
||||||
|
foo: override
|
||||||
|
<<: !include include.yaml
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.load_configuration('config.yaml')
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_configuration_with_retain_tag_on_scalar_raises():
|
||||||
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
flexmock(module.os.path).should_receive('isabs').and_return(False)
|
||||||
|
flexmock(module.os.path).should_receive('exists').and_return(True)
|
||||||
|
include_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
stuff:
|
||||||
|
foo: bar
|
||||||
|
baz: quux
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
include_file.name = 'include.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file)
|
||||||
|
config_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
stuff:
|
||||||
|
foo: !retain override
|
||||||
|
<<: !include include.yaml
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.load_configuration('config.yaml')
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_configuration_with_omit_tag_merges_include_and_omits_requested_values():
|
||||||
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
flexmock(module.os.path).should_receive('isabs').and_return(False)
|
||||||
|
flexmock(module.os.path).should_receive('exists').and_return(True)
|
||||||
|
include_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
stuff:
|
||||||
|
- a
|
||||||
|
- b
|
||||||
|
- c
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
include_file.name = 'include.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file)
|
||||||
|
config_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
stuff:
|
||||||
|
- x
|
||||||
|
- !omit b
|
||||||
|
- y
|
||||||
|
<<: !include include.yaml
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
|
||||||
|
assert module.load_configuration('config.yaml') == {'stuff': ['a', 'c', 'x', 'y']}
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_configuration_with_omit_tag_on_unknown_value_merges_include_and_does_not_raise():
|
||||||
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
flexmock(module.os.path).should_receive('isabs').and_return(False)
|
||||||
|
flexmock(module.os.path).should_receive('exists').and_return(True)
|
||||||
|
include_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
stuff:
|
||||||
|
- a
|
||||||
|
- b
|
||||||
|
- c
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
include_file.name = 'include.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file)
|
||||||
|
config_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
stuff:
|
||||||
|
- x
|
||||||
|
- !omit q
|
||||||
|
- y
|
||||||
|
<<: !include include.yaml
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
|
||||||
|
assert module.load_configuration('config.yaml') == {'stuff': ['a', 'b', 'c', 'x', 'y']}
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_configuration_with_omit_tag_on_non_list_item_raises():
|
||||||
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
flexmock(module.os.path).should_receive('isabs').and_return(False)
|
||||||
|
flexmock(module.os.path).should_receive('exists').and_return(True)
|
||||||
|
include_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
stuff:
|
||||||
|
- a
|
||||||
|
- b
|
||||||
|
- c
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
include_file.name = 'include.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file)
|
||||||
|
config_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
stuff: !omit
|
||||||
|
- x
|
||||||
|
- y
|
||||||
|
<<: !include include.yaml
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.load_configuration('config.yaml')
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_configuration_with_omit_tag_on_non_scalar_list_item_raises():
|
||||||
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
flexmock(module.os.path).should_receive('isabs').and_return(False)
|
||||||
|
flexmock(module.os.path).should_receive('exists').and_return(True)
|
||||||
|
include_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
stuff:
|
||||||
|
- foo: bar
|
||||||
|
baz: quux
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
include_file.name = 'include.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file)
|
||||||
|
config_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
stuff:
|
||||||
|
- !omit foo: bar
|
||||||
|
baz: quux
|
||||||
|
<<: !include include.yaml
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.load_configuration('config.yaml')
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_configuration_with_omit_tag_but_without_merge_raises():
|
||||||
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
flexmock(module.os.path).should_receive('isabs').and_return(False)
|
||||||
|
flexmock(module.os.path).should_receive('exists').and_return(True)
|
||||||
|
include_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
stuff:
|
||||||
|
- a
|
||||||
|
- !omit b
|
||||||
|
- c
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
include_file.name = 'include.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file)
|
||||||
|
config_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
stuff:
|
||||||
|
- x
|
||||||
|
- y
|
||||||
|
<<: !include include.yaml
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.load_configuration('config.yaml')
|
||||||
|
|
||||||
|
|
||||||
def test_load_configuration_does_not_merge_include_list():
|
def test_load_configuration_does_not_merge_include_list():
|
||||||
builtins = flexmock(sys.modules['builtins'])
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
@ -143,42 +414,79 @@ def test_load_configuration_does_not_merge_include_list():
|
||||||
config_file.name = 'config.yaml'
|
config_file.name = 'config.yaml'
|
||||||
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
|
||||||
with pytest.raises(ruamel.yaml.error.YAMLError):
|
with pytest.raises(module.ruamel.yaml.error.YAMLError):
|
||||||
assert module.load_configuration('config.yaml')
|
assert module.load_configuration('config.yaml')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'node_class',
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.MappingNode,
|
||||||
|
module.ruamel.yaml.nodes.SequenceNode,
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
def test_raise_retain_node_error_raises(node_class):
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.raise_retain_node_error(
|
||||||
|
loader=flexmock(), node=node_class(tag=flexmock(), value=flexmock())
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_raise_omit_node_error_raises():
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.raise_omit_node_error(loader=flexmock(), node=flexmock())
|
||||||
|
|
||||||
|
|
||||||
|
def test_filter_omitted_nodes():
|
||||||
|
nodes = [
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='a'),
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='b'),
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='c'),
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='a'),
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='!omit', value='b'),
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='c'),
|
||||||
|
]
|
||||||
|
|
||||||
|
result = module.filter_omitted_nodes(nodes)
|
||||||
|
|
||||||
|
assert [item.value for item in result] == ['a', 'c', 'a', 'c']
|
||||||
|
|
||||||
|
|
||||||
def test_deep_merge_nodes_replaces_colliding_scalar_values():
|
def test_deep_merge_nodes_replaces_colliding_scalar_values():
|
||||||
node_values = [
|
node_values = [
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
||||||
ruamel.yaml.nodes.MappingNode(
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
tag='tag:yaml.org,2002:map',
|
tag='tag:yaml.org,2002:map',
|
||||||
value=[
|
value=[
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
tag='tag:yaml.org,2002:str', value='keep_hourly'
|
tag='tag:yaml.org,2002:str', value='keep_hourly'
|
||||||
),
|
),
|
||||||
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='24'),
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:int', value='24'
|
||||||
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
tag='tag:yaml.org,2002:str', value='keep_daily'
|
tag='tag:yaml.org,2002:str', value='keep_daily'
|
||||||
),
|
),
|
||||||
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='7'),
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='7'),
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
||||||
ruamel.yaml.nodes.MappingNode(
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
tag='tag:yaml.org,2002:map',
|
tag='tag:yaml.org,2002:map',
|
||||||
value=[
|
value=[
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
tag='tag:yaml.org,2002:str', value='keep_daily'
|
tag='tag:yaml.org,2002:str', value='keep_daily'
|
||||||
),
|
),
|
||||||
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='5'),
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='5'),
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
|
@ -200,35 +508,39 @@ def test_deep_merge_nodes_replaces_colliding_scalar_values():
|
||||||
def test_deep_merge_nodes_keeps_non_colliding_scalar_values():
|
def test_deep_merge_nodes_keeps_non_colliding_scalar_values():
|
||||||
node_values = [
|
node_values = [
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
||||||
ruamel.yaml.nodes.MappingNode(
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
tag='tag:yaml.org,2002:map',
|
tag='tag:yaml.org,2002:map',
|
||||||
value=[
|
value=[
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
tag='tag:yaml.org,2002:str', value='keep_hourly'
|
tag='tag:yaml.org,2002:str', value='keep_hourly'
|
||||||
),
|
),
|
||||||
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='24'),
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:int', value='24'
|
||||||
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
tag='tag:yaml.org,2002:str', value='keep_daily'
|
tag='tag:yaml.org,2002:str', value='keep_daily'
|
||||||
),
|
),
|
||||||
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='7'),
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='7'),
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
||||||
ruamel.yaml.nodes.MappingNode(
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
tag='tag:yaml.org,2002:map',
|
tag='tag:yaml.org,2002:map',
|
||||||
value=[
|
value=[
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
tag='tag:yaml.org,2002:str', value='keep_minutely'
|
tag='tag:yaml.org,2002:str', value='keep_minutely'
|
||||||
),
|
),
|
||||||
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='10'),
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:int', value='10'
|
||||||
|
),
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
|
@ -252,28 +564,28 @@ def test_deep_merge_nodes_keeps_non_colliding_scalar_values():
|
||||||
def test_deep_merge_nodes_keeps_deeply_nested_values():
|
def test_deep_merge_nodes_keeps_deeply_nested_values():
|
||||||
node_values = [
|
node_values = [
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='storage'),
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='storage'),
|
||||||
ruamel.yaml.nodes.MappingNode(
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
tag='tag:yaml.org,2002:map',
|
tag='tag:yaml.org,2002:map',
|
||||||
value=[
|
value=[
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
tag='tag:yaml.org,2002:str', value='lock_wait'
|
tag='tag:yaml.org,2002:str', value='lock_wait'
|
||||||
),
|
),
|
||||||
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='5'),
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='5'),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
tag='tag:yaml.org,2002:str', value='extra_borg_options'
|
tag='tag:yaml.org,2002:str', value='extra_borg_options'
|
||||||
),
|
),
|
||||||
ruamel.yaml.nodes.MappingNode(
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
tag='tag:yaml.org,2002:map',
|
tag='tag:yaml.org,2002:map',
|
||||||
value=[
|
value=[
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
tag='tag:yaml.org,2002:str', value='init'
|
tag='tag:yaml.org,2002:str', value='init'
|
||||||
),
|
),
|
||||||
ruamel.yaml.nodes.ScalarNode(
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
tag='tag:yaml.org,2002:str', value='--init-option'
|
tag='tag:yaml.org,2002:str', value='--init-option'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
@ -284,22 +596,22 @@ def test_deep_merge_nodes_keeps_deeply_nested_values():
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='storage'),
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='storage'),
|
||||||
ruamel.yaml.nodes.MappingNode(
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
tag='tag:yaml.org,2002:map',
|
tag='tag:yaml.org,2002:map',
|
||||||
value=[
|
value=[
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
tag='tag:yaml.org,2002:str', value='extra_borg_options'
|
tag='tag:yaml.org,2002:str', value='extra_borg_options'
|
||||||
),
|
),
|
||||||
ruamel.yaml.nodes.MappingNode(
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
tag='tag:yaml.org,2002:map',
|
tag='tag:yaml.org,2002:map',
|
||||||
value=[
|
value=[
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
tag='tag:yaml.org,2002:str', value='prune'
|
tag='tag:yaml.org,2002:str', value='prune'
|
||||||
),
|
),
|
||||||
ruamel.yaml.nodes.ScalarNode(
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
tag='tag:yaml.org,2002:str', value='--prune-option'
|
tag='tag:yaml.org,2002:str', value='--prune-option'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
@ -331,32 +643,48 @@ def test_deep_merge_nodes_keeps_deeply_nested_values():
|
||||||
def test_deep_merge_nodes_appends_colliding_sequence_values():
|
def test_deep_merge_nodes_appends_colliding_sequence_values():
|
||||||
node_values = [
|
node_values = [
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='hooks'),
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='hooks'),
|
||||||
ruamel.yaml.nodes.MappingNode(
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
tag='tag:yaml.org,2002:map',
|
tag='tag:yaml.org,2002:map',
|
||||||
value=[
|
value=[
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
tag='tag:yaml.org,2002:str', value='before_backup'
|
tag='tag:yaml.org,2002:str', value='before_backup'
|
||||||
),
|
),
|
||||||
ruamel.yaml.nodes.SequenceNode(
|
module.ruamel.yaml.nodes.SequenceNode(
|
||||||
tag='tag:yaml.org,2002:int', value=['echo 1', 'echo 2']
|
tag='tag:yaml.org,2002:seq',
|
||||||
|
value=[
|
||||||
|
module.ruamel.yaml.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='echo 1'
|
||||||
|
),
|
||||||
|
module.ruamel.yaml.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='echo 2'
|
||||||
|
),
|
||||||
|
],
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='hooks'),
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='hooks'),
|
||||||
ruamel.yaml.nodes.MappingNode(
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
tag='tag:yaml.org,2002:map',
|
tag='tag:yaml.org,2002:map',
|
||||||
value=[
|
value=[
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode(
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
tag='tag:yaml.org,2002:str', value='before_backup'
|
tag='tag:yaml.org,2002:str', value='before_backup'
|
||||||
),
|
),
|
||||||
ruamel.yaml.nodes.SequenceNode(
|
module.ruamel.yaml.nodes.SequenceNode(
|
||||||
tag='tag:yaml.org,2002:int', value=['echo 3', 'echo 4']
|
tag='tag:yaml.org,2002:seq',
|
||||||
|
value=[
|
||||||
|
module.ruamel.yaml.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='echo 3'
|
||||||
|
),
|
||||||
|
module.ruamel.yaml.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='echo 4'
|
||||||
|
),
|
||||||
|
],
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
|
@ -371,4 +699,178 @@ def test_deep_merge_nodes_appends_colliding_sequence_values():
|
||||||
options = section_value.value
|
options = section_value.value
|
||||||
assert len(options) == 1
|
assert len(options) == 1
|
||||||
assert options[0][0].value == 'before_backup'
|
assert options[0][0].value == 'before_backup'
|
||||||
assert options[0][1].value == ['echo 1', 'echo 2', 'echo 3', 'echo 4']
|
assert [item.value for item in options[0][1].value] == ['echo 1', 'echo 2', 'echo 3', 'echo 4']
|
||||||
|
|
||||||
|
|
||||||
|
def test_deep_merge_nodes_only_keeps_mapping_values_tagged_with_retain():
|
||||||
|
node_values = [
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
||||||
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='tag:yaml.org,2002:map',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='keep_hourly'
|
||||||
|
),
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:int', value='24'
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='keep_daily'
|
||||||
|
),
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='7'),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
||||||
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='!retain',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='keep_daily'
|
||||||
|
),
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='5'),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
result = module.deep_merge_nodes(node_values)
|
||||||
|
assert len(result) == 1
|
||||||
|
(section_key, section_value) = result[0]
|
||||||
|
assert section_key.value == 'retention'
|
||||||
|
assert section_value.tag == 'tag:yaml.org,2002:map'
|
||||||
|
options = section_value.value
|
||||||
|
assert len(options) == 1
|
||||||
|
assert options[0][0].value == 'keep_daily'
|
||||||
|
assert options[0][1].value == '5'
|
||||||
|
|
||||||
|
|
||||||
|
def test_deep_merge_nodes_only_keeps_sequence_values_tagged_with_retain():
|
||||||
|
node_values = [
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='hooks'),
|
||||||
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='tag:yaml.org,2002:map',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='before_backup'
|
||||||
|
),
|
||||||
|
module.ruamel.yaml.nodes.SequenceNode(
|
||||||
|
tag='tag:yaml.org,2002:seq',
|
||||||
|
value=[
|
||||||
|
module.ruamel.yaml.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='echo 1'
|
||||||
|
),
|
||||||
|
module.ruamel.yaml.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='echo 2'
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='hooks'),
|
||||||
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='tag:yaml.org,2002:map',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='before_backup'
|
||||||
|
),
|
||||||
|
module.ruamel.yaml.nodes.SequenceNode(
|
||||||
|
tag='!retain',
|
||||||
|
value=[
|
||||||
|
module.ruamel.yaml.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='echo 3'
|
||||||
|
),
|
||||||
|
module.ruamel.yaml.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='echo 4'
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
result = module.deep_merge_nodes(node_values)
|
||||||
|
assert len(result) == 1
|
||||||
|
(section_key, section_value) = result[0]
|
||||||
|
assert section_key.value == 'hooks'
|
||||||
|
options = section_value.value
|
||||||
|
assert len(options) == 1
|
||||||
|
assert options[0][0].value == 'before_backup'
|
||||||
|
assert options[0][1].tag == 'tag:yaml.org,2002:seq'
|
||||||
|
assert [item.value for item in options[0][1].value] == ['echo 3', 'echo 4']
|
||||||
|
|
||||||
|
|
||||||
|
def test_deep_merge_nodes_skips_sequence_values_tagged_with_omit():
|
||||||
|
node_values = [
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='hooks'),
|
||||||
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='tag:yaml.org,2002:map',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='before_backup'
|
||||||
|
),
|
||||||
|
module.ruamel.yaml.nodes.SequenceNode(
|
||||||
|
tag='tag:yaml.org,2002:seq',
|
||||||
|
value=[
|
||||||
|
module.ruamel.yaml.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='echo 1'
|
||||||
|
),
|
||||||
|
module.ruamel.yaml.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='echo 2'
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='hooks'),
|
||||||
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='tag:yaml.org,2002:map',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='before_backup'
|
||||||
|
),
|
||||||
|
module.ruamel.yaml.nodes.SequenceNode(
|
||||||
|
tag='tag:yaml.org,2002:seq',
|
||||||
|
value=[
|
||||||
|
module.ruamel.yaml.ScalarNode(tag='!omit', value='echo 2'),
|
||||||
|
module.ruamel.yaml.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='echo 3'
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
result = module.deep_merge_nodes(node_values)
|
||||||
|
assert len(result) == 1
|
||||||
|
(section_key, section_value) = result[0]
|
||||||
|
assert section_key.value == 'hooks'
|
||||||
|
options = section_value.value
|
||||||
|
assert len(options) == 1
|
||||||
|
assert options[0][0].value == 'before_backup'
|
||||||
|
assert [item.value for item in options[0][1].value] == ['echo 1', 'echo 3']
|
||||||
|
|
|
@ -8,7 +8,7 @@ from flexmock import flexmock
|
||||||
from borgmatic.config import validate as module
|
from borgmatic.config import validate as module
|
||||||
|
|
||||||
|
|
||||||
def test_schema_filename_returns_plausable_path():
|
def test_schema_filename_returns_plausible_path():
|
||||||
schema_path = module.schema_filename()
|
schema_path = module.schema_filename()
|
||||||
|
|
||||||
assert schema_path.endswith('/schema.yaml')
|
assert schema_path.endswith('/schema.yaml')
|
||||||
|
@ -63,7 +63,10 @@ def test_parse_configuration_transforms_file_into_mapping():
|
||||||
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
|
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
|
||||||
|
|
||||||
assert config == {
|
assert config == {
|
||||||
'location': {'source_directories': ['/home', '/etc'], 'repositories': ['hostname.borg']},
|
'location': {
|
||||||
|
'source_directories': ['/home', '/etc'],
|
||||||
|
'repositories': [{'path': 'hostname.borg'}],
|
||||||
|
},
|
||||||
'retention': {'keep_daily': 7, 'keep_hourly': 24, 'keep_minutely': 60},
|
'retention': {'keep_daily': 7, 'keep_hourly': 24, 'keep_minutely': 60},
|
||||||
'consistency': {'checks': [{'name': 'repository'}, {'name': 'archives'}]},
|
'consistency': {'checks': [{'name': 'repository'}, {'name': 'archives'}]},
|
||||||
}
|
}
|
||||||
|
@ -89,7 +92,7 @@ def test_parse_configuration_passes_through_quoted_punctuation():
|
||||||
assert config == {
|
assert config == {
|
||||||
'location': {
|
'location': {
|
||||||
'source_directories': [f'/home/{string.punctuation}'],
|
'source_directories': [f'/home/{string.punctuation}'],
|
||||||
'repositories': ['test.borg'],
|
'repositories': [{'path': 'test.borg'}],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert logs == []
|
assert logs == []
|
||||||
|
@ -151,7 +154,7 @@ def test_parse_configuration_inlines_include():
|
||||||
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
|
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
|
||||||
|
|
||||||
assert config == {
|
assert config == {
|
||||||
'location': {'source_directories': ['/home'], 'repositories': ['hostname.borg']},
|
'location': {'source_directories': ['/home'], 'repositories': [{'path': 'hostname.borg'}]},
|
||||||
'retention': {'keep_daily': 7, 'keep_hourly': 24},
|
'retention': {'keep_daily': 7, 'keep_hourly': 24},
|
||||||
}
|
}
|
||||||
assert logs == []
|
assert logs == []
|
||||||
|
@ -185,7 +188,7 @@ def test_parse_configuration_merges_include():
|
||||||
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
|
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
|
||||||
|
|
||||||
assert config == {
|
assert config == {
|
||||||
'location': {'source_directories': ['/home'], 'repositories': ['hostname.borg']},
|
'location': {'source_directories': ['/home'], 'repositories': [{'path': 'hostname.borg'}]},
|
||||||
'retention': {'keep_daily': 1, 'keep_hourly': 24},
|
'retention': {'keep_daily': 1, 'keep_hourly': 24},
|
||||||
}
|
}
|
||||||
assert logs == []
|
assert logs == []
|
||||||
|
@ -247,7 +250,7 @@ def test_parse_configuration_applies_overrides():
|
||||||
assert config == {
|
assert config == {
|
||||||
'location': {
|
'location': {
|
||||||
'source_directories': ['/home'],
|
'source_directories': ['/home'],
|
||||||
'repositories': ['hostname.borg'],
|
'repositories': [{'path': 'hostname.borg'}],
|
||||||
'local_path': 'borg2',
|
'local_path': 'borg2',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -273,7 +276,7 @@ def test_parse_configuration_applies_normalization():
|
||||||
assert config == {
|
assert config == {
|
||||||
'location': {
|
'location': {
|
||||||
'source_directories': ['/home'],
|
'source_directories': ['/home'],
|
||||||
'repositories': ['hostname.borg'],
|
'repositories': [{'path': 'hostname.borg'}],
|
||||||
'exclude_if_present': ['.nobackup'],
|
'exclude_if_present': ['.nobackup'],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue