Stream SQLite databases directly to Borg instead of dumping to an intermediate file (#807).
This commit is contained in:
parent
f2023aed22
commit
5f3dc1cfb0
3 changed files with 15 additions and 11 deletions
1
NEWS
1
NEWS
|
@ -6,6 +6,7 @@
|
|||
* #800: Add configured repository labels to the JSON output for all actions.
|
||||
* #802: The "check --force" flag now runs checks even if "check" is in "skip_actions".
|
||||
* #804: Validate the configured action names in the "skip_actions" option.
|
||||
* #807: Stream SQLite databases directly to Borg instead of dumping to an intermediate file.
|
||||
* When logging commands that borgmatic executes, log the environment variables that
|
||||
borgmatic sets for those commands. (But don't log their values, since they often contain
|
||||
passwords.)
|
||||
|
|
|
@ -18,10 +18,12 @@ def make_dump_path(config): # pragma: no cover
|
|||
|
||||
def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
'''
|
||||
Dump the given SQLite3 databases to a file. The databases are supplied as a sequence of
|
||||
Dump the given SQLite3 databases to a named pipe. The databases are supplied as a sequence of
|
||||
configuration dicts, as per the configuration schema. Use the given configuration dict to
|
||||
construct the destination path and the given log prefix in any log entries. If this is a dry
|
||||
run, then don't actually dump anything.
|
||||
construct the destination path and the given log prefix in any log entries.
|
||||
|
||||
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||
'''
|
||||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||
processes = []
|
||||
|
@ -40,6 +42,7 @@ def dump_data_sources(databases, config, log_prefix, dry_run):
|
|||
|
||||
dump_path = make_dump_path(config)
|
||||
dump_filename = dump.make_data_source_dump_filename(dump_path, database['name'])
|
||||
|
||||
if os.path.exists(dump_filename):
|
||||
logger.warning(
|
||||
f'{log_prefix}: Skipping duplicate dump of SQLite database at {database_path} to {dump_filename}'
|
||||
|
@ -59,7 +62,7 @@ def dump_data_sources(databases, config, log_prefix, dry_run):
|
|||
if dry_run:
|
||||
continue
|
||||
|
||||
dump.create_parent_directory_for_dump(dump_filename)
|
||||
dump.create_named_pipe_for_dump(dump_filename)
|
||||
processes.append(execute_command(command, shell=True, run_to_completion=False))
|
||||
|
||||
return processes
|
||||
|
|
|
@ -13,7 +13,7 @@ def test_dump_data_sources_logs_and_skips_if_dump_already_exists():
|
|||
'/path/to/dump/database'
|
||||
)
|
||||
flexmock(module.os.path).should_receive('exists').and_return(True)
|
||||
flexmock(module.dump).should_receive('create_parent_directory_for_dump').never()
|
||||
flexmock(module.dump).should_receive('create_named_pipe_for_dump').never()
|
||||
flexmock(module).should_receive('execute_command').never()
|
||||
|
||||
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == []
|
||||
|
@ -31,7 +31,7 @@ def test_dump_data_sources_dumps_each_database():
|
|||
'/path/to/dump/database'
|
||||
)
|
||||
flexmock(module.os.path).should_receive('exists').and_return(False)
|
||||
flexmock(module.dump).should_receive('create_parent_directory_for_dump')
|
||||
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
|
||||
flexmock(module).should_receive('execute_command').and_return(processes[0]).and_return(
|
||||
processes[1]
|
||||
)
|
||||
|
@ -39,7 +39,7 @@ def test_dump_data_sources_dumps_each_database():
|
|||
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == processes
|
||||
|
||||
|
||||
def test_dumping_database_with_non_existent_path_warns_and_dumps_database():
|
||||
def test_dump_data_sources_with_non_existent_path_warns_and_dumps_database():
|
||||
databases = [
|
||||
{'path': '/path/to/database1', 'name': 'database1'},
|
||||
]
|
||||
|
@ -51,13 +51,13 @@ def test_dumping_database_with_non_existent_path_warns_and_dumps_database():
|
|||
'/path/to/dump/database'
|
||||
)
|
||||
flexmock(module.os.path).should_receive('exists').and_return(False)
|
||||
flexmock(module.dump).should_receive('create_parent_directory_for_dump')
|
||||
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
|
||||
flexmock(module).should_receive('execute_command').and_return(processes[0])
|
||||
|
||||
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == processes
|
||||
|
||||
|
||||
def test_dumping_database_with_name_all_warns_and_dumps_all_databases():
|
||||
def test_dump_data_sources_with_name_all_warns_and_dumps_all_databases():
|
||||
databases = [
|
||||
{'path': '/path/to/database1', 'name': 'all'},
|
||||
]
|
||||
|
@ -71,7 +71,7 @@ def test_dumping_database_with_name_all_warns_and_dumps_all_databases():
|
|||
'/path/to/dump/database'
|
||||
)
|
||||
flexmock(module.os.path).should_receive('exists').and_return(False)
|
||||
flexmock(module.dump).should_receive('create_parent_directory_for_dump')
|
||||
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
|
||||
flexmock(module).should_receive('execute_command').and_return(processes[0])
|
||||
|
||||
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == processes
|
||||
|
@ -85,7 +85,7 @@ def test_dump_data_sources_does_not_dump_if_dry_run():
|
|||
'/path/to/dump/database'
|
||||
)
|
||||
flexmock(module.os.path).should_receive('exists').and_return(False)
|
||||
flexmock(module.dump).should_receive('create_parent_directory_for_dump').never()
|
||||
flexmock(module.dump).should_receive('create_named_pipe_for_dump').never()
|
||||
flexmock(module).should_receive('execute_command').never()
|
||||
|
||||
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=True) == []
|
||||
|
|
Loading…
Reference in a new issue