Compare commits
No commits in common. "main" and "addUptimeKumaTests" have entirely different histories.
main
...
addUptimeK
21 changed files with 231 additions and 687 deletions
16
NEWS
16
NEWS
|
@ -1,21 +1,7 @@
|
||||||
1.8.13.dev0
|
1.8.12.dev0
|
||||||
* #785: Add an "only_run_on" option to consistency checks so you can limit a check to running on
|
|
||||||
particular days of the week. See the documentation for more information:
|
|
||||||
https://torsion.org/borgmatic/docs/how-to/deal-with-very-large-backups/#check-days
|
|
||||||
* #885: Add Uptime Kuma monitoring hook. See the documentation for more information:
|
|
||||||
https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#uptime-kuma-hook
|
|
||||||
* #886: Fix a PagerDuty hook traceback with Python < 3.10.
|
|
||||||
* #889: Fix the Healthchecks ping body size limit, restoring it to the documented 100,000 bytes.
|
|
||||||
|
|
||||||
1.8.12
|
|
||||||
* #817: Add a "--max-duration" flag to the "check" action and a "max_duration" option to the
|
|
||||||
repository check configuration. This tells Borg to interrupt a repository check after a certain
|
|
||||||
duration.
|
|
||||||
* #860: Fix interaction between environment variable interpolation in constants and shell escaping.
|
* #860: Fix interaction between environment variable interpolation in constants and shell escaping.
|
||||||
* #863: When color output is disabled (explicitly or implicitly), don't prefix each log line with
|
* #863: When color output is disabled (explicitly or implicitly), don't prefix each log line with
|
||||||
the log level.
|
the log level.
|
||||||
* #865: Add an "upload_buffer_size" option to set the size of the upload buffer used in "create"
|
|
||||||
action.
|
|
||||||
* #866: Fix "Argument list too long" error in the "spot" check when checking hundreds of thousands
|
* #866: Fix "Argument list too long" error in the "spot" check when checking hundreds of thousands
|
||||||
of files at once.
|
of files at once.
|
||||||
* #874: Add the configured repository label as "repository_label" to the interpolated variables
|
* #874: Add the configured repository label as "repository_label" to the interpolated variables
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import calendar
|
|
||||||
import datetime
|
import datetime
|
||||||
import hashlib
|
import hashlib
|
||||||
import itertools
|
import itertools
|
||||||
|
@ -100,17 +99,12 @@ def parse_frequency(frequency):
|
||||||
raise ValueError(f"Could not parse consistency check frequency '{frequency}'")
|
raise ValueError(f"Could not parse consistency check frequency '{frequency}'")
|
||||||
|
|
||||||
|
|
||||||
WEEKDAY_DAYS = calendar.day_name[0:5]
|
|
||||||
WEEKEND_DAYS = calendar.day_name[5:7]
|
|
||||||
|
|
||||||
|
|
||||||
def filter_checks_on_frequency(
|
def filter_checks_on_frequency(
|
||||||
config,
|
config,
|
||||||
borg_repository_id,
|
borg_repository_id,
|
||||||
checks,
|
checks,
|
||||||
force,
|
force,
|
||||||
archives_check_id=None,
|
archives_check_id=None,
|
||||||
datetime_now=datetime.datetime.now,
|
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Given a configuration dict with a "checks" sequence of dicts, a Borg repository ID, a sequence
|
Given a configuration dict with a "checks" sequence of dicts, a Borg repository ID, a sequence
|
||||||
|
@ -149,29 +143,6 @@ def filter_checks_on_frequency(
|
||||||
if checks and check not in checks:
|
if checks and check not in checks:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
only_run_on = check_config.get('only_run_on')
|
|
||||||
if only_run_on:
|
|
||||||
# Use a dict instead of a set to preserve ordering.
|
|
||||||
days = dict.fromkeys(only_run_on)
|
|
||||||
|
|
||||||
if 'weekday' in days:
|
|
||||||
days = {
|
|
||||||
**dict.fromkeys(day for day in days if day != 'weekday'),
|
|
||||||
**dict.fromkeys(WEEKDAY_DAYS),
|
|
||||||
}
|
|
||||||
if 'weekend' in days:
|
|
||||||
days = {
|
|
||||||
**dict.fromkeys(day for day in days if day != 'weekend'),
|
|
||||||
**dict.fromkeys(WEEKEND_DAYS),
|
|
||||||
}
|
|
||||||
|
|
||||||
if calendar.day_name[datetime_now().weekday()] not in days:
|
|
||||||
logger.info(
|
|
||||||
f"Skipping {check} check due to day of the week; check only runs on {'/'.join(days)} (use --force to check anyway)"
|
|
||||||
)
|
|
||||||
filtered_checks.remove(check)
|
|
||||||
continue
|
|
||||||
|
|
||||||
frequency_delta = parse_frequency(check_config.get('frequency'))
|
frequency_delta = parse_frequency(check_config.get('frequency'))
|
||||||
if not frequency_delta:
|
if not frequency_delta:
|
||||||
continue
|
continue
|
||||||
|
@ -182,8 +153,8 @@ def filter_checks_on_frequency(
|
||||||
|
|
||||||
# If we've not yet reached the time when the frequency dictates we're ready for another
|
# If we've not yet reached the time when the frequency dictates we're ready for another
|
||||||
# check, skip this check.
|
# check, skip this check.
|
||||||
if datetime_now() < check_time + frequency_delta:
|
if datetime.datetime.now() < check_time + frequency_delta:
|
||||||
remaining = check_time + frequency_delta - datetime_now()
|
remaining = check_time + frequency_delta - datetime.datetime.now()
|
||||||
logger.info(
|
logger.info(
|
||||||
f'Skipping {check} check due to configured frequency; {remaining} until next check (use --force to check anyway)'
|
f'Skipping {check} check due to configured frequency; {remaining} until next check (use --force to check anyway)'
|
||||||
)
|
)
|
||||||
|
|
|
@ -50,10 +50,10 @@ def make_archive_filter_flags(local_borg_version, config, checks, check_argument
|
||||||
return ()
|
return ()
|
||||||
|
|
||||||
|
|
||||||
def make_check_name_flags(checks, archive_filter_flags):
|
def make_check_flags(checks, archive_filter_flags):
|
||||||
'''
|
'''
|
||||||
Given parsed checks set and a sequence of flags to filter archives, transform the checks into
|
Given a parsed checks set and a sequence of flags to filter archives,
|
||||||
tuple of command-line check flags.
|
transform the checks into tuple of command-line check flags.
|
||||||
|
|
||||||
For example, given parsed checks of:
|
For example, given parsed checks of:
|
||||||
|
|
||||||
|
@ -134,30 +134,10 @@ def check_archives(
|
||||||
if logger.isEnabledFor(logging.DEBUG):
|
if logger.isEnabledFor(logging.DEBUG):
|
||||||
verbosity_flags = ('--debug', '--show-rc')
|
verbosity_flags = ('--debug', '--show-rc')
|
||||||
|
|
||||||
try:
|
|
||||||
repository_check_config = next(
|
|
||||||
check for check in config.get('checks', ()) if check.get('name') == 'repository'
|
|
||||||
)
|
|
||||||
except StopIteration:
|
|
||||||
repository_check_config = {}
|
|
||||||
|
|
||||||
if check_arguments.max_duration and 'archives' in checks:
|
|
||||||
raise ValueError('The archives check cannot run when the --max-duration flag is used')
|
|
||||||
if repository_check_config.get('max_duration') and 'archives' in checks:
|
|
||||||
raise ValueError(
|
|
||||||
'The archives check cannot run when the repository check has the max_duration option set'
|
|
||||||
)
|
|
||||||
|
|
||||||
max_duration = check_arguments.max_duration or repository_check_config.get('max_duration')
|
|
||||||
|
|
||||||
borg_environment = environment.make_environment(config)
|
|
||||||
borg_exit_codes = config.get('borg_exit_codes')
|
|
||||||
|
|
||||||
full_command = (
|
full_command = (
|
||||||
(local_path, 'check')
|
(local_path, 'check')
|
||||||
+ (('--repair',) if check_arguments.repair else ())
|
+ (('--repair',) if check_arguments.repair else ())
|
||||||
+ (('--max-duration', str(max_duration)) if max_duration else ())
|
+ make_check_flags(checks, archive_filter_flags)
|
||||||
+ make_check_name_flags(checks, archive_filter_flags)
|
|
||||||
+ (('--remote-path', remote_path) if remote_path else ())
|
+ (('--remote-path', remote_path) if remote_path else ())
|
||||||
+ (('--log-json',) if global_arguments.log_json else ())
|
+ (('--log-json',) if global_arguments.log_json else ())
|
||||||
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||||
|
@ -167,6 +147,9 @@ def check_archives(
|
||||||
+ flags.make_repository_flags(repository_path, local_borg_version)
|
+ flags.make_repository_flags(repository_path, local_borg_version)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
borg_environment = environment.make_environment(config)
|
||||||
|
borg_exit_codes = config.get('borg_exit_codes')
|
||||||
|
|
||||||
# The Borg repair option triggers an interactive prompt, which won't work when output is
|
# The Borg repair option triggers an interactive prompt, which won't work when output is
|
||||||
# captured. And progress messes with the terminal directly.
|
# captured. And progress messes with the terminal directly.
|
||||||
if check_arguments.repair or check_arguments.progress:
|
if check_arguments.repair or check_arguments.progress:
|
||||||
|
|
|
@ -371,7 +371,6 @@ def make_base_create_command(
|
||||||
chunker_params = config.get('chunker_params', None)
|
chunker_params = config.get('chunker_params', None)
|
||||||
compression = config.get('compression', None)
|
compression = config.get('compression', None)
|
||||||
upload_rate_limit = config.get('upload_rate_limit', None)
|
upload_rate_limit = config.get('upload_rate_limit', None)
|
||||||
upload_buffer_size = config.get('upload_buffer_size', None)
|
|
||||||
umask = config.get('umask', None)
|
umask = config.get('umask', None)
|
||||||
lock_wait = config.get('lock_wait', None)
|
lock_wait = config.get('lock_wait', None)
|
||||||
list_filter_flags = make_list_filter_flags(local_borg_version, dry_run)
|
list_filter_flags = make_list_filter_flags(local_borg_version, dry_run)
|
||||||
|
@ -413,7 +412,6 @@ def make_base_create_command(
|
||||||
+ (('--chunker-params', chunker_params) if chunker_params else ())
|
+ (('--chunker-params', chunker_params) if chunker_params else ())
|
||||||
+ (('--compression', compression) if compression else ())
|
+ (('--compression', compression) if compression else ())
|
||||||
+ upload_ratelimit_flags
|
+ upload_ratelimit_flags
|
||||||
+ (('--upload-buffer', str(upload_buffer_size)) if upload_buffer_size else ())
|
|
||||||
+ (('--one-file-system',) if config.get('one_file_system') or stream_processes else ())
|
+ (('--one-file-system',) if config.get('one_file_system') or stream_processes else ())
|
||||||
+ numeric_ids_flags
|
+ numeric_ids_flags
|
||||||
+ atime_flags
|
+ atime_flags
|
||||||
|
|
|
@ -661,11 +661,6 @@ def make_parsers():
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help='Attempt to repair any inconsistencies found (for interactive use)',
|
help='Attempt to repair any inconsistencies found (for interactive use)',
|
||||||
)
|
)
|
||||||
check_group.add_argument(
|
|
||||||
'--max-duration',
|
|
||||||
metavar='SECONDS',
|
|
||||||
help='How long to check the repository before interrupting the check, defaults to no interruption',
|
|
||||||
)
|
|
||||||
check_group.add_argument(
|
check_group.add_argument(
|
||||||
'-a',
|
'-a',
|
||||||
'--match-archives',
|
'--match-archives',
|
||||||
|
|
|
@ -280,11 +280,6 @@ properties:
|
||||||
Remote network upload rate limit in kiBytes/second. Defaults to
|
Remote network upload rate limit in kiBytes/second. Defaults to
|
||||||
unlimited.
|
unlimited.
|
||||||
example: 100
|
example: 100
|
||||||
upload_buffer_size:
|
|
||||||
type: integer
|
|
||||||
description: |
|
|
||||||
Size of network upload buffer in MiB. Defaults to no buffer.
|
|
||||||
example: 160
|
|
||||||
retries:
|
retries:
|
||||||
type: integer
|
type: integer
|
||||||
description: |
|
description: |
|
||||||
|
@ -516,6 +511,7 @@ properties:
|
||||||
name:
|
name:
|
||||||
type: string
|
type: string
|
||||||
enum:
|
enum:
|
||||||
|
- repository
|
||||||
- archives
|
- archives
|
||||||
- data
|
- data
|
||||||
- extract
|
- extract
|
||||||
|
@ -546,78 +542,6 @@ properties:
|
||||||
"always": running this check every time checks
|
"always": running this check every time checks
|
||||||
are run.
|
are run.
|
||||||
example: 2 weeks
|
example: 2 weeks
|
||||||
only_run_on:
|
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
type: string
|
|
||||||
description: |
|
|
||||||
After the "frequency" duration has elapsed, only
|
|
||||||
run this check if the current day of the week
|
|
||||||
matches one of these values (the name of a day of
|
|
||||||
the week in the current locale). "weekday" and
|
|
||||||
"weekend" are also accepted. Defaults to running
|
|
||||||
the check on any day of the week.
|
|
||||||
example:
|
|
||||||
- Saturday
|
|
||||||
- Sunday
|
|
||||||
- required: [name]
|
|
||||||
additionalProperties: false
|
|
||||||
properties:
|
|
||||||
name:
|
|
||||||
type: string
|
|
||||||
enum:
|
|
||||||
- repository
|
|
||||||
description: |
|
|
||||||
Name of consistency check to run: "repository",
|
|
||||||
"archives", "data", "spot", and/or "extract".
|
|
||||||
"repository" checks the consistency of the
|
|
||||||
repository, "archives" checks all of the
|
|
||||||
archives, "data" verifies the integrity of the
|
|
||||||
data within the archives, "spot" checks that
|
|
||||||
some percentage of source files are found in the
|
|
||||||
most recent archive (with identical contents),
|
|
||||||
and "extract" does an extraction dry-run of the
|
|
||||||
most recent archive. Note that "data" implies
|
|
||||||
"archives". See "skip_actions" for disabling
|
|
||||||
checks altogether.
|
|
||||||
example: spot
|
|
||||||
frequency:
|
|
||||||
type: string
|
|
||||||
description: |
|
|
||||||
How frequently to run this type of consistency
|
|
||||||
check (as a best effort). The value is a number
|
|
||||||
followed by a unit of time. E.g., "2 weeks" to
|
|
||||||
run this consistency check no more than every
|
|
||||||
two weeks for a given repository or "1 month" to
|
|
||||||
run it no more than monthly. Defaults to
|
|
||||||
"always": running this check every time checks
|
|
||||||
are run.
|
|
||||||
example: 2 weeks
|
|
||||||
only_run_on:
|
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
type: string
|
|
||||||
description: |
|
|
||||||
After the "frequency" duration has elapsed, only
|
|
||||||
run this check if the current day of the week
|
|
||||||
matches one of these values (the name of a day of
|
|
||||||
the week in the current locale). "weekday" and
|
|
||||||
"weekend" are also accepted. Defaults to running
|
|
||||||
the check on any day of the week.
|
|
||||||
example:
|
|
||||||
- Saturday
|
|
||||||
- Sunday
|
|
||||||
max_duration:
|
|
||||||
type: integer
|
|
||||||
description: |
|
|
||||||
How many seconds to check the repository before
|
|
||||||
interrupting the check. Useful for splitting a
|
|
||||||
long-running repository check into multiple
|
|
||||||
partial checks. Defaults to no interruption. Only
|
|
||||||
applies to the "repository" check, does not check
|
|
||||||
the repository index, and is not compatible with a
|
|
||||||
simultaneous "archives" check or "--repair" flag.
|
|
||||||
example: 3600
|
|
||||||
- required:
|
- required:
|
||||||
- name
|
- name
|
||||||
- count_tolerance_percentage
|
- count_tolerance_percentage
|
||||||
|
@ -655,20 +579,6 @@ properties:
|
||||||
"always": running this check every time checks
|
"always": running this check every time checks
|
||||||
are run.
|
are run.
|
||||||
example: 2 weeks
|
example: 2 weeks
|
||||||
only_run_on:
|
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
type: string
|
|
||||||
description: |
|
|
||||||
After the "frequency" duration has elapsed, only
|
|
||||||
run this check if the current day of the week
|
|
||||||
matches one of these values (the name of a day of
|
|
||||||
the week in the current locale). "weekday" and
|
|
||||||
"weekend" are also accepted. Defaults to running
|
|
||||||
the check on any day of the week.
|
|
||||||
example:
|
|
||||||
- Saturday
|
|
||||||
- Sunday
|
|
||||||
count_tolerance_percentage:
|
count_tolerance_percentage:
|
||||||
type: number
|
type: number
|
||||||
description: |
|
description: |
|
||||||
|
@ -1766,17 +1676,25 @@ properties:
|
||||||
an account at https://healthchecks.io (or self-host Healthchecks) if
|
an account at https://healthchecks.io (or self-host Healthchecks) if
|
||||||
you'd like to use this service. See borgmatic monitoring
|
you'd like to use this service. See borgmatic monitoring
|
||||||
documentation for details.
|
documentation for details.
|
||||||
uptime_kuma:
|
uptimekuma:
|
||||||
type: object
|
type: object
|
||||||
required: ['push_url']
|
required: ['server', 'push_code']
|
||||||
additionalProperties: false
|
additionalProperties: false
|
||||||
properties:
|
properties:
|
||||||
push_url:
|
server:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
Uptime Kuma push URL without query string (do not include the
|
Uptime Kuma base URL or UUID to notify when a backup
|
||||||
question mark or anything after it).
|
begins, ends, or errors
|
||||||
example: https://example.uptime.kuma/api/push/abcd1234
|
example: https://example.uptime.kuma
|
||||||
|
push_code:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
Uptime Kuma "Push Code" from the push URL you have been
|
||||||
|
given. For example, the push code for
|
||||||
|
https://base.url/api/push/12345678?status=up&msg=OK&ping=
|
||||||
|
would be 12345678
|
||||||
|
example: 12345678
|
||||||
states:
|
states:
|
||||||
type: array
|
type: array
|
||||||
items:
|
items:
|
||||||
|
@ -1787,8 +1705,8 @@ properties:
|
||||||
- fail
|
- fail
|
||||||
uniqueItems: true
|
uniqueItems: true
|
||||||
description: |
|
description: |
|
||||||
List of one or more monitoring states to push for: "start",
|
List of one or more monitoring states to ping for: "start",
|
||||||
"finish", and/or "fail". Defaults to pushing for all
|
"finish", and/or "fail". Defaults to pinging for all
|
||||||
states.
|
states.
|
||||||
example:
|
example:
|
||||||
- start
|
- start
|
||||||
|
|
|
@ -23,7 +23,6 @@ HOOK_NAME_TO_MODULE = {
|
||||||
'cronhub': cronhub,
|
'cronhub': cronhub,
|
||||||
'cronitor': cronitor,
|
'cronitor': cronitor,
|
||||||
'healthchecks': healthchecks,
|
'healthchecks': healthchecks,
|
||||||
'loki': loki,
|
|
||||||
'mariadb_databases': mariadb,
|
'mariadb_databases': mariadb,
|
||||||
'mongodb_databases': mongodb,
|
'mongodb_databases': mongodb,
|
||||||
'mysql_databases': mysql,
|
'mysql_databases': mysql,
|
||||||
|
@ -31,7 +30,8 @@ HOOK_NAME_TO_MODULE = {
|
||||||
'pagerduty': pagerduty,
|
'pagerduty': pagerduty,
|
||||||
'postgresql_databases': postgresql,
|
'postgresql_databases': postgresql,
|
||||||
'sqlite_databases': sqlite,
|
'sqlite_databases': sqlite,
|
||||||
'uptime_kuma': uptimekuma,
|
'loki': loki,
|
||||||
|
'uptimekuma': uptimekuma,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,7 @@ MONITOR_STATE_TO_HEALTHCHECKS = {
|
||||||
monitor.State.LOG: 'log',
|
monitor.State.LOG: 'log',
|
||||||
}
|
}
|
||||||
|
|
||||||
DEFAULT_PING_BODY_LIMIT_BYTES = 100000
|
DEFAULT_PING_BODY_LIMIT_BYTES = 1500
|
||||||
HANDLER_IDENTIFIER = 'healthchecks'
|
HANDLER_IDENTIFIER = 'healthchecks'
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,13 +2,13 @@ from enum import Enum
|
||||||
|
|
||||||
MONITOR_HOOK_NAMES = (
|
MONITOR_HOOK_NAMES = (
|
||||||
'apprise',
|
'apprise',
|
||||||
'cronhub',
|
|
||||||
'cronitor',
|
|
||||||
'healthchecks',
|
'healthchecks',
|
||||||
'loki',
|
'cronitor',
|
||||||
'ntfy',
|
'cronhub',
|
||||||
'pagerduty',
|
'pagerduty',
|
||||||
'uptime_kuma',
|
'ntfy',
|
||||||
|
'loki',
|
||||||
|
'uptimekuma',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -40,7 +40,9 @@ def ping_monitor(hook_config, config, config_filename, state, monitoring_log_lev
|
||||||
return
|
return
|
||||||
|
|
||||||
hostname = platform.node()
|
hostname = platform.node()
|
||||||
local_timestamp = datetime.datetime.now(datetime.timezone.utc).astimezone().isoformat()
|
local_timestamp = (
|
||||||
|
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).astimezone().isoformat()
|
||||||
|
)
|
||||||
payload = json.dumps(
|
payload = json.dumps(
|
||||||
{
|
{
|
||||||
'routing_key': hook_config['integration_key'],
|
'routing_key': hook_config['integration_key'],
|
||||||
|
|
|
@ -6,7 +6,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def initialize_monitor(
|
def initialize_monitor(
|
||||||
push_url, config, config_filename, monitoring_log_level, dry_run
|
ping_url, config, config_filename, monitoring_log_level, dry_run
|
||||||
): # pragma: no cover
|
): # pragma: no cover
|
||||||
'''
|
'''
|
||||||
No initialization is necessary for this monitor.
|
No initialization is necessary for this monitor.
|
||||||
|
@ -16,38 +16,43 @@ def initialize_monitor(
|
||||||
|
|
||||||
def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run):
|
def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run):
|
||||||
'''
|
'''
|
||||||
Make a get request to the configured Uptime Kuma push_url. Use the given configuration filename
|
Ping the configured Uptime Kuma push_code. Use the given configuration filename in any log entries.
|
||||||
in any log entries. If this is a dry run, then don't actually push anything.
|
If this is a dry run, then don't actually ping anything.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
run_states = hook_config.get('states', ['start', 'finish', 'fail'])
|
run_states = hook_config.get('states', ['start', 'finish', 'fail'])
|
||||||
|
|
||||||
if state.name.lower() not in run_states:
|
if state.name.lower() in run_states:
|
||||||
return
|
|
||||||
|
|
||||||
dry_run_label = ' (dry run; not actually pushing)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
||||||
status = 'down' if state.name.lower() == 'fail' else 'up'
|
|
||||||
push_url = hook_config.get('push_url', 'https://example.uptime.kuma/api/push/abcd1234')
|
|
||||||
query = f'status={status}&msg={state.name.lower()}'
|
|
||||||
logger.info(
|
|
||||||
f'{config_filename}: Pushing Uptime Kuma push_url {push_url}?{query} {dry_run_label}'
|
|
||||||
)
|
|
||||||
logger.debug(f'{config_filename}: Full Uptime Kuma state URL {push_url}?{query}')
|
|
||||||
|
|
||||||
if dry_run:
|
status = 'up'
|
||||||
return
|
if state.name.lower() == 'fail':
|
||||||
|
status = 'down'
|
||||||
|
|
||||||
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
base_url = hook_config.get('server', 'https://example.uptime.kuma') + '/api/push'
|
||||||
|
push_code = hook_config.get('push_code')
|
||||||
|
|
||||||
try:
|
logger.info(f'{config_filename}: Pinging Uptime Kuma push_code {push_code}{dry_run_label}')
|
||||||
response = requests.get(f'{push_url}?{query}')
|
logger.debug(f'{config_filename}: Using Uptime Kuma ping URL {base_url}/{push_code}')
|
||||||
if not response.ok:
|
logger.debug(
|
||||||
response.raise_for_status()
|
f'{config_filename}: Full Uptime Kuma state URL {base_url}/{push_code}?status={status}&msg={state.name.lower()}&ping='
|
||||||
except requests.exceptions.RequestException as error:
|
)
|
||||||
logger.warning(f'{config_filename}: Uptime Kuma error: {error}')
|
|
||||||
|
if not dry_run:
|
||||||
|
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||||
|
try:
|
||||||
|
response = requests.get(
|
||||||
|
f'{base_url}/{push_code}?status={status}&msg={state.name.lower()}&ping='
|
||||||
|
)
|
||||||
|
if not response.ok:
|
||||||
|
response.raise_for_status()
|
||||||
|
except requests.exceptions.RequestException as error:
|
||||||
|
logger.warning(f'{config_filename}: Uptime Kuma error: {error}')
|
||||||
|
|
||||||
|
|
||||||
def destroy_monitor(
|
def destroy_monitor(
|
||||||
push_url_or_uuid, config, config_filename, monitoring_log_level, dry_run
|
ping_url_or_uuid, config, config_filename, monitoring_log_level, dry_run
|
||||||
): # pragma: no cover
|
): # pragma: no cover
|
||||||
'''
|
'''
|
||||||
No destruction is necessary for this monitor.
|
No destruction is necessary for this monitor.
|
||||||
|
|
|
@ -167,11 +167,12 @@ li {
|
||||||
padding: .25em 0;
|
padding: .25em 0;
|
||||||
}
|
}
|
||||||
li ul {
|
li ul {
|
||||||
list-style-type: disc;
|
margin: .5em 0;
|
||||||
padding-left: 2em;
|
padding-left: 1em;
|
||||||
}
|
}
|
||||||
li li:last-child {
|
li li {
|
||||||
padding-bottom: 0em;
|
padding-top: .1em;
|
||||||
|
padding-bottom: .1em;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Syntax highlighting and Code blocks */
|
/* Syntax highlighting and Code blocks */
|
||||||
|
|
|
@ -242,57 +242,6 @@ check --force` runs `check` even if it's specified in the `skip_actions`
|
||||||
option.
|
option.
|
||||||
|
|
||||||
|
|
||||||
### Check days
|
|
||||||
|
|
||||||
<span class="minilink minilink-addedin">New in version 1.8.13</span> You can
|
|
||||||
optionally configure checks to only run on particular days of the week. For
|
|
||||||
instance:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
checks:
|
|
||||||
- name: repository
|
|
||||||
only_run_on:
|
|
||||||
- Saturday
|
|
||||||
- Sunday
|
|
||||||
- name: archives
|
|
||||||
only_run_on:
|
|
||||||
- weekday
|
|
||||||
- name: spot
|
|
||||||
only_run_on:
|
|
||||||
- Friday
|
|
||||||
- weekend
|
|
||||||
```
|
|
||||||
|
|
||||||
Each day of the week is specified in the current locale (system
|
|
||||||
language/country settings). `weekend` and `weekday` are also accepted.
|
|
||||||
|
|
||||||
Just like with `frequency`, borgmatic only makes a best effort to run checks
|
|
||||||
on the given day of the week. For instance, if you run `borgmatic check`
|
|
||||||
daily, then every day borgmatic will have an opportunity to determine whether
|
|
||||||
your checks are configured to run on that day. If they are, then the checks
|
|
||||||
run. If not, they are skipped.
|
|
||||||
|
|
||||||
For instance, with the above configuration, if borgmatic is run on a Saturday,
|
|
||||||
the `repository` check will run. But on a Monday? The repository check will
|
|
||||||
get skipped. And if borgmatic is never run on a Saturday or a Sunday, that
|
|
||||||
check will never get a chance to run.
|
|
||||||
|
|
||||||
Also, the day of the week configuration applies *after* any configured
|
|
||||||
`frequency` for a check. So for instance, imagine the following configuration:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
checks:
|
|
||||||
- name: repository
|
|
||||||
frequency: 2 weeks
|
|
||||||
only_run_on:
|
|
||||||
- Monday
|
|
||||||
```
|
|
||||||
|
|
||||||
If you run borgmatic daily with that configuration, then borgmatic will first
|
|
||||||
wait two weeks after the previous check before running the check again—on the
|
|
||||||
first Monday after the `frequency` duration elapses.
|
|
||||||
|
|
||||||
|
|
||||||
### Running only checks
|
### Running only checks
|
||||||
|
|
||||||
<span class="minilink minilink-addedin">New in version 1.7.1</span> If you
|
<span class="minilink minilink-addedin">New in version 1.7.1</span> If you
|
||||||
|
|
|
@ -102,9 +102,9 @@ and depend on containers for runtime dependencies. These tests do run on the
|
||||||
continuous integration (CI) server, and running them on your developer machine
|
continuous integration (CI) server, and running them on your developer machine
|
||||||
is the closest thing to dev-CI parity.
|
is the closest thing to dev-CI parity.
|
||||||
|
|
||||||
If you would like to run the end-to-end tests, first install Docker (or
|
If you would like to run the full test suite, first install Docker (or Podman;
|
||||||
Podman; see below) and [Docker
|
see below) and [Docker Compose](https://docs.docker.com/compose/install/).
|
||||||
Compose](https://docs.docker.com/compose/install/). Then run:
|
Then run:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
scripts/run-end-to-end-tests
|
scripts/run-end-to-end-tests
|
||||||
|
@ -152,14 +152,12 @@ the following deviations from it:
|
||||||
* In general, spell out words in variable names instead of shortening them.
|
* In general, spell out words in variable names instead of shortening them.
|
||||||
So, think `index` instead of `idx`. There are some notable exceptions to
|
So, think `index` instead of `idx`. There are some notable exceptions to
|
||||||
this though (like `config`).
|
this though (like `config`).
|
||||||
* Favor blank lines around `if` statements, `return`s, logical code groupings,
|
|
||||||
etc. Readability is more important than packing the code tightly.
|
|
||||||
|
|
||||||
borgmatic code uses the [Black](https://black.readthedocs.io/en/stable/) code
|
borgmatic code uses the [Black](https://black.readthedocs.io/en/stable/) code
|
||||||
formatter, the [Flake8](http://flake8.pycqa.org/en/latest/) code checker, and
|
formatter, the [Flake8](http://flake8.pycqa.org/en/latest/) code checker, and
|
||||||
the [isort](https://github.com/timothycrosley/isort) import orderer, so
|
the [isort](https://github.com/timothycrosley/isort) import orderer, so
|
||||||
certain code style requirements are enforced when running automated tests. See
|
certain code style requirements will be enforced when running automated tests.
|
||||||
the Black, Flake8, and isort documentation for more information.
|
See the Black, Flake8, and isort documentation for more information.
|
||||||
|
|
||||||
|
|
||||||
## Continuous integration
|
## Continuous integration
|
||||||
|
|
|
@ -39,14 +39,13 @@ below for how to configure this.
|
||||||
borgmatic integrates with these monitoring services and libraries, pinging
|
borgmatic integrates with these monitoring services and libraries, pinging
|
||||||
them as backups happen:
|
them as backups happen:
|
||||||
|
|
||||||
* [Apprise](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#apprise-hook)
|
|
||||||
* [Cronhub](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronhub-hook)
|
|
||||||
* [Cronitor](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronitor-hook)
|
|
||||||
* [Grafana Loki](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#loki-hook)
|
|
||||||
* [Healthchecks](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#healthchecks-hook)
|
* [Healthchecks](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#healthchecks-hook)
|
||||||
* [ntfy](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#ntfy-hook)
|
* [Cronitor](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronitor-hook)
|
||||||
|
* [Cronhub](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronhub-hook)
|
||||||
* [PagerDuty](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#pagerduty-hook)
|
* [PagerDuty](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#pagerduty-hook)
|
||||||
* [Uptime Kuma](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#uptime-kuma-hook)
|
* [ntfy](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#ntfy-hook)
|
||||||
|
* [Grafana Loki](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#loki-hook)
|
||||||
|
* [Apprise](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#apprise-hook)
|
||||||
|
|
||||||
The idea is that you'll receive an alert when something goes wrong or when the
|
The idea is that you'll receive an alert when something goes wrong or when the
|
||||||
service doesn't hear from borgmatic for a configured interval (if supported).
|
service doesn't hear from borgmatic for a configured interval (if supported).
|
||||||
|
@ -506,61 +505,6 @@ See the [configuration
|
||||||
reference](https://torsion.org/borgmatic/docs/reference/configuration/) for
|
reference](https://torsion.org/borgmatic/docs/reference/configuration/) for
|
||||||
details.
|
details.
|
||||||
|
|
||||||
## Uptime Kuma hook
|
|
||||||
|
|
||||||
[Uptime Kuma](https://uptime.kuma.pet) is an easy-to-use, self-hosted
|
|
||||||
monitoring tool and can provide a Push monitor type to accept HTTP `GET`
|
|
||||||
requests from a service instead of contacting it directly.
|
|
||||||
|
|
||||||
Uptime Kuma allows you to see a history of monitor states and can in turn
|
|
||||||
alert via ntfy, Gotify, Matrix, Apprise, Email, and many more.
|
|
||||||
|
|
||||||
An example configuration is shown here with all the available options:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
uptime_kuma:
|
|
||||||
push_url: https://kuma.my-domain.com/api/push/abcd1234
|
|
||||||
states:
|
|
||||||
- start
|
|
||||||
- finish
|
|
||||||
- fail
|
|
||||||
```
|
|
||||||
|
|
||||||
The `push_url` is provided to your from your Uptime Kuma service and
|
|
||||||
originally includes a query string—the text including and after the question
|
|
||||||
mark (`?`). But please do not include the query string in the `push_url`
|
|
||||||
configuration; borgmatic will add this automatically depending on the state of
|
|
||||||
your backup.
|
|
||||||
|
|
||||||
Using `start`, `finish` and `fail` states means you will get two "up beats" in
|
|
||||||
Uptime Kuma for successful backups and the ability to see failures if and when
|
|
||||||
the backup started (was there a `start` beat?).
|
|
||||||
|
|
||||||
A reasonable base-level configuration for an Uptime Kuma Monitor for a backup
|
|
||||||
is below:
|
|
||||||
|
|
||||||
```ini
|
|
||||||
# These are to be entered into Uptime Kuma and not into your borgmatic
|
|
||||||
# configuration.
|
|
||||||
|
|
||||||
# Push monitors wait for the client to contact Uptime Kuma instead of Uptime
|
|
||||||
# Kuma contacting the client. This is perfect for backup monitoring.
|
|
||||||
Monitor Type = Push
|
|
||||||
|
|
||||||
Heartbeat Interval = 90000 # = 25 hours = 1 day + 1 hour
|
|
||||||
|
|
||||||
# Wait 6 times the Heartbeat Retry (below) before logging a heartbeat missed.
|
|
||||||
Retries = 6
|
|
||||||
|
|
||||||
# Multiplied by Retries this gives a grace period within which the monitor
|
|
||||||
# goes into the "Pending" state.
|
|
||||||
Heartbeat Retry = 360 # = 10 minutes
|
|
||||||
|
|
||||||
# For each Heartbeat Interval if the backup fails repeatedly, a notification
|
|
||||||
# is sent each time.
|
|
||||||
Resend Notification every X times = 1
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## Scripting borgmatic
|
## Scripting borgmatic
|
||||||
|
|
||||||
|
|
|
@ -10,17 +10,20 @@ eleventyNavigation:
|
||||||
If case you're interested in [developing on
|
If case you're interested in [developing on
|
||||||
borgmatic](https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/),
|
borgmatic](https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/),
|
||||||
here's an abridged primer on how its Python source code is organized to help
|
here's an abridged primer on how its Python source code is organized to help
|
||||||
you get started. Starting at the top level, we have:
|
you get started. At the top level we have:
|
||||||
|
|
||||||
* [borgmatic](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic): The main borgmatic source module. Most of the code is here. Within that:
|
* [borgmatic](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic): The main borgmatic source module. Most of the code is here.
|
||||||
* [actions](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/actions): borgmatic-specific logic for running each action (create, list, check, etc.).
|
|
||||||
* [borg](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/borg): Lower-level code that's responsible for interacting with Borg to run each action.
|
|
||||||
* [commands](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/commands): Looking to add a new flag or action? Start here. This contains borgmatic's entry point, argument parsing, and shell completion.
|
|
||||||
* [config](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/config): Code responsible for loading, normalizing, and validating borgmatic's configuration.
|
|
||||||
* [hooks](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/hooks): Looking to add a new database or monitoring integration? Start here.
|
|
||||||
* [docs](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/docs): How-to and reference documentation, including the document you're reading now.
|
* [docs](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/docs): How-to and reference documentation, including the document you're reading now.
|
||||||
* [sample](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/sample): Example configurations for cron and systemd.
|
* [sample](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/sample): Example configurations for cron and systemd.
|
||||||
* [scripts](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/scripts): Dev-facing scripts for things like building documentation and running end-to-end tests.
|
* [scripts](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/scripts): Dev-facing scripts for things like building documentation and running end-to-end tests.
|
||||||
* [tests](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/tests): Automated tests organized by: end-to-end, integration, and unit.
|
* [tests](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/tests): Automated tests organized by: end-to-end, integration, and unit.
|
||||||
|
|
||||||
|
Within the `borgmatic` directory you'll find:
|
||||||
|
|
||||||
|
* [actions](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/actions): Mid-level code for running each borgmatic action (create, list, check, etc.).
|
||||||
|
* [borg](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/borg): Lower-level code that actually shells out to Borg for each action.
|
||||||
|
* [commands](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/commands): Looking to add a new flag or action? Start here. This contains borgmatic's entry point, argument parsing, and shell completion.
|
||||||
|
* [config](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/config): Code responsible for loading, normalizing, and validating borgmatic's configuration.
|
||||||
|
* [hooks](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/hooks): Looking to add a new database or monitoring integration? Start here.
|
||||||
|
|
||||||
So, broadly speaking, the control flow goes: `commands` → `config` followed by `commands` → `actions` → `borg` and `hooks`.
|
So, broadly speaking, the control flow goes: `commands` → `config` followed by `commands` → `actions` → `borg` and `hooks`.
|
||||||
|
|
2
setup.py
2
setup.py
|
@ -1,6 +1,6 @@
|
||||||
from setuptools import find_packages, setup
|
from setuptools import find_packages, setup
|
||||||
|
|
||||||
VERSION = '1.8.13.dev0'
|
VERSION = '1.8.12.dev0'
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
|
|
|
@ -113,74 +113,6 @@ def test_filter_checks_on_frequency_retains_check_without_frequency():
|
||||||
) == ('archives',)
|
) == ('archives',)
|
||||||
|
|
||||||
|
|
||||||
def test_filter_checks_on_frequency_retains_check_with_empty_only_run_on():
|
|
||||||
flexmock(module).should_receive('parse_frequency').and_return(None)
|
|
||||||
|
|
||||||
assert module.filter_checks_on_frequency(
|
|
||||||
config={'checks': [{'name': 'archives', 'only_run_on': []}]},
|
|
||||||
borg_repository_id='repo',
|
|
||||||
checks=('archives',),
|
|
||||||
force=False,
|
|
||||||
archives_check_id='1234',
|
|
||||||
datetime_now=flexmock(weekday=lambda: 0),
|
|
||||||
) == ('archives',)
|
|
||||||
|
|
||||||
|
|
||||||
def test_filter_checks_on_frequency_retains_check_with_only_run_on_matching_today():
|
|
||||||
flexmock(module).should_receive('parse_frequency').and_return(None)
|
|
||||||
|
|
||||||
assert module.filter_checks_on_frequency(
|
|
||||||
config={'checks': [{'name': 'archives', 'only_run_on': [module.calendar.day_name[0]]}]},
|
|
||||||
borg_repository_id='repo',
|
|
||||||
checks=('archives',),
|
|
||||||
force=False,
|
|
||||||
archives_check_id='1234',
|
|
||||||
datetime_now=flexmock(weekday=lambda: 0),
|
|
||||||
) == ('archives',)
|
|
||||||
|
|
||||||
|
|
||||||
def test_filter_checks_on_frequency_retains_check_with_only_run_on_matching_today_via_weekday_value():
|
|
||||||
flexmock(module).should_receive('parse_frequency').and_return(None)
|
|
||||||
|
|
||||||
assert module.filter_checks_on_frequency(
|
|
||||||
config={'checks': [{'name': 'archives', 'only_run_on': ['weekday']}]},
|
|
||||||
borg_repository_id='repo',
|
|
||||||
checks=('archives',),
|
|
||||||
force=False,
|
|
||||||
archives_check_id='1234',
|
|
||||||
datetime_now=flexmock(weekday=lambda: 0),
|
|
||||||
) == ('archives',)
|
|
||||||
|
|
||||||
|
|
||||||
def test_filter_checks_on_frequency_retains_check_with_only_run_on_matching_today_via_weekend_value():
|
|
||||||
flexmock(module).should_receive('parse_frequency').and_return(None)
|
|
||||||
|
|
||||||
assert module.filter_checks_on_frequency(
|
|
||||||
config={'checks': [{'name': 'archives', 'only_run_on': ['weekend']}]},
|
|
||||||
borg_repository_id='repo',
|
|
||||||
checks=('archives',),
|
|
||||||
force=False,
|
|
||||||
archives_check_id='1234',
|
|
||||||
datetime_now=flexmock(weekday=lambda: 6),
|
|
||||||
) == ('archives',)
|
|
||||||
|
|
||||||
|
|
||||||
def test_filter_checks_on_frequency_skips_check_with_only_run_on_not_matching_today():
|
|
||||||
flexmock(module).should_receive('parse_frequency').and_return(None)
|
|
||||||
|
|
||||||
assert (
|
|
||||||
module.filter_checks_on_frequency(
|
|
||||||
config={'checks': [{'name': 'archives', 'only_run_on': [module.calendar.day_name[5]]}]},
|
|
||||||
borg_repository_id='repo',
|
|
||||||
checks=('archives',),
|
|
||||||
force=False,
|
|
||||||
archives_check_id='1234',
|
|
||||||
datetime_now=flexmock(weekday=lambda: 0),
|
|
||||||
)
|
|
||||||
== ()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_filter_checks_on_frequency_retains_check_with_elapsed_frequency():
|
def test_filter_checks_on_frequency_retains_check_with_elapsed_frequency():
|
||||||
flexmock(module).should_receive('parse_frequency').and_return(
|
flexmock(module).should_receive('parse_frequency').and_return(
|
||||||
module.datetime.timedelta(hours=1)
|
module.datetime.timedelta(hours=1)
|
||||||
|
@ -236,7 +168,7 @@ def test_filter_checks_on_frequency_skips_check_with_unelapsed_frequency():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_filter_checks_on_frequency_retains_check_with_unelapsed_frequency_and_force():
|
def test_filter_checks_on_frequency_restains_check_with_unelapsed_frequency_and_force():
|
||||||
assert module.filter_checks_on_frequency(
|
assert module.filter_checks_on_frequency(
|
||||||
config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
|
config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
|
||||||
borg_repository_id='repo',
|
borg_repository_id='repo',
|
||||||
|
|
|
@ -222,35 +222,35 @@ def test_make_archive_filter_flags_with_default_checks_and_prefix_includes_match
|
||||||
assert flags == ('--match-archives', 'sh:foo-*')
|
assert flags == ('--match-archives', 'sh:foo-*')
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_name_flags_with_repository_check_returns_flag():
|
def test_make_check_flags_with_repository_check_returns_flag():
|
||||||
flags = module.make_check_name_flags({'repository'}, ())
|
flags = module.make_check_flags({'repository'}, ())
|
||||||
|
|
||||||
assert flags == ('--repository-only',)
|
assert flags == ('--repository-only',)
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_name_flags_with_archives_check_returns_flag():
|
def test_make_check_flags_with_archives_check_returns_flag():
|
||||||
flags = module.make_check_name_flags({'archives'}, ())
|
flags = module.make_check_flags({'archives'}, ())
|
||||||
|
|
||||||
assert flags == ('--archives-only',)
|
assert flags == ('--archives-only',)
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_name_flags_with_archives_check_and_archive_filter_flags_includes_those_flags():
|
def test_make_check_flags_with_archives_check_and_archive_filter_flags_includes_those_flags():
|
||||||
flags = module.make_check_name_flags({'archives'}, ('--match-archives', 'sh:foo-*'))
|
flags = module.make_check_flags({'archives'}, ('--match-archives', 'sh:foo-*'))
|
||||||
|
|
||||||
assert flags == ('--archives-only', '--match-archives', 'sh:foo-*')
|
assert flags == ('--archives-only', '--match-archives', 'sh:foo-*')
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_name_flags_without_archives_check_and_with_archive_filter_flags_includes_those_flags():
|
def test_make_check_flags_without_archives_check_and_with_archive_filter_flags_includes_those_flags():
|
||||||
flags = module.make_check_name_flags({'repository'}, ('--match-archives', 'sh:foo-*'))
|
flags = module.make_check_flags({'repository'}, ('--match-archives', 'sh:foo-*'))
|
||||||
|
|
||||||
assert flags == ('--repository-only',)
|
assert flags == ('--repository-only',)
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_name_flags_with_data_check_returns_flag_and_implies_archives():
|
def test_make_check_flags_with_data_check_returns_flag_and_implies_archives():
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
flexmock(module.flags).should_receive('make_match_archives_flags').and_return(())
|
flexmock(module.flags).should_receive('make_match_archives_flags').and_return(())
|
||||||
|
|
||||||
flags = module.make_check_name_flags({'data'}, ())
|
flags = module.make_check_flags({'data'}, ())
|
||||||
|
|
||||||
assert flags == (
|
assert flags == (
|
||||||
'--archives-only',
|
'--archives-only',
|
||||||
|
@ -258,20 +258,20 @@ def test_make_check_name_flags_with_data_check_returns_flag_and_implies_archives
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_name_flags_with_extract_omits_extract_flag():
|
def test_make_check_flags_with_extract_omits_extract_flag():
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
flexmock(module.flags).should_receive('make_match_archives_flags').and_return(())
|
flexmock(module.flags).should_receive('make_match_archives_flags').and_return(())
|
||||||
|
|
||||||
flags = module.make_check_name_flags({'extract'}, ())
|
flags = module.make_check_flags({'extract'}, ())
|
||||||
|
|
||||||
assert flags == ()
|
assert flags == ()
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_name_flags_with_repository_and_data_checks_does_not_return_repository_only():
|
def test_make_check_flags_with_repository_and_data_checks_does_not_return_repository_only():
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
flexmock(module.flags).should_receive('make_match_archives_flags').and_return(())
|
flexmock(module.flags).should_receive('make_match_archives_flags').and_return(())
|
||||||
|
|
||||||
flags = module.make_check_name_flags(
|
flags = module.make_check_flags(
|
||||||
{
|
{
|
||||||
'repository',
|
'repository',
|
||||||
'data',
|
'data',
|
||||||
|
@ -332,7 +332,8 @@ def test_get_repository_id_with_missing_json_keys_raises():
|
||||||
|
|
||||||
def test_check_archives_with_progress_passes_through_to_borg():
|
def test_check_archives_with_progress_passes_through_to_borg():
|
||||||
config = {}
|
config = {}
|
||||||
flexmock(module).should_receive('make_check_name_flags').and_return(())
|
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||||
|
flexmock(module).should_receive('execute_command').never()
|
||||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
flexmock(module.environment).should_receive('make_environment')
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
|
@ -348,12 +349,7 @@ def test_check_archives_with_progress_passes_through_to_borg():
|
||||||
config=config,
|
config=config,
|
||||||
local_borg_version='1.2.3',
|
local_borg_version='1.2.3',
|
||||||
check_arguments=flexmock(
|
check_arguments=flexmock(
|
||||||
progress=True,
|
progress=True, repair=None, only_checks=None, force=None, match_archives=None
|
||||||
repair=None,
|
|
||||||
only_checks=None,
|
|
||||||
force=None,
|
|
||||||
match_archives=None,
|
|
||||||
max_duration=None,
|
|
||||||
),
|
),
|
||||||
global_arguments=flexmock(log_json=False),
|
global_arguments=flexmock(log_json=False),
|
||||||
checks={'repository'},
|
checks={'repository'},
|
||||||
|
@ -363,7 +359,8 @@ def test_check_archives_with_progress_passes_through_to_borg():
|
||||||
|
|
||||||
def test_check_archives_with_repair_passes_through_to_borg():
|
def test_check_archives_with_repair_passes_through_to_borg():
|
||||||
config = {}
|
config = {}
|
||||||
flexmock(module).should_receive('make_check_name_flags').and_return(())
|
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||||
|
flexmock(module).should_receive('execute_command').never()
|
||||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
flexmock(module.environment).should_receive('make_environment')
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
|
@ -379,148 +376,7 @@ def test_check_archives_with_repair_passes_through_to_borg():
|
||||||
config=config,
|
config=config,
|
||||||
local_borg_version='1.2.3',
|
local_borg_version='1.2.3',
|
||||||
check_arguments=flexmock(
|
check_arguments=flexmock(
|
||||||
progress=None,
|
progress=None, repair=True, only_checks=None, force=None, match_archives=None
|
||||||
repair=True,
|
|
||||||
only_checks=None,
|
|
||||||
force=None,
|
|
||||||
match_archives=None,
|
|
||||||
max_duration=None,
|
|
||||||
),
|
|
||||||
global_arguments=flexmock(log_json=False),
|
|
||||||
checks={'repository'},
|
|
||||||
archive_filter_flags=(),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_check_archives_with_max_duration_flag_passes_through_to_borg():
|
|
||||||
config = {}
|
|
||||||
flexmock(module).should_receive('make_check_name_flags').and_return(())
|
|
||||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
|
||||||
flexmock(module.environment).should_receive('make_environment')
|
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
|
||||||
('borg', 'check', '--max-duration', '33', 'repo'),
|
|
||||||
extra_environment=None,
|
|
||||||
borg_local_path='borg',
|
|
||||||
borg_exit_codes=None,
|
|
||||||
).once()
|
|
||||||
|
|
||||||
module.check_archives(
|
|
||||||
repository_path='repo',
|
|
||||||
config=config,
|
|
||||||
local_borg_version='1.2.3',
|
|
||||||
check_arguments=flexmock(
|
|
||||||
progress=None,
|
|
||||||
repair=None,
|
|
||||||
only_checks=None,
|
|
||||||
force=None,
|
|
||||||
match_archives=None,
|
|
||||||
max_duration=33,
|
|
||||||
),
|
|
||||||
global_arguments=flexmock(log_json=False),
|
|
||||||
checks={'repository'},
|
|
||||||
archive_filter_flags=(),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_check_archives_with_max_duration_flag_and_archives_check_errors():
|
|
||||||
config = {}
|
|
||||||
flexmock(module).should_receive('execute_command').never()
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
module.check_archives(
|
|
||||||
repository_path='repo',
|
|
||||||
config=config,
|
|
||||||
local_borg_version='1.2.3',
|
|
||||||
check_arguments=flexmock(
|
|
||||||
progress=None,
|
|
||||||
repair=None,
|
|
||||||
only_checks=None,
|
|
||||||
force=None,
|
|
||||||
match_archives=None,
|
|
||||||
max_duration=33,
|
|
||||||
),
|
|
||||||
global_arguments=flexmock(log_json=False),
|
|
||||||
checks={'repository', 'archives'},
|
|
||||||
archive_filter_flags=(),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_check_archives_with_max_duration_option_passes_through_to_borg():
|
|
||||||
config = {'checks': [{'name': 'repository', 'max_duration': 33}]}
|
|
||||||
flexmock(module).should_receive('make_check_name_flags').and_return(())
|
|
||||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
|
||||||
flexmock(module.environment).should_receive('make_environment')
|
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
|
||||||
('borg', 'check', '--max-duration', '33', 'repo'),
|
|
||||||
extra_environment=None,
|
|
||||||
borg_local_path='borg',
|
|
||||||
borg_exit_codes=None,
|
|
||||||
).once()
|
|
||||||
|
|
||||||
module.check_archives(
|
|
||||||
repository_path='repo',
|
|
||||||
config=config,
|
|
||||||
local_borg_version='1.2.3',
|
|
||||||
check_arguments=flexmock(
|
|
||||||
progress=None,
|
|
||||||
repair=None,
|
|
||||||
only_checks=None,
|
|
||||||
force=None,
|
|
||||||
match_archives=None,
|
|
||||||
max_duration=None,
|
|
||||||
),
|
|
||||||
global_arguments=flexmock(log_json=False),
|
|
||||||
checks={'repository'},
|
|
||||||
archive_filter_flags=(),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_check_archives_with_max_duration_option_and_archives_check_errors():
|
|
||||||
config = {'checks': [{'name': 'repository', 'max_duration': 33}]}
|
|
||||||
flexmock(module).should_receive('execute_command').never()
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
module.check_archives(
|
|
||||||
repository_path='repo',
|
|
||||||
config=config,
|
|
||||||
local_borg_version='1.2.3',
|
|
||||||
check_arguments=flexmock(
|
|
||||||
progress=None,
|
|
||||||
repair=None,
|
|
||||||
only_checks=None,
|
|
||||||
force=None,
|
|
||||||
match_archives=None,
|
|
||||||
max_duration=None,
|
|
||||||
),
|
|
||||||
global_arguments=flexmock(log_json=False),
|
|
||||||
checks={'repository', 'archives'},
|
|
||||||
archive_filter_flags=(),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_check_archives_with_max_duration_flag_overrides_max_duration_option():
|
|
||||||
config = {'checks': [{'name': 'repository', 'max_duration': 33}]}
|
|
||||||
flexmock(module).should_receive('make_check_name_flags').and_return(())
|
|
||||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
|
||||||
flexmock(module.environment).should_receive('make_environment')
|
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
|
||||||
('borg', 'check', '--max-duration', '44', 'repo'),
|
|
||||||
extra_environment=None,
|
|
||||||
borg_local_path='borg',
|
|
||||||
borg_exit_codes=None,
|
|
||||||
).once()
|
|
||||||
|
|
||||||
module.check_archives(
|
|
||||||
repository_path='repo',
|
|
||||||
config=config,
|
|
||||||
local_borg_version='1.2.3',
|
|
||||||
check_arguments=flexmock(
|
|
||||||
progress=None,
|
|
||||||
repair=None,
|
|
||||||
only_checks=None,
|
|
||||||
force=None,
|
|
||||||
match_archives=None,
|
|
||||||
max_duration=44,
|
|
||||||
),
|
),
|
||||||
global_arguments=flexmock(log_json=False),
|
global_arguments=flexmock(log_json=False),
|
||||||
checks={'repository'},
|
checks={'repository'},
|
||||||
|
@ -539,7 +395,7 @@ def test_check_archives_with_max_duration_flag_overrides_max_duration_option():
|
||||||
)
|
)
|
||||||
def test_check_archives_calls_borg_with_parameters(checks):
|
def test_check_archives_calls_borg_with_parameters(checks):
|
||||||
config = {}
|
config = {}
|
||||||
flexmock(module).should_receive('make_check_name_flags').with_args(checks, ()).and_return(())
|
flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(())
|
||||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(('borg', 'check', 'repo'))
|
insert_execute_command_mock(('borg', 'check', 'repo'))
|
||||||
|
|
||||||
|
@ -548,12 +404,7 @@ def test_check_archives_calls_borg_with_parameters(checks):
|
||||||
config=config,
|
config=config,
|
||||||
local_borg_version='1.2.3',
|
local_borg_version='1.2.3',
|
||||||
check_arguments=flexmock(
|
check_arguments=flexmock(
|
||||||
progress=None,
|
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||||
repair=None,
|
|
||||||
only_checks=None,
|
|
||||||
force=None,
|
|
||||||
match_archives=None,
|
|
||||||
max_duration=None,
|
|
||||||
),
|
),
|
||||||
global_arguments=flexmock(log_json=False),
|
global_arguments=flexmock(log_json=False),
|
||||||
checks=checks,
|
checks=checks,
|
||||||
|
@ -563,7 +414,7 @@ def test_check_archives_calls_borg_with_parameters(checks):
|
||||||
|
|
||||||
def test_check_archives_with_log_info_passes_through_to_borg():
|
def test_check_archives_with_log_info_passes_through_to_borg():
|
||||||
config = {}
|
config = {}
|
||||||
flexmock(module).should_receive('make_check_name_flags').and_return(())
|
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_logging_mock(logging.INFO)
|
insert_logging_mock(logging.INFO)
|
||||||
insert_execute_command_mock(('borg', 'check', '--info', 'repo'))
|
insert_execute_command_mock(('borg', 'check', '--info', 'repo'))
|
||||||
|
@ -573,12 +424,7 @@ def test_check_archives_with_log_info_passes_through_to_borg():
|
||||||
config=config,
|
config=config,
|
||||||
local_borg_version='1.2.3',
|
local_borg_version='1.2.3',
|
||||||
check_arguments=flexmock(
|
check_arguments=flexmock(
|
||||||
progress=None,
|
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||||
repair=None,
|
|
||||||
only_checks=None,
|
|
||||||
force=None,
|
|
||||||
match_archives=None,
|
|
||||||
max_duration=None,
|
|
||||||
),
|
),
|
||||||
global_arguments=flexmock(log_json=False),
|
global_arguments=flexmock(log_json=False),
|
||||||
checks={'repository'},
|
checks={'repository'},
|
||||||
|
@ -588,7 +434,7 @@ def test_check_archives_with_log_info_passes_through_to_borg():
|
||||||
|
|
||||||
def test_check_archives_with_log_debug_passes_through_to_borg():
|
def test_check_archives_with_log_debug_passes_through_to_borg():
|
||||||
config = {}
|
config = {}
|
||||||
flexmock(module).should_receive('make_check_name_flags').and_return(())
|
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_logging_mock(logging.DEBUG)
|
insert_logging_mock(logging.DEBUG)
|
||||||
insert_execute_command_mock(('borg', 'check', '--debug', '--show-rc', 'repo'))
|
insert_execute_command_mock(('borg', 'check', '--debug', '--show-rc', 'repo'))
|
||||||
|
@ -598,12 +444,7 @@ def test_check_archives_with_log_debug_passes_through_to_borg():
|
||||||
config=config,
|
config=config,
|
||||||
local_borg_version='1.2.3',
|
local_borg_version='1.2.3',
|
||||||
check_arguments=flexmock(
|
check_arguments=flexmock(
|
||||||
progress=None,
|
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||||
repair=None,
|
|
||||||
only_checks=None,
|
|
||||||
force=None,
|
|
||||||
match_archives=None,
|
|
||||||
max_duration=None,
|
|
||||||
),
|
),
|
||||||
global_arguments=flexmock(log_json=False),
|
global_arguments=flexmock(log_json=False),
|
||||||
checks={'repository'},
|
checks={'repository'},
|
||||||
|
@ -614,7 +455,7 @@ def test_check_archives_with_log_debug_passes_through_to_borg():
|
||||||
def test_check_archives_with_local_path_calls_borg_via_local_path():
|
def test_check_archives_with_local_path_calls_borg_via_local_path():
|
||||||
checks = {'repository'}
|
checks = {'repository'}
|
||||||
config = {}
|
config = {}
|
||||||
flexmock(module).should_receive('make_check_name_flags').with_args(checks, ()).and_return(())
|
flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(())
|
||||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(('borg1', 'check', 'repo'))
|
insert_execute_command_mock(('borg1', 'check', 'repo'))
|
||||||
|
|
||||||
|
@ -623,12 +464,7 @@ def test_check_archives_with_local_path_calls_borg_via_local_path():
|
||||||
config=config,
|
config=config,
|
||||||
local_borg_version='1.2.3',
|
local_borg_version='1.2.3',
|
||||||
check_arguments=flexmock(
|
check_arguments=flexmock(
|
||||||
progress=None,
|
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||||
repair=None,
|
|
||||||
only_checks=None,
|
|
||||||
force=None,
|
|
||||||
match_archives=None,
|
|
||||||
max_duration=None,
|
|
||||||
),
|
),
|
||||||
global_arguments=flexmock(log_json=False),
|
global_arguments=flexmock(log_json=False),
|
||||||
checks=checks,
|
checks=checks,
|
||||||
|
@ -641,7 +477,7 @@ def test_check_archives_with_exit_codes_calls_borg_using_them():
|
||||||
checks = {'repository'}
|
checks = {'repository'}
|
||||||
borg_exit_codes = flexmock()
|
borg_exit_codes = flexmock()
|
||||||
config = {'borg_exit_codes': borg_exit_codes}
|
config = {'borg_exit_codes': borg_exit_codes}
|
||||||
flexmock(module).should_receive('make_check_name_flags').with_args(checks, ()).and_return(())
|
flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(())
|
||||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(('borg', 'check', 'repo'), borg_exit_codes=borg_exit_codes)
|
insert_execute_command_mock(('borg', 'check', 'repo'), borg_exit_codes=borg_exit_codes)
|
||||||
|
|
||||||
|
@ -650,12 +486,7 @@ def test_check_archives_with_exit_codes_calls_borg_using_them():
|
||||||
config=config,
|
config=config,
|
||||||
local_borg_version='1.2.3',
|
local_borg_version='1.2.3',
|
||||||
check_arguments=flexmock(
|
check_arguments=flexmock(
|
||||||
progress=None,
|
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||||
repair=None,
|
|
||||||
only_checks=None,
|
|
||||||
force=None,
|
|
||||||
match_archives=None,
|
|
||||||
max_duration=None,
|
|
||||||
),
|
),
|
||||||
global_arguments=flexmock(log_json=False),
|
global_arguments=flexmock(log_json=False),
|
||||||
checks=checks,
|
checks=checks,
|
||||||
|
@ -666,7 +497,7 @@ def test_check_archives_with_exit_codes_calls_borg_using_them():
|
||||||
def test_check_archives_with_remote_path_passes_through_to_borg():
|
def test_check_archives_with_remote_path_passes_through_to_borg():
|
||||||
checks = {'repository'}
|
checks = {'repository'}
|
||||||
config = {}
|
config = {}
|
||||||
flexmock(module).should_receive('make_check_name_flags').with_args(checks, ()).and_return(())
|
flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(())
|
||||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(('borg', 'check', '--remote-path', 'borg1', 'repo'))
|
insert_execute_command_mock(('borg', 'check', '--remote-path', 'borg1', 'repo'))
|
||||||
|
|
||||||
|
@ -675,12 +506,7 @@ def test_check_archives_with_remote_path_passes_through_to_borg():
|
||||||
config=config,
|
config=config,
|
||||||
local_borg_version='1.2.3',
|
local_borg_version='1.2.3',
|
||||||
check_arguments=flexmock(
|
check_arguments=flexmock(
|
||||||
progress=None,
|
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||||
repair=None,
|
|
||||||
only_checks=None,
|
|
||||||
force=None,
|
|
||||||
match_archives=None,
|
|
||||||
max_duration=None,
|
|
||||||
),
|
),
|
||||||
global_arguments=flexmock(log_json=False),
|
global_arguments=flexmock(log_json=False),
|
||||||
checks=checks,
|
checks=checks,
|
||||||
|
@ -692,7 +518,7 @@ def test_check_archives_with_remote_path_passes_through_to_borg():
|
||||||
def test_check_archives_with_log_json_passes_through_to_borg():
|
def test_check_archives_with_log_json_passes_through_to_borg():
|
||||||
checks = {'repository'}
|
checks = {'repository'}
|
||||||
config = {}
|
config = {}
|
||||||
flexmock(module).should_receive('make_check_name_flags').with_args(checks, ()).and_return(())
|
flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(())
|
||||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(('borg', 'check', '--log-json', 'repo'))
|
insert_execute_command_mock(('borg', 'check', '--log-json', 'repo'))
|
||||||
|
|
||||||
|
@ -701,12 +527,7 @@ def test_check_archives_with_log_json_passes_through_to_borg():
|
||||||
config=config,
|
config=config,
|
||||||
local_borg_version='1.2.3',
|
local_borg_version='1.2.3',
|
||||||
check_arguments=flexmock(
|
check_arguments=flexmock(
|
||||||
progress=None,
|
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||||
repair=None,
|
|
||||||
only_checks=None,
|
|
||||||
force=None,
|
|
||||||
match_archives=None,
|
|
||||||
max_duration=None,
|
|
||||||
),
|
),
|
||||||
global_arguments=flexmock(log_json=True),
|
global_arguments=flexmock(log_json=True),
|
||||||
checks=checks,
|
checks=checks,
|
||||||
|
@ -717,7 +538,7 @@ def test_check_archives_with_log_json_passes_through_to_borg():
|
||||||
def test_check_archives_with_lock_wait_passes_through_to_borg():
|
def test_check_archives_with_lock_wait_passes_through_to_borg():
|
||||||
checks = {'repository'}
|
checks = {'repository'}
|
||||||
config = {'lock_wait': 5}
|
config = {'lock_wait': 5}
|
||||||
flexmock(module).should_receive('make_check_name_flags').with_args(checks, ()).and_return(())
|
flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(())
|
||||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(('borg', 'check', '--lock-wait', '5', 'repo'))
|
insert_execute_command_mock(('borg', 'check', '--lock-wait', '5', 'repo'))
|
||||||
|
|
||||||
|
@ -726,12 +547,7 @@ def test_check_archives_with_lock_wait_passes_through_to_borg():
|
||||||
config=config,
|
config=config,
|
||||||
local_borg_version='1.2.3',
|
local_borg_version='1.2.3',
|
||||||
check_arguments=flexmock(
|
check_arguments=flexmock(
|
||||||
progress=None,
|
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||||
repair=None,
|
|
||||||
only_checks=None,
|
|
||||||
force=None,
|
|
||||||
match_archives=None,
|
|
||||||
max_duration=None,
|
|
||||||
),
|
),
|
||||||
global_arguments=flexmock(log_json=False),
|
global_arguments=flexmock(log_json=False),
|
||||||
checks=checks,
|
checks=checks,
|
||||||
|
@ -743,7 +559,7 @@ def test_check_archives_with_retention_prefix():
|
||||||
checks = {'repository'}
|
checks = {'repository'}
|
||||||
prefix = 'foo-'
|
prefix = 'foo-'
|
||||||
config = {'prefix': prefix}
|
config = {'prefix': prefix}
|
||||||
flexmock(module).should_receive('make_check_name_flags').with_args(checks, ()).and_return(())
|
flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(())
|
||||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(('borg', 'check', 'repo'))
|
insert_execute_command_mock(('borg', 'check', 'repo'))
|
||||||
|
|
||||||
|
@ -752,12 +568,7 @@ def test_check_archives_with_retention_prefix():
|
||||||
config=config,
|
config=config,
|
||||||
local_borg_version='1.2.3',
|
local_borg_version='1.2.3',
|
||||||
check_arguments=flexmock(
|
check_arguments=flexmock(
|
||||||
progress=None,
|
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||||
repair=None,
|
|
||||||
only_checks=None,
|
|
||||||
force=None,
|
|
||||||
match_archives=None,
|
|
||||||
max_duration=None,
|
|
||||||
),
|
),
|
||||||
global_arguments=flexmock(log_json=False),
|
global_arguments=flexmock(log_json=False),
|
||||||
checks=checks,
|
checks=checks,
|
||||||
|
@ -767,7 +578,7 @@ def test_check_archives_with_retention_prefix():
|
||||||
|
|
||||||
def test_check_archives_with_extra_borg_options_passes_through_to_borg():
|
def test_check_archives_with_extra_borg_options_passes_through_to_borg():
|
||||||
config = {'extra_borg_options': {'check': '--extra --options'}}
|
config = {'extra_borg_options': {'check': '--extra --options'}}
|
||||||
flexmock(module).should_receive('make_check_name_flags').and_return(())
|
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(('borg', 'check', '--extra', '--options', 'repo'))
|
insert_execute_command_mock(('borg', 'check', '--extra', '--options', 'repo'))
|
||||||
|
|
||||||
|
@ -776,12 +587,7 @@ def test_check_archives_with_extra_borg_options_passes_through_to_borg():
|
||||||
config=config,
|
config=config,
|
||||||
local_borg_version='1.2.3',
|
local_borg_version='1.2.3',
|
||||||
check_arguments=flexmock(
|
check_arguments=flexmock(
|
||||||
progress=None,
|
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||||
repair=None,
|
|
||||||
only_checks=None,
|
|
||||||
force=None,
|
|
||||||
match_archives=None,
|
|
||||||
max_duration=None,
|
|
||||||
),
|
),
|
||||||
global_arguments=flexmock(log_json=False),
|
global_arguments=flexmock(log_json=False),
|
||||||
checks={'repository'},
|
checks={'repository'},
|
||||||
|
@ -791,9 +597,7 @@ def test_check_archives_with_extra_borg_options_passes_through_to_borg():
|
||||||
|
|
||||||
def test_check_archives_with_match_archives_passes_through_to_borg():
|
def test_check_archives_with_match_archives_passes_through_to_borg():
|
||||||
config = {}
|
config = {}
|
||||||
flexmock(module).should_receive('make_check_name_flags').and_return(
|
flexmock(module).should_receive('make_check_flags').and_return(('--match-archives', 'foo-*'))
|
||||||
('--match-archives', 'foo-*')
|
|
||||||
)
|
|
||||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
flexmock(module.environment).should_receive('make_environment')
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
|
@ -808,12 +612,7 @@ def test_check_archives_with_match_archives_passes_through_to_borg():
|
||||||
config=config,
|
config=config,
|
||||||
local_borg_version='1.2.3',
|
local_borg_version='1.2.3',
|
||||||
check_arguments=flexmock(
|
check_arguments=flexmock(
|
||||||
progress=None,
|
progress=None, repair=None, only_checks=None, force=None, match_archives='foo-*'
|
||||||
repair=None,
|
|
||||||
only_checks=None,
|
|
||||||
force=None,
|
|
||||||
match_archives='foo-*',
|
|
||||||
max_duration=None,
|
|
||||||
),
|
),
|
||||||
global_arguments=flexmock(log_json=False),
|
global_arguments=flexmock(log_json=False),
|
||||||
checks={'archives'},
|
checks={'archives'},
|
||||||
|
|
|
@ -693,7 +693,6 @@ def test_make_base_create_command_includes_exclude_patterns_in_borg_command():
|
||||||
('one_file_system', True, True, ('--one-file-system',)),
|
('one_file_system', True, True, ('--one-file-system',)),
|
||||||
('upload_rate_limit', 100, True, ('--upload-ratelimit', '100')),
|
('upload_rate_limit', 100, True, ('--upload-ratelimit', '100')),
|
||||||
('upload_rate_limit', 100, False, ('--remote-ratelimit', '100')),
|
('upload_rate_limit', 100, False, ('--remote-ratelimit', '100')),
|
||||||
('upload_buffer_size', 160, True, ('--upload-buffer', '160')),
|
|
||||||
('numeric_ids', True, True, ('--numeric-ids',)),
|
('numeric_ids', True, True, ('--numeric-ids',)),
|
||||||
('numeric_ids', True, False, ('--numeric-owner',)),
|
('numeric_ids', True, False, ('--numeric-owner',)),
|
||||||
('read_special', True, True, ('--read-special',)),
|
('read_special', True, True, ('--read-special',)),
|
||||||
|
|
|
@ -3,14 +3,15 @@ from flexmock import flexmock
|
||||||
import borgmatic.hooks.monitor
|
import borgmatic.hooks.monitor
|
||||||
from borgmatic.hooks import uptimekuma as module
|
from borgmatic.hooks import uptimekuma as module
|
||||||
|
|
||||||
DEFAULT_PUSH_URL = 'https://example.uptime.kuma/api/push/abcd1234'
|
default_base_url = 'https://example.uptime.kuma'
|
||||||
CUSTOM_PUSH_URL = 'https://uptime.example.com/api/push/efgh5678'
|
custom_base_url = 'https://uptime.example.com'
|
||||||
|
push_code = 'abcd1234'
|
||||||
|
|
||||||
|
|
||||||
def test_ping_monitor_hits_default_uptimekuma_on_fail():
|
def test_ping_monitor_hits_default_uptimekuma_on_fail():
|
||||||
hook_config = {}
|
hook_config = {'push_code': push_code}
|
||||||
flexmock(module.requests).should_receive('get').with_args(
|
flexmock(module.requests).should_receive('get').with_args(
|
||||||
f'{DEFAULT_PUSH_URL}?status=down&msg=fail'
|
f'{default_base_url}/api/push/{push_code}?status=down&msg=fail&ping='
|
||||||
).and_return(flexmock(ok=True)).once()
|
).and_return(flexmock(ok=True)).once()
|
||||||
|
|
||||||
module.ping_monitor(
|
module.ping_monitor(
|
||||||
|
@ -24,9 +25,9 @@ def test_ping_monitor_hits_default_uptimekuma_on_fail():
|
||||||
|
|
||||||
|
|
||||||
def test_ping_monitor_hits_custom_uptimekuma_on_fail():
|
def test_ping_monitor_hits_custom_uptimekuma_on_fail():
|
||||||
hook_config = {'push_url': CUSTOM_PUSH_URL}
|
hook_config = {'server': custom_base_url, 'push_code': push_code}
|
||||||
flexmock(module.requests).should_receive('get').with_args(
|
flexmock(module.requests).should_receive('get').with_args(
|
||||||
f'{CUSTOM_PUSH_URL}?status=down&msg=fail'
|
f'{custom_base_url}/api/push/{push_code}?status=down&msg=fail&ping='
|
||||||
).and_return(flexmock(ok=True)).once()
|
).and_return(flexmock(ok=True)).once()
|
||||||
|
|
||||||
module.ping_monitor(
|
module.ping_monitor(
|
||||||
|
@ -39,10 +40,10 @@ def test_ping_monitor_hits_custom_uptimekuma_on_fail():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_ping_monitor_custom_uptimekuma_on_start():
|
def test_ping_monitor_hits_default_uptimekuma_on_start():
|
||||||
hook_config = {'push_url': CUSTOM_PUSH_URL}
|
hook_config = {'push_code': push_code}
|
||||||
flexmock(module.requests).should_receive('get').with_args(
|
flexmock(module.requests).should_receive('get').with_args(
|
||||||
f'{CUSTOM_PUSH_URL}?status=up&msg=start'
|
f'{default_base_url}/api/push/{push_code}?status=up&msg=start&ping='
|
||||||
).and_return(flexmock(ok=True)).once()
|
).and_return(flexmock(ok=True)).once()
|
||||||
|
|
||||||
module.ping_monitor(
|
module.ping_monitor(
|
||||||
|
@ -55,10 +56,26 @@ def test_ping_monitor_custom_uptimekuma_on_start():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_ping_monitor_custom_uptimekuma_on_finish():
|
def test_ping_monitor_custom_uptimekuma_on_start():
|
||||||
hook_config = {'push_url': CUSTOM_PUSH_URL}
|
hook_config = {'server': custom_base_url, 'push_code': push_code}
|
||||||
flexmock(module.requests).should_receive('get').with_args(
|
flexmock(module.requests).should_receive('get').with_args(
|
||||||
f'{CUSTOM_PUSH_URL}?status=up&msg=finish'
|
f'{custom_base_url}/api/push/{push_code}?status=up&msg=start&ping='
|
||||||
|
).and_return(flexmock(ok=True)).once()
|
||||||
|
|
||||||
|
module.ping_monitor(
|
||||||
|
hook_config,
|
||||||
|
{},
|
||||||
|
'config.yaml',
|
||||||
|
borgmatic.hooks.monitor.State.START,
|
||||||
|
monitoring_log_level=1,
|
||||||
|
dry_run=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ping_monitor_hits_default_uptimekuma_on_finish():
|
||||||
|
hook_config = {'push_code': push_code}
|
||||||
|
flexmock(module.requests).should_receive('get').with_args(
|
||||||
|
f'{default_base_url}/api/push/{push_code}?status=up&msg=finish&ping='
|
||||||
).and_return(flexmock(ok=True)).once()
|
).and_return(flexmock(ok=True)).once()
|
||||||
|
|
||||||
module.ping_monitor(
|
module.ping_monitor(
|
||||||
|
@ -71,8 +88,24 @@ def test_ping_monitor_custom_uptimekuma_on_finish():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_ping_monitor_does_not_hit_custom_uptimekuma_on_fail_dry_run():
|
def test_ping_monitor_custom_uptimekuma_on_finish():
|
||||||
hook_config = {'push_url': CUSTOM_PUSH_URL}
|
hook_config = {'server': custom_base_url, 'push_code': push_code}
|
||||||
|
flexmock(module.requests).should_receive('get').with_args(
|
||||||
|
f'{custom_base_url}/api/push/{push_code}?status=up&msg=finish&ping='
|
||||||
|
).and_return(flexmock(ok=True)).once()
|
||||||
|
|
||||||
|
module.ping_monitor(
|
||||||
|
hook_config,
|
||||||
|
{},
|
||||||
|
'config.yaml',
|
||||||
|
borgmatic.hooks.monitor.State.FINISH,
|
||||||
|
monitoring_log_level=1,
|
||||||
|
dry_run=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ping_monitor_does_not_hit_default_uptimekuma_on_fail_dry_run():
|
||||||
|
hook_config = {'push_code': push_code}
|
||||||
flexmock(module.requests).should_receive('get').never()
|
flexmock(module.requests).should_receive('get').never()
|
||||||
|
|
||||||
module.ping_monitor(
|
module.ping_monitor(
|
||||||
|
@ -85,8 +118,22 @@ def test_ping_monitor_does_not_hit_custom_uptimekuma_on_fail_dry_run():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_ping_monitor_does_not_hit_custom_uptimekuma_on_start_dry_run():
|
def test_ping_monitor_does_not_hit_custom_uptimekuma_on_fail_dry_run():
|
||||||
hook_config = {'push_url': CUSTOM_PUSH_URL}
|
hook_config = {'server': custom_base_url, 'push_code': push_code}
|
||||||
|
flexmock(module.requests).should_receive('get').never()
|
||||||
|
|
||||||
|
module.ping_monitor(
|
||||||
|
hook_config,
|
||||||
|
{},
|
||||||
|
'config.yaml',
|
||||||
|
borgmatic.hooks.monitor.State.FAIL,
|
||||||
|
monitoring_log_level=1,
|
||||||
|
dry_run=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ping_monitor_does_not_hit_default_uptimekuma_on_start_dry_run():
|
||||||
|
hook_config = {'push_code': push_code}
|
||||||
flexmock(module.requests).should_receive('get').never()
|
flexmock(module.requests).should_receive('get').never()
|
||||||
|
|
||||||
module.ping_monitor(
|
module.ping_monitor(
|
||||||
|
@ -99,8 +146,36 @@ def test_ping_monitor_does_not_hit_custom_uptimekuma_on_start_dry_run():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ping_monitor_does_not_hit_custom_uptimekuma_on_start_dry_run():
|
||||||
|
hook_config = {'server': custom_base_url, 'push_code': push_code}
|
||||||
|
flexmock(module.requests).should_receive('get').never()
|
||||||
|
|
||||||
|
module.ping_monitor(
|
||||||
|
hook_config,
|
||||||
|
{},
|
||||||
|
'config.yaml',
|
||||||
|
borgmatic.hooks.monitor.State.START,
|
||||||
|
monitoring_log_level=1,
|
||||||
|
dry_run=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ping_monitor_does_not_hit_default_uptimekuma_on_finish_dry_run():
|
||||||
|
hook_config = {'push_code': push_code}
|
||||||
|
flexmock(module.requests).should_receive('get').never()
|
||||||
|
|
||||||
|
module.ping_monitor(
|
||||||
|
hook_config,
|
||||||
|
{},
|
||||||
|
'config.yaml',
|
||||||
|
borgmatic.hooks.monitor.State.FINISH,
|
||||||
|
monitoring_log_level=1,
|
||||||
|
dry_run=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_ping_monitor_does_not_hit_custom_uptimekuma_on_finish_dry_run():
|
def test_ping_monitor_does_not_hit_custom_uptimekuma_on_finish_dry_run():
|
||||||
hook_config = {'push_url': CUSTOM_PUSH_URL}
|
hook_config = {'server': custom_base_url, 'push_code': push_code}
|
||||||
flexmock(module.requests).should_receive('get').never()
|
flexmock(module.requests).should_receive('get').never()
|
||||||
|
|
||||||
module.ping_monitor(
|
module.ping_monitor(
|
||||||
|
@ -114,9 +189,9 @@ def test_ping_monitor_does_not_hit_custom_uptimekuma_on_finish_dry_run():
|
||||||
|
|
||||||
|
|
||||||
def test_ping_monitor_with_connection_error_logs_warning():
|
def test_ping_monitor_with_connection_error_logs_warning():
|
||||||
hook_config = {'push_url': CUSTOM_PUSH_URL}
|
hook_config = {'push_code': push_code}
|
||||||
flexmock(module.requests).should_receive('get').with_args(
|
flexmock(module.requests).should_receive('get').with_args(
|
||||||
f'{CUSTOM_PUSH_URL}?status=down&msg=fail'
|
f'{default_base_url}/api/push/{push_code}?status=down&msg=fail&ping='
|
||||||
).and_raise(module.requests.exceptions.ConnectionError)
|
).and_raise(module.requests.exceptions.ConnectionError)
|
||||||
flexmock(module.logger).should_receive('warning').once()
|
flexmock(module.logger).should_receive('warning').once()
|
||||||
|
|
||||||
|
@ -131,13 +206,13 @@ def test_ping_monitor_with_connection_error_logs_warning():
|
||||||
|
|
||||||
|
|
||||||
def test_ping_monitor_with_other_error_logs_warning():
|
def test_ping_monitor_with_other_error_logs_warning():
|
||||||
hook_config = {'push_url': CUSTOM_PUSH_URL}
|
hook_config = {'push_code': push_code}
|
||||||
response = flexmock(ok=False)
|
response = flexmock(ok=False)
|
||||||
response.should_receive('raise_for_status').and_raise(
|
response.should_receive('raise_for_status').and_raise(
|
||||||
module.requests.exceptions.RequestException
|
module.requests.exceptions.RequestException
|
||||||
)
|
)
|
||||||
flexmock(module.requests).should_receive('post').with_args(
|
flexmock(module.requests).should_receive('post').with_args(
|
||||||
f'{CUSTOM_PUSH_URL}?status=down&msg=fail'
|
f'{default_base_url}/api/push/{push_code}?status=down&msg=fail&ping='
|
||||||
).and_return(response)
|
).and_return(response)
|
||||||
flexmock(module.logger).should_receive('warning').once()
|
flexmock(module.logger).should_receive('warning').once()
|
||||||
|
|
||||||
|
@ -149,17 +224,3 @@ def test_ping_monitor_with_other_error_logs_warning():
|
||||||
monitoring_log_level=1,
|
monitoring_log_level=1,
|
||||||
dry_run=False,
|
dry_run=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_ping_monitor_with_invalid_run_state():
|
|
||||||
hook_config = {'push_url': CUSTOM_PUSH_URL}
|
|
||||||
flexmock(module.requests).should_receive('get').never()
|
|
||||||
|
|
||||||
module.ping_monitor(
|
|
||||||
hook_config,
|
|
||||||
{},
|
|
||||||
'config.yaml',
|
|
||||||
borgmatic.hooks.monitor.State.LOG,
|
|
||||||
monitoring_log_level=1,
|
|
||||||
dry_run=True,
|
|
||||||
)
|
|
||||||
|
|
Loading…
Reference in a new issue