Remove configuration sections (#723).
continuous-integration/drone/push Build is passing Details

Reviewed-on: #723
This commit is contained in:
Dan Helfman 2023-07-14 03:10:51 +00:00
commit e913cddcd1
128 changed files with 3644 additions and 3778 deletions

12
NEWS
View File

@ -10,11 +10,23 @@
"check --repair".
* When merging two configuration files, error gracefully if the two files do not adhere to the same
format.
* #721: Remove configuration sections ("location:", "storage:", "hooks:" etc.), while still keeping
deprecated support for them. Now, all options are at the same level, and you don't need to worry
about commenting/uncommenting section headers when you change an option.
* #721: BREAKING: The retention prefix and the consistency prefix can no longer have different
values (unless one is not set).
* #721: BREAKING: The storage umask and the hooks umask can no longer have different values (unless
one is not set).
* BREAKING: Flags like "--config" that previously took multiple values now need to be given once
per value, e.g. "--config first.yaml --config second.yaml" instead of "--config first.yaml
second.yaml". This prevents argument parsing errors on ambiguous commands.
* BREAKING: Remove the deprecated (and silently ignored) "--successful" flag on the "list" action,
as newer versions of Borg list successful (non-checkpoint) archives by default.
* All deprecated configuration option values now generate warning logs.
* Remove the deprecated (and non-functional) "--excludes" flag in favor of excludes within
configuration.
* Fix an error when logging too-long command output during error handling. Now, long command output
is truncated before logging.
1.7.15
* #326: Add configuration options and command-line flags for backing up a database from one

View File

@ -16,50 +16,41 @@ The canonical home of borgmatic is at <a href="https://torsion.org/borgmatic">ht
Here's an example configuration file:
```yaml
location:
# List of source directories to backup.
source_directories:
- /home
- /etc
# List of source directories to backup.
source_directories:
- /home
- /etc
# Paths of local or remote repositories to backup to.
repositories:
- path: ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo
label: borgbase
- path: /var/lib/backups/local.borg
label: local
# Paths of local or remote repositories to backup to.
repositories:
- path: ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo
label: borgbase
- path: /var/lib/backups/local.borg
label: local
retention:
# Retention policy for how many backups to keep.
keep_daily: 7
keep_weekly: 4
keep_monthly: 6
# Retention policy for how many backups to keep.
keep_daily: 7
keep_weekly: 4
keep_monthly: 6
consistency:
# List of checks to run to validate your backups.
checks:
- name: repository
- name: archives
frequency: 2 weeks
# List of checks to run to validate your backups.
checks:
- name: repository
- name: archives
frequency: 2 weeks
hooks:
# Custom preparation scripts to run.
before_backup:
- prepare-for-backup.sh
# Custom preparation scripts to run.
before_backup:
- prepare-for-backup.sh
# Databases to dump and include in backups.
postgresql_databases:
- name: users
# Databases to dump and include in backups.
postgresql_databases:
- name: users
# Third-party services to notify you if backups aren't happening.
healthchecks: https://hc-ping.com/be067061-cf96-4412-8eae-62b0c50d6a8c
# Third-party services to notify you if backups aren't happening.
healthchecks: https://hc-ping.com/be067061-cf96-4412-8eae-62b0c50d6a8c
```
Want to see borgmatic in action? Check out the <a
href="https://asciinema.org/a/203761?autoplay=1" target="_blank">screencast</a>.
<a href="https://asciinema.org/a/203761?autoplay=1" target="_blank"><img src="https://asciinema.org/a/203761.png" width="480"></a>
borgmatic is powered by [Borg Backup](https://www.borgbackup.org/).
## Integrations

View File

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def run_borg(
repository,
storage,
config,
local_borg_version,
borg_arguments,
global_arguments,
@ -28,7 +28,7 @@ def run_borg(
archive_name = borgmatic.borg.rlist.resolve_archive_name(
repository['path'],
borg_arguments.archive,
storage,
config,
local_borg_version,
global_arguments,
local_path,
@ -36,7 +36,7 @@ def run_borg(
)
borgmatic.borg.borg.run_arbitrary_borg(
repository['path'],
storage,
config,
local_borg_version,
options=borg_arguments.options,
archive=archive_name,

View File

@ -8,7 +8,7 @@ logger = logging.getLogger(__name__)
def run_break_lock(
repository,
storage,
config,
local_borg_version,
break_lock_arguments,
global_arguments,
@ -26,7 +26,7 @@ def run_break_lock(
)
borgmatic.borg.break_lock.break_lock(
repository['path'],
storage,
config,
local_borg_version,
global_arguments,
local_path=local_path,

View File

@ -10,10 +10,7 @@ logger = logging.getLogger(__name__)
def run_check(
config_filename,
repository,
location,
storage,
consistency,
hooks,
config,
hook_context,
local_borg_version,
check_arguments,
@ -30,8 +27,8 @@ def run_check(
return
borgmatic.hooks.command.execute_hook(
hooks.get('before_check'),
hooks.get('umask'),
config.get('before_check'),
config.get('umask'),
config_filename,
'pre-check',
global_arguments.dry_run,
@ -40,9 +37,7 @@ def run_check(
logger.info(f'{repository.get("label", repository["path"])}: Running consistency checks')
borgmatic.borg.check.check_archives(
repository['path'],
location,
storage,
consistency,
config,
local_borg_version,
global_arguments,
local_path=local_path,
@ -53,8 +48,8 @@ def run_check(
force=check_arguments.force,
)
borgmatic.hooks.command.execute_hook(
hooks.get('after_check'),
hooks.get('umask'),
config.get('after_check'),
config.get('umask'),
config_filename,
'post-check',
global_arguments.dry_run,

View File

@ -11,9 +11,7 @@ logger = logging.getLogger(__name__)
def run_compact(
config_filename,
repository,
storage,
retention,
hooks,
config,
hook_context,
local_borg_version,
compact_arguments,
@ -31,8 +29,8 @@ def run_compact(
return
borgmatic.hooks.command.execute_hook(
hooks.get('before_compact'),
hooks.get('umask'),
config.get('before_compact'),
config.get('umask'),
config_filename,
'pre-compact',
global_arguments.dry_run,
@ -45,7 +43,7 @@ def run_compact(
borgmatic.borg.compact.compact_segments(
global_arguments.dry_run,
repository['path'],
storage,
config,
local_borg_version,
global_arguments,
local_path=local_path,
@ -59,8 +57,8 @@ def run_compact(
f'{repository.get("label", repository["path"])}: Skipping compact (only available/needed in Borg 1.2+)'
)
borgmatic.hooks.command.execute_hook(
hooks.get('after_compact'),
hooks.get('umask'),
config.get('after_compact'),
config.get('umask'),
config_filename,
'post-compact',
global_arguments.dry_run,

View File

@ -43,7 +43,6 @@ def get_config_paths(bootstrap_arguments, global_arguments, local_borg_version):
),
[borgmatic_manifest_path],
{},
{},
local_borg_version,
global_arguments,
extract_to_stdout=True,
@ -95,7 +94,6 @@ def run_bootstrap(bootstrap_arguments, global_arguments, local_borg_version):
),
[config_path.lstrip(os.path.sep) for config_path in manifest_config_paths],
{},
{},
local_borg_version,
global_arguments,
extract_to_stdout=False,

View File

@ -2,6 +2,7 @@ import logging
import borgmatic.config.generate
import borgmatic.config.validate
import borgmatic.logger
logger = logging.getLogger(__name__)
@ -14,6 +15,7 @@ def run_generate(generate_arguments, global_arguments):
Raise FileExistsError if a file already exists at the destination path and the generate
arguments do not have overwrite set.
'''
borgmatic.logger.add_custom_log_levels()
dry_run_label = ' (dry run; not actually writing anything)' if global_arguments.dry_run else ''
logger.answer(

View File

@ -1,6 +1,7 @@
import logging
import borgmatic.config.generate
import borgmatic.logger
logger = logging.getLogger(__name__)
@ -14,6 +15,8 @@ def run_validate(validate_arguments, configs):
loading machinery prior to here, so this function mainly exists to support additional validate
flags like "--show".
'''
borgmatic.logger.add_custom_log_levels()
if validate_arguments.show:
for config_path, config in configs.items():
if len(configs) > 1:

View File

@ -17,7 +17,7 @@ import borgmatic.hooks.dump
logger = logging.getLogger(__name__)
def create_borgmatic_manifest(location, config_paths, dry_run):
def create_borgmatic_manifest(config, config_paths, dry_run):
'''
Create a borgmatic manifest file to store the paths to the configuration files used to create
the archive.
@ -25,7 +25,7 @@ def create_borgmatic_manifest(location, config_paths, dry_run):
if dry_run:
return
borgmatic_source_directory = location.get(
borgmatic_source_directory = config.get(
'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY
)
@ -49,9 +49,7 @@ def create_borgmatic_manifest(location, config_paths, dry_run):
def run_create(
config_filename,
repository,
location,
storage,
hooks,
config,
hook_context,
local_borg_version,
create_arguments,
@ -71,8 +69,8 @@ def run_create(
return
borgmatic.hooks.command.execute_hook(
hooks.get('before_backup'),
hooks.get('umask'),
config.get('before_backup'),
config.get('umask'),
config_filename,
'pre-backup',
global_arguments.dry_run,
@ -81,30 +79,25 @@ def run_create(
logger.info(f'{repository.get("label", repository["path"])}: Creating archive{dry_run_label}')
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
config,
repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
active_dumps = borgmatic.hooks.dispatch.call_hooks(
'dump_databases',
hooks,
config,
repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
create_borgmatic_manifest(
location, global_arguments.used_config_paths, global_arguments.dry_run
)
create_borgmatic_manifest(config, global_arguments.used_config_paths, global_arguments.dry_run)
stream_processes = [process for processes in active_dumps.values() for process in processes]
json_output = borgmatic.borg.create.create_archive(
global_arguments.dry_run,
repository['path'],
location,
storage,
config,
local_borg_version,
global_arguments,
local_path=local_path,
@ -120,15 +113,14 @@ def run_create(
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
config,
config_filename,
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
borgmatic.hooks.command.execute_hook(
hooks.get('after_backup'),
hooks.get('umask'),
config.get('after_backup'),
config.get('umask'),
config_filename,
'post-backup',
global_arguments.dry_run,

View File

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def run_export_tar(
repository,
storage,
config,
local_borg_version,
export_tar_arguments,
global_arguments,
@ -31,7 +31,7 @@ def run_export_tar(
borgmatic.borg.rlist.resolve_archive_name(
repository['path'],
export_tar_arguments.archive,
storage,
config,
local_borg_version,
global_arguments,
local_path,
@ -39,7 +39,7 @@ def run_export_tar(
),
export_tar_arguments.paths,
export_tar_arguments.destination,
storage,
config,
local_borg_version,
global_arguments,
local_path=local_path,

View File

@ -11,9 +11,7 @@ logger = logging.getLogger(__name__)
def run_extract(
config_filename,
repository,
location,
storage,
hooks,
config,
hook_context,
local_borg_version,
extract_arguments,
@ -25,8 +23,8 @@ def run_extract(
Run the "extract" action for the given repository.
'''
borgmatic.hooks.command.execute_hook(
hooks.get('before_extract'),
hooks.get('umask'),
config.get('before_extract'),
config.get('umask'),
config_filename,
'pre-extract',
global_arguments.dry_run,
@ -44,15 +42,14 @@ def run_extract(
borgmatic.borg.rlist.resolve_archive_name(
repository['path'],
extract_arguments.archive,
storage,
config,
local_borg_version,
global_arguments,
local_path,
remote_path,
),
extract_arguments.paths,
location,
storage,
config,
local_borg_version,
global_arguments,
local_path=local_path,
@ -62,8 +59,8 @@ def run_extract(
progress=extract_arguments.progress,
)
borgmatic.hooks.command.execute_hook(
hooks.get('after_extract'),
hooks.get('umask'),
config.get('after_extract'),
config.get('umask'),
config_filename,
'post-extract',
global_arguments.dry_run,

View File

@ -11,7 +11,7 @@ logger = logging.getLogger(__name__)
def run_info(
repository,
storage,
config,
local_borg_version,
info_arguments,
global_arguments,
@ -33,7 +33,7 @@ def run_info(
archive_name = borgmatic.borg.rlist.resolve_archive_name(
repository['path'],
info_arguments.archive,
storage,
config,
local_borg_version,
global_arguments,
local_path,
@ -41,7 +41,7 @@ def run_info(
)
json_output = borgmatic.borg.info.display_archives_info(
repository['path'],
storage,
config,
local_borg_version,
borgmatic.actions.arguments.update_arguments(info_arguments, archive=archive_name),
global_arguments,

View File

@ -10,7 +10,7 @@ logger = logging.getLogger(__name__)
def run_list(
repository,
storage,
config,
local_borg_version,
list_arguments,
global_arguments,
@ -34,7 +34,7 @@ def run_list(
archive_name = borgmatic.borg.rlist.resolve_archive_name(
repository['path'],
list_arguments.archive,
storage,
config,
local_borg_version,
global_arguments,
local_path,
@ -42,7 +42,7 @@ def run_list(
)
json_output = borgmatic.borg.list.list_archive(
repository['path'],
storage,
config,
local_borg_version,
borgmatic.actions.arguments.update_arguments(list_arguments, archive=archive_name),
global_arguments,

View File

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def run_mount(
repository,
storage,
config,
local_borg_version,
mount_arguments,
global_arguments,
@ -34,14 +34,14 @@ def run_mount(
borgmatic.borg.rlist.resolve_archive_name(
repository['path'],
mount_arguments.archive,
storage,
config,
local_borg_version,
global_arguments,
local_path,
remote_path,
),
mount_arguments,
storage,
config,
local_borg_version,
global_arguments,
local_path=local_path,

View File

@ -10,9 +10,7 @@ logger = logging.getLogger(__name__)
def run_prune(
config_filename,
repository,
storage,
retention,
hooks,
config,
hook_context,
local_borg_version,
prune_arguments,
@ -30,8 +28,8 @@ def run_prune(
return
borgmatic.hooks.command.execute_hook(
hooks.get('before_prune'),
hooks.get('umask'),
config.get('before_prune'),
config.get('umask'),
config_filename,
'pre-prune',
global_arguments.dry_run,
@ -41,8 +39,7 @@ def run_prune(
borgmatic.borg.prune.prune_archives(
global_arguments.dry_run,
repository['path'],
storage,
retention,
config,
local_borg_version,
prune_arguments,
global_arguments,
@ -50,8 +47,8 @@ def run_prune(
remote_path=remote_path,
)
borgmatic.hooks.command.execute_hook(
hooks.get('after_prune'),
hooks.get('umask'),
config.get('after_prune'),
config.get('umask'),
config_filename,
'post-prune',
global_arguments.dry_run,

View File

@ -8,7 +8,7 @@ logger = logging.getLogger(__name__)
def run_rcreate(
repository,
storage,
config,
local_borg_version,
rcreate_arguments,
global_arguments,
@ -27,7 +27,7 @@ def run_rcreate(
borgmatic.borg.rcreate.create_repository(
global_arguments.dry_run,
repository['path'],
storage,
config,
local_borg_version,
global_arguments,
rcreate_arguments.encryption_mode,

View File

@ -18,12 +18,12 @@ UNSPECIFIED_HOOK = object()
def get_configured_database(
hooks, archive_database_names, hook_name, database_name, configuration_database_name=None
config, archive_database_names, hook_name, database_name, configuration_database_name=None
):
'''
Find the first database with the given hook name and database name in the configured hooks
dict and the given archive database names dict (from hook name to database names contained in
a particular backup archive). If UNSPECIFIED_HOOK is given as the hook name, search all database
Find the first database with the given hook name and database name in the configuration dict and
the given archive database names dict (from hook name to database names contained in a
particular backup archive). If UNSPECIFIED_HOOK is given as the hook name, search all database
hooks for the named database. If a configuration database name is given, use that instead of the
database name to lookup the database in the given hooks configuration.
@ -33,9 +33,13 @@ def get_configured_database(
configuration_database_name = database_name
if hook_name == UNSPECIFIED_HOOK:
hooks_to_search = hooks
hooks_to_search = {
hook_name: value
for (hook_name, value) in config.items()
if hook_name in borgmatic.hooks.dump.DATABASE_HOOK_NAMES
}
else:
hooks_to_search = {hook_name: hooks[hook_name]}
hooks_to_search = {hook_name: config[hook_name]}
return next(
(
@ -58,9 +62,7 @@ def get_configured_hook_name_and_database(hooks, database_name):
def restore_single_database(
repository,
location,
storage,
hooks,
config,
local_borg_version,
global_arguments,
local_path,
@ -81,10 +83,9 @@ def restore_single_database(
dump_pattern = borgmatic.hooks.dispatch.call_hooks(
'make_database_dump_pattern',
hooks,
config,
repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
database['name'],
)[hook_name]
@ -94,8 +95,7 @@ def restore_single_database(
repository=repository['path'],
archive=archive_name,
paths=borgmatic.hooks.dump.convert_glob_patterns_to_borg_patterns([dump_pattern]),
location_config=location,
storage_config=storage,
config=config,
local_borg_version=local_borg_version,
global_arguments=global_arguments,
local_path=local_path,
@ -109,10 +109,10 @@ def restore_single_database(
# Run a single database restore, consuming the extract stdout (if any).
borgmatic.hooks.dispatch.call_hooks(
'restore_database_dump',
{hook_name: [database]},
config,
repository['path'],
database['name'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
extract_process,
connection_params,
@ -122,21 +122,20 @@ def restore_single_database(
def collect_archive_database_names(
repository,
archive,
location,
storage,
config,
local_borg_version,
global_arguments,
local_path,
remote_path,
):
'''
Given a local or remote repository path, a resolved archive name, a location configuration dict,
a storage configuration dict, the local Borg version, global_arguments an argparse.Namespace,
and local and remote Borg paths, query the archive for the names of databases it contains and
return them as a dict from hook name to a sequence of database names.
Given a local or remote repository path, a resolved archive name, a configuration dict, the
local Borg version, global_arguments an argparse.Namespace, and local and remote Borg paths,
query the archive for the names of databases it contains and return them as a dict from hook
name to a sequence of database names.
'''
borgmatic_source_directory = os.path.expanduser(
location.get(
config.get(
'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY
)
).lstrip('/')
@ -146,7 +145,7 @@ def collect_archive_database_names(
dump_paths = borgmatic.borg.list.capture_archive_listing(
repository,
archive,
storage,
config,
local_borg_version,
global_arguments,
list_path=parent_dump_path,
@ -249,9 +248,7 @@ def ensure_databases_found(restore_names, remaining_restore_names, found_names):
def run_restore(
repository,
location,
storage,
hooks,
config,
local_borg_version,
restore_arguments,
global_arguments,
@ -275,17 +272,16 @@ def run_restore(
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
config,
repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
archive_name = borgmatic.borg.rlist.resolve_archive_name(
repository['path'],
restore_arguments.archive,
storage,
config,
local_borg_version,
global_arguments,
local_path,
@ -294,8 +290,7 @@ def run_restore(
archive_database_names = collect_archive_database_names(
repository['path'],
archive_name,
location,
storage,
config,
local_borg_version,
global_arguments,
local_path,
@ -315,7 +310,7 @@ def run_restore(
for hook_name, database_names in restore_names.items():
for database_name in database_names:
found_hook_name, found_database = get_configured_database(
hooks, archive_database_names, hook_name, database_name
config, archive_database_names, hook_name, database_name
)
if not found_database:
@ -327,9 +322,7 @@ def run_restore(
found_names.add(database_name)
restore_single_database(
repository,
location,
storage,
hooks,
config,
local_borg_version,
global_arguments,
local_path,
@ -340,12 +333,12 @@ def run_restore(
connection_params,
)
# For any database that weren't found via exact matches in the hooks configuration, try to
# fallback to "all" entries.
# For any database that weren't found via exact matches in the configuration, try to fallback
# to "all" entries.
for hook_name, database_names in remaining_restore_names.items():
for database_name in database_names:
found_hook_name, found_database = get_configured_database(
hooks, archive_database_names, hook_name, database_name, 'all'
config, archive_database_names, hook_name, database_name, 'all'
)
if not found_database:
@ -357,9 +350,7 @@ def run_restore(
restore_single_database(
repository,
location,
storage,
hooks,
config,
local_borg_version,
global_arguments,
local_path,
@ -372,10 +363,9 @@ def run_restore(
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
config,
repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)

View File

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def run_rinfo(
repository,
storage,
config,
local_borg_version,
rinfo_arguments,
global_arguments,
@ -31,7 +31,7 @@ def run_rinfo(
json_output = borgmatic.borg.rinfo.display_repository_info(
repository['path'],
storage,
config,
local_borg_version,
rinfo_arguments=rinfo_arguments,
global_arguments=global_arguments,

View File

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def run_rlist(
repository,
storage,
config,
local_borg_version,
rlist_arguments,
global_arguments,
@ -29,7 +29,7 @@ def run_rlist(
json_output = borgmatic.borg.rlist.list_repository(
repository['path'],
storage,
config,
local_borg_version,
rlist_arguments=rlist_arguments,
global_arguments=global_arguments,

View File

@ -7,7 +7,7 @@ logger = logging.getLogger(__name__)
def run_transfer(
repository,
storage,
config,
local_borg_version,
transfer_arguments,
global_arguments,
@ -23,7 +23,7 @@ def run_transfer(
borgmatic.borg.transfer.transfer_archives(
global_arguments.dry_run,
repository['path'],
storage,
config,
local_borg_version,
transfer_arguments,
global_arguments,

View File

@ -13,7 +13,7 @@ BORG_SUBCOMMANDS_WITH_SUBCOMMANDS = {'key', 'debug'}
def run_arbitrary_borg(
repository_path,
storage_config,
config,
local_borg_version,
options,
archive=None,
@ -21,13 +21,13 @@ def run_arbitrary_borg(
remote_path=None,
):
'''
Given a local or remote repository path, a storage config dict, the local Borg version, a
Given a local or remote repository path, a configuration dict, the local Borg version, a
sequence of arbitrary command-line Borg options, and an optional archive name, run an arbitrary
Borg command, passing in REPOSITORY and ARCHIVE environment variables for optional use in the
command.
'''
borgmatic.logger.add_custom_log_levels()
lock_wait = storage_config.get('lock_wait', None)
lock_wait = config.get('lock_wait', None)
try:
options = options[1:] if options[0] == '--' else options
@ -61,7 +61,7 @@ def run_arbitrary_borg(
borg_local_path=local_path,
shell=True,
extra_environment=dict(
(environment.make_environment(storage_config) or {}),
(environment.make_environment(config) or {}),
**{
'BORG_REPO': repository_path,
'ARCHIVE': archive if archive else '',

View File

@ -8,19 +8,19 @@ logger = logging.getLogger(__name__)
def break_lock(
repository_path,
storage_config,
config,
local_borg_version,
global_arguments,
local_path='borg',
remote_path=None,
):
'''
Given a local or remote repository path, a storage configuration dict, the local Borg version,
an argparse.Namespace of global arguments, and optional local and remote Borg paths, break any
Given a local or remote repository path, a configuration dict, the local Borg version, an
argparse.Namespace of global arguments, and optional local and remote Borg paths, break any
repository and cache locks leftover from Borg aborting.
'''
umask = storage_config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None)
umask = config.get('umask', None)
lock_wait = config.get('lock_wait', None)
full_command = (
(local_path, 'break-lock')
@ -33,5 +33,5 @@ def break_lock(
+ flags.make_repository_flags(repository_path, local_borg_version)
)
borg_environment = environment.make_environment(storage_config)
borg_environment = environment.make_environment(config)
execute_command(full_command, borg_local_path=local_path, extra_environment=borg_environment)

View File

@ -19,12 +19,12 @@ DEFAULT_CHECKS = (
logger = logging.getLogger(__name__)
def parse_checks(consistency_config, only_checks=None):
def parse_checks(config, only_checks=None):
'''
Given a consistency config with a "checks" sequence of dicts and an optional list of override
Given a configuration dict with a "checks" sequence of dicts and an optional list of override
checks, return a tuple of named checks to run.
For example, given a retention config of:
For example, given a config of:
{'checks': ({'name': 'repository'}, {'name': 'archives'})}
@ -36,8 +36,7 @@ def parse_checks(consistency_config, only_checks=None):
has a name of "disabled", return an empty tuple, meaning that no checks should be run.
'''
checks = only_checks or tuple(
check_config['name']
for check_config in (consistency_config.get('checks', None) or DEFAULT_CHECKS)
check_config['name'] for check_config in (config.get('checks', None) or DEFAULT_CHECKS)
)
checks = tuple(check.lower() for check in checks)
if 'disabled' in checks:
@ -90,23 +89,22 @@ def parse_frequency(frequency):
def filter_checks_on_frequency(
location_config,
consistency_config,
config,
borg_repository_id,
checks,
force,
archives_check_id=None,
):
'''
Given a location config, a consistency config with a "checks" sequence of dicts, a Borg
repository ID, a sequence of checks, whether to force checks to run, and an ID for the archives
check potentially being run (if any), filter down those checks based on the configured
"frequency" for each check as compared to its check time file.
Given a configuration dict with a "checks" sequence of dicts, a Borg repository ID, a sequence
of checks, whether to force checks to run, and an ID for the archives check potentially being
run (if any), filter down those checks based on the configured "frequency" for each check as
compared to its check time file.
In other words, a check whose check time file's timestamp is too new (based on the configured
frequency) will get cut from the returned sequence of checks. Example:
consistency_config = {
config = {
'checks': [
{
'name': 'archives',
@ -115,9 +113,9 @@ def filter_checks_on_frequency(
]
}
When this function is called with that consistency_config and "archives" in checks, "archives"
will get filtered out of the returned result if its check time file is newer than 2 weeks old,
indicating that it's not yet time to run that check again.
When this function is called with that config and "archives" in checks, "archives" will get
filtered out of the returned result if its check time file is newer than 2 weeks old, indicating
that it's not yet time to run that check again.
Raise ValueError if a frequency cannot be parsed.
'''
@ -126,7 +124,7 @@ def filter_checks_on_frequency(
if force:
return tuple(filtered_checks)
for check_config in consistency_config.get('checks', DEFAULT_CHECKS):
for check_config in config.get('checks', DEFAULT_CHECKS):
check = check_config['name']
if checks and check not in checks:
continue
@ -135,9 +133,7 @@ def filter_checks_on_frequency(
if not frequency_delta:
continue
check_time = probe_for_check_time(
location_config, borg_repository_id, check, archives_check_id
)
check_time = probe_for_check_time(config, borg_repository_id, check, archives_check_id)
if not check_time:
continue
@ -153,13 +149,11 @@ def filter_checks_on_frequency(
return tuple(filtered_checks)
def make_archive_filter_flags(
local_borg_version, storage_config, checks, check_last=None, prefix=None
):
def make_archive_filter_flags(local_borg_version, config, checks, check_last=None, prefix=None):
'''
Given the local Borg version, a storage configuration dict, a parsed sequence of checks, the
check last value, and a consistency check prefix, transform the checks into tuple of
command-line flags for filtering archives in a check command.
Given the local Borg version, a configuration dict, a parsed sequence of checks, the check last
value, and a consistency check prefix, transform the checks into tuple of command-line flags for
filtering archives in a check command.
If a check_last value is given and "archives" is in checks, then include a "--last" flag. And if
a prefix value is given and "archives" is in checks, then include a "--match-archives" flag.
@ -174,8 +168,8 @@ def make_archive_filter_flags(
if prefix
else (
flags.make_match_archives_flags(
storage_config.get('match_archives'),
storage_config.get('archive_name_format'),
config.get('match_archives'),
config.get('archive_name_format'),
local_borg_version,
)
)
@ -237,14 +231,14 @@ def make_check_flags(checks, archive_filter_flags):
)
def make_check_time_path(location_config, borg_repository_id, check_type, archives_check_id=None):
def make_check_time_path(config, borg_repository_id, check_type, archives_check_id=None):
'''
Given a location configuration dict, a Borg repository ID, the name of a check type
("repository", "archives", etc.), and a unique hash of the archives filter flags, return a
path for recording that check's time (the time of that check last occurring).
Given a configuration dict, a Borg repository ID, the name of a check type ("repository",
"archives", etc.), and a unique hash of the archives filter flags, return a path for recording
that check's time (the time of that check last occurring).
'''
borgmatic_source_directory = os.path.expanduser(
location_config.get('borgmatic_source_directory', state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY)
config.get('borgmatic_source_directory', state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY)
)
if check_type in ('archives', 'data'):
@ -287,11 +281,11 @@ def read_check_time(path):
return None
def probe_for_check_time(location_config, borg_repository_id, check, archives_check_id):
def probe_for_check_time(config, borg_repository_id, check, archives_check_id):
'''
Given a location configuration dict, a Borg repository ID, the name of a check type
("repository", "archives", etc.), and a unique hash of the archives filter flags, return a
the corresponding check time or None if such a check time does not exist.
Given a configuration dict, a Borg repository ID, the name of a check type ("repository",
"archives", etc.), and a unique hash of the archives filter flags, return a the corresponding
check time or None if such a check time does not exist.
When the check type is "archives" or "data", this function probes two different paths to find
the check time, e.g.:
@ -311,8 +305,8 @@ def probe_for_check_time(location_config, borg_repository_id, check, archives_ch
read_check_time(group[0])
for group in itertools.groupby(
(
make_check_time_path(location_config, borg_repository_id, check, archives_check_id),
make_check_time_path(location_config, borg_repository_id, check),
make_check_time_path(config, borg_repository_id, check, archives_check_id),
make_check_time_path(config, borg_repository_id, check),
)
)
)
@ -323,10 +317,10 @@ def probe_for_check_time(location_config, borg_repository_id, check, archives_ch
return None
def upgrade_check_times(location_config, borg_repository_id):
def upgrade_check_times(config, borg_repository_id):
'''
Given a location configuration dict and a Borg repository ID, upgrade any corresponding check
times on disk from old-style paths to new-style paths.
Given a configuration dict and a Borg repository ID, upgrade any corresponding check times on
disk from old-style paths to new-style paths.
Currently, the only upgrade performed is renaming an archive or data check path that looks like:
@ -337,7 +331,7 @@ def upgrade_check_times(location_config, borg_repository_id):
~/.borgmatic/checks/1234567890/archives/all
'''
for check_type in ('archives', 'data'):
new_path = make_check_time_path(location_config, borg_repository_id, check_type, 'all')
new_path = make_check_time_path(config, borg_repository_id, check_type, 'all')
old_path = os.path.dirname(new_path)
temporary_path = f'{old_path}.temp'
@ -357,9 +351,7 @@ def upgrade_check_times(location_config, borg_repository_id):
def check_archives(
repository_path,
location_config,
storage_config,
consistency_config,
config,
local_borg_version,
global_arguments,
local_path='borg',
@ -370,10 +362,9 @@ def check_archives(
force=None,
):
'''
Given a local or remote repository path, a storage config dict, a consistency config dict,
local/remote commands to run, whether to include progress information, whether to attempt a
repair, and an optional list of checks to use instead of configured checks, check the contained
Borg archives for consistency.
Given a local or remote repository path, a configuration dict, local/remote commands to run,
whether to include progress information, whether to attempt a repair, and an optional list of
checks to use instead of configured checks, check the contained Borg archives for consistency.
If there are no consistency checks to run, skip running them.
@ -383,7 +374,7 @@ def check_archives(
borg_repository_id = json.loads(
rinfo.display_repository_info(
repository_path,
storage_config,
config,
local_borg_version,
argparse.Namespace(json=True),
global_arguments,
@ -394,21 +385,20 @@ def check_archives(
except (json.JSONDecodeError, KeyError):
raise ValueError(f'Cannot determine Borg repository ID for {repository_path}')
upgrade_check_times(location_config, borg_repository_id)
upgrade_check_times(config, borg_repository_id)
check_last = consistency_config.get('check_last', None)
prefix = consistency_config.get('prefix')
configured_checks = parse_checks(consistency_config, only_checks)
check_last = config.get('check_last', None)
prefix = config.get('prefix')
configured_checks = parse_checks(config, only_checks)
lock_wait = None
extra_borg_options = storage_config.get('extra_borg_options', {}).get('check', '')
extra_borg_options = config.get('extra_borg_options', {}).get('check', '')
archive_filter_flags = make_archive_filter_flags(
local_borg_version, storage_config, configured_checks, check_last, prefix
local_borg_version, config, configured_checks, check_last, prefix
)
archives_check_id = make_archives_check_id(archive_filter_flags)
checks = filter_checks_on_frequency(
location_config,
consistency_config,
config,
borg_repository_id,
configured_checks,
force,
@ -416,7 +406,7 @@ def check_archives(
)
if set(checks).intersection({'repository', 'archives', 'data'}):
lock_wait = storage_config.get('lock_wait')
lock_wait = config.get('lock_wait')
verbosity_flags = ()
if logger.isEnabledFor(logging.INFO):
@ -437,7 +427,7 @@ def check_archives(
+ flags.make_repository_flags(repository_path, local_borg_version)
)
borg_environment = environment.make_environment(storage_config)
borg_environment = environment.make_environment(config)
# The Borg repair option triggers an interactive prompt, which won't work when output is
# captured. And progress messes with the terminal directly.
@ -450,12 +440,12 @@ def check_archives(
for check in checks:
write_check_time(
make_check_time_path(location_config, borg_repository_id, check, archives_check_id)
make_check_time_path(config, borg_repository_id, check, archives_check_id)
)
if 'extract' in checks:
extract.extract_last_archive_dry_run(
storage_config,
config,
local_borg_version,
global_arguments,
repository_path,
@ -463,4 +453,4 @@ def check_archives(
local_path,
remote_path,
)
write_check_time(make_check_time_path(location_config, borg_repository_id, 'extract'))
write_check_time(make_check_time_path(config, borg_repository_id, 'extract'))

View File

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def compact_segments(
dry_run,
repository_path,
storage_config,
config,
local_borg_version,
global_arguments,
local_path='borg',
@ -19,12 +19,12 @@ def compact_segments(
threshold=None,
):
'''
Given dry-run flag, a local or remote repository path, a storage config dict, and the local
Borg version, compact the segments in a repository.
Given dry-run flag, a local or remote repository path, a configuration dict, and the local Borg
version, compact the segments in a repository.
'''
umask = storage_config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None)
extra_borg_options = storage_config.get('extra_borg_options', {}).get('compact', '')
umask = config.get('umask', None)
lock_wait = config.get('lock_wait', None)
extra_borg_options = config.get('extra_borg_options', {}).get('compact', '')
full_command = (
(local_path, 'compact')
@ -49,5 +49,5 @@ def compact_segments(
full_command,
output_log_level=logging.INFO,
borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config),
extra_environment=environment.make_environment(config),
)

View File

@ -146,12 +146,12 @@ def ensure_files_readable(*filename_lists):
open(file_object).close()
def make_pattern_flags(location_config, pattern_filename=None):
def make_pattern_flags(config, pattern_filename=None):
'''
Given a location config dict with a potential patterns_from option, and a filename containing
any additional patterns, return the corresponding Borg flags for those files as a tuple.
Given a configuration dict with a potential patterns_from option, and a filename containing any
additional patterns, return the corresponding Borg flags for those files as a tuple.
'''
pattern_filenames = tuple(location_config.get('patterns_from') or ()) + (
pattern_filenames = tuple(config.get('patterns_from') or ()) + (
(pattern_filename,) if pattern_filename else ()
)
@ -162,12 +162,12 @@ def make_pattern_flags(location_config, pattern_filename=None):
)
def make_exclude_flags(location_config, exclude_filename=None):
def make_exclude_flags(config, exclude_filename=None):
'''
Given a location config dict with various exclude options, and a filename containing any exclude
Given a configuration dict with various exclude options, and a filename containing any exclude
patterns, return the corresponding Borg flags as a tuple.
'''
exclude_filenames = tuple(location_config.get('exclude_from') or ()) + (
exclude_filenames = tuple(config.get('exclude_from') or ()) + (
(exclude_filename,) if exclude_filename else ()
)
exclude_from_flags = tuple(
@ -175,17 +175,15 @@ def make_exclude_flags(location_config, exclude_filename=None):
('--exclude-from', exclude_filename) for exclude_filename in exclude_filenames
)
)
caches_flag = ('--exclude-caches',) if location_config.get('exclude_caches') else ()
caches_flag = ('--exclude-caches',) if config.get('exclude_caches') else ()
if_present_flags = tuple(
itertools.chain.from_iterable(
('--exclude-if-present', if_present)
for if_present in location_config.get('exclude_if_present', ())
for if_present in config.get('exclude_if_present', ())
)
)
keep_exclude_tags_flags = (
('--keep-exclude-tags',) if location_config.get('keep_exclude_tags') else ()
)
exclude_nodump_flags = ('--exclude-nodump',) if location_config.get('exclude_nodump') else ()
keep_exclude_tags_flags = ('--keep-exclude-tags',) if config.get('keep_exclude_tags') else ()
exclude_nodump_flags = ('--exclude-nodump',) if config.get('exclude_nodump') else ()
return (
exclude_from_flags
@ -294,6 +292,7 @@ def collect_special_file_paths(
capture_stderr=True,
working_directory=working_directory,
extra_environment=borg_environment,
borg_local_path=local_path,
)
paths = tuple(
@ -326,8 +325,7 @@ def check_all_source_directories_exist(source_directories):
def create_archive(
dry_run,
repository_path,
location_config,
storage_config,
config,
local_borg_version,
global_arguments,
local_path='borg',
@ -339,72 +337,70 @@ def create_archive(
stream_processes=None,
):
'''
Given vebosity/dry-run flags, a local or remote repository path, a location config dict, and a
storage config dict, create a Borg archive and return Borg's JSON output (if any).
Given vebosity/dry-run flags, a local or remote repository path, and a configuration dict,
create a Borg archive and return Borg's JSON output (if any).
If a sequence of stream processes is given (instances of subprocess.Popen), then execute the
create command while also triggering the given processes to produce output.
'''
borgmatic.logger.add_custom_log_levels()
borgmatic_source_directories = expand_directories(
collect_borgmatic_source_directories(location_config.get('borgmatic_source_directory'))
collect_borgmatic_source_directories(config.get('borgmatic_source_directory'))
)
if location_config.get('source_directories_must_exist', False):
check_all_source_directories_exist(location_config.get('source_directories'))
if config.get('source_directories_must_exist', False):
check_all_source_directories_exist(config.get('source_directories'))
sources = deduplicate_directories(
map_directories_to_devices(
expand_directories(
tuple(location_config.get('source_directories', ()))
tuple(config.get('source_directories', ()))
+ borgmatic_source_directories
+ tuple(global_arguments.used_config_paths)
)
),
additional_directory_devices=map_directories_to_devices(
expand_directories(pattern_root_directories(location_config.get('patterns')))
expand_directories(pattern_root_directories(config.get('patterns')))
),
)
ensure_files_readable(location_config.get('patterns_from'), location_config.get('exclude_from'))
ensure_files_readable(config.get('patterns_from'), config.get('exclude_from'))
try:
working_directory = os.path.expanduser(location_config.get('working_directory'))
working_directory = os.path.expanduser(config.get('working_directory'))
except TypeError:
working_directory = None
pattern_file = (
write_pattern_file(location_config.get('patterns'), sources)
if location_config.get('patterns') or location_config.get('patterns_from')
write_pattern_file(config.get('patterns'), sources)
if config.get('patterns') or config.get('patterns_from')
else None
)
exclude_file = write_pattern_file(
expand_home_directories(location_config.get('exclude_patterns'))
)
checkpoint_interval = storage_config.get('checkpoint_interval', None)
checkpoint_volume = storage_config.get('checkpoint_volume', None)
chunker_params = storage_config.get('chunker_params', None)
compression = storage_config.get('compression', None)
upload_rate_limit = storage_config.get('upload_rate_limit', None)
umask = storage_config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None)
exclude_file = write_pattern_file(expand_home_directories(config.get('exclude_patterns')))
checkpoint_interval = config.get('checkpoint_interval', None)
checkpoint_volume = config.get('checkpoint_volume', None)
chunker_params = config.get('chunker_params', None)
compression = config.get('compression', None)
upload_rate_limit = config.get('upload_rate_limit', None)
umask = config.get('umask', None)
lock_wait = config.get('lock_wait', None)
list_filter_flags = make_list_filter_flags(local_borg_version, dry_run)
files_cache = location_config.get('files_cache')
archive_name_format = storage_config.get('archive_name_format', DEFAULT_ARCHIVE_NAME_FORMAT)
extra_borg_options = storage_config.get('extra_borg_options', {}).get('create', '')
files_cache = config.get('files_cache')
archive_name_format = config.get('archive_name_format', DEFAULT_ARCHIVE_NAME_FORMAT)
extra_borg_options = config.get('extra_borg_options', {}).get('create', '')
if feature.available(feature.Feature.ATIME, local_borg_version):
atime_flags = ('--atime',) if location_config.get('atime') is True else ()
atime_flags = ('--atime',) if config.get('atime') is True else ()
else:
atime_flags = ('--noatime',) if location_config.get('atime') is False else ()
atime_flags = ('--noatime',) if config.get('atime') is False else ()
if feature.available(feature.Feature.NOFLAGS, local_borg_version):
noflags_flags = ('--noflags',) if location_config.get('flags') is False else ()
noflags_flags = ('--noflags',) if config.get('flags') is False else ()
else:
noflags_flags = ('--nobsdflags',) if location_config.get('flags') is False else ()
noflags_flags = ('--nobsdflags',) if config.get('flags') is False else ()
if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version):
numeric_ids_flags = ('--numeric-ids',) if location_config.get('numeric_ids') else ()
numeric_ids_flags = ('--numeric-ids',) if config.get('numeric_ids') else ()
else:
numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_ids') else ()
numeric_ids_flags = ('--numeric-owner',) if config.get('numeric_ids') else ()
if feature.available(feature.Feature.UPLOAD_RATELIMIT, local_borg_version):
upload_ratelimit_flags = (
@ -415,7 +411,7 @@ def create_archive(
('--remote-ratelimit', str(upload_rate_limit)) if upload_rate_limit else ()
)
if stream_processes and location_config.get('read_special') is False:
if stream_processes and config.get('read_special') is False:
logger.warning(
f'{repository_path}: Ignoring configured "read_special" value of false, as true is needed for database hooks.'
)
@ -423,23 +419,19 @@ def create_archive(
create_command = (
tuple(local_path.split(' '))
+ ('create',)
+ make_pattern_flags(location_config, pattern_file.name if pattern_file else None)
+ make_exclude_flags(location_config, exclude_file.name if exclude_file else None)
+ make_pattern_flags(config, pattern_file.name if pattern_file else None)
+ make_exclude_flags(config, exclude_file.name if exclude_file else None)
+ (('--checkpoint-interval', str(checkpoint_interval)) if checkpoint_interval else ())
+ (('--checkpoint-volume', str(checkpoint_volume)) if checkpoint_volume else ())
+ (('--chunker-params', chunker_params) if chunker_params else ())
+ (('--compression', compression) if compression else ())
+ upload_ratelimit_flags
+ (
('--one-file-system',)
if location_config.get('one_file_system') or stream_processes
else ()
)
+ (('--one-file-system',) if config.get('one_file_system') or stream_processes else ())
+ numeric_ids_flags
+ atime_flags
+ (('--noctime',) if location_config.get('ctime') is False else ())
+ (('--nobirthtime',) if location_config.get('birthtime') is False else ())
+ (('--read-special',) if location_config.get('read_special') or stream_processes else ())
+ (('--noctime',) if config.get('ctime') is False else ())
+ (('--nobirthtime',) if config.get('birthtime') is False else ())
+ (('--read-special',) if config.get('read_special') or stream_processes else ())
+ noflags_flags
+ (('--files-cache', files_cache) if files_cache else ())
+ (('--remote-path', remote_path) if remote_path else ())
@ -470,11 +462,11 @@ def create_archive(
# the terminal directly.
output_file = DO_NOT_CAPTURE if progress else None
borg_environment = environment.make_environment(storage_config)
borg_environment = environment.make_environment(config)
# If database hooks are enabled (as indicated by streaming processes), exclude files that might
# cause Borg to hang. But skip this if the user has explicitly set the "read_special" to True.
if stream_processes and not location_config.get('read_special'):
if stream_processes and not config.get('read_special'):
logger.debug(f'{repository_path}: Collecting special file paths')
special_file_paths = collect_special_file_paths(
create_command,
@ -490,11 +482,11 @@ def create_archive(
)
exclude_file = write_pattern_file(
expand_home_directories(
tuple(location_config.get('exclude_patterns') or ()) + special_file_paths
tuple(config.get('exclude_patterns') or ()) + special_file_paths
),
pattern_file=exclude_file,
)
create_command += make_exclude_flags(location_config, exclude_file.name)
create_command += make_exclude_flags(config, exclude_file.name)
create_command += (
(('--info',) if logger.getEffectiveLevel() == logging.INFO and not json else ())
@ -519,6 +511,7 @@ def create_archive(
create_command,
working_directory=working_directory,
extra_environment=borg_environment,
borg_local_path=local_path,
)
else:
execute_command(

View File

@ -21,15 +21,15 @@ DEFAULT_BOOL_OPTION_TO_UPPERCASE_ENVIRONMENT_VARIABLE = {
}
def make_environment(storage_config):
def make_environment(config):
'''
Given a borgmatic storage configuration dict, return its options converted to a Borg environment
Given a borgmatic configuration dict, return its options converted to a Borg environment
variable dict.
'''
environment = {}
for option_name, environment_variable_name in OPTION_TO_ENVIRONMENT_VARIABLE.items():
value = storage_config.get(option_name)
value = config.get(option_name)
if value:
environment[environment_variable_name] = str(value)
@ -38,14 +38,14 @@ def make_environment(storage_config):
option_name,
environment_variable_name,
) in DEFAULT_BOOL_OPTION_TO_DOWNCASE_ENVIRONMENT_VARIABLE.items():
value = storage_config.get(option_name, False)
value = config.get(option_name, False)
environment[environment_variable_name] = 'yes' if value else 'no'
for (
option_name,
environment_variable_name,
) in DEFAULT_BOOL_OPTION_TO_UPPERCASE_ENVIRONMENT_VARIABLE.items():
value = storage_config.get(option_name, False)
value = config.get(option_name, False)
environment[environment_variable_name] = 'YES' if value else 'NO'
return environment

View File

@ -13,7 +13,7 @@ def export_tar_archive(
archive,
paths,
destination_path,
storage_config,
config,
local_borg_version,
global_arguments,
local_path='borg',
@ -24,16 +24,16 @@ def export_tar_archive(
):
'''
Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to
export from the archive, a destination path to export to, a storage configuration dict, the
local Borg version, optional local and remote Borg paths, an optional filter program, whether to
include per-file details, and an optional number of path components to strip, export the archive
into the given destination path as a tar-formatted file.
export from the archive, a destination path to export to, a configuration dict, the local Borg
version, optional local and remote Borg paths, an optional filter program, whether to include
per-file details, and an optional number of path components to strip, export the archive into
the given destination path as a tar-formatted file.
If the destination path is "-", then stream the output to stdout instead of to a file.
'''
borgmatic.logger.add_custom_log_levels()
umask = storage_config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None)
umask = config.get('umask', None)
lock_wait = config.get('lock_wait', None)
full_command = (
(local_path, 'export-tar')
@ -70,5 +70,5 @@ def export_tar_archive(
output_file=DO_NOT_CAPTURE if destination_path == '-' else None,
output_log_level=output_log_level,
borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config),
extra_environment=environment.make_environment(config),
)

View File

@ -10,7 +10,7 @@ logger = logging.getLogger(__name__)
def extract_last_archive_dry_run(
storage_config,
config,
local_borg_version,
global_arguments,
repository_path,
@ -32,7 +32,7 @@ def extract_last_archive_dry_run(
last_archive_name = rlist.resolve_archive_name(
repository_path,
'latest',
storage_config,
config,
local_borg_version,
global_arguments,
local_path,
@ -43,7 +43,7 @@ def extract_last_archive_dry_run(
return
list_flag = ('--list',) if logger.isEnabledFor(logging.DEBUG) else ()
borg_environment = environment.make_environment(storage_config)
borg_environment = environment.make_environment(config)
full_extract_command = (
(local_path, 'extract', '--dry-run')
+ (('--remote-path', remote_path) if remote_path else ())
@ -66,8 +66,7 @@ def extract_archive(
repository,
archive,
paths,
location_config,
storage_config,
config,
local_borg_version,
global_arguments,
local_path='borg',
@ -80,22 +79,22 @@ def extract_archive(
'''
Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to
restore from the archive, the local Borg version string, an argparse.Namespace of global
arguments, location/storage configuration dicts, optional local and remote Borg paths, and an
optional destination path to extract to, extract the archive into the current directory.
arguments, a configuration dict, optional local and remote Borg paths, and an optional
destination path to extract to, extract the archive into the current directory.
If extract to stdout is True, then start the extraction streaming to stdout, and return that
extract process as an instance of subprocess.Popen.
'''
umask = storage_config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None)
umask = config.get('umask', None)
lock_wait = config.get('lock_wait', None)
if progress and extract_to_stdout:
raise ValueError('progress and extract_to_stdout cannot both be set')
if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version):
numeric_ids_flags = ('--numeric-ids',) if location_config.get('numeric_ids') else ()
numeric_ids_flags = ('--numeric-ids',) if config.get('numeric_ids') else ()
else:
numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_ids') else ()
numeric_ids_flags = ('--numeric-owner',) if config.get('numeric_ids') else ()
if strip_components == 'all':
if not paths:
@ -127,7 +126,7 @@ def extract_archive(
+ (tuple(paths) if paths else ())
)
borg_environment = environment.make_environment(storage_config)
borg_environment = environment.make_environment(config)
# The progress output isn't compatible with captured and logged output, as progress messes with
# the terminal directly.

View File

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def display_archives_info(
repository_path,
storage_config,
config,
local_borg_version,
info_arguments,
global_arguments,
@ -17,12 +17,12 @@ def display_archives_info(
remote_path=None,
):
'''
Given a local or remote repository path, a storage config dict, the local Borg version, global
Given a local or remote repository path, a configuration dict, the local Borg version, global
arguments as an argparse.Namespace, and the arguments to the info action, display summary
information for Borg archives in the repository or return JSON summary information.
'''
borgmatic.logger.add_custom_log_levels()
lock_wait = storage_config.get('lock_wait', None)
lock_wait = config.get('lock_wait', None)
full_command = (
(local_path, 'info')
@ -50,8 +50,8 @@ def display_archives_info(
flags.make_match_archives_flags(
info_arguments.match_archives
or info_arguments.archive
or storage_config.get('match_archives'),
storage_config.get('archive_name_format'),
or config.get('match_archives'),
config.get('archive_name_format'),
local_borg_version,
)
)
@ -65,12 +65,13 @@ def display_archives_info(
if info_arguments.json:
return execute_command_and_capture_output(
full_command,
extra_environment=environment.make_environment(storage_config),
extra_environment=environment.make_environment(config),
borg_local_path=local_path,
)
else:
execute_command(
full_command,
output_log_level=logging.ANSWER,
borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config),
extra_environment=environment.make_environment(config),
)

View File

@ -21,7 +21,7 @@ MAKE_FLAGS_EXCLUDES = (
def make_list_command(
repository_path,
storage_config,
config,
local_borg_version,
list_arguments,
global_arguments,
@ -29,11 +29,11 @@ def make_list_command(
remote_path=None,
):
'''
Given a local or remote repository path, a storage config dict, the arguments to the list
action, and local and remote Borg paths, return a command as a tuple to list archives or paths
within an archive.
Given a local or remote repository path, a configuration dict, the arguments to the list action,
and local and remote Borg paths, return a command as a tuple to list archives or paths within an
archive.
'''
lock_wait = storage_config.get('lock_wait', None)
lock_wait = config.get('lock_wait', None)
return (
(local_path, 'list')
@ -89,7 +89,7 @@ def make_find_paths(find_paths):
def capture_archive_listing(
repository_path,
archive,
storage_config,
config,
local_borg_version,
global_arguments,
list_path=None,
@ -97,18 +97,18 @@ def capture_archive_listing(
remote_path=None,
):
'''
Given a local or remote repository path, an archive name, a storage config dict, the local Borg
Given a local or remote repository path, an archive name, a configuration dict, the local Borg
version, global arguments as an argparse.Namespace, the archive path in which to list files, and
local and remote Borg paths, capture the output of listing that archive and return it as a list
of file paths.
'''
borg_environment = environment.make_environment(storage_config)
borg_environment = environment.make_environment(config)
return tuple(
execute_command_and_capture_output(
make_list_command(
repository_path,
storage_config,
config,
local_borg_version,
argparse.Namespace(
repository=repository_path,
@ -123,6 +123,7 @@ def capture_archive_listing(
remote_path,
),
extra_environment=borg_environment,
borg_local_path=local_path,
)
.strip('\n')
.split('\n')
@ -131,7 +132,7 @@ def capture_archive_listing(
def list_archive(
repository_path,
storage_config,
config,
local_borg_version,
list_arguments,
global_arguments,
@ -139,7 +140,7 @@ def list_archive(
remote_path=None,
):
'''
Given a local or remote repository path, a storage config dict, the local Borg version, global
Given a local or remote repository path, a configuration dict, the local Borg version, global
arguments as an argparse.Namespace, the arguments to the list action as an argparse.Namespace,
and local and remote Borg paths, display the output of listing the files of a Borg archive (or
return JSON output). If list_arguments.find_paths are given, list the files by searching across
@ -167,7 +168,7 @@ def list_archive(
)
return rlist.list_repository(
repository_path,
storage_config,
config,
local_borg_version,
rlist_arguments,
global_arguments,
@ -187,7 +188,7 @@ def list_archive(
'The --json flag on the list action is not supported when using the --archive/--find flags.'
)
borg_environment = environment.make_environment(storage_config)
borg_environment = environment.make_environment(config)
# If there are any paths to find (and there's not a single archive already selected), start by
# getting a list of archives to search.
@ -209,7 +210,7 @@ def list_archive(
execute_command_and_capture_output(
rlist.make_rlist_command(
repository_path,
storage_config,
config,
local_borg_version,
rlist_arguments,
global_arguments,
@ -217,6 +218,7 @@ def list_archive(
remote_path,
),
extra_environment=borg_environment,
borg_local_path=local_path,
)
.strip('\n')
.split('\n')
@ -238,7 +240,7 @@ def list_archive(
main_command = make_list_command(
repository_path,
storage_config,
config,
local_borg_version,
archive_arguments,
global_arguments,

View File

@ -10,7 +10,7 @@ def mount_archive(
repository_path,
archive,
mount_arguments,
storage_config,
config,
local_borg_version,
global_arguments,
local_path='borg',
@ -22,8 +22,8 @@ def mount_archive(
dict, the local Borg version, global arguments as an argparse.Namespace instance, and optional
local and remote Borg paths, mount the archive onto the mount point.
'''
umask = storage_config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None)
umask = config.get('umask', None)
lock_wait = config.get('lock_wait', None)
full_command = (
(local_path, 'mount')
@ -58,7 +58,7 @@ def mount_archive(
+ (tuple(mount_arguments.paths) if mount_arguments.paths else ())
)
borg_environment = environment.make_environment(storage_config)
borg_environment = environment.make_environment(config)
# Don't capture the output when foreground mode is used so that ctrl-C can work properly.
if mount_arguments.foreground:

View File

@ -7,9 +7,9 @@ from borgmatic.execute import execute_command
logger = logging.getLogger(__name__)
def make_prune_flags(storage_config, retention_config, local_borg_version):
def make_prune_flags(config, local_borg_version):
'''
Given a retention config dict mapping from option name to value, transform it into an sequence of
Given a configuration dict mapping from option name to value, transform it into an sequence of
command-line flags.
For example, given a retention config of:
@ -23,12 +23,12 @@ def make_prune_flags(storage_config, retention_config, local_borg_version):
('--keep-monthly', '6'),
)
'''
config = retention_config.copy()
prefix = config.pop('prefix', None)
flag_pairs = (
('--' + option_name.replace('_', '-'), str(value)) for option_name, value in config.items()
('--' + option_name.replace('_', '-'), str(value))
for option_name, value in config.items()
if option_name.startswith('keep_')
)
prefix = config.get('prefix')
return tuple(element for pair in flag_pairs for element in pair) + (
(
@ -39,8 +39,8 @@ def make_prune_flags(storage_config, retention_config, local_borg_version):
if prefix
else (
flags.make_match_archives_flags(
storage_config.get('match_archives'),
storage_config.get('archive_name_format'),
config.get('match_archives'),
config.get('archive_name_format'),
local_borg_version,
)
)
@ -50,8 +50,7 @@ def make_prune_flags(storage_config, retention_config, local_borg_version):
def prune_archives(
dry_run,
repository_path,
storage_config,
retention_config,
config,
local_borg_version,
prune_arguments,
global_arguments,
@ -59,18 +58,17 @@ def prune_archives(
remote_path=None,
):
'''
Given dry-run flag, a local or remote repository path, a storage config dict, and a
retention config dict, prune Borg archives according to the retention policy specified in that
configuration.
Given dry-run flag, a local or remote repository path, and a configuration dict, prune Borg
archives according to the retention policy specified in that configuration.
'''
borgmatic.logger.add_custom_log_levels()
umask = storage_config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None)
extra_borg_options = storage_config.get('extra_borg_options', {}).get('prune', '')
umask = config.get('umask', None)
lock_wait = config.get('lock_wait', None)
extra_borg_options = config.get('extra_borg_options', {}).get('prune', '')
full_command = (
(local_path, 'prune')
+ make_prune_flags(storage_config, retention_config, local_borg_version)
+ make_prune_flags(config, local_borg_version)
+ (('--remote-path', remote_path) if remote_path else ())
+ (('--umask', str(umask)) if umask else ())
+ (('--log-json',) if global_arguments.log_json else ())
@ -97,5 +95,5 @@ def prune_archives(
full_command,
output_log_level=output_log_level,
borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config),
extra_environment=environment.make_environment(config),
)

View File

@ -14,7 +14,7 @@ RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE = 2
def create_repository(
dry_run,
repository_path,
storage_config,
config,
local_borg_version,
global_arguments,
encryption_mode,
@ -27,15 +27,15 @@ def create_repository(
remote_path=None,
):
'''
Given a dry-run flag, a local or remote repository path, a storage configuration dict, the local
Borg version, a Borg encryption mode, the path to another repo whose key material should be
reused, whether the repository should be append-only, and the storage quota to use, create the
Given a dry-run flag, a local or remote repository path, a configuration dict, the local Borg
version, a Borg encryption mode, the path to another repo whose key material should be reused,
whether the repository should be append-only, and the storage quota to use, create the
repository. If the repository already exists, then log and skip creation.
'''
try:
rinfo.display_repository_info(
repository_path,
storage_config,
config,
local_borg_version,
argparse.Namespace(json=True),
global_arguments,
@ -48,8 +48,8 @@ def create_repository(
if error.returncode != RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE:
raise
lock_wait = storage_config.get('lock_wait')
extra_borg_options = storage_config.get('extra_borg_options', {}).get('rcreate', '')
lock_wait = config.get('lock_wait')
extra_borg_options = config.get('extra_borg_options', {}).get('rcreate', '')
rcreate_command = (
(local_path,)
@ -82,5 +82,5 @@ def create_repository(
rcreate_command,
output_file=DO_NOT_CAPTURE,
borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config),
extra_environment=environment.make_environment(config),
)

View File

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def display_repository_info(
repository_path,
storage_config,
config,
local_borg_version,
rinfo_arguments,
global_arguments,
@ -17,12 +17,12 @@ def display_repository_info(
remote_path=None,
):
'''
Given a local or remote repository path, a storage config dict, the local Borg version, the
Given a local or remote repository path, a configuration dict, the local Borg version, the
arguments to the rinfo action, and global arguments as an argparse.Namespace, display summary
information for the Borg repository or return JSON summary information.
'''
borgmatic.logger.add_custom_log_levels()
lock_wait = storage_config.get('lock_wait', None)
lock_wait = config.get('lock_wait', None)
full_command = (
(local_path,)
@ -48,12 +48,13 @@ def display_repository_info(
+ flags.make_repository_flags(repository_path, local_borg_version)
)
extra_environment = environment.make_environment(storage_config)
extra_environment = environment.make_environment(config)
if rinfo_arguments.json:
return execute_command_and_capture_output(
full_command,
extra_environment=extra_environment,
borg_local_path=local_path,
)
else:
execute_command(

View File

@ -10,14 +10,14 @@ logger = logging.getLogger(__name__)
def resolve_archive_name(
repository_path,
archive,
storage_config,
config,
local_borg_version,
global_arguments,
local_path='borg',
remote_path=None,
):
'''
Given a local or remote repository path, an archive name, a storage config dict, the local Borg
Given a local or remote repository path, an archive name, a configuration dict, the local Borg
version, global arguments as an argparse.Namespace, a local Borg path, and a remote Borg path,
return the archive name. But if the archive name is "latest", then instead introspect the
repository for the latest archive and return its name.
@ -34,7 +34,7 @@ def resolve_archive_name(
)
+ flags.make_flags('remote-path', remote_path)
+ flags.make_flags('log-json', global_arguments.log_json)
+ flags.make_flags('lock-wait', storage_config.get('lock_wait'))
+ flags.make_flags('lock-wait', config.get('lock_wait'))
+ flags.make_flags('last', 1)
+ ('--short',)
+ flags.make_repository_flags(repository_path, local_borg_version)
@ -42,7 +42,8 @@ def resolve_archive_name(
output = execute_command_and_capture_output(
full_command,
extra_environment=environment.make_environment(storage_config),
extra_environment=environment.make_environment(config),
borg_local_path=local_path,
)
try:
latest_archive = output.strip().splitlines()[-1]
@ -59,7 +60,7 @@ MAKE_FLAGS_EXCLUDES = ('repository', 'prefix', 'match_archives')
def make_rlist_command(
repository_path,
storage_config,
config,
local_borg_version,
rlist_arguments,
global_arguments,
@ -67,7 +68,7 @@ def make_rlist_command(
remote_path=None,
):
'''
Given a local or remote repository path, a storage config dict, the local Borg version, the
Given a local or remote repository path, a configuration dict, the local Borg version, the
arguments to the rlist action, global arguments as an argparse.Namespace instance, and local and
remote Borg paths, return a command as a tuple to list archives with a repository.
'''
@ -88,7 +89,7 @@ def make_rlist_command(
)
+ flags.make_flags('remote-path', remote_path)
+ flags.make_flags('log-json', global_arguments.log_json)
+ flags.make_flags('lock-wait', storage_config.get('lock_wait'))
+ flags.make_flags('lock-wait', config.get('lock_wait'))
+ (
(
flags.make_flags('match-archives', f'sh:{rlist_arguments.prefix}*')
@ -98,8 +99,8 @@ def make_rlist_command(
if rlist_arguments.prefix
else (
flags.make_match_archives_flags(
rlist_arguments.match_archives or storage_config.get('match_archives'),
storage_config.get('archive_name_format'),
rlist_arguments.match_archives or config.get('match_archives'),
config.get('archive_name_format'),
local_borg_version,
)
)
@ -111,7 +112,7 @@ def make_rlist_command(
def list_repository(
repository_path,
storage_config,
config,
local_borg_version,
rlist_arguments,
global_arguments,
@ -119,17 +120,17 @@ def list_repository(
remote_path=None,
):
'''
Given a local or remote repository path, a storage config dict, the local Borg version, the
Given a local or remote repository path, a configuration dict, the local Borg version, the
arguments to the list action, global arguments as an argparse.Namespace instance, and local and
remote Borg paths, display the output of listing Borg archives in the given repository (or
return JSON output).
'''
borgmatic.logger.add_custom_log_levels()
borg_environment = environment.make_environment(storage_config)
borg_environment = environment.make_environment(config)
main_command = make_rlist_command(
repository_path,
storage_config,
config,
local_borg_version,
rlist_arguments,
global_arguments,
@ -138,7 +139,9 @@ def list_repository(
)
if rlist_arguments.json:
return execute_command_and_capture_output(main_command, extra_environment=borg_environment)
return execute_command_and_capture_output(
main_command, extra_environment=borg_environment, borg_local_path=local_path
)
else:
execute_command(
main_command,

View File

@ -10,7 +10,7 @@ logger = logging.getLogger(__name__)
def transfer_archives(
dry_run,
repository_path,
storage_config,
config,
local_borg_version,
transfer_arguments,
global_arguments,
@ -18,7 +18,7 @@ def transfer_archives(
remote_path=None,
):
'''
Given a dry-run flag, a local or remote repository path, a storage config dict, the local Borg
Given a dry-run flag, a local or remote repository path, a configuration dict, the local Borg
version, the arguments to the transfer action, and global arguments as an argparse.Namespace
instance, transfer archives to the given repository.
'''
@ -30,7 +30,7 @@ def transfer_archives(
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
+ flags.make_flags('remote-path', remote_path)
+ flags.make_flags('log-json', global_arguments.log_json)
+ flags.make_flags('lock-wait', storage_config.get('lock_wait', None))
+ flags.make_flags('lock-wait', config.get('lock_wait', None))
+ (
flags.make_flags_from_arguments(
transfer_arguments,
@ -40,8 +40,8 @@ def transfer_archives(
flags.make_match_archives_flags(
transfer_arguments.match_archives
or transfer_arguments.archive
or storage_config.get('match_archives'),
storage_config.get('archive_name_format'),
or config.get('match_archives'),
config.get('archive_name_format'),
local_borg_version,
)
)
@ -56,5 +56,5 @@ def transfer_archives(
output_log_level=logging.ANSWER,
output_file=DO_NOT_CAPTURE if transfer_arguments.progress else None,
borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config),
extra_environment=environment.make_environment(config),
)

View File

@ -6,9 +6,9 @@ from borgmatic.execute import execute_command_and_capture_output
logger = logging.getLogger(__name__)
def local_borg_version(storage_config, local_path='borg'):
def local_borg_version(config, local_path='borg'):
'''
Given a storage configuration dict and a local Borg binary path, return a version string for it.
Given a configuration dict and a local Borg binary path, return a version string for it.
Raise OSError or CalledProcessError if there is a problem running Borg.
Raise ValueError if the version cannot be parsed.
@ -20,7 +20,8 @@ def local_borg_version(storage_config, local_path='borg'):
)
output = execute_command_and_capture_output(
full_command,
extra_environment=environment.make_environment(storage_config),
extra_environment=environment.make_environment(config),
borg_local_path=local_path,
)
try:

View File

@ -1,7 +1,7 @@
import collections
import itertools
import sys
from argparse import Action, ArgumentParser
from argparse import ArgumentParser
from borgmatic.config import collect
@ -216,42 +216,12 @@ def parse_arguments_for_actions(unparsed_arguments, action_parsers, global_parse
arguments['global'], remaining = global_parser.parse_known_args(unparsed_arguments)
remaining_action_arguments.append(remaining)
# Prevent action names and arguments that follow "--config" paths from being considered as
# additional paths.
for argument_name in arguments.keys():
if argument_name == 'global':
continue
for action_name in [argument_name] + ACTION_ALIASES.get(argument_name, []):
try:
action_name_index = arguments['global'].config_paths.index(action_name)
arguments['global'].config_paths = arguments['global'].config_paths[
:action_name_index
]
break
except ValueError:
pass
return (
arguments,
tuple(remaining_action_arguments) if arguments else unparsed_arguments,
)
class Extend_action(Action):
'''
An argparse action to support Python 3.8's "extend" action in older versions of Python.
'''
def __call__(self, parser, namespace, values, option_string=None):
items = getattr(namespace, self.dest, None)
if items:
items.extend(values) # pragma: no cover
else:
setattr(namespace, self.dest, list(values))
def make_parsers():
'''
Build a global arguments parser, individual action parsers, and a combined parser containing
@ -263,16 +233,14 @@ def make_parsers():
unexpanded_config_paths = collect.get_default_config_paths(expand_home=False)
global_parser = ArgumentParser(add_help=False)
global_parser.register('action', 'extend', Extend_action)
global_group = global_parser.add_argument_group('global arguments')
global_group.add_argument(
'-c',
'--config',
nargs='*',
dest='config_paths',
default=config_paths,
help=f"Configuration filenames or directories, defaults to: {' '.join(unexpanded_config_paths)}",
action='append',
help=f"Configuration filename or directory, can specify flag multiple times, defaults to: {' '.join(unexpanded_config_paths)}",
)
global_group.add_argument(
'-n',
@ -330,11 +298,10 @@ def make_parsers():
)
global_group.add_argument(
'--override',
metavar='SECTION.OPTION=VALUE',
nargs='+',
metavar='OPTION.SUBOPTION=VALUE',
dest='overrides',
action='extend',
help='One or more configuration file options to override with specified values',
action='append',
help='Configuration file option to override with specified value, can specify flag multiple times',
)
global_group.add_argument(
'--no-environment-interpolation',
@ -672,9 +639,9 @@ def make_parsers():
'--path',
'--restore-path',
metavar='PATH',
nargs='+',
dest='paths',
help='Paths to extract from archive, defaults to the entire archive',
action='append',
help='Path to extract from archive, can specify flag multiple times, defaults to the entire archive',
)
extract_group.add_argument(
'--destination',
@ -826,9 +793,9 @@ def make_parsers():
export_tar_group.add_argument(
'--path',
metavar='PATH',
nargs='+',
dest='paths',
help='Paths to export from archive, defaults to the entire archive',
action='append',
help='Path to export from archive, can specify flag multiple times, defaults to the entire archive',
)
export_tar_group.add_argument(
'--destination',
@ -877,9 +844,9 @@ def make_parsers():
mount_group.add_argument(
'--path',
metavar='PATH',
nargs='+',
dest='paths',
help='Paths to mount from archive, defaults to the entire archive',
action='append',
help='Path to mount from archive, can specify multiple times, defaults to the entire archive',
)
mount_group.add_argument(
'--foreground',
@ -954,16 +921,16 @@ def make_parsers():
restore_group.add_argument(
'--database',
metavar='NAME',
nargs='+',
dest='databases',
help="Names of databases to restore from archive, defaults to all databases. Note that any databases to restore must be defined in borgmatic's configuration",
action='append',
help="Name of database to restore from archive, must be defined in borgmatic's configuration, can specify flag multiple times, defaults to all databases",
)
restore_group.add_argument(
'--schema',
metavar='NAME',
nargs='+',
dest='schemas',
help='Names of schemas to restore from the database, defaults to all schemas. Schemas are only supported for PostgreSQL and MongoDB databases',
action='append',
help='Name of schema to restore from the database, can specify flag multiple times, defaults to all schemas. Schemas are only supported for PostgreSQL and MongoDB databases',
)
restore_group.add_argument(
'--hostname',
@ -1065,16 +1032,16 @@ def make_parsers():
list_group.add_argument(
'--path',
metavar='PATH',
nargs='+',
dest='paths',
help='Paths or patterns to list from a single selected archive (via "--archive"), defaults to listing the entire archive',
action='append',
help='Path or pattern to list from a single selected archive (via "--archive"), can specify flag multiple times, defaults to listing the entire archive',
)
list_group.add_argument(
'--find',
metavar='PATH',
nargs='+',
dest='find_paths',
help='Partial paths or patterns to search for and list across multiple archives',
action='append',
help='Partial path or pattern to search for and list across multiple archives, can specify flag multiple times',
)
list_group.add_argument(
'--short', default=False, action='store_true', help='Output only path names'
@ -1248,6 +1215,9 @@ def parse_arguments(*unparsed_arguments):
unparsed_arguments, action_parsers.choices, global_parser
)
if not arguments['global'].config_paths:
arguments['global'].config_paths = collect.get_default_config_paths(expand_home=True)
for action_name in ('bootstrap', 'generate', 'validate'):
if (
action_name in arguments.keys() and len(arguments.keys()) > 2

View File

@ -58,16 +58,12 @@ def run_configuration(config_filename, config, arguments):
* JSON output strings from successfully executing any actions that produce JSON
* logging.LogRecord instances containing errors from any actions or backup hooks that fail
'''
(location, storage, retention, consistency, hooks) = (
config.get(section_name, {})
for section_name in ('location', 'storage', 'retention', 'consistency', 'hooks')
)
global_arguments = arguments['global']
local_path = location.get('local_path', 'borg')
remote_path = location.get('remote_path')
retries = storage.get('retries', 0)
retry_wait = storage.get('retry_wait', 0)
local_path = config.get('local_path', 'borg')
remote_path = config.get('remote_path')
retries = config.get('retries', 0)
retry_wait = config.get('retry_wait', 0)
encountered_error = None
error_repository = ''
using_primary_action = {'create', 'prune', 'compact', 'check'}.intersection(arguments)
@ -75,7 +71,7 @@ def run_configuration(config_filename, config, arguments):
monitoring_hooks_are_activated = using_primary_action and monitoring_log_level != DISABLED
try:
local_borg_version = borg_version.local_borg_version(storage, local_path)
local_borg_version = borg_version.local_borg_version(config, local_path)
except (OSError, CalledProcessError, ValueError) as error:
yield from log_error_records(f'{config_filename}: Error getting local Borg version', error)
return
@ -84,7 +80,7 @@ def run_configuration(config_filename, config, arguments):
if monitoring_hooks_are_activated:
dispatch.call_hooks(
'initialize_monitor',
hooks,
config,
config_filename,
monitor.MONITOR_HOOK_NAMES,
monitoring_log_level,
@ -93,7 +89,7 @@ def run_configuration(config_filename, config, arguments):
dispatch.call_hooks(
'ping_monitor',
hooks,
config,
config_filename,
monitor.MONITOR_HOOK_NAMES,
monitor.State.START,
@ -109,7 +105,7 @@ def run_configuration(config_filename, config, arguments):
if not encountered_error:
repo_queue = Queue()
for repo in location['repositories']:
for repo in config['repositories']:
repo_queue.put(
(repo, 0),
)
@ -129,11 +125,7 @@ def run_configuration(config_filename, config, arguments):
yield from run_actions(
arguments=arguments,
config_filename=config_filename,
location=location,
storage=storage,
retention=retention,
consistency=consistency,
hooks=hooks,
config=config,
local_path=local_path,
remote_path=remote_path,
local_borg_version=local_borg_version,
@ -172,7 +164,7 @@ def run_configuration(config_filename, config, arguments):
# send logs irrespective of error
dispatch.call_hooks(
'ping_monitor',
hooks,
config,
config_filename,
monitor.MONITOR_HOOK_NAMES,
monitor.State.LOG,
@ -191,7 +183,7 @@ def run_configuration(config_filename, config, arguments):
if monitoring_hooks_are_activated:
dispatch.call_hooks(
'ping_monitor',
hooks,
config,
config_filename,
monitor.MONITOR_HOOK_NAMES,
monitor.State.FINISH,
@ -200,7 +192,7 @@ def run_configuration(config_filename, config, arguments):
)
dispatch.call_hooks(
'destroy_monitor',
hooks,
config,
config_filename,
monitor.MONITOR_HOOK_NAMES,
monitoring_log_level,
@ -216,8 +208,8 @@ def run_configuration(config_filename, config, arguments):
if encountered_error and using_primary_action:
try:
command.execute_hook(
hooks.get('on_error'),
hooks.get('umask'),
config.get('on_error'),
config.get('umask'),
config_filename,
'on-error',
global_arguments.dry_run,
@ -227,7 +219,7 @@ def run_configuration(config_filename, config, arguments):
)
dispatch.call_hooks(
'ping_monitor',
hooks,
config,
config_filename,
monitor.MONITOR_HOOK_NAMES,
monitor.State.FAIL,
@ -236,7 +228,7 @@ def run_configuration(config_filename, config, arguments):
)
dispatch.call_hooks(
'destroy_monitor',
hooks,
config,
config_filename,
monitor.MONITOR_HOOK_NAMES,
monitoring_log_level,
@ -253,11 +245,7 @@ def run_actions(
*,
arguments,
config_filename,
location,
storage,
retention,
consistency,
hooks,
config,
local_path,
remote_path,
local_borg_version,
@ -282,13 +270,13 @@ def run_actions(
hook_context = {
'repository': repository_path,
# Deprecated: For backwards compatibility with borgmatic < 1.6.0.
'repositories': ','.join([repo['path'] for repo in location['repositories']]),
'repositories': ','.join([repo['path'] for repo in config['repositories']]),
'log_file': global_arguments.log_file if global_arguments.log_file else '',
}
command.execute_hook(
hooks.get('before_actions'),
hooks.get('umask'),
config.get('before_actions'),
config.get('umask'),
config_filename,
'pre-actions',
global_arguments.dry_run,
@ -299,7 +287,7 @@ def run_actions(
if action_name == 'rcreate':
borgmatic.actions.rcreate.run_rcreate(
repository,
storage,
config,
local_borg_version,
action_arguments,
global_arguments,
@ -309,7 +297,7 @@ def run_actions(
elif action_name == 'transfer':
borgmatic.actions.transfer.run_transfer(
repository,
storage,
config,
local_borg_version,
action_arguments,
global_arguments,
@ -320,9 +308,7 @@ def run_actions(
yield from borgmatic.actions.create.run_create(
config_filename,
repository,
location,
storage,
hooks,
config,
hook_context,
local_borg_version,
action_arguments,
@ -335,9 +321,7 @@ def run_actions(
borgmatic.actions.prune.run_prune(
config_filename,
repository,
storage,
retention,
hooks,
config,
hook_context,
local_borg_version,
action_arguments,
@ -350,9 +334,7 @@ def run_actions(
borgmatic.actions.compact.run_compact(
config_filename,
repository,
storage,
retention,
hooks,
config,
hook_context,
local_borg_version,
action_arguments,
@ -362,14 +344,11 @@ def run_actions(
remote_path,
)
elif action_name == 'check':
if checks.repository_enabled_for_checks(repository, consistency):
if checks.repository_enabled_for_checks(repository, config):
borgmatic.actions.check.run_check(
config_filename,
repository,
location,
storage,
consistency,
hooks,
config,
hook_context,
local_borg_version,
action_arguments,
@ -381,9 +360,7 @@ def run_actions(
borgmatic.actions.extract.run_extract(
config_filename,
repository,
location,
storage,
hooks,
config,
hook_context,
local_borg_version,
action_arguments,
@ -394,7 +371,7 @@ def run_actions(
elif action_name == 'export-tar':
borgmatic.actions.export_tar.run_export_tar(
repository,
storage,
config,
local_borg_version,
action_arguments,
global_arguments,
@ -404,7 +381,7 @@ def run_actions(
elif action_name == 'mount':
borgmatic.actions.mount.run_mount(
repository,
storage,
config,
local_borg_version,
action_arguments,
global_arguments,
@ -414,9 +391,7 @@ def run_actions(
elif action_name == 'restore':
borgmatic.actions.restore.run_restore(
repository,
location,
storage,
hooks,
config,
local_borg_version,
action_arguments,
global_arguments,
@ -426,7 +401,7 @@ def run_actions(
elif action_name == 'rlist':
yield from borgmatic.actions.rlist.run_rlist(
repository,
storage,
config,
local_borg_version,
action_arguments,
global_arguments,
@ -436,7 +411,7 @@ def run_actions(
elif action_name == 'list':
yield from borgmatic.actions.list.run_list(
repository,
storage,
config,
local_borg_version,
action_arguments,
global_arguments,
@ -446,7 +421,7 @@ def run_actions(
elif action_name == 'rinfo':
yield from borgmatic.actions.rinfo.run_rinfo(
repository,
storage,
config,
local_borg_version,
action_arguments,
global_arguments,
@ -456,7 +431,7 @@ def run_actions(
elif action_name == 'info':
yield from borgmatic.actions.info.run_info(
repository,
storage,
config,
local_borg_version,
action_arguments,
global_arguments,
@ -466,7 +441,7 @@ def run_actions(
elif action_name == 'break-lock':
borgmatic.actions.break_lock.run_break_lock(
repository,
storage,
config,
local_borg_version,
action_arguments,
global_arguments,
@ -476,7 +451,7 @@ def run_actions(
elif action_name == 'borg':
borgmatic.actions.borg.run_borg(
repository,
storage,
config,
local_borg_version,
action_arguments,
global_arguments,
@ -485,8 +460,8 @@ def run_actions(
)
command.execute_hook(
hooks.get('after_actions'),
hooks.get('umask'),
config.get('after_actions'),
config.get('umask'),
config_filename,
'post-actions',
global_arguments.dry_run,
@ -569,6 +544,9 @@ def log_record(suppress_log=False, **kwargs):
return record
MAX_CAPTURED_OUTPUT_LENGTH = 1000
def log_error_records(
message, error=None, levelno=logging.CRITICAL, log_command_error_output=False
):
@ -591,12 +569,18 @@ def log_error_records(
except CalledProcessError as error:
yield log_record(levelno=levelno, levelname=level_name, msg=message)
if error.output:
try:
output = error.output.decode('utf-8')
except (UnicodeDecodeError, AttributeError):
output = error.output
# Suppress these logs for now and save full error output for the log summary at the end.
yield log_record(
levelno=levelno,
levelname=level_name,
msg=error.output,
suppress_log=not log_command_error_output,
msg=output[:MAX_CAPTURED_OUTPUT_LENGTH]
+ ' ...' * (len(output) > MAX_CAPTURED_OUTPUT_LENGTH),
suppress_log=True,
)
yield log_record(levelno=levelno, levelname=level_name, msg=error)
except (ValueError, OSError) as error:
@ -613,7 +597,7 @@ def get_local_path(configs):
Arbitrarily return the local path from the first configuration dict. Default to "borg" if not
set.
'''
return next(iter(configs.values())).get('location', {}).get('local_path', 'borg')
return next(iter(configs.values())).get('local_path', 'borg')
def collect_highlander_action_summary_logs(configs, arguments, configuration_parse_errors):
@ -627,6 +611,8 @@ def collect_highlander_action_summary_logs(configs, arguments, configuration_par
A highlander action is an action that cannot coexist with other actions on the borgmatic
command-line, and borgmatic exits after processing such an action.
'''
add_custom_log_levels()
if 'bootstrap' in arguments:
try:
# No configuration file is needed for bootstrap.
@ -744,10 +730,9 @@ def collect_configuration_run_summary_logs(configs, arguments):
if 'create' in arguments:
try:
for config_filename, config in configs.items():
hooks = config.get('hooks', {})
command.execute_hook(
hooks.get('before_everything'),
hooks.get('umask'),
config.get('before_everything'),
config.get('umask'),
config_filename,
'pre-everything',
arguments['global'].dry_run,
@ -792,10 +777,9 @@ def collect_configuration_run_summary_logs(configs, arguments):
if 'create' in arguments:
try:
for config_filename, config in configs.items():
hooks = config.get('hooks', {})
command.execute_hook(
hooks.get('after_everything'),
hooks.get('umask'),
config.get('after_everything'),
config.get('umask'),
config_filename,
'post-everything',
arguments['global'].dry_run,

View File

@ -11,7 +11,7 @@ INDENT = 4
SEQUENCE_INDENT = 2
def _insert_newline_before_comment(config, field_name):
def insert_newline_before_comment(config, field_name):
'''
Using some ruamel.yaml black magic, insert a blank line in the config right before the given
field and its comments.
@ -21,10 +21,10 @@ def _insert_newline_before_comment(config, field_name):
)
def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False):
def schema_to_sample_configuration(schema, level=0, parent_is_sequence=False):
'''
Given a loaded configuration schema, generate and return sample config for it. Include comments
for each section based on the schema "description".
for each option based on the schema "description".
'''
schema_type = schema.get('type')
example = schema.get('example')
@ -33,13 +33,13 @@ def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False):
if schema_type == 'array':
config = yaml.comments.CommentedSeq(
[_schema_to_sample_configuration(schema['items'], level, parent_is_sequence=True)]
[schema_to_sample_configuration(schema['items'], level, parent_is_sequence=True)]
)
add_comments_to_configuration_sequence(config, schema, indent=(level * INDENT))
elif schema_type == 'object':
config = yaml.comments.CommentedMap(
[
(field_name, _schema_to_sample_configuration(sub_schema, level + 1))
(field_name, schema_to_sample_configuration(sub_schema, level + 1))
for field_name, sub_schema in schema['properties'].items()
]
)
@ -53,13 +53,13 @@ def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False):
return config
def _comment_out_line(line):
def comment_out_line(line):
# If it's already is commented out (or empty), there's nothing further to do!
stripped_line = line.lstrip()
if not stripped_line or stripped_line.startswith('#'):
return line
# Comment out the names of optional sections, inserting the '#' after any indent for aesthetics.
# Comment out the names of optional options, inserting the '#' after any indent for aesthetics.
matches = re.match(r'(\s*)', line)
indent_spaces = matches.group(0) if matches else ''
count_indent_spaces = len(indent_spaces)
@ -67,7 +67,7 @@ def _comment_out_line(line):
return '# '.join((indent_spaces, line[count_indent_spaces:]))
def _comment_out_optional_configuration(rendered_config):
def comment_out_optional_configuration(rendered_config):
'''
Post-process a rendered configuration string to comment out optional key/values, as determined
by a sentinel in the comment before each key.
@ -92,7 +92,7 @@ def _comment_out_optional_configuration(rendered_config):
if not line.strip():
optional = False
lines.append(_comment_out_line(line) if optional else line)
lines.append(comment_out_line(line) if optional else line)
return '\n'.join(lines)
@ -165,7 +165,6 @@ def add_comments_to_configuration_sequence(config, schema, indent=0):
return
REQUIRED_SECTION_NAMES = {'location', 'retention'}
REQUIRED_KEYS = {'source_directories', 'repositories', 'keep_daily'}
COMMENTED_OUT_SENTINEL = 'COMMENT_OUT'
@ -185,7 +184,7 @@ def add_comments_to_configuration_object(config, schema, indent=0, skip_first=Fa
# If this is an optional key, add an indicator to the comment flagging it to be commented
# out from the sample configuration. This sentinel is consumed by downstream processing that
# does the actual commenting out.
if field_name not in REQUIRED_SECTION_NAMES and field_name not in REQUIRED_KEYS:
if field_name not in REQUIRED_KEYS:
description = (
'\n'.join((description, COMMENTED_OUT_SENTINEL))
if description
@ -199,7 +198,7 @@ def add_comments_to_configuration_object(config, schema, indent=0, skip_first=Fa
config.yaml_set_comment_before_after_key(key=field_name, before=description, indent=indent)
if index > 0:
_insert_newline_before_comment(config, field_name)
insert_newline_before_comment(config, field_name)
RUAMEL_YAML_COMMENTS_INDEX = 1
@ -284,7 +283,7 @@ def generate_sample_configuration(
normalize.normalize(source_filename, source_config)
destination_config = merge_source_configuration_into_destination(
_schema_to_sample_configuration(schema), source_config
schema_to_sample_configuration(schema), source_config
)
if dry_run:
@ -292,6 +291,6 @@ def generate_sample_configuration(
write_configuration(
destination_filename,
_comment_out_optional_configuration(render_configuration(destination_config)),
comment_out_optional_configuration(render_configuration(destination_config)),
overwrite=overwrite,
)

View File

@ -97,8 +97,8 @@ class Include_constructor(ruamel.yaml.SafeConstructor):
```
These includes are deep merged into the current configuration file. For instance, in this
example, any "retention" options in common.yaml will get merged into the "retention" section
in the example configuration file.
example, any "option" with sub-options in common.yaml will get merged into the corresponding
"option" with sub-options in the example configuration file.
'''
representer = ruamel.yaml.representer.SafeRepresenter()
@ -116,7 +116,7 @@ def load_configuration(filename):
'''
Load the given configuration file and return its contents as a data structure of nested dicts
and lists. Also, replace any "{constant}" strings with the value of the "constant" key in the
"constants" section of the configuration file.
"constants" option of the configuration file.
Raise ruamel.yaml.error.YAMLError if something goes wrong parsing the YAML, or RecursionError
if there are too many recursive includes.
@ -223,8 +223,8 @@ def deep_merge_nodes(nodes):
If a mapping or sequence node has a YAML "!retain" tag, then that node is not merged.
The purpose of deep merging like this is to support, for instance, merging one borgmatic
configuration file into another for reuse, such that a configuration section ("retention",
etc.) does not completely replace the corresponding section in a merged file.
configuration file into another for reuse, such that a configuration option with sub-options
does not completely replace the corresponding option in a merged file.
Raise ValueError if a merge is implied using two incompatible types.
'''

View File

@ -2,21 +2,70 @@ import logging
import os
def normalize_sections(config_filename, config):
'''
Given a configuration filename and a configuration dict of its loaded contents, airlift any
options out of sections ("location:", etc.) to the global scope and delete those sections.
Return any log message warnings produced based on the normalization performed.
Raise ValueError if the "prefix" option is set in both "location" and "consistency" sections.
'''
location = config.get('location') or {}
storage = config.get('storage') or {}
consistency = config.get('consistency') or {}
hooks = config.get('hooks') or {}
if (
location.get('prefix')
and consistency.get('prefix')
and location.get('prefix') != consistency.get('prefix')
):
raise ValueError(
'The retention prefix and the consistency prefix cannot have different values (unless one is not set).'
)
if storage.get('umask') and hooks.get('umask') and storage.get('umask') != hooks.get('umask'):
raise ValueError(
'The storage umask and the hooks umask cannot have different values (unless one is not set).'
)
any_section_upgraded = False
# Move any options from deprecated sections into the global scope.
for section_name in ('location', 'storage', 'retention', 'consistency', 'output', 'hooks'):
section_config = config.get(section_name)
if section_config:
any_section_upgraded = True
del config[section_name]
config.update(section_config)
if any_section_upgraded:
return [
logging.makeLogRecord(
dict(
levelno=logging.WARNING,
levelname='WARNING',
msg=f'{config_filename}: Configuration sections like location: and storage: are deprecated and support will be removed from a future release. To prepare for this, move your options out of sections to the global scope.',
)
)
]
return []
def normalize(config_filename, config):
'''
Given a configuration filename and a configuration dict of its loaded contents, apply particular
hard-coded rules to normalize the configuration to adhere to the current schema. Return any log
message warnings produced based on the normalization performed.
Raise ValueError the configuration cannot be normalized.
'''
logs = []
location = config.get('location') or {}
storage = config.get('storage') or {}
consistency = config.get('consistency') or {}
retention = config.get('retention') or {}
hooks = config.get('hooks') or {}
logs = normalize_sections(config_filename, config)
# Upgrade exclude_if_present from a string to a list.
exclude_if_present = location.get('exclude_if_present')
exclude_if_present = config.get('exclude_if_present')
if isinstance(exclude_if_present, str):
logs.append(
logging.makeLogRecord(
@ -27,10 +76,10 @@ def normalize(config_filename, config):
)
)
)
config['location']['exclude_if_present'] = [exclude_if_present]
config['exclude_if_present'] = [exclude_if_present]
# Upgrade various monitoring hooks from a string to a dict.
healthchecks = hooks.get('healthchecks')
healthchecks = config.get('healthchecks')
if isinstance(healthchecks, str):
logs.append(
logging.makeLogRecord(
@ -41,9 +90,9 @@ def normalize(config_filename, config):
)
)
)
config['hooks']['healthchecks'] = {'ping_url': healthchecks}
config['healthchecks'] = {'ping_url': healthchecks}
cronitor = hooks.get('cronitor')
cronitor = config.get('cronitor')
if isinstance(cronitor, str):
logs.append(
logging.makeLogRecord(
@ -54,9 +103,9 @@ def normalize(config_filename, config):
)
)
)
config['hooks']['cronitor'] = {'ping_url': cronitor}
config['cronitor'] = {'ping_url': cronitor}
pagerduty = hooks.get('pagerduty')
pagerduty = config.get('pagerduty')
if isinstance(pagerduty, str):
logs.append(
logging.makeLogRecord(
@ -67,9 +116,9 @@ def normalize(config_filename, config):
)
)
)
config['hooks']['pagerduty'] = {'integration_key': pagerduty}
config['pagerduty'] = {'integration_key': pagerduty}
cronhub = hooks.get('cronhub')
cronhub = config.get('cronhub')
if isinstance(cronhub, str):
logs.append(
logging.makeLogRecord(
@ -80,10 +129,10 @@ def normalize(config_filename, config):
)
)
)
config['hooks']['cronhub'] = {'ping_url': cronhub}
config['cronhub'] = {'ping_url': cronhub}
# Upgrade consistency checks from a list of strings to a list of dicts.
checks = consistency.get('checks')
checks = config.get('checks')
if isinstance(checks, list) and len(checks) and isinstance(checks[0], str):
logs.append(
logging.makeLogRecord(
@ -94,10 +143,10 @@ def normalize(config_filename, config):
)
)
)
config['consistency']['checks'] = [{'name': check_type} for check_type in checks]
config['checks'] = [{'name': check_type} for check_type in checks]
# Rename various configuration options.
numeric_owner = location.pop('numeric_owner', None)
numeric_owner = config.pop('numeric_owner', None)
if numeric_owner is not None:
logs.append(
logging.makeLogRecord(
@ -108,9 +157,9 @@ def normalize(config_filename, config):
)
)
)
config['location']['numeric_ids'] = numeric_owner
config['numeric_ids'] = numeric_owner
bsd_flags = location.pop('bsd_flags', None)
bsd_flags = config.pop('bsd_flags', None)
if bsd_flags is not None:
logs.append(
logging.makeLogRecord(
@ -121,9 +170,9 @@ def normalize(config_filename, config):
)
)
)
config['location']['flags'] = bsd_flags
config['flags'] = bsd_flags
remote_rate_limit = storage.pop('remote_rate_limit', None)
remote_rate_limit = config.pop('remote_rate_limit', None)
if remote_rate_limit is not None:
logs.append(
logging.makeLogRecord(
@ -134,10 +183,10 @@ def normalize(config_filename, config):
)
)
)
config['storage']['upload_rate_limit'] = remote_rate_limit
config['upload_rate_limit'] = remote_rate_limit
# Upgrade remote repositories to ssh:// syntax, required in Borg 2.
repositories = location.get('repositories')
repositories = config.get('repositories')
if repositories:
if isinstance(repositories[0], str):
logs.append(
@ -149,11 +198,11 @@ def normalize(config_filename, config):
)
)
)
config['location']['repositories'] = [
{'path': repository} for repository in repositories
]
repositories = config['location']['repositories']
config['location']['repositories'] = []
config['repositories'] = [{'path': repository} for repository in repositories]
repositories = config['repositories']
config['repositories'] = []
for repository_dict in repositories:
repository_path = repository_dict['path']
if '~' in repository_path:
@ -171,14 +220,14 @@ def normalize(config_filename, config):
updated_repository_path = os.path.abspath(
repository_path.partition('file://')[-1]
)
config['location']['repositories'].append(
config['repositories'].append(
dict(
repository_dict,
path=updated_repository_path,
)
)
elif repository_path.startswith('ssh://'):
config['location']['repositories'].append(repository_dict)
config['repositories'].append(repository_dict)
else:
rewritten_repository_path = f"ssh://{repository_path.replace(':~', '/~').replace(':/', '/').replace(':', '/./')}"
logs.append(
@ -190,16 +239,16 @@ def normalize(config_filename, config):
)
)
)
config['location']['repositories'].append(
config['repositories'].append(
dict(
repository_dict,
path=rewritten_repository_path,
)
)
else:
config['location']['repositories'].append(repository_dict)
config['repositories'].append(repository_dict)
if consistency.get('prefix') or retention.get('prefix'):
if config.get('prefix'):
logs.append(
logging.makeLogRecord(
dict(

View File

@ -32,19 +32,33 @@ def convert_value_type(value):
return ruamel.yaml.YAML(typ='safe').load(io.StringIO(value))
LEGACY_SECTION_NAMES = {'location', 'storage', 'retention', 'consistency', 'output', 'hooks'}
def strip_section_names(parsed_override_key):
'''
Given a parsed override key as a tuple of option and suboption names, strip out any initial
legacy section names, since configuration file normalization also strips them out.
'''
if parsed_override_key[0] in LEGACY_SECTION_NAMES:
return parsed_override_key[1:]
return parsed_override_key
def parse_overrides(raw_overrides):
'''
Given a sequence of configuration file override strings in the form of "section.option=value",
Given a sequence of configuration file override strings in the form of "option.suboption=value",
parse and return a sequence of tuples (keys, values), where keys is a sequence of strings. For
instance, given the following raw overrides:
['section.my_option=value1', 'section.other_option=value2']
['my_option.suboption=value1', 'other_option=value2']
... return this:
(
(('section', 'my_option'), 'value1'),
(('section', 'other_option'), 'value2'),
(('my_option', 'suboption'), 'value1'),
(('other_option'), 'value2'),
)
Raise ValueError if an override can't be parsed.
@ -59,13 +73,13 @@ def parse_overrides(raw_overrides):
raw_keys, value = raw_override.split('=', 1)
parsed_overrides.append(
(
tuple(raw_keys.split('.')),
strip_section_names(tuple(raw_keys.split('.'))),
convert_value_type(value),
)
)
except ValueError:
raise ValueError(
f"Invalid override '{raw_override}'. Make sure you use the form: SECTION.OPTION=VALUE"
f"Invalid override '{raw_override}'. Make sure you use the form: OPTION=VALUE or OPTION.SUBOPTION=VALUE"
)
except ruamel.yaml.error.YAMLError as error:
raise ValueError(f"Invalid override '{raw_override}': {error.problem}")
@ -76,7 +90,7 @@ def parse_overrides(raw_overrides):
def apply_overrides(config, raw_overrides):
'''
Given a configuration dict and a sequence of configuration file override strings in the form of
"section.option=value", parse each override and set it the configuration dict.
"option.suboption=value", parse each override and set it the configuration dict.
'''
overrides = parse_overrides(raw_overrides)

File diff suppressed because it is too large Load Diff

View File

@ -71,18 +71,15 @@ def apply_logical_validation(config_filename, parsed_configuration):
below), run through any additional logical validation checks. If there are any such validation
problems, raise a Validation_error.
'''
location_repositories = parsed_configuration.get('location', {}).get('repositories')
check_repositories = parsed_configuration.get('consistency', {}).get('check_repositories', [])
repositories = parsed_configuration.get('repositories')
check_repositories = parsed_configuration.get('check_repositories', [])
for repository in check_repositories:
if not any(
repositories_match(repository, config_repository)
for config_repository in location_repositories
repositories_match(repository, config_repository) for config_repository in repositories
):
raise Validation_error(
config_filename,
(
f'Unknown repository in the "consistency" section\'s "check_repositories": {repository}',
),
(f'Unknown repository in "check_repositories": {repository}',),
)
@ -90,11 +87,15 @@ def parse_configuration(config_filename, schema_filename, overrides=None, resolv
'''
Given the path to a config filename in YAML format, the path to a schema filename in a YAML
rendition of JSON Schema format, a sequence of configuration file override strings in the form
of "section.option=value", return the parsed configuration as a data structure of nested dicts
of "option.suboption=value", return the parsed configuration as a data structure of nested dicts
and lists corresponding to the schema. Example return value:
{'location': {'source_directories': ['/home', '/etc'], 'repository': 'hostname.borg'},
'retention': {'keep_daily': 7}, 'consistency': {'checks': ['repository', 'archives']}}
{
'source_directories': ['/home', '/etc'],
'repository': 'hostname.borg',
'keep_daily': 7,
'checks': ['repository', 'archives'],
}
Also return a sequence of logging.LogRecord instances containing any warnings about the
configuration.
@ -174,7 +175,7 @@ def guard_configuration_contains_repository(repository, configurations):
tuple(
config_repository
for config in configurations.values()
for config_repository in config['location']['repositories']
for config_repository in config['repositories']
if repositories_match(config_repository, repository)
)
)
@ -198,7 +199,7 @@ def guard_single_repository_selected(repository, configurations):
tuple(
config_repository
for config in configurations.values()
for config_repository in config['location']['repositories']
for config_repository in config['repositories']
)
)

View File

@ -241,13 +241,16 @@ def execute_command_and_capture_output(
shell=False,
extra_environment=None,
working_directory=None,
borg_local_path=None,
):
'''
Execute the given command (a sequence of command/argument strings), capturing and returning its
output (stdout). If capture stderr is True, then capture and return stderr in addition to
stdout. If shell is True, execute the command within a shell. If an extra environment dict is
given, then use it to augment the current environment, and pass the result into the command. If
a working directory is given, use that as the present working directory when running the command.
a working directory is given, use that as the present working directory when running the
command. If a Borg local path is given, and the command matches it (regardless of arguments),
treat exit code 1 as a warning instead of an error.
Raise subprocesses.CalledProcessError if an error occurs while running the command.
'''
@ -264,7 +267,7 @@ def execute_command_and_capture_output(
cwd=working_directory,
)
except subprocess.CalledProcessError as error:
if exit_code_indicates_error(command, error.returncode):
if exit_code_indicates_error(command, error.returncode, borg_local_path):
raise
output = error.output

View File

@ -14,7 +14,7 @@ MONITOR_STATE_TO_CRONHUB = {
def initialize_monitor(
ping_url, config_filename, monitoring_log_level, dry_run
ping_url, config, config_filename, monitoring_log_level, dry_run
): # pragma: no cover
'''
No initialization is necessary for this monitor.
@ -22,7 +22,7 @@ def initialize_monitor(
pass
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run):
def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run):
'''
Ping the configured Cronhub URL, modified with the monitor.State. Use the given configuration
filename in any log entries. If this is a dry run, then don't actually ping anything.
@ -55,7 +55,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
def destroy_monitor(
ping_url_or_uuid, config_filename, monitoring_log_level, dry_run
ping_url_or_uuid, config, config_filename, monitoring_log_level, dry_run
): # pragma: no cover
'''
No destruction is necessary for this monitor.

View File

@ -14,7 +14,7 @@ MONITOR_STATE_TO_CRONITOR = {
def initialize_monitor(
ping_url, config_filename, monitoring_log_level, dry_run
ping_url, config, config_filename, monitoring_log_level, dry_run
): # pragma: no cover
'''
No initialization is necessary for this monitor.
@ -22,7 +22,7 @@ def initialize_monitor(
pass
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run):
def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run):
'''
Ping the configured Cronitor URL, modified with the monitor.State. Use the given configuration
filename in any log entries. If this is a dry run, then don't actually ping anything.
@ -50,7 +50,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
def destroy_monitor(
ping_url_or_uuid, config_filename, monitoring_log_level, dry_run
ping_url_or_uuid, config, config_filename, monitoring_log_level, dry_run
): # pragma: no cover
'''
No destruction is necessary for this monitor.

View File

@ -27,18 +27,17 @@ HOOK_NAME_TO_MODULE = {
}
def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs):
def call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs):
'''
Given the hooks configuration dict and a prefix to use in log entries, call the requested
function of the Python module corresponding to the given hook name. Supply that call with the
configuration for this hook (if any), the log prefix, and any given args and kwargs. Return any
return value.
Given a configuration dict and a prefix to use in log entries, call the requested function of
the Python module corresponding to the given hook name. Supply that call with the configuration
for this hook (if any), the log prefix, and any given args and kwargs. Return any return value.
Raise ValueError if the hook name is unknown.
Raise AttributeError if the function name is not found in the module.
Raise anything else that the called function raises.
'''
config = hooks.get(hook_name, {})
hook_config = config.get(hook_name, {})
try:
module = HOOK_NAME_TO_MODULE[hook_name]
@ -46,15 +45,15 @@ def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs):
raise ValueError(f'Unknown hook name: {hook_name}')
logger.debug(f'{log_prefix}: Calling {hook_name} hook function {function_name}')
return getattr(module, function_name)(config, log_prefix, *args, **kwargs)
return getattr(module, function_name)(hook_config, config, log_prefix, *args, **kwargs)
def call_hooks(function_name, hooks, log_prefix, hook_names, *args, **kwargs):
def call_hooks(function_name, config, log_prefix, hook_names, *args, **kwargs):
'''
Given the hooks configuration dict and a prefix to use in log entries, call the requested
function of the Python module corresponding to each given hook name. Supply each call with the
configuration for that hook, the log prefix, and any given args and kwargs. Collect any return
values into a dict from hook name to return value.
Given a configuration dict and a prefix to use in log entries, call the requested function of
the Python module corresponding to each given hook name. Supply each call with the configuration
for that hook, the log prefix, and any given args and kwargs. Collect any return values into a
dict from hook name to return value.
If the hook name is not present in the hooks configuration, then don't call the function for it
and omit it from the return values.
@ -64,23 +63,23 @@ def call_hooks(function_name, hooks, log_prefix, hook_names, *args, **kwargs):
Raise anything else that a called function raises. An error stops calls to subsequent functions.
'''
return {
hook_name: call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs)
hook_name: call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs)
for hook_name in hook_names
if hooks.get(hook_name)
if config.get(hook_name)
}
def call_hooks_even_if_unconfigured(function_name, hooks, log_prefix, hook_names, *args, **kwargs):
def call_hooks_even_if_unconfigured(function_name, config, log_prefix, hook_names, *args, **kwargs):
'''
Given the hooks configuration dict and a prefix to use in log entries, call the requested
function of the Python module corresponding to each given hook name. Supply each call with the
configuration for that hook, the log prefix, and any given args and kwargs. Collect any return
values into a dict from hook name to return value.
Given a configuration dict and a prefix to use in log entries, call the requested function of
the Python module corresponding to each given hook name. Supply each call with the configuration
for that hook, the log prefix, and any given args and kwargs. Collect any return values into a
dict from hook name to return value.
Raise AttributeError if the function name is not found in the module.
Raise anything else that a called function raises. An error stops calls to subsequent functions.
'''
return {
hook_name: call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs)
hook_name: call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs)
for hook_name in hook_names
}

View File

@ -70,7 +70,7 @@ def format_buffered_logs_for_payload():
return payload
def initialize_monitor(hook_config, config_filename, monitoring_log_level, dry_run):
def initialize_monitor(hook_config, config, config_filename, monitoring_log_level, dry_run):
'''
Add a handler to the root logger that stores in memory the most recent logs emitted. That way,
we can send them all to Healthchecks upon a finish or failure state. But skip this if the
@ -90,7 +90,7 @@ def initialize_monitor(hook_config, config_filename, monitoring_log_level, dry_r
)
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run):
def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run):
'''
Ping the configured Healthchecks URL or UUID, modified with the monitor.State. Use the given
configuration filename in any log entries, and log to Healthchecks with the giving log level.
@ -133,7 +133,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
logger.warning(f'{config_filename}: Healthchecks error: {error}')
def destroy_monitor(hook_config, config_filename, monitoring_log_level, dry_run):
def destroy_monitor(hook_config, config, config_filename, monitoring_log_level, dry_run):
'''
Remove the monitor handler that was added to the root logger. This prevents the handler from
getting reused by other instances of this monitor.

View File

@ -6,21 +6,20 @@ from borgmatic.hooks import dump
logger = logging.getLogger(__name__)
def make_dump_path(location_config): # pragma: no cover
def make_dump_path(config): # pragma: no cover
'''
Make the dump path from the given location configuration and the name of this hook.
Make the dump path from the given configuration dict and the name of this hook.
'''
return dump.make_database_dump_path(
location_config.get('borgmatic_source_directory'), 'mongodb_databases'
config.get('borgmatic_source_directory'), 'mongodb_databases'
)
def dump_databases(databases, log_prefix, location_config, dry_run):
def dump_databases(databases, config, log_prefix, dry_run):
'''
Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of
dicts, one dict describing each database as per the configuration schema. Use the given log
prefix in any log entries. Use the given location configuration dict to construct the
destination path.
dicts, one dict describing each database as per the configuration schema. Use the configuration
dict to construct the destination path and the given log prefix in any log entries.
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@ -33,7 +32,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
for database in databases:
name = database['name']
dump_filename = dump.make_database_dump_filename(
make_dump_path(location_config), name, database.get('hostname')
make_dump_path(config), name, database.get('hostname')
)
dump_format = database.get('format', 'archive')
@ -82,47 +81,53 @@ def build_dump_command(database, dump_filename, dump_format):
return command
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover
def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
'''
Remove all database dump files for this hook regardless of the given databases. Use the log
prefix in any log entries. Use the given location configuration dict to construct the
destination path. If this is a dry run, then don't actually remove anything.
prefix in any log entries. Use the given configuration dict to construct the destination path.
If this is a dry run, then don't actually remove anything.
'''
dump.remove_database_dumps(make_dump_path(location_config), 'MongoDB', log_prefix, dry_run)
dump.remove_database_dumps(make_dump_path(config), 'MongoDB', log_prefix, dry_run)
def make_database_dump_pattern(
databases, log_prefix, location_config, name=None
): # pragma: no cover
def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
'''
Given a sequence of configurations dicts, a prefix to log with, a location configuration dict,
Given a sequence of database configurations dicts, a configuration dict, a prefix to log with,
and a database name to match, return the corresponding glob patterns to match the database dump
in an archive.
'''
return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*')
return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*')
def restore_database_dump(
database_config, log_prefix, location_config, dry_run, extract_process, connection_params
databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params
):
'''
Restore the given MongoDB database from an extract stream. The database is supplied as a
one-element sequence containing a dict describing the database, as per the configuration schema.
Use the given log prefix in any log entries. If this is a dry run, then don't actually restore
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce
output to consume.
Restore the given MongoDB database from an extract stream. The databases are supplied as a
sequence containing one dict describing each database (as per the configuration schema), but
only the database corresponding to the given database name is restored. Use the configuration
dict to construct the destination path and the given log prefix in any log entries. If this is a
dry run, then don't actually restore anything. Trigger the given active extract process (an
instance of subprocess.Popen) to produce output to consume.
If the extract process is None, then restore the dump from the filesystem rather than from an
extract stream.
'''
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
if len(database_config) != 1:
raise ValueError('The database configuration value is invalid')
try:
database = next(
database_config
for database_config in databases_config
if database_config.get('name') == database_name
)
except StopIteration:
raise ValueError(
f'A database named "{database_name}" could not be found in the configuration'
)
database = database_config[0]
dump_filename = dump.make_database_dump_filename(
make_dump_path(location_config), database['name'], database.get('hostname')
make_dump_path(config), database['name'], database.get('hostname')
)
restore_command = build_restore_command(
extract_process, database, dump_filename, connection_params

View File

@ -12,13 +12,11 @@ from borgmatic.hooks import dump
logger = logging.getLogger(__name__)
def make_dump_path(location_config): # pragma: no cover
def make_dump_path(config): # pragma: no cover
'''
Make the dump path from the given location configuration and the name of this hook.
Make the dump path from the given configuration dict and the name of this hook.
'''
return dump.make_database_dump_path(
location_config.get('borgmatic_source_directory'), 'mysql_databases'
)
return dump.make_database_dump_path(config.get('borgmatic_source_directory'), 'mysql_databases')
SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys')
@ -106,12 +104,11 @@ def execute_dump_command(
)
def dump_databases(databases, log_prefix, location_config, dry_run):
def dump_databases(databases, config, log_prefix, dry_run):
'''
Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence
of dicts, one dict describing each database as per the configuration schema. Use the given log
prefix in any log entries. Use the given location configuration dict to construct the
destination path.
of dicts, one dict describing each database as per the configuration schema. Use the given
configuration dict to construct the destination path and the given log prefix in any log entries.
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@ -122,7 +119,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
logger.info(f'{log_prefix}: Dumping MySQL databases{dry_run_label}')
for database in databases:
dump_path = make_dump_path(location_config)
dump_path = make_dump_path(config)
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
dump_database_names = database_names_to_dump(
database, extra_environment, log_prefix, dry_run
@ -165,42 +162,46 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
return [process for process in processes if process]
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover
def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
'''
Remove all database dump files for this hook regardless of the given databases. Use the log
prefix in any log entries. Use the given location configuration dict to construct the
destination path. If this is a dry run, then don't actually remove anything.
Remove all database dump files for this hook regardless of the given databases. Use the given
configuration dict to construct the destination path and the log prefix in any log entries. If
this is a dry run, then don't actually remove anything.
'''
dump.remove_database_dumps(make_dump_path(location_config), 'MySQL', log_prefix, dry_run)
dump.remove_database_dumps(make_dump_path(config), 'MySQL', log_prefix, dry_run)
def make_database_dump_pattern(
databases, log_prefix, location_config, name=None
): # pragma: no cover
def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
'''
Given a sequence of configurations dicts, a prefix to log with, a location configuration dict,
and a database name to match, return the corresponding glob patterns to match the database dump
in an archive.
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a
database name to match, return the corresponding glob patterns to match the database dump in an
archive.
'''
return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*')
return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*')
def restore_database_dump(
database_config, log_prefix, location_config, dry_run, extract_process, connection_params
databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params
):
'''
Restore the given MySQL/MariaDB database from an extract stream. The database is supplied as a
one-element sequence containing a dict describing the database, as per the configuration schema.
Use the given log prefix in any log entries. If this is a dry run, then don't actually restore
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce
output to consume.
Restore the given MySQL/MariaDB database from an extract stream. The databases are supplied as a
sequence containing one dict describing each database (as per the configuration schema), but
only the database corresponding to the given database name is restored. Use the given log
prefix in any log entries. If this is a dry run, then don't actually restore anything. Trigger
the given active extract process (an instance of subprocess.Popen) to produce output to consume.
'''
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
if len(database_config) != 1:
raise ValueError('The database configuration value is invalid')
database = database_config[0]
try:
database = next(
database_config
for database_config in databases_config
if database_config.get('name') == database_name
)
except StopIteration:
raise ValueError(
f'A database named "{database_name}" could not be found in the configuration'
)
hostname = connection_params['hostname'] or database.get(
'restore_hostname', database.get('hostname')

View File

@ -6,7 +6,7 @@ logger = logging.getLogger(__name__)
def initialize_monitor(
ping_url, config_filename, monitoring_log_level, dry_run
ping_url, config, config_filename, monitoring_log_level, dry_run
): # pragma: no cover
'''
No initialization is necessary for this monitor.
@ -14,7 +14,7 @@ def initialize_monitor(
pass
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run):
def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run):
'''
Ping the configured Ntfy topic. Use the given configuration filename in any log entries.
If this is a dry run, then don't actually ping anything.
@ -75,7 +75,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
def destroy_monitor(
ping_url_or_uuid, config_filename, monitoring_log_level, dry_run
ping_url_or_uuid, config, config_filename, monitoring_log_level, dry_run
): # pragma: no cover
'''
No destruction is necessary for this monitor.

View File

@ -13,7 +13,7 @@ EVENTS_API_URL = 'https://events.pagerduty.com/v2/enqueue'
def initialize_monitor(
integration_key, config_filename, monitoring_log_level, dry_run
integration_key, config, config_filename, monitoring_log_level, dry_run
): # pragma: no cover
'''
No initialization is necessary for this monitor.
@ -21,7 +21,7 @@ def initialize_monitor(
pass
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run):
def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run):
'''
If this is an error state, create a PagerDuty event with the configured integration key. Use
the given configuration filename in any log entries. If this is a dry run, then don't actually
@ -75,7 +75,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
def destroy_monitor(
ping_url_or_uuid, config_filename, monitoring_log_level, dry_run
ping_url_or_uuid, config, config_filename, monitoring_log_level, dry_run
): # pragma: no cover
'''
No destruction is necessary for this monitor.

View File

@ -14,12 +14,12 @@ from borgmatic.hooks import dump
logger = logging.getLogger(__name__)
def make_dump_path(location_config): # pragma: no cover
def make_dump_path(config): # pragma: no cover
'''
Make the dump path from the given location configuration and the name of this hook.
Make the dump path from the given configuration dict and the name of this hook.
'''
return dump.make_database_dump_path(
location_config.get('borgmatic_source_directory'), 'postgresql_databases'
config.get('borgmatic_source_directory'), 'postgresql_databases'
)
@ -92,12 +92,12 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
)
def dump_databases(databases, log_prefix, location_config, dry_run):
def dump_databases(databases, config, log_prefix, dry_run):
'''
Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of
dicts, one dict describing each database as per the configuration schema. Use the given log
prefix in any log entries. Use the given location configuration dict to construct the
destination path.
dicts, one dict describing each database as per the configuration schema. Use the given
configuration dict to construct the destination path and the given log prefix in any log
entries.
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@ -111,7 +111,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
for database in databases:
extra_environment = make_extra_environment(database)
dump_path = make_dump_path(location_config)
dump_path = make_dump_path(config)
dump_database_names = database_names_to_dump(
database, extra_environment, log_prefix, dry_run
)
@ -183,35 +183,34 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
return processes
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover
def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
'''
Remove all database dump files for this hook regardless of the given databases. Use the log
prefix in any log entries. Use the given location configuration dict to construct the
destination path. If this is a dry run, then don't actually remove anything.
Remove all database dump files for this hook regardless of the given databases. Use the given
configuration dict to construct the destination path and the log prefix in any log entries. If
this is a dry run, then don't actually remove anything.
'''
dump.remove_database_dumps(make_dump_path(location_config), 'PostgreSQL', log_prefix, dry_run)
dump.remove_database_dumps(make_dump_path(config), 'PostgreSQL', log_prefix, dry_run)
def make_database_dump_pattern(
databases, log_prefix, location_config, name=None
): # pragma: no cover
def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
'''
Given a sequence of configurations dicts, a prefix to log with, a location configuration dict,
and a database name to match, return the corresponding glob patterns to match the database dump
in an archive.
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a
database name to match, return the corresponding glob patterns to match the database dump in an
archive.
'''
return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*')
return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*')
def restore_database_dump(
database_config, log_prefix, location_config, dry_run, extract_process, connection_params
databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params
):
'''
Restore the given PostgreSQL database from an extract stream. The database is supplied as a
one-element sequence containing a dict describing the database, as per the configuration schema.
Use the given log prefix in any log entries. If this is a dry run, then don't actually restore
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce
output to consume.
Restore the given PostgreSQL database from an extract stream. The databases are supplied as a
sequence containing one dict describing each database (as per the configuration schema), but
only the database corresponding to the given database name is restored. Use the given
configuration dict to construct the destination path and the given log prefix in any log
entries. If this is a dry run, then don't actually restore anything. Trigger the given active
extract process (an instance of subprocess.Popen) to produce output to consume.
If the extract process is None, then restore the dump from the filesystem rather than from an
extract stream.
@ -221,10 +220,16 @@ def restore_database_dump(
'''
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
if len(database_config) != 1:
raise ValueError('The database configuration value is invalid')
database = database_config[0]
try:
database = next(
database_config
for database_config in databases_config
if database_config.get('name') == database_name
)
except StopIteration:
raise ValueError(
f'A database named "{database_name}" could not be found in the configuration'
)
hostname = connection_params['hostname'] or database.get(
'restore_hostname', database.get('hostname')
@ -236,7 +241,7 @@ def restore_database_dump(
all_databases = bool(database['name'] == 'all')
dump_filename = dump.make_database_dump_filename(
make_dump_path(location_config), database['name'], database.get('hostname')
make_dump_path(config), database['name'], database.get('hostname')
)
psql_command = shlex.split(database.get('psql_command') or 'psql')
analyze_command = (
@ -264,7 +269,7 @@ def restore_database_dump(
+ (() if extract_process else (dump_filename,))
+ tuple(
itertools.chain.from_iterable(('--schema', schema) for schema in database['schemas'])
if database['schemas']
if database.get('schemas')
else ()
)
)

View File

@ -7,21 +7,21 @@ from borgmatic.hooks import dump
logger = logging.getLogger(__name__)
def make_dump_path(location_config): # pragma: no cover
def make_dump_path(config): # pragma: no cover
'''
Make the dump path from the given location configuration and the name of this hook.
Make the dump path from the given configuration dict and the name of this hook.
'''
return dump.make_database_dump_path(
location_config.get('borgmatic_source_directory'), 'sqlite_databases'
config.get('borgmatic_source_directory'), 'sqlite_databases'
)
def dump_databases(databases, log_prefix, location_config, dry_run):
def dump_databases(databases, config, log_prefix, dry_run):
'''
Dump the given SQLite3 databases to a file. The databases are supplied as a sequence of
configuration dicts, as per the configuration schema. Use the given log prefix in any log
entries. Use the given location configuration dict to construct the destination path. If this
is a dry run, then don't actually dump anything.
configuration dicts, as per the configuration schema. Use the given configuration dict to
construct the destination path and the given log prefix in any log entries. If this is a dry
run, then don't actually dump anything.
'''
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
processes = []
@ -38,7 +38,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
f'{log_prefix}: No SQLite database at {database_path}; An empty database will be created and dumped'
)
dump_path = make_dump_path(location_config)
dump_path = make_dump_path(config)
dump_filename = dump.make_database_dump_filename(dump_path, database['name'])
if os.path.exists(dump_filename):
logger.warning(
@ -65,43 +65,49 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
return processes
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover
def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
'''
Remove the given SQLite3 database dumps from the filesystem. The databases are supplied as a
sequence of configuration dicts, as per the configuration schema. Use the given log prefix in
any log entries. Use the given location configuration dict to construct the destination path.
If this is a dry run, then don't actually remove anything.
sequence of configuration dicts, as per the configuration schema. Use the given configuration
dict to construct the destination path and the given log prefix in any log entries. If this is a
dry run, then don't actually remove anything.
'''
dump.remove_database_dumps(make_dump_path(location_config), 'SQLite', log_prefix, dry_run)
dump.remove_database_dumps(make_dump_path(config), 'SQLite', log_prefix, dry_run)
def make_database_dump_pattern(
databases, log_prefix, location_config, name=None
): # pragma: no cover
def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
'''
Make a pattern that matches the given SQLite3 databases. The databases are supplied as a
sequence of configuration dicts, as per the configuration schema.
'''
return dump.make_database_dump_filename(make_dump_path(location_config), name)
return dump.make_database_dump_filename(make_dump_path(config), name)
def restore_database_dump(
database_config, log_prefix, location_config, dry_run, extract_process, connection_params
databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params
):
'''
Restore the given SQLite3 database from an extract stream. The database is supplied as a
one-element sequence containing a dict describing the database, as per the configuration schema.
Use the given log prefix in any log entries. If this is a dry run, then don't actually restore
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce
output to consume.
Restore the given SQLite3 database from an extract stream. The databases are supplied as a
sequence containing one dict describing each database (as per the configuration schema), but
only the database corresponding to the given database name is restored. Use the given log prefix
in any log entries. If this is a dry run, then don't actually restore anything. Trigger the
given active extract process (an instance of subprocess.Popen) to produce output to consume.
'''
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
if len(database_config) != 1:
raise ValueError('The database configuration value is invalid')
try:
database = next(
database_config
for database_config in databases_config
if database_config.get('name') == database_name
)
except StopIteration:
raise ValueError(
f'A database named "{database_name}" could not be found in the configuration'
)
database_path = connection_params['restore_path'] or database_config[0].get(
'restore_path', database_config[0].get('path')
database_path = connection_params['restore_path'] or database.get(
'restore_path', database.get('path')
)
logger.debug(f'{log_prefix}: Restoring SQLite database at {database_path}{dry_run_label}')

View File

@ -1,5 +1,5 @@
<h2>Improve this documentation</h2>
<p>Have an idea on how to make this documentation even better? Use our <a
href="https://projects.torsion.org/borgmatic-collective/borgmatic/issues">issue tracker</a> to send your
feedback!</p>
href="https://torsion.org/borgmatic/#support-and-contributing">issue
tracker</a> to send your feedback!</p>

View File

@ -21,11 +21,10 @@ running backups, and specify `after_backup` hooks to perform cleanup steps
afterwards. Here's an example:
```yaml
hooks:
before_backup:
- mount /some/filesystem
after_backup:
- umount /some/filesystem
before_backup:
- mount /some/filesystem
after_backup:
- umount /some/filesystem
```
If your command contains a special YAML character such as a colon, you may
@ -33,11 +32,23 @@ need to quote the entire string (or use a [multiline
string](https://yaml-multiline.info/)) to avoid an error:
```yaml
hooks:
before_backup:
- "echo Backup: start"
before_backup:
- "echo Backup: start"
```
There are additional hooks that run before/after other actions as well. For
instance, `before_prune` runs before a `prune` action for a repository, while
`after_prune` runs after it.
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
these options in the `hooks:` section of your configuration.
<span class="minilink minilink-addedin">New in version 1.7.0</span> The
`before_actions` and `after_actions` hooks run before/after all the actions
(like `create`, `prune`, etc.) for each repository. These hooks are a good
place to run per-repository steps like mounting/unmounting a remote
filesystem.
<span class="minilink minilink-addedin">New in version 1.6.0</span> The
`before_backup` and `after_backup` hooks each run once per repository in a
configuration file. `before_backup` hooks runs right before the `create`
@ -46,16 +57,6 @@ but not if an error occurs in a previous hook or in the backups themselves.
(Prior to borgmatic 1.6.0, these hooks instead ran once per configuration file
rather than once per repository.)
There are additional hooks that run before/after other actions as well. For
instance, `before_prune` runs before a `prune` action for a repository, while
`after_prune` runs after it.
<span class="minilink minilink-addedin">New in version 1.7.0</span> The
`before_actions` and `after_actions` hooks run before/after all the actions
(like `create`, `prune`, etc.) for each repository. These hooks are a good
place to run per-repository steps like mounting/unmounting a remote
filesystem.
## Variable interpolation
@ -64,11 +65,13 @@ variables into the hook command. Here's an example that assumes you provide a
separate shell script:
```yaml
hooks:
after_prune:
- record-prune.sh "{configuration_filename}" "{repository}"
after_prune:
- record-prune.sh "{configuration_filename}" "{repository}"
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `hooks:` section of your configuration.
In this example, when the hook is triggered, borgmatic interpolates runtime
values into the hook command: the borgmatic configuration filename and the
paths of the current Borg repository. Here's the full set of supported
@ -92,13 +95,15 @@ You can also use `before_everything` and `after_everything` hooks to perform
global setup or cleanup:
```yaml
hooks:
before_everything:
- set-up-stuff-globally
after_everything:
- clean-up-stuff-globally
before_everything:
- set-up-stuff-globally
after_everything:
- clean-up-stuff-globally
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
these options in the `hooks:` section of your configuration.
`before_everything` hooks collected from all borgmatic configuration files run
once before all configuration files (prior to all actions), but only if there
is a `create` action. An error encountered during a `before_everything` hook
@ -109,6 +114,7 @@ but only if there is a `create` action. It runs even if an error occurs during
a backup or a backup hook, but not if an error occurs during a
`before_everything` hook.
## Error hooks
borgmatic also runs `on_error` hooks if an error occurs, either when creating
@ -116,6 +122,7 @@ a backup or running a backup hook. See the [monitoring and alerting
documentation](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/)
for more information.
## Hook output
Any output produced by your hooks shows up both at the console and in syslog
@ -123,6 +130,7 @@ Any output produced by your hooks shows up both at the console and in syslog
href="https://torsion.org/borgmatic/docs/how-to/inspect-your-backups/">inspecting
your backups</a>.
## Security
An important security note about hooks: borgmatic executes all hook commands

View File

@ -44,14 +44,16 @@ file](https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/),
say at `/etc/borgmatic.d/removable.yaml`:
```yaml
location:
source_directories:
- /home
source_directories:
- /home
repositories:
- path: /mnt/removable/backup.borg
repositories:
- path: /mnt/removable/backup.borg
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
these options in the `location:` section of your configuration.
<span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit
the `path:` portion of the `repositories` list.
@ -60,11 +62,13 @@ the external `findmnt` utility to see whether the drive is mounted before
proceeding.
```yaml
hooks:
before_backup:
- findmnt /mnt/removable > /dev/null || exit 75
before_backup:
- findmnt /mnt/removable > /dev/null || exit 75
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put this
option in the `hooks:` section of your configuration.
What this does is check if the `findmnt` command errors when probing for a
particular mount point. If it does error, then it returns exit code 75 to
borgmatic. borgmatic logs the soft failure, skips all further actions in that
@ -77,27 +81,21 @@ optionally using `before_actions` instead.
You can imagine a similar check for the sometimes-online server case:
```yaml
location:
source_directories:
- /home
source_directories:
- /home
repositories:
- path: ssh://me@buddys-server.org/./backup.borg
repositories:
- path: ssh://me@buddys-server.org/./backup.borg
hooks:
before_backup:
- ping -q -c 1 buddys-server.org > /dev/null || exit 75
before_backup:
- ping -q -c 1 buddys-server.org > /dev/null || exit 75
```
<span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit
the `path:` portion of the `repositories` list.
Or to only run backups if the battery level is high enough:
```yaml
hooks:
before_backup:
- is_battery_percent_at_least.sh 25
before_backup:
- is_battery_percent_at_least.sh 25
```
(Writing the battery script is left as an exercise to the reader.)

View File

@ -18,31 +18,32 @@ prior to running backups. For example, here is everything you need to dump and
backup a couple of local PostgreSQL databases and a MySQL/MariaDB database.
```yaml
hooks:
postgresql_databases:
- name: users
- name: orders
mysql_databases:
- name: posts
postgresql_databases:
- name: users
- name: orders
mysql_databases:
- name: posts
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
these and other database options in the `hooks:` section of your
configuration.
<span class="minilink minilink-addedin">New in version 1.5.22</span> You can
also dump MongoDB databases. For example:
```yaml
hooks:
mongodb_databases:
- name: messages
mongodb_databases:
- name: messages
```
<span class="minilink minilink-addedin">New in version 1.7.9</span>
Additionally, you can dump SQLite databases. For example:
```yaml
hooks:
sqlite_databases:
- name: mydb
path: /var/lib/sqlite3/mydb.sqlite
sqlite_databases:
- name: mydb
path: /var/lib/sqlite3/mydb.sqlite
```
As part of each backup, borgmatic streams a database dump for each configured
@ -54,7 +55,7 @@ temporary disk space.)
To support this, borgmatic creates temporary named pipes in `~/.borgmatic` by
default. To customize this path, set the `borgmatic_source_directory` option
in the `location` section of borgmatic's configuration.
in borgmatic's configuration.
Also note that using a database hook implicitly enables both the
`read_special` and `one_file_system` configuration settings (even if they're
@ -64,35 +65,34 @@ See Limitations below for more on this.
Here's a more involved example that connects to remote databases:
```yaml
hooks:
postgresql_databases:
- name: users
hostname: database1.example.org
- name: orders
hostname: database2.example.org
port: 5433
username: postgres
password: trustsome1
format: tar
options: "--role=someone"
mysql_databases:
- name: posts
hostname: database3.example.org
port: 3307
username: root
password: trustsome1
options: "--skip-comments"
mongodb_databases:
- name: messages
hostname: database4.example.org
port: 27018
username: dbuser
password: trustsome1
authentication_database: mongousers
options: "--ssl"
sqlite_databases:
- name: mydb
path: /var/lib/sqlite3/mydb.sqlite
postgresql_databases:
- name: users
hostname: database1.example.org
- name: orders
hostname: database2.example.org
port: 5433
username: postgres
password: trustsome1
format: tar
options: "--role=someone"
mysql_databases:
- name: posts
hostname: database3.example.org
port: 3307
username: root
password: trustsome1
options: "--skip-comments"
mongodb_databases:
- name: messages
hostname: database4.example.org
port: 27018
username: dbuser
password: trustsome1
authentication_database: mongousers
options: "--ssl"
sqlite_databases:
- name: mydb
path: /var/lib/sqlite3/mydb.sqlite
```
See your [borgmatic configuration
@ -106,13 +106,12 @@ listing databases, restoring databases, etc.).
If you want to dump all databases on a host, use `all` for the database name:
```yaml
hooks:
postgresql_databases:
- name: all
mysql_databases:
- name: all
mongodb_databases:
- name: all
postgresql_databases:
- name: all
mysql_databases:
- name: all
mongodb_databases:
- name: all
```
Note that you may need to use a `username` of the `postgres` superuser for
@ -120,6 +119,9 @@ this to work with PostgreSQL.
The SQLite hook in particular does not consider "all" a special database name.
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
these options in the `hooks:` section of your configuration.
<span class="minilink minilink-addedin">New in version 1.7.6</span> With
PostgreSQL and MySQL, you can optionally dump "all" databases to separate
files instead of one combined dump file, allowing more convenient restores of
@ -127,13 +129,12 @@ individual databases. Enable this by specifying your desired database dump
`format`:
```yaml
hooks:
postgresql_databases:
- name: all
format: custom
mysql_databases:
- name: all
format: sql
postgresql_databases:
- name: all
format: custom
mysql_databases:
- name: all
format: sql
```
### Containers
@ -143,15 +144,17 @@ problem—configure borgmatic to connect to the container's name on its exposed
port. For instance:
```yaml
hooks:
postgresql_databases:
- name: users
hostname: your-database-container-name
port: 5433
username: postgres
password: trustsome1
postgresql_databases:
- name: users
hostname: your-database-container-name
port: 5433
username: postgres
password: trustsome1
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
these options in the `hooks:` section of your configuration.
But what if borgmatic is running on the host? You can still connect to a
database container if its ports are properly exposed to the host. For
instance, when running the database container, you can specify `--publish
@ -179,8 +182,7 @@ hooks:
password: trustsome1
```
You can alter the ports in these examples to suit your particular database
system.
Alter the ports in these examples to suit your particular database system.
### No source directories
@ -196,6 +198,7 @@ it is a mandatory option there:
```yaml
location:
source_directories: []
hooks:
mysql_databases:
- name: all
@ -292,7 +295,7 @@ restore one of them, use the `--database` flag to select one or more
databases. For instance:
```bash
borgmatic restore --archive host-2023-... --database users
borgmatic restore --archive host-2023-... --database users --database orders
```
<span class="minilink minilink-addedin">New in version 1.7.6</span> You can
@ -427,10 +430,9 @@ You can add any additional flags to the `options:` in your database
configuration. Here's an example:
```yaml
hooks:
mysql_databases:
- name: posts
options: "--single-transaction --quick"
mysql_databases:
- name: posts
options: "--single-transaction --quick"
```
### borgmatic hangs during backup

View File

@ -65,19 +65,20 @@ configure borgmatic to run repository checks only. Configure this in the
`consistency` section of borgmatic configuration:
```yaml
consistency:
checks:
- name: repository
checks:
- name: repository
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `consistency:` section of your configuration.
<span class="minilink minilink-addedin">Prior to version 1.6.2</span> The
`checks` option was a plain list of strings without the `name:` part, and
borgmatic ran each configured check every time checks were run. For example:
```yaml
consistency:
checks:
- repository
checks:
- repository
```
@ -103,14 +104,16 @@ optionally configure checks to run on a periodic basis rather than every time
borgmatic runs checks. For instance:
```yaml
consistency:
checks:
- name: repository
frequency: 2 weeks
- name: archives
frequency: 1 month
checks:
- name: repository
frequency: 2 weeks
- name: archives
frequency: 1 month
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `consistency:` section of your configuration.
This tells borgmatic to run the `repository` consistency check at most once
every two weeks for a given repository and the `archives` check at most once a
month. The `frequency` value is a number followed by a unit of time, e.g. "3
@ -162,18 +165,19 @@ either for a single repository or for all repositories.
Disabling all consistency checks looks like this:
```yaml
consistency:
checks:
- name: disabled
checks:
- name: disabled
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `consistency:` section of your configuration.
<span class="minilink minilink-addedin">Prior to version 1.6.2</span> `checks`
was a plain list of strings without the `name:` part. For instance:
```yaml
consistency:
checks:
- disabled
checks:
- disabled
```
If you have multiple repositories in your borgmatic configuration file,
@ -181,9 +185,8 @@ you can keep running consistency checks, but only against a subset of the
repositories:
```yaml
consistency:
check_repositories:
- path/of/repository_to_check.borg
check_repositories:
- path/of/repository_to_check.borg
```
Finally, you can override your configuration file's consistency checks, and

View File

@ -7,7 +7,7 @@ eleventyNavigation:
---
## Source code
To get set up to hack on borgmatic, first clone it via HTTPS or SSH:
To get set up to develop on borgmatic, first clone it via HTTPS or SSH:
```bash
git clone https://projects.torsion.org/borgmatic-collective/borgmatic.git
@ -21,8 +21,8 @@ git clone ssh://git@projects.torsion.org:3022/borgmatic-collective/borgmatic.git
Then, install borgmatic
"[editable](https://pip.pypa.io/en/stable/cli/pip_install/#editable-installs)"
so that you can run borgmatic commands while you're hacking on them to
make sure your changes work.
so that you can run borgmatic actions during development to make sure your
changes work.
```bash
cd borgmatic

View File

@ -65,7 +65,7 @@ everything from an archive. To do that, tack on one or more `--path` values.
For instance:
```bash
borgmatic extract --archive latest --path path/1 path/2
borgmatic extract --archive latest --path path/1 --path path/2
```
Note that the specified restore paths should not have a leading slash. Like a

View File

@ -60,7 +60,7 @@ with `--format`. Refer to the [borg list --format
documentation](https://borgbackup.readthedocs.io/en/stable/usage/list.html#the-format-specifier-syntax)
for available values.
*(No borgmatic `list` or `info` actions? Upgrade borgmatic!)*
(No borgmatic `list` or `info` actions? Upgrade borgmatic!)
<span class="minilink minilink-addedin">New in borgmatic version 1.7.0</span>
There are also `rlist` and `rinfo` actions for displaying repository

View File

@ -12,18 +12,20 @@ it. borgmatic supports this in its configuration by specifying multiple backup
repositories. Here's an example:
```yaml
location:
# List of source directories to backup.
source_directories:
- /home
- /etc
# List of source directories to backup.
source_directories:
- /home
- /etc
# Paths of local or remote repositories to backup to.
repositories:
- path: ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo
- path: /var/lib/backups/local.borg
# Paths of local or remote repositories to backup to.
repositories:
- path: ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo
- path: /var/lib/backups/local.borg
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
these options in the `location:` section of your configuration.
<span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit
the `path:` portion of the `repositories` list.

View File

@ -74,14 +74,15 @@ and borgmatic uses that format to name any new archive it creates. For
instance:
```yaml
storage:
...
archive_name_format: home-directories-{now}
archive_name_format: home-directories-{now}
```
This means that when borgmatic creates an archive, its name will start with
the string `home-directories-` and end with a timestamp for its creation time.
If `archive_name_format` is unspecified, the default is
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `storage:` section of your configuration.
This example means that when borgmatic creates an archive, its name will start
with the string `home-directories-` and end with a timestamp for its creation
time. If `archive_name_format` is unspecified, the default is
`{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}`, meaning your system hostname plus a
timestamp in a particular format.
@ -103,11 +104,12 @@ to filter archives when running supported actions.
For instance, let's say that you have this in your configuration:
```yaml
storage:
...
archive_name_format: {hostname}-user-data-{now}
archive_name_format: {hostname}-user-data-{now}
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `storage:` section of your configuration.
borgmatic considers `{now}` an emphemeral data placeholder that will probably
change per archive, while `{hostname}` won't. So it turns the example value
into `{hostname}-user-data-*` and applies it to filter down the set of
@ -123,10 +125,8 @@ If this behavior isn't quite smart enough for your needs, you can use the
filtering archives. For example:
```yaml
storage:
...
archive_name_format: {hostname}-user-data-{now}
match_archives: sh:myhost-user-data-*
archive_name_format: {hostname}-user-data-{now}
match_archives: sh:myhost-user-data-*
```
For Borg 1.x, use a shell pattern for the `match_archives` value and see the
@ -156,23 +156,28 @@ them. To achieve this, you can put fragments of common configuration options
into a file, and then include or inline that file into one or more borgmatic
configuration files.
Let's say that you want to include common retention configuration across all
Let's say that you want to include common consistency check configuration across all
of your configuration files. You could do that in each configuration file with
the following:
```yaml
location:
...
repositories:
- path: repo.borg
retention:
!include /etc/borgmatic/common_retention.yaml
checks:
!include /etc/borgmatic/common_checks.yaml
```
And then the contents of `common_retention.yaml` could be:
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> These
options were organized into sections like `location:` and `consistency:`.
The contents of `common_checks.yaml` could be:
```yaml
keep_hourly: 24
keep_daily: 7
- name: repository
frequency: 3 weeks
- name: archives
frequency: 2 weeks
```
To prevent borgmatic from trying to load these configuration fragments by
@ -184,18 +189,18 @@ When a configuration include is a relative path, borgmatic loads it from either
the current working directory or from the directory containing the file doing
the including.
Note that this form of include must be a YAML value rather than a key. For
Note that this form of include must be a value rather than an option name. For
example, this will not work:
```yaml
location:
...
repositories:
- path: repo.borg
# Don't do this. It won't work!
!include /etc/borgmatic/common_retention.yaml
!include /etc/borgmatic/common_checks.yaml
```
But if you do want to merge in a YAML key *and* its values, keep reading!
But if you do want to merge in a option name *and* its values, keep reading!
## Include merging
@ -203,45 +208,43 @@ But if you do want to merge in a YAML key *and* its values, keep reading!
If you need to get even fancier and merge in common configuration options, you
can perform a YAML merge of included configuration using the YAML `<<` key.
For instance, here's an example of a main configuration file that pulls in
retention and consistency options via a single include:
retention and consistency checks options via a single include:
```yaml
<<: !include /etc/borgmatic/common.yaml
repositories:
- path: repo.borg
location:
...
<<: !include /etc/borgmatic/common.yaml
```
This is what `common.yaml` might look like:
```yaml
retention:
keep_hourly: 24
keep_daily: 7
keep_hourly: 24
keep_daily: 7
consistency:
checks:
- name: repository
checks:
- name: repository
frequency: 3 weeks
- name: archives
frequency: 2 weeks
```
Once this include gets merged in, the resulting configuration would have all
of the `location` options from the original configuration file *and* the
`retention` and `consistency` options from the include.
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> These
options were organized into sections like `retention:` and `consistency:`.
Prior to borgmatic version 1.6.0, when there's a section collision between the
local file and the merged include, the local file's section takes precedence.
So if the `retention` section appears in both the local file and the include
file, the included `retention` is ignored in favor of the local `retention`.
But see below about deep merge in version 1.6.0+.
Once this include gets merged in, the resulting configuration would have all
of the options from the original configuration file *and* the options from the
include.
Note that this `<<` include merging syntax is only for merging in mappings
(configuration options and their values). But if you'd like to include a
single value directly, please see the section above about standard includes.
single value directly, please see the above about standard includes.
Additionally, there is a limitation preventing multiple `<<` include merges
per section. So for instance, that means you can do one `<<` merge at the
global level, another `<<` within each configuration section, etc. (This is a
YAML limitation.)
per file or option value. So for instance, that means you can do one `<<`
merge at the global level, another `<<` within each nested option value, etc.
(This is a YAML limitation.)
### Deep merge
@ -252,29 +255,30 @@ at all levels in the two configuration files. This allows you to include
common configuration—up to full borgmatic configuration files—while overriding
only the parts you want to customize.
For instance, here's an example of a main configuration file that pulls in two
retention options via an include and then overrides one of them locally:
For instance, here's an example of a main configuration file that pulls in
options via an include and then overrides one of them locally:
```yaml
<<: !include /etc/borgmatic/common.yaml
location:
...
constants:
hostname: myhostname
retention:
keep_daily: 5
repositories:
- path: repo.borg
```
This is what `common.yaml` might look like:
```yaml
retention:
keep_hourly: 24
keep_daily: 7
constants:
prefix: myprefix
hostname: otherhost
```
Once this include gets merged in, the resulting configuration would have a
`keep_hourly` value of `24` and an overridden `keep_daily` value of `5`.
`prefix` value of `myprefix` and an overridden `hostname` value of
`myhostname`.
When there's an option collision between the local file and the merged
include, the local file's option takes precedence.
@ -292,21 +296,22 @@ configuration file, you can omit it with an `!omit` tag. For instance:
```yaml
<<: !include /etc/borgmatic/common.yaml
location:
source_directories:
- !omit /home
- /var
source_directories:
- !omit /home
- /var
```
And `common.yaml` like this:
```yaml
location:
source_directories:
- /home
- /etc
source_directories:
- /home
- /etc
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `location:` section of your configuration.
Once this include gets merged in, the resulting configuration will have a
`source_directories` value of `/etc` and `/var`—with `/home` omitted.
@ -319,16 +324,15 @@ an example of some things not to do:
```yaml
<<: !include /etc/borgmatic/common.yaml
location:
source_directories:
# Do not do this! It will not work. "!omit" belongs before "/home".
- /home !omit
source_directories:
# Do not do this! It will not work. "!omit" belongs before "/home".
- /home !omit
# Do not do this either! "!omit" only works on scalar list items.
repositories: !omit
# Also do not do this for the same reason! This is a list item, but it's
# not a scalar.
- !omit path: repo.borg
# Do not do this either! "!omit" only works on scalar list items.
repositories: !omit
# Also do not do this for the same reason! This is a list item, but it's
# not a scalar.
- !omit path: repo.borg
```
Additionally, the `!omit` tag only works in a configuration file that also
@ -342,8 +346,8 @@ includes.
### Shallow merge
Even though deep merging is generally pretty handy for included files,
sometimes you want specific sections in the local file to take precedence over
included sections—without any merging occurring for them.
sometimes you want specific options in the local file to take precedence over
included options—without any merging occurring for them.
<span class="minilink minilink-addedin">New in version 1.7.12</span> That's
where the `!retain` tag comes in. Whenever you're merging an included file
@ -357,37 +361,38 @@ on the `retention` mapping:
```yaml
<<: !include /etc/borgmatic/common.yaml
location:
repositories:
- path: repo.borg
repositories:
- path: repo.borg
retention: !retain
keep_daily: 5
checks: !retain
- name: repository
```
And `common.yaml` like this:
```yaml
location:
repositories:
- path: common.borg
repositories:
- path: common.borg
retention:
keep_hourly: 24
keep_daily: 7
checks:
- name: archives
```
Once this include gets merged in, the resulting configuration will have a
`keep_daily` value of `5` and nothing else in the `retention` section. That's
because the `!retain` tag says to retain the local version of `retention` and
ignore any values coming in from the include. But because the `repositories`
list doesn't have a `!retain` tag, it still gets merged together to contain
both `common.borg` and `repo.borg`.
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> These
options were organized into sections like `location:` and `consistency:`.
The `!retain` tag can only be placed on mappings and lists, and it goes right
after the name of the option (and its colon) on the same line. The effects of
`!retain` are recursive, meaning that if you place a `!retain` tag on a
top-level mapping, even deeply nested values within it will not be merged.
Once this include gets merged in, the resulting configuration will have a
`checks` value with a name of `repository` and no other values. That's because
the `!retain` tag says to retain the local version of `checks` and ignore any
values coming in from the include. But because the `repositories` list doesn't
have a `!retain` tag, it still gets merged together to contain both
`common.borg` and `repo.borg`.
The `!retain` tag can only be placed on mappings (keys/values) and lists, and
it goes right after the name of the option (and its colon) on the same line.
The effects of `!retain` are recursive, meaning that if you place a `!retain`
tag on a top-level mapping, even deeply nested values within it will not be
merged.
Additionally, the `!retain` tag only works in a configuration file that also
performs a merge include with `<<: !include`. It doesn't make sense within,
@ -434,43 +439,41 @@ Whatever the reason, you can override borgmatic configuration options at the
command-line via the `--override` flag. Here's an example:
```bash
borgmatic create --override location.remote_path=/usr/local/bin/borg1
borgmatic create --override remote_path=/usr/local/bin/borg1
```
What this does is load your configuration files, and for each one, disregard
the configured value for the `remote_path` option in the `location` section,
and use the value of `/usr/local/bin/borg1` instead.
the configured value for the `remote_path` option, and use the value of
`/usr/local/bin/borg1` instead.
You can even override multiple values at once. For instance:
You can even override nested values or multiple values at once. For instance:
```bash
borgmatic create --override section.option1=value1 section.option2=value2
borgmatic create --override parent_option.option1=value1 --override parent_option.option2=value2
```
This will accomplish the same thing:
```bash
borgmatic create --override section.option1=value1 --override section.option2=value2
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Don't
forget to specify the section that an option is in. That looks like a prefix
on the option name, e.g. `location.repositories`.
Note that each value is parsed as an actual YAML string, so you can even set
list values by using brackets. For instance:
```bash
borgmatic create --override location.repositories=[test1.borg,test2.borg]
borgmatic create --override repositories=[test1.borg,test2.borg]
```
Or even a single list element:
```bash
borgmatic create --override location.repositories=[/root/test.borg]
borgmatic create --override repositories=[/root/test.borg]
```
If your override value contains special YAML characters like colons, then
you'll need quotes for it to parse correctly:
```bash
borgmatic create --override location.repositories="['user@server:test.borg']"
borgmatic create --override repositories="['user@server:test.borg']"
```
There is not currently a way to override a single element of a list without
@ -486,7 +489,9 @@ indentation and a leading dash.)
Be sure to quote your overrides if they contain spaces or other characters
that your shell may interpret.
An alternate to command-line overrides is passing in your values via [environment variables](https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/).
An alternate to command-line overrides is passing in your values via
[environment
variables](https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/).
## Constant interpolation
@ -506,16 +511,19 @@ constants:
user: foo
archive_prefix: bar
location:
source_directories:
- /home/{user}/.config
- /home/{user}/.ssh
...
source_directories:
- /home/{user}/.config
- /home/{user}/.ssh
storage:
archive_name_format: '{archive_prefix}-{now}'
...
archive_name_format: '{archive_prefix}-{now}'
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Don't
forget to specify the section (like `location:` or `storage:`) that any option
is in.
In this example, when borgmatic runs, all instances of `{user}` get replaced
with `foo` and all instances of `{archive-prefix}` get replaced with `bar-`.
(And in this particular example, `{now}` doesn't get replaced with anything,
@ -523,14 +531,13 @@ but gets passed directly to Borg.) After substitution, the logical result
looks something like this:
```yaml
location:
source_directories:
- /home/foo/.config
- /home/foo/.ssh
...
source_directories:
- /home/foo/.config
- /home/foo/.ssh
storage:
archive_name_format: 'bar-{now}'
...
archive_name_format: 'bar-{now}'
```
An alternate to constants is passing in your values via [environment

View File

@ -89,19 +89,20 @@ notifications or take other actions, so you can get alerted as soon as
something goes wrong. Here's a not-so-useful example:
```yaml
hooks:
on_error:
- echo "Error while creating a backup or running a backup hook."
on_error:
- echo "Error while creating a backup or running a backup hook."
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `hooks:` section of your configuration.
The `on_error` hook supports interpolating particular runtime variables into
the hook command. Here's an example that assumes you provide a separate shell
script to handle the alerting:
```yaml
hooks:
on_error:
- send-text-message.sh "{configuration_filename}" "{repository}"
on_error:
- send-text-message.sh "{configuration_filename}" "{repository}"
```
In this example, when the error occurs, borgmatic interpolates runtime values
@ -135,11 +136,13 @@ URL" for your project. Here's an example:
```yaml
hooks:
healthchecks:
ping_url: https://hc-ping.com/addffa72-da17-40ae-be9c-ff591afb942a
healthchecks:
ping_url: https://hc-ping.com/addffa72-da17-40ae-be9c-ff591afb942a
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `hooks:` section of your configuration.
With this hook in place, borgmatic pings your Healthchecks project when a
backup begins, ends, or errors. Specifically, after the <a
href="https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/">`before_backup`
@ -179,11 +182,13 @@ API URL" for your monitor. Here's an example:
```yaml
hooks:
cronitor:
ping_url: https://cronitor.link/d3x0c1
cronitor:
ping_url: https://cronitor.link/d3x0c1
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `hooks:` section of your configuration.
With this hook in place, borgmatic pings your Cronitor monitor when a backup
begins, ends, or errors. Specifically, after the <a
href="https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/">`before_backup`
@ -208,11 +213,13 @@ URL" for your monitor. Here's an example:
```yaml
hooks:
cronhub:
ping_url: https://cronhub.io/start/1f5e3410-254c-11e8-b61d-55875966d031
cronhub:
ping_url: https://cronhub.io/start/1f5e3410-254c-11e8-b61d-55875966d031
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `hooks:` section of your configuration.
With this hook in place, borgmatic pings your Cronhub monitor when a backup
begins, ends, or errors. Specifically, after the <a
href="https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/">`before_backup`
@ -251,11 +258,13 @@ Here's an example:
```yaml
hooks:
pagerduty:
integration_key: a177cad45bd374409f78906a810a3074
pagerduty:
integration_key: a177cad45bd374409f78906a810a3074
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `hooks:` section of your configuration.
With this hook in place, borgmatic creates a PagerDuty event for your service
whenever backups fail. Specifically, if an error occurs during a `create`,
`prune`, `compact`, or `check` action, borgmatic sends an event to PagerDuty
@ -291,31 +300,34 @@ An example configuration is shown here, with all the available options, includin
[tags](https://ntfy.sh/docs/publish/#tags-emojis):
```yaml
hooks:
ntfy:
topic: my-unique-topic
server: https://ntfy.my-domain.com
start:
title: A Borgmatic backup started
message: Watch this space...
tags: borgmatic
priority: min
finish:
title: A Borgmatic backup completed successfully
message: Nice!
tags: borgmatic,+1
priority: min
fail:
title: A Borgmatic backup failed
message: You should probably fix it
tags: borgmatic,-1,skull
priority: max
states:
- start
- finish
- fail
ntfy:
topic: my-unique-topic
server: https://ntfy.my-domain.com
start:
title: A Borgmatic backup started
message: Watch this space...
tags: borgmatic
priority: min
finish:
title: A Borgmatic backup completed successfully
message: Nice!
tags: borgmatic,+1
priority: min
fail:
title: A Borgmatic backup failed
message: You should probably fix it
tags: borgmatic,-1,skull
priority: max
states:
- start
- finish
- fail
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
the `ntfy:` option in the `hooks:` section of your configuration.
## Scripting borgmatic
To consume the output of borgmatic in other software, you can include an

View File

@ -20,10 +20,12 @@ pull your repository passphrase, your database passwords, or any other option
values from environment variables. For instance:
```yaml
storage:
encryption_passphrase: ${MY_PASSPHRASE}
encryption_passphrase: ${MY_PASSPHRASE}
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `storage:` section of your configuration.
This uses the `MY_PASSPHRASE` environment variable as your encryption
passphrase. Note that the `{` `}` brackets are required. `$MY_PASSPHRASE` by
itself will not work.
@ -38,12 +40,14 @@ configuration](https://torsion.org/borgmatic/docs/how-to/backup-your-databases/)
the same approach applies. For example:
```yaml
hooks:
postgresql_databases:
- name: users
password: ${MY_DATABASE_PASSWORD}
postgresql_databases:
- name: users
password: ${MY_DATABASE_PASSWORD}
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
this option in the `hooks:` section of your configuration.
This uses the `MY_DATABASE_PASSWORD` environment variable as your database
password.
@ -53,8 +57,7 @@ password.
If you'd like to set a default for your environment variables, you can do so with the following syntax:
```yaml
storage:
encryption_passphrase: ${MY_PASSPHRASE:-defaultpass}
encryption_passphrase: ${MY_PASSPHRASE:-defaultpass}
```
Here, "`defaultpass`" is the default passphrase if the `MY_PASSPHRASE`
@ -72,8 +75,7 @@ can escape it with a backslash. For instance, if your password is literally
`${A}@!`:
```yaml
storage:
encryption_passphrase: \${A}@!
encryption_passphrase: \${A}@!
```
### Related features

View File

@ -140,13 +140,14 @@ use the `--destination` flag, for instance: `--destination
You should edit the configuration file to suit your needs, as the generated
values are only representative. All options are optional except where
indicated, so feel free to ignore anything you don't need.
indicated, so feel free to ignore anything you don't need. Be sure to use
spaces rather than tabs for indentation; YAML does not allow tabs.
Note that the configuration file is organized into distinct sections, each
with a section name like `location:` or `storage:`. So take care that if you
uncomment a particular option, also uncomment its containing section name, or
else borgmatic won't recognize the option. Also be sure to use spaces rather
than tabs for indentation; YAML does not allow tabs.
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> The
configuration file was organized into distinct sections, each with a section
name like `location:` or `storage:`. So in older versions of borgmatic, take
care that if you uncomment a particular option, also uncomment its containing
section name—or else borgmatic won't recognize the option.
You can get the same sample configuration file from the [configuration
reference](https://torsion.org/borgmatic/docs/reference/configuration/), the

View File

@ -131,22 +131,23 @@ Let's say your original borgmatic repository configuration file looks something
like this:
```yaml
location:
repositories:
- path: original.borg
repositories:
- path: original.borg
```
<span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit
the `path:` portion of the `repositories` list.
Change it to a new (not yet created) repository path:
```yaml
location:
repositories:
- path: upgraded.borg
repositories:
- path: upgraded.borg
```
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> This
option was found in the `location:` section of your configuration.
<span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit
the `path:` portion of the `repositories` list.
Then, run the `rcreate` action (formerly `init`) to create that new Borg 2
repository:

View File

@ -15,7 +15,7 @@ listed here do not have equivalents in borgmatic's [configuration
file](https://torsion.org/borgmatic/docs/reference/configuration/).
If you're using an older version of borgmatic, some of these flags may not be
present in that version, and you should instead use `borgmatic --help` or
present in that version and you should instead use `borgmatic --help` or
`borgmatic [action name] --help` (where `[action name]` is the name of an
action like `list`, `create`, etc.).

View File

@ -22,7 +22,7 @@ def generate_configuration(config_path, repository_path):
.replace('- /home', f'- {config_path}')
.replace('- /etc', '')
.replace('- /var/log/syslog*', '')
+ 'storage:\n encryption_passphrase: "test"'
+ 'encryption_passphrase: "test"'
)
config_file = open(config_path, 'w')
config_file.write(config)

View File

@ -22,60 +22,57 @@ def write_configuration(
storing database dumps, dump format (for PostgreSQL), and encryption passphrase.
'''
config = f'''
location:
source_directories:
- {source_directory}
repositories:
- {repository_path}
borgmatic_source_directory: {borgmatic_source_directory}
source_directories:
- {source_directory}
repositories:
- path: {repository_path}
borgmatic_source_directory: {borgmatic_source_directory}
storage:
encryption_passphrase: "test"
encryption_passphrase: "test"
hooks:
postgresql_databases:
- name: test
hostname: postgresql
username: postgres
password: test
format: {postgresql_dump_format}
- name: all
hostname: postgresql
username: postgres
password: test
- name: all
format: custom
hostname: postgresql
username: postgres
password: test
mysql_databases:
- name: test
hostname: mysql
username: root
password: test
- name: all
hostname: mysql
username: root
password: test
- name: all
format: sql
hostname: mysql
username: root
password: test
mongodb_databases:
- name: test
hostname: mongodb
username: root
password: test
authentication_database: admin
format: {mongodb_dump_format}
- name: all
hostname: mongodb
username: root
password: test
sqlite_databases:
- name: sqlite_test
path: /tmp/sqlite_test.db
postgresql_databases:
- name: test
hostname: postgresql
username: postgres
password: test
format: {postgresql_dump_format}
- name: all
hostname: postgresql
username: postgres
password: test
- name: all
format: custom
hostname: postgresql
username: postgres
password: test
mysql_databases:
- name: test
hostname: mysql
username: root
password: test
- name: all
hostname: mysql
username: root
password: test
- name: all
format: sql
hostname: mysql
username: root
password: test
mongodb_databases:
- name: test
hostname: mongodb
username: root
password: test
authentication_database: admin
format: {mongodb_dump_format}
- name: all
hostname: mongodb
username: root
password: test
sqlite_databases:
- name: sqlite_test
path: /tmp/sqlite_test.db
'''
with open(config_path, 'w') as config_file:
@ -96,51 +93,48 @@ def write_custom_restore_configuration(
restore_username, restore_password and restore_path.
'''
config = f'''
location:
source_directories:
- {source_directory}
repositories:
- {repository_path}
borgmatic_source_directory: {borgmatic_source_directory}
source_directories:
- {source_directory}
repositories:
- path: {repository_path}
borgmatic_source_directory: {borgmatic_source_directory}
storage:
encryption_passphrase: "test"
encryption_passphrase: "test"
hooks:
postgresql_databases:
- name: test
hostname: postgresql
username: postgres
password: test
format: {postgresql_dump_format}
restore_hostname: postgresql2
restore_port: 5433
restore_username: postgres2
restore_password: test2
mysql_databases:
- name: test
hostname: mysql
username: root
password: test
restore_hostname: mysql2
restore_port: 3307
restore_username: root
restore_password: test2
mongodb_databases:
- name: test
hostname: mongodb
username: root
password: test
authentication_database: admin
format: {mongodb_dump_format}
restore_hostname: mongodb2
restore_port: 27018
restore_username: root2
restore_password: test2
sqlite_databases:
- name: sqlite_test
path: /tmp/sqlite_test.db
restore_path: /tmp/sqlite_test2.db
postgresql_databases:
- name: test
hostname: postgresql
username: postgres
password: test
format: {postgresql_dump_format}
restore_hostname: postgresql2
restore_port: 5433
restore_username: postgres2
restore_password: test2
mysql_databases:
- name: test
hostname: mysql
username: root
password: test
restore_hostname: mysql2
restore_port: 3307
restore_username: root
restore_password: test2
mongodb_databases:
- name: test
hostname: mongodb
username: root
password: test
authentication_database: admin
format: {mongodb_dump_format}
restore_hostname: mongodb2
restore_port: 27018
restore_username: root2
restore_password: test2
sqlite_databases:
- name: sqlite_test
path: /tmp/sqlite_test.db
restore_path: /tmp/sqlite_test2.db
'''
with open(config_path, 'w') as config_file:
@ -161,23 +155,20 @@ def write_simple_custom_restore_configuration(
these options for PostgreSQL.
'''
config = f'''
location:
source_directories:
- {source_directory}
repositories:
- {repository_path}
borgmatic_source_directory: {borgmatic_source_directory}
source_directories:
- {source_directory}
repositories:
- path: {repository_path}
borgmatic_source_directory: {borgmatic_source_directory}
storage:
encryption_passphrase: "test"
encryption_passphrase: "test"
hooks:
postgresql_databases:
- name: test
hostname: postgresql
username: postgres
password: test
format: {postgresql_dump_format}
postgresql_databases:
- name: test
hostname: postgresql
username: postgres
password: test
format: {postgresql_dump_format}
'''
with open(config_path, 'w') as config_file:

View File

@ -21,7 +21,7 @@ def generate_configuration(config_path, repository_path):
.replace('- /home', f'- {config_path}')
.replace('- /etc', '')
.replace('- /var/log/syslog*', '')
+ 'storage:\n encryption_passphrase: "test"'
+ 'encryption_passphrase: "test"'
)
config_file = open(config_path, 'w')
config_file.write(config)

View File

@ -38,5 +38,4 @@ def test_validate_config_command_with_show_flag_displays_configuration():
f'validate-borgmatic-config --config {config_path} --show'.split(' ')
).decode(sys.stdout.encoding)
assert 'location:' in output
assert 'repositories:' in output

View File

@ -84,7 +84,6 @@ def test_prune_archives_command_does_not_duplicate_flags_or_raise():
False,
'repo',
{},
{},
'2.3.4',
fuzz_argument(arguments, argument_name),
argparse.Namespace(log_json=False),

View File

@ -17,10 +17,10 @@ def test_parse_arguments_with_no_arguments_uses_defaults():
assert global_arguments.log_file_verbosity == 0
def test_parse_arguments_with_multiple_config_paths_parses_as_list():
def test_parse_arguments_with_multiple_config_flags_parses_as_list():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
arguments = module.parse_arguments('--config', 'myconfig', 'otherconfig')
arguments = module.parse_arguments('--config', 'myconfig', '--config', 'otherconfig')
global_arguments = arguments['global']
assert global_arguments.config_paths == ['myconfig', 'otherconfig']
@ -109,20 +109,11 @@ def test_parse_arguments_with_single_override_parses():
assert global_arguments.overrides == ['foo.bar=baz']
def test_parse_arguments_with_multiple_overrides_parses():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
arguments = module.parse_arguments('--override', 'foo.bar=baz', 'foo.quux=7')
global_arguments = arguments['global']
assert global_arguments.overrides == ['foo.bar=baz', 'foo.quux=7']
def test_parse_arguments_with_multiple_overrides_and_flags_parses():
def test_parse_arguments_with_multiple_overrides_flags_parses():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
arguments = module.parse_arguments(
'--override', 'foo.bar=baz', '--override', 'foo.quux=7', 'this.that=8'
'--override', 'foo.bar=baz', '--override', 'foo.quux=7', '--override', 'this.that=8'
)
global_arguments = arguments['global']

View File

@ -13,43 +13,43 @@ def test_insert_newline_before_comment_does_not_raise():
config = module.yaml.comments.CommentedMap([(field_name, 33)])
config.yaml_set_comment_before_after_key(key=field_name, before='Comment')
module._insert_newline_before_comment(config, field_name)
module.insert_newline_before_comment(config, field_name)
def test_comment_out_line_skips_blank_line():
line = ' \n'
assert module._comment_out_line(line) == line
assert module.comment_out_line(line) == line
def test_comment_out_line_skips_already_commented_out_line():
line = ' # foo'
assert module._comment_out_line(line) == line
assert module.comment_out_line(line) == line
def test_comment_out_line_comments_section_name():
line = 'figgy-pudding:'
assert module._comment_out_line(line) == '# ' + line
assert module.comment_out_line(line) == '# ' + line
def test_comment_out_line_comments_indented_option():
line = ' enabled: true'
assert module._comment_out_line(line) == ' # enabled: true'
assert module.comment_out_line(line) == ' # enabled: true'
def test_comment_out_line_comments_twice_indented_option():
line = ' - item'
assert module._comment_out_line(line) == ' # - item'
assert module.comment_out_line(line) == ' # - item'
def test_comment_out_optional_configuration_comments_optional_config_only():
# The "# COMMENT_OUT" comment is a sentinel used to express that the following key is optional.
# It's stripped out of the final output.
flexmock(module)._comment_out_line = lambda line: '# ' + line
flexmock(module).comment_out_line = lambda line: '# ' + line
config = '''
# COMMENT_OUT
foo:
@ -58,14 +58,13 @@ foo:
- baz
- quux
location:
repositories:
- one
- two
repositories:
- one
- two
# This comment should be kept.
# COMMENT_OUT
other: thing
# This comment should be kept.
# COMMENT_OUT
other: thing
'''
# flake8: noqa
@ -75,16 +74,15 @@ location:
# - baz
# - quux
location:
repositories:
- one
- two
repositories:
- one
- two
# This comment should be kept.
# other: thing
# This comment should be kept.
# other: thing
'''
assert module._comment_out_optional_configuration(config.strip()) == expected_config.strip()
assert module.comment_out_optional_configuration(config.strip()) == expected_config.strip()
def test_render_configuration_converts_configuration_to_yaml_string():
@ -204,10 +202,10 @@ def test_generate_sample_configuration_does_not_raise():
builtins = flexmock(sys.modules['builtins'])
builtins.should_receive('open').with_args('schema.yaml').and_return('')
flexmock(module.yaml).should_receive('round_trip_load')
flexmock(module).should_receive('_schema_to_sample_configuration')
flexmock(module).should_receive('schema_to_sample_configuration')
flexmock(module).should_receive('merge_source_configuration_into_destination')
flexmock(module).should_receive('render_configuration')
flexmock(module).should_receive('_comment_out_optional_configuration')
flexmock(module).should_receive('comment_out_optional_configuration')
flexmock(module).should_receive('write_configuration')
module.generate_sample_configuration(False, None, 'dest.yaml', 'schema.yaml')
@ -219,10 +217,10 @@ def test_generate_sample_configuration_with_source_filename_does_not_raise():
flexmock(module.yaml).should_receive('round_trip_load')
flexmock(module.load).should_receive('load_configuration')
flexmock(module.normalize).should_receive('normalize')
flexmock(module).should_receive('_schema_to_sample_configuration')
flexmock(module).should_receive('schema_to_sample_configuration')
flexmock(module).should_receive('merge_source_configuration_into_destination')
flexmock(module).should_receive('render_configuration')
flexmock(module).should_receive('_comment_out_optional_configuration')
flexmock(module).should_receive('comment_out_optional_configuration')
flexmock(module).should_receive('write_configuration')
module.generate_sample_configuration(False, 'source.yaml', 'dest.yaml', 'schema.yaml')
@ -232,10 +230,10 @@ def test_generate_sample_configuration_with_dry_run_does_not_write_file():
builtins = flexmock(sys.modules['builtins'])
builtins.should_receive('open').with_args('schema.yaml').and_return('')
flexmock(module.yaml).should_receive('round_trip_load')
flexmock(module).should_receive('_schema_to_sample_configuration')
flexmock(module).should_receive('schema_to_sample_configuration')
flexmock(module).should_receive('merge_source_configuration_into_destination')
flexmock(module).should_receive('render_configuration')
flexmock(module).should_receive('_comment_out_optional_configuration')
flexmock(module).should_receive('comment_out_optional_configuration')
flexmock(module).should_receive('write_configuration').never()
module.generate_sample_configuration(True, None, 'dest.yaml', 'schema.yaml')

View File

@ -40,35 +40,32 @@ def mock_config_and_schema(config_yaml, schema_yaml=None):
def test_parse_configuration_transforms_file_into_mapping():
mock_config_and_schema(
'''
location:
source_directories:
- /home
- /etc
source_directories:
- /home
- /etc
repositories:
- path: hostname.borg
repositories:
- path: hostname.borg
retention:
keep_minutely: 60
keep_hourly: 24
keep_daily: 7
keep_minutely: 60
keep_hourly: 24
keep_daily: 7
consistency:
checks:
- name: repository
- name: archives
checks:
- name: repository
- name: archives
'''
)
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
assert config == {
'location': {
'source_directories': ['/home', '/etc'],
'repositories': [{'path': 'hostname.borg'}],
},
'retention': {'keep_daily': 7, 'keep_hourly': 24, 'keep_minutely': 60},
'consistency': {'checks': [{'name': 'repository'}, {'name': 'archives'}]},
'source_directories': ['/home', '/etc'],
'repositories': [{'path': 'hostname.borg'}],
'keep_daily': 7,
'keep_hourly': 24,
'keep_minutely': 60,
'checks': [{'name': 'repository'}, {'name': 'archives'}],
}
assert logs == []
@ -78,22 +75,19 @@ def test_parse_configuration_passes_through_quoted_punctuation():
mock_config_and_schema(
f'''
location:
source_directories:
- "/home/{escaped_punctuation}"
source_directories:
- "/home/{escaped_punctuation}"
repositories:
- path: test.borg
repositories:
- path: test.borg
'''
)
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
assert config == {
'location': {
'source_directories': [f'/home/{string.punctuation}'],
'repositories': [{'path': 'test.borg'}],
}
'source_directories': [f'/home/{string.punctuation}'],
'repositories': [{'path': 'test.borg'}],
}
assert logs == []
@ -101,26 +95,22 @@ def test_parse_configuration_passes_through_quoted_punctuation():
def test_parse_configuration_with_schema_lacking_examples_does_not_raise():
mock_config_and_schema(
'''
location:
source_directories:
- /home
source_directories:
- /home
repositories:
- path: hostname.borg
repositories:
- path: hostname.borg
''',
'''
map:
location:
source_directories:
required: true
map:
source_directories:
required: true
seq:
- type: scalar
repositories:
required: true
seq:
- type: scalar
seq:
- type: scalar
repositories:
required: true
seq:
- type: scalar
''',
)
@ -130,12 +120,11 @@ def test_parse_configuration_with_schema_lacking_examples_does_not_raise():
def test_parse_configuration_inlines_include():
mock_config_and_schema(
'''
location:
source_directories:
- /home
source_directories:
- /home
repositories:
- path: hostname.borg
repositories:
- path: hostname.borg
retention:
!include include.yaml
@ -154,25 +143,25 @@ def test_parse_configuration_inlines_include():
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
assert config == {
'location': {'source_directories': ['/home'], 'repositories': [{'path': 'hostname.borg'}]},
'retention': {'keep_daily': 7, 'keep_hourly': 24},
'source_directories': ['/home'],
'repositories': [{'path': 'hostname.borg'}],
'keep_daily': 7,
'keep_hourly': 24,
}
assert logs == []
assert len(logs) == 1
def test_parse_configuration_merges_include():
mock_config_and_schema(
'''
location:
source_directories:
- /home
source_directories:
- /home
repositories:
- path: hostname.borg
repositories:
- path: hostname.borg
retention:
keep_daily: 1
<<: !include include.yaml
keep_daily: 1
<<: !include include.yaml
'''
)
builtins = flexmock(sys.modules['builtins'])
@ -188,8 +177,10 @@ def test_parse_configuration_merges_include():
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
assert config == {
'location': {'source_directories': ['/home'], 'repositories': [{'path': 'hostname.borg'}]},
'retention': {'keep_daily': 1, 'keep_hourly': 24},
'source_directories': ['/home'],
'repositories': [{'path': 'hostname.borg'}],
'keep_daily': 1,
'keep_hourly': 24,
}
assert logs == []
@ -218,10 +209,9 @@ def test_parse_configuration_raises_for_syntax_error():
def test_parse_configuration_raises_for_validation_error():
mock_config_and_schema(
'''
location:
source_directories: yes
repositories:
- path: hostname.borg
source_directories: yes
repositories:
- path: hostname.borg
'''
)
@ -232,14 +222,13 @@ def test_parse_configuration_raises_for_validation_error():
def test_parse_configuration_applies_overrides():
mock_config_and_schema(
'''
location:
source_directories:
- /home
source_directories:
- /home
repositories:
- path: hostname.borg
repositories:
- path: hostname.borg
local_path: borg1
local_path: borg1
'''
)
@ -248,11 +237,9 @@ def test_parse_configuration_applies_overrides():
)
assert config == {
'location': {
'source_directories': ['/home'],
'repositories': [{'path': 'hostname.borg'}],
'local_path': 'borg2',
}
'source_directories': ['/home'],
'repositories': [{'path': 'hostname.borg'}],
'local_path': 'borg2',
}
assert logs == []
@ -274,10 +261,8 @@ def test_parse_configuration_applies_normalization():
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
assert config == {
'location': {
'source_directories': ['/home'],
'repositories': [{'path': 'hostname.borg'}],
'exclude_if_present': ['.nobackup'],
}
'source_directories': ['/home'],
'repositories': [{'path': 'hostname.borg'}],
'exclude_if_present': ['.nobackup'],
}
assert logs

View File

@ -10,7 +10,7 @@ def test_destroy_monitor_removes_healthchecks_handler():
original_handlers = list(logger.handlers)
logger.addHandler(module.Forgetful_buffering_handler(byte_capacity=100, log_level=1))
module.destroy_monitor(flexmock(), flexmock(), flexmock(), flexmock())
module.destroy_monitor(flexmock(), flexmock(), flexmock(), flexmock(), flexmock())
assert logger.handlers == original_handlers
@ -19,6 +19,6 @@ def test_destroy_monitor_without_healthchecks_handler_does_not_raise():
logger = logging.getLogger()
original_handlers = list(logger.handlers)
module.destroy_monitor(flexmock(), flexmock(), flexmock(), flexmock())
module.destroy_monitor(flexmock(), flexmock(), flexmock(), flexmock(), flexmock())
assert logger.handlers == original_handlers

View File

@ -14,7 +14,7 @@ def test_run_borg_does_not_raise():
module.run_borg(
repository={'path': 'repos'},
storage={},
config={},
local_borg_version=None,
global_arguments=flexmock(log_json=False),
borg_arguments=borg_arguments,

View File

@ -11,7 +11,7 @@ def test_run_break_lock_does_not_raise():
module.run_break_lock(
repository={'path': 'repo'},
storage={},
config={},
local_borg_version=None,
break_lock_arguments=break_lock_arguments,
global_arguments=flexmock(),

View File

@ -5,9 +5,6 @@ from borgmatic.actions import check as module
def test_run_check_calls_hooks_for_configured_repository():
flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.config.checks).should_receive(
'repository_enabled_for_checks'
).and_return(True)
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').never()
flexmock(module.borgmatic.borg.check).should_receive('check_archives').once()
flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2)
@ -23,10 +20,7 @@ def test_run_check_calls_hooks_for_configured_repository():
module.run_check(
config_filename='test.yaml',
repository={'path': 'repo'},
location={'repositories': ['repo']},
storage={},
consistency={},
hooks={},
config={'repositories': ['repo']},
hook_context={},
local_borg_version=None,
check_arguments=check_arguments,
@ -54,10 +48,7 @@ def test_run_check_runs_with_selected_repository():
module.run_check(
config_filename='test.yaml',
repository={'path': 'repo'},
location={'repositories': ['repo']},
storage={},
consistency={},
hooks={},
config={'repositories': ['repo']},
hook_context={},
local_borg_version=None,
check_arguments=check_arguments,
@ -85,10 +76,7 @@ def test_run_check_bails_if_repository_does_not_match():
module.run_check(
config_filename='test.yaml',
repository={'path': 'repo'},
location={'repositories': ['repo']},
storage={},
consistency={},
hooks={},
config={'repositories': ['repo']},
hook_context={},
local_borg_version=None,
check_arguments=check_arguments,

View File

@ -17,9 +17,7 @@ def test_compact_actions_calls_hooks_for_configured_repository():
module.run_compact(
config_filename='test.yaml',
repository={'path': 'repo'},
storage={},
retention={},
hooks={},
config={},
hook_context={},
local_borg_version=None,
compact_arguments=compact_arguments,
@ -45,9 +43,7 @@ def test_compact_runs_with_selected_repository():
module.run_compact(
config_filename='test.yaml',
repository={'path': 'repo'},
storage={},
retention={},
hooks={},
config={},
hook_context={},
local_borg_version=None,
compact_arguments=compact_arguments,
@ -73,9 +69,7 @@ def test_compact_bails_if_repository_does_not_match():
module.run_compact(
config_filename='test.yaml',
repository={'path': 'repo'},
storage={},
retention={},
hooks={},
config={},
hook_context={},
local_borg_version=None,
compact_arguments=compact_arguments,

View File

@ -28,9 +28,7 @@ def test_run_create_executes_and_calls_hooks_for_configured_repository():
module.run_create(
config_filename='test.yaml',
repository={'path': 'repo'},
location={},
storage={},
hooks={},
config={},
hook_context={},
local_borg_version=None,
create_arguments=create_arguments,
@ -49,6 +47,11 @@ def test_run_create_runs_with_selected_repository():
).once().and_return(True)
flexmock(module.borgmatic.borg.create).should_receive('create_archive').once()
flexmock(module).should_receive('create_borgmatic_manifest').once()
flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2)
flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return({})
flexmock(module.borgmatic.hooks.dispatch).should_receive(
'call_hooks_even_if_unconfigured'
).and_return({})
create_arguments = flexmock(
repository=flexmock(),
progress=flexmock(),
@ -62,9 +65,7 @@ def test_run_create_runs_with_selected_repository():
module.run_create(
config_filename='test.yaml',
repository={'path': 'repo'},
location={},
storage={},
hooks={},
config={},
hook_context={},
local_borg_version=None,
create_arguments=create_arguments,
@ -96,9 +97,7 @@ def test_run_create_bails_if_repository_does_not_match():
module.run_create(
config_filename='test.yaml',
repository='repo',
location={},
storage={},
hooks={},
config={},
hook_context={},
local_borg_version=None,
create_arguments=create_arguments,

View File

@ -20,7 +20,7 @@ def test_run_export_tar_does_not_raise():
module.run_export_tar(
repository={'path': 'repo'},
storage={},
config={},
local_borg_version=None,
export_tar_arguments=export_tar_arguments,
global_arguments=global_arguments,

View File

@ -21,9 +21,7 @@ def test_run_extract_calls_hooks():
module.run_extract(
config_filename='test.yaml',
repository={'path': 'repo'},
location={'repositories': ['repo']},
storage={},
hooks={},
config={'repositories': ['repo']},
hook_context={},
local_borg_version=None,
extract_arguments=extract_arguments,

View File

@ -18,7 +18,7 @@ def test_run_info_does_not_raise():
list(
module.run_info(
repository={'path': 'repo'},
storage={},
config={},
local_borg_version=None,
info_arguments=info_arguments,
global_arguments=flexmock(log_json=False),

View File

@ -18,7 +18,7 @@ def test_run_list_does_not_raise():
list(
module.run_list(
repository={'path': 'repo'},
storage={},
config={},
local_borg_version=None,
list_arguments=list_arguments,
global_arguments=flexmock(log_json=False),

View File

@ -18,7 +18,7 @@ def test_run_mount_does_not_raise():
module.run_mount(
repository={'path': 'repo'},
storage={},
config={},
local_borg_version=None,
mount_arguments=mount_arguments,
global_arguments=flexmock(log_json=False),

View File

@ -14,9 +14,7 @@ def test_run_prune_calls_hooks_for_configured_repository():
module.run_prune(
config_filename='test.yaml',
repository={'path': 'repo'},
storage={},
retention={},
hooks={},
config={},
hook_context={},
local_borg_version=None,
prune_arguments=prune_arguments,
@ -39,9 +37,7 @@ def test_run_prune_runs_with_selected_repository():
module.run_prune(
config_filename='test.yaml',
repository={'path': 'repo'},
storage={},
retention={},
hooks={},
config={},
hook_context={},
local_borg_version=None,
prune_arguments=prune_arguments,
@ -64,9 +60,7 @@ def test_run_prune_bails_if_repository_does_not_match():
module.run_prune(
config_filename='test.yaml',
repository='repo',
storage={},
retention={},
hooks={},
config={},
hook_context={},
local_borg_version=None,
prune_arguments=prune_arguments,

View File

@ -19,7 +19,7 @@ def test_run_rcreate_does_not_raise():
module.run_rcreate(
repository={'path': 'repo'},
storage={},
config={},
local_borg_version=None,
rcreate_arguments=arguments,
global_arguments=flexmock(dry_run=False),
@ -46,7 +46,7 @@ def test_run_rcreate_bails_if_repository_does_not_match():
module.run_rcreate(
repository={'path': 'repo'},
storage={},
config={},
local_borg_version=None,
rcreate_arguments=arguments,
global_arguments=flexmock(dry_run=False),

View File

@ -6,7 +6,7 @@ import borgmatic.actions.restore as module
def test_get_configured_database_matches_database_by_name():
assert module.get_configured_database(
hooks={
config={
'other_databases': [{'name': 'other'}],
'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}],
},
@ -18,7 +18,7 @@ def test_get_configured_database_matches_database_by_name():
def test_get_configured_database_matches_nothing_when_database_name_not_configured():
assert module.get_configured_database(
hooks={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]},
config={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]},
archive_database_names={'postgresql_databases': ['foo']},
hook_name='postgresql_databases',
database_name='quux',
@ -27,7 +27,7 @@ def test_get_configured_database_matches_nothing_when_database_name_not_configur
def test_get_configured_database_matches_nothing_when_database_name_not_in_archive():
assert module.get_configured_database(
hooks={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]},
config={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]},
archive_database_names={'postgresql_databases': ['bar']},
hook_name='postgresql_databases',
database_name='foo',
@ -36,7 +36,7 @@ def test_get_configured_database_matches_nothing_when_database_name_not_in_archi
def test_get_configured_database_matches_database_by_configuration_database_name():
assert module.get_configured_database(
hooks={'postgresql_databases': [{'name': 'all'}, {'name': 'bar'}]},
config={'postgresql_databases': [{'name': 'all'}, {'name': 'bar'}]},
archive_database_names={'postgresql_databases': ['foo']},
hook_name='postgresql_databases',
database_name='foo',
@ -46,7 +46,7 @@ def test_get_configured_database_matches_database_by_configuration_database_name
def test_get_configured_database_with_unspecified_hook_matches_database_by_name():
assert module.get_configured_database(
hooks={
config={
'other_databases': [{'name': 'other'}],
'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}],
},
@ -69,8 +69,7 @@ def test_collect_archive_database_names_parses_archive_paths():
archive_database_names = module.collect_archive_database_names(
repository={'path': 'repo'},
archive='archive',
location={'borgmatic_source_directory': '.borgmatic'},
storage=flexmock(),
config={'borgmatic_source_directory': '.borgmatic'},
local_borg_version=flexmock(),
global_arguments=flexmock(log_json=False),
local_path=flexmock(),
@ -95,8 +94,7 @@ def test_collect_archive_database_names_parses_directory_format_archive_paths():
archive_database_names = module.collect_archive_database_names(
repository={'path': 'repo'},
archive='archive',
location={'borgmatic_source_directory': '.borgmatic'},
storage=flexmock(),
config={'borgmatic_source_directory': '.borgmatic'},
local_borg_version=flexmock(),
global_arguments=flexmock(log_json=False),
local_path=flexmock(),
@ -117,8 +115,7 @@ def test_collect_archive_database_names_skips_bad_archive_paths():
archive_database_names = module.collect_archive_database_names(
repository={'path': 'repo'},
archive='archive',
location={'borgmatic_source_directory': '.borgmatic'},
storage=flexmock(),
config={'borgmatic_source_directory': '.borgmatic'},
local_borg_version=flexmock(),
global_arguments=flexmock(log_json=False),
local_path=flexmock(),
@ -231,9 +228,7 @@ def test_run_restore_restores_each_database():
).and_return(('postgresql_databases', {'name': 'bar'}))
flexmock(module).should_receive('restore_single_database').with_args(
repository=object,
location=object,
storage=object,
hooks=object,
config=object,
local_borg_version=object,
global_arguments=object,
local_path=object,
@ -245,9 +240,7 @@ def test_run_restore_restores_each_database():
).once()
flexmock(module).should_receive('restore_single_database').with_args(
repository=object,
location=object,
storage=object,
hooks=object,
config=object,
local_borg_version=object,
global_arguments=object,
local_path=object,
@ -261,9 +254,7 @@ def test_run_restore_restores_each_database():
module.run_restore(
repository={'path': 'repo'},
location=flexmock(),
storage=flexmock(),
hooks=flexmock(),
config=flexmock(),
local_borg_version=flexmock(),
restore_arguments=flexmock(
repository='repo',
@ -293,9 +284,7 @@ def test_run_restore_bails_for_non_matching_repository():
module.run_restore(
repository={'path': 'repo'},
location=flexmock(),
storage=flexmock(),
hooks=flexmock(),
config=flexmock(),
local_borg_version=flexmock(),
restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()),
global_arguments=flexmock(dry_run=False),
@ -317,19 +306,19 @@ def test_run_restore_restores_database_configured_with_all_name():
flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock())
flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_database').with_args(
hooks=object,
config=object,
archive_database_names=object,
hook_name='postgresql_databases',
database_name='foo',
).and_return(('postgresql_databases', {'name': 'foo'}))
flexmock(module).should_receive('get_configured_database').with_args(
hooks=object,
config=object,
archive_database_names=object,
hook_name='postgresql_databases',
database_name='bar',
).and_return((None, None))
flexmock(module).should_receive('get_configured_database').with_args(
hooks=object,
config=object,
archive_database_names=object,
hook_name='postgresql_databases',
database_name='bar',
@ -337,9 +326,7 @@ def test_run_restore_restores_database_configured_with_all_name():
).and_return(('postgresql_databases', {'name': 'bar'}))
flexmock(module).should_receive('restore_single_database').with_args(
repository=object,
location=object,
storage=object,
hooks=object,
config=object,
local_borg_version=object,
global_arguments=object,
local_path=object,
@ -351,9 +338,7 @@ def test_run_restore_restores_database_configured_with_all_name():
).once()
flexmock(module).should_receive('restore_single_database').with_args(
repository=object,
location=object,
storage=object,
hooks=object,
config=object,
local_borg_version=object,
global_arguments=object,
local_path=object,
@ -367,9 +352,7 @@ def test_run_restore_restores_database_configured_with_all_name():
module.run_restore(
repository={'path': 'repo'},
location=flexmock(),
storage=flexmock(),
hooks=flexmock(),
config=flexmock(),
local_borg_version=flexmock(),
restore_arguments=flexmock(
repository='repo',
@ -401,19 +384,19 @@ def test_run_restore_skips_missing_database():
flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock())
flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_database').with_args(
hooks=object,
config=object,
archive_database_names=object,
hook_name='postgresql_databases',
database_name='foo',
).and_return(('postgresql_databases', {'name': 'foo'}))
flexmock(module).should_receive('get_configured_database').with_args(
hooks=object,
config=object,
archive_database_names=object,
hook_name='postgresql_databases',
database_name='bar',
).and_return((None, None))
flexmock(module).should_receive('get_configured_database').with_args(
hooks=object,
config=object,
archive_database_names=object,
hook_name='postgresql_databases',
database_name='bar',
@ -421,9 +404,7 @@ def test_run_restore_skips_missing_database():
).and_return((None, None))
flexmock(module).should_receive('restore_single_database').with_args(
repository=object,
location=object,
storage=object,
hooks=object,
config=object,
local_borg_version=object,
global_arguments=object,
local_path=object,
@ -435,9 +416,7 @@ def test_run_restore_skips_missing_database():
).once()
flexmock(module).should_receive('restore_single_database').with_args(
repository=object,
location=object,
storage=object,
hooks=object,
config=object,
local_borg_version=object,
global_arguments=object,
local_path=object,
@ -451,9 +430,7 @@ def test_run_restore_skips_missing_database():
module.run_restore(
repository={'path': 'repo'},
location=flexmock(),
storage=flexmock(),
hooks=flexmock(),
config=flexmock(),
local_borg_version=flexmock(),
restore_arguments=flexmock(
repository='repo',
@ -486,22 +463,20 @@ def test_run_restore_restores_databases_from_different_hooks():
flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock())
flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_database').with_args(
hooks=object,
config=object,
archive_database_names=object,
hook_name='postgresql_databases',
database_name='foo',
).and_return(('postgresql_databases', {'name': 'foo'}))
flexmock(module).should_receive('get_configured_database').with_args(
hooks=object,
config=object,
archive_database_names=object,
hook_name='mysql_databases',
database_name='bar',
).and_return(('mysql_databases', {'name': 'bar'}))
flexmock(module).should_receive('restore_single_database').with_args(
repository=object,
location=object,
storage=object,
hooks=object,
config=object,
local_borg_version=object,
global_arguments=object,
local_path=object,
@ -513,9 +488,7 @@ def test_run_restore_restores_databases_from_different_hooks():
).once()
flexmock(module).should_receive('restore_single_database').with_args(
repository=object,
location=object,
storage=object,
hooks=object,
config=object,
local_borg_version=object,
global_arguments=object,
local_path=object,
@ -529,9 +502,7 @@ def test_run_restore_restores_databases_from_different_hooks():
module.run_restore(
repository={'path': 'repo'},
location=flexmock(),
storage=flexmock(),
hooks=flexmock(),
config=flexmock(),
local_borg_version=flexmock(),
restore_arguments=flexmock(
repository='repo',

View File

@ -12,7 +12,7 @@ def test_run_rinfo_does_not_raise():
list(
module.run_rinfo(
repository={'path': 'repo'},
storage={},
config={},
local_borg_version=None,
rinfo_arguments=rinfo_arguments,
global_arguments=flexmock(log_json=False),

View File

@ -12,7 +12,7 @@ def test_run_rlist_does_not_raise():
list(
module.run_rlist(
repository={'path': 'repo'},
storage={},
config={},
local_borg_version=None,
rlist_arguments=rlist_arguments,
global_arguments=flexmock(),

View File

@ -11,7 +11,7 @@ def test_run_transfer_does_not_raise():
module.run_transfer(
repository={'path': 'repo'},
storage={},
config={},
local_borg_version=None,
transfer_arguments=transfer_arguments,
global_arguments=global_arguments,

View File

@ -22,7 +22,7 @@ def test_run_arbitrary_borg_calls_borg_with_flags():
module.run_arbitrary_borg(
repository_path='repo',
storage_config={},
config={},
local_borg_version='1.2.3',
options=['break-lock', '::'],
)
@ -44,7 +44,7 @@ def test_run_arbitrary_borg_with_log_info_calls_borg_with_info_flag():
module.run_arbitrary_borg(
repository_path='repo',
storage_config={},
config={},
local_borg_version='1.2.3',
options=['break-lock', '::'],
)
@ -66,7 +66,7 @@ def test_run_arbitrary_borg_with_log_debug_calls_borg_with_debug_flag():
module.run_arbitrary_borg(
repository_path='repo',
storage_config={},
config={},
local_borg_version='1.2.3',
options=['break-lock', '::'],
)
@ -75,7 +75,7 @@ def test_run_arbitrary_borg_with_log_debug_calls_borg_with_debug_flag():
def test_run_arbitrary_borg_with_lock_wait_calls_borg_with_lock_wait_flags():
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
storage_config = {'lock_wait': 5}
config = {'lock_wait': 5}
flexmock(module.flags).should_receive('make_flags').and_return(()).and_return(
('--lock-wait', '5')
)
@ -90,7 +90,7 @@ def test_run_arbitrary_borg_with_lock_wait_calls_borg_with_lock_wait_flags():
module.run_arbitrary_borg(
repository_path='repo',
storage_config=storage_config,
config=config,
local_borg_version='1.2.3',
options=['break-lock', '::'],
)
@ -111,7 +111,7 @@ def test_run_arbitrary_borg_with_archive_calls_borg_with_archive_flag():
module.run_arbitrary_borg(
repository_path='repo',
storage_config={},
config={},
local_borg_version='1.2.3',
options=['break-lock', '::$ARCHIVE'],
archive='archive',
@ -133,7 +133,7 @@ def test_run_arbitrary_borg_with_local_path_calls_borg_via_local_path():
module.run_arbitrary_borg(
repository_path='repo',
storage_config={},
config={},
local_borg_version='1.2.3',
options=['break-lock', '::'],
local_path='borg1',
@ -157,7 +157,7 @@ def test_run_arbitrary_borg_with_remote_path_calls_borg_with_remote_path_flags()
module.run_arbitrary_borg(
repository_path='repo',
storage_config={},
config={},
local_borg_version='1.2.3',
options=['break-lock', '::'],
remote_path='borg1',
@ -179,7 +179,7 @@ def test_run_arbitrary_borg_passes_borg_specific_flags_to_borg():
module.run_arbitrary_borg(
repository_path='repo',
storage_config={},
config={},
local_borg_version='1.2.3',
options=['list', '--progress', '::'],
)
@ -200,7 +200,7 @@ def test_run_arbitrary_borg_omits_dash_dash_in_flags_passed_to_borg():
module.run_arbitrary_borg(
repository_path='repo',
storage_config={},
config={},
local_borg_version='1.2.3',
options=['--', 'break-lock', '::'],
)
@ -221,7 +221,7 @@ def test_run_arbitrary_borg_without_borg_specific_flags_does_not_raise():
module.run_arbitrary_borg(
repository_path='repo',
storage_config={},
config={},
local_borg_version='1.2.3',
options=[],
)
@ -243,7 +243,7 @@ def test_run_arbitrary_borg_passes_key_sub_command_to_borg_before_injected_flags
module.run_arbitrary_borg(
repository_path='repo',
storage_config={},
config={},
local_borg_version='1.2.3',
options=['key', 'export', '::'],
)
@ -265,7 +265,7 @@ def test_run_arbitrary_borg_passes_debug_sub_command_to_borg_before_injected_fla
module.run_arbitrary_borg(
repository_path='repo',
storage_config={},
config={},
local_borg_version='1.2.3',
options=['debug', 'dump-manifest', '::', 'path'],
)

View File

@ -22,7 +22,7 @@ def test_break_lock_calls_borg_with_required_flags():
module.break_lock(
repository_path='repo',
storage_config={},
config={},
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
)
@ -34,7 +34,7 @@ def test_break_lock_calls_borg_with_remote_path_flags():
module.break_lock(
repository_path='repo',
storage_config={},
config={},
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
remote_path='borg1',
@ -47,7 +47,7 @@ def test_break_lock_calls_borg_with_umask_flags():
module.break_lock(
repository_path='repo',
storage_config={'umask': '0770'},
config={'umask': '0770'},
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
)
@ -59,7 +59,7 @@ def test_break_lock_calls_borg_with_log_json_flags():
module.break_lock(
repository_path='repo',
storage_config={},
config={},
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=True),
)
@ -71,7 +71,7 @@ def test_break_lock_calls_borg_with_lock_wait_flags():
module.break_lock(
repository_path='repo',
storage_config={'lock_wait': '5'},
config={'lock_wait': '5'},
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
)
@ -84,7 +84,7 @@ def test_break_lock_with_log_info_calls_borg_with_info_parameter():
module.break_lock(
repository_path='repo',
storage_config={},
config={},
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
)
@ -97,7 +97,7 @@ def test_break_lock_with_log_debug_calls_borg_with_debug_flags():
module.break_lock(
repository_path='repo',
storage_config={},
config={},
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
)

View File

@ -99,8 +99,7 @@ def test_filter_checks_on_frequency_without_config_uses_default_checks():
flexmock(module).should_receive('probe_for_check_time').and_return(None)
assert module.filter_checks_on_frequency(
location_config={},
consistency_config={},
config={},
borg_repository_id='repo',
checks=('repository', 'archives'),
force=False,
@ -110,8 +109,7 @@ def test_filter_checks_on_frequency_without_config_uses_default_checks():
def test_filter_checks_on_frequency_retains_unconfigured_check():
assert module.filter_checks_on_frequency(
location_config={},
consistency_config={},
config={},
borg_repository_id='repo',
checks=('data',),
force=False,
@ -122,8 +120,7 @@ def test_filter_checks_on_frequency_retains_check_without_frequency():
flexmock(module).should_receive('parse_frequency').and_return(None)
assert module.filter_checks_on_frequency(
location_config={},
consistency_config={'checks': [{'name': 'archives'}]},
config={'checks': [{'name': 'archives'}]},
borg_repository_id='repo',
checks=('archives',),
force=False,
@ -141,8 +138,7 @@ def test_filter_checks_on_frequency_retains_check_with_elapsed_frequency():
)
assert module.filter_checks_on_frequency(
location_config={},
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
borg_repository_id='repo',
checks=('archives',),
force=False,
@ -158,8 +154,7 @@ def test_filter_checks_on_frequency_retains_check_with_missing_check_time_file()
flexmock(module).should_receive('probe_for_check_time').and_return(None)
assert module.filter_checks_on_frequency(
location_config={},
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
borg_repository_id='repo',
checks=('archives',),
force=False,
@ -178,8 +173,7 @@ def test_filter_checks_on_frequency_skips_check_with_unelapsed_frequency():
assert (
module.filter_checks_on_frequency(
location_config={},
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
borg_repository_id='repo',
checks=('archives',),
force=False,
@ -191,8 +185,7 @@ def test_filter_checks_on_frequency_skips_check_with_unelapsed_frequency():
def test_filter_checks_on_frequency_restains_check_with_unelapsed_frequency_and_force():
assert module.filter_checks_on_frequency(
location_config={},
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
borg_repository_id='repo',
checks=('archives',),
force=True,
@ -616,7 +609,7 @@ def test_upgrade_check_times_renames_stale_temporary_check_path():
def test_check_archives_with_progress_calls_borg_with_progress_parameter():
checks = ('repository',)
consistency_config = {'check_last': None}
config = {'check_last': None}
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}'
)
@ -639,9 +632,7 @@ def test_check_archives_with_progress_calls_borg_with_progress_parameter():
module.check_archives(
repository_path='repo',
location_config={},
storage_config={},
consistency_config=consistency_config,
config=config,
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
progress=True,
@ -650,7 +641,7 @@ def test_check_archives_with_progress_calls_borg_with_progress_parameter():
def test_check_archives_with_repair_calls_borg_with_repair_parameter():
checks = ('repository',)
consistency_config = {'check_last': None}
config = {'check_last': None}
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}'
)
@ -673,9 +664,7 @@ def test_check_archives_with_repair_calls_borg_with_repair_parameter():
module.check_archives(
repository_path='repo',
location_config={},
storage_config={},
consistency_config=consistency_config,
config=config,
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
repair=True,
@ -693,7 +682,7 @@ def test_check_archives_with_repair_calls_borg_with_repair_parameter():
)
def test_check_archives_calls_borg_with_parameters(checks):
check_last = flexmock()
consistency_config = {'check_last': check_last}
config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}'
)
@ -710,9 +699,7 @@ def test_check_archives_calls_borg_with_parameters(checks):
module.check_archives(
repository_path='repo',
location_config={},
storage_config={},
consistency_config=consistency_config,
config=config,
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
)
@ -721,7 +708,7 @@ def test_check_archives_calls_borg_with_parameters(checks):
def test_check_archives_with_json_error_raises():
checks = ('archives',)
check_last = flexmock()
consistency_config = {'check_last': check_last}
config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"unexpected": {"id": "repo"}}'
)
@ -734,9 +721,7 @@ def test_check_archives_with_json_error_raises():
with pytest.raises(ValueError):
module.check_archives(
repository_path='repo',
location_config={},
storage_config={},
consistency_config=consistency_config,
config=config,
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
)
@ -745,7 +730,7 @@ def test_check_archives_with_json_error_raises():
def test_check_archives_with_missing_json_keys_raises():
checks = ('archives',)
check_last = flexmock()
consistency_config = {'check_last': check_last}
config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return('{invalid JSON')
flexmock(module).should_receive('upgrade_check_times')
flexmock(module).should_receive('parse_checks')
@ -756,9 +741,7 @@ def test_check_archives_with_missing_json_keys_raises():
with pytest.raises(ValueError):
module.check_archives(
repository_path='repo',
location_config={},
storage_config={},
consistency_config=consistency_config,
config=config,
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
)
@ -767,7 +750,7 @@ def test_check_archives_with_missing_json_keys_raises():
def test_check_archives_with_extract_check_calls_extract_only():
checks = ('extract',)
check_last = flexmock()
consistency_config = {'check_last': check_last}
config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}'
)
@ -784,9 +767,7 @@ def test_check_archives_with_extract_check_calls_extract_only():
module.check_archives(
repository_path='repo',
location_config={},
storage_config={},
consistency_config=consistency_config,
config=config,
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
)
@ -794,7 +775,7 @@ def test_check_archives_with_extract_check_calls_extract_only():
def test_check_archives_with_log_info_calls_borg_with_info_parameter():
checks = ('repository',)
consistency_config = {'check_last': None}
config = {'check_last': None}
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}'
)
@ -812,9 +793,7 @@ def test_check_archives_with_log_info_calls_borg_with_info_parameter():
module.check_archives(
repository_path='repo',
location_config={},
storage_config={},
consistency_config=consistency_config,
config=config,
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
)
@ -822,7 +801,7 @@ def test_check_archives_with_log_info_calls_borg_with_info_parameter():
def test_check_archives_with_log_debug_calls_borg_with_debug_parameter():
checks = ('repository',)
consistency_config = {'check_last': None}
config = {'check_last': None}
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}'
)
@ -840,16 +819,14 @@ def test_check_archives_with_log_debug_calls_borg_with_debug_parameter():
module.check_archives(
repository_path='repo',
location_config={},
storage_config={},
consistency_config=consistency_config,
config=config,
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
)
def test_check_archives_without_any_checks_bails():
consistency_config = {'check_last': None}
config = {'check_last': None}
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}'
)
@ -862,9 +839,7 @@ def test_check_archives_without_any_checks_bails():
module.check_archives(
repository_path='repo',
location_config={},
storage_config={},
consistency_config=consistency_config,
config=config,
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
)
@ -873,7 +848,7 @@ def test_check_archives_without_any_checks_bails():
def test_check_archives_with_local_path_calls_borg_via_local_path():
checks = ('repository',)
check_last = flexmock()
consistency_config = {'check_last': check_last}
config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}'
)
@ -890,9 +865,7 @@ def test_check_archives_with_local_path_calls_borg_via_local_path():
module.check_archives(
repository_path='repo',
location_config={},
storage_config={},
consistency_config=consistency_config,
config=config,
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
local_path='borg1',
@ -902,7 +875,7 @@ def test_check_archives_with_local_path_calls_borg_via_local_path():
def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters():
checks = ('repository',)
check_last = flexmock()
consistency_config = {'check_last': check_last}
config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}'
)
@ -919,9 +892,7 @@ def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters(
module.check_archives(
repository_path='repo',
location_config={},
storage_config={},
consistency_config=consistency_config,
config=config,
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
remote_path='borg1',
@ -931,8 +902,7 @@ def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters(
def test_check_archives_with_log_json_calls_borg_with_log_json_parameters():
checks = ('repository',)
check_last = flexmock()
storage_config = {}
consistency_config = {'check_last': check_last}
config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}'
)
@ -949,9 +919,7 @@ def test_check_archives_with_log_json_calls_borg_with_log_json_parameters():
module.check_archives(
repository_path='repo',
location_config={},
storage_config=storage_config,
consistency_config=consistency_config,
config=config,
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=True),
)
@ -960,8 +928,7 @@ def test_check_archives_with_log_json_calls_borg_with_log_json_parameters():
def test_check_archives_with_lock_wait_calls_borg_with_lock_wait_parameters():
checks = ('repository',)
check_last = flexmock()
storage_config = {'lock_wait': 5}
consistency_config = {'check_last': check_last}
config = {'lock_wait': 5, 'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}'
)
@ -978,9 +945,7 @@ def test_check_archives_with_lock_wait_calls_borg_with_lock_wait_parameters():
module.check_archives(
repository_path='repo',
location_config={},
storage_config=storage_config,
consistency_config=consistency_config,
config=config,
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
)
@ -990,7 +955,7 @@ def test_check_archives_with_retention_prefix():
checks = ('repository',)
check_last = flexmock()
prefix = 'foo-'
consistency_config = {'check_last': check_last, 'prefix': prefix}
config = {'check_last': check_last, 'prefix': prefix}
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}'
)
@ -1007,9 +972,7 @@ def test_check_archives_with_retention_prefix():
module.check_archives(
repository_path='repo',
location_config={},
storage_config={},
consistency_config=consistency_config,
config=config,
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
)
@ -1017,7 +980,7 @@ def test_check_archives_with_retention_prefix():
def test_check_archives_with_extra_borg_options_calls_borg_with_extra_options():
checks = ('repository',)
consistency_config = {'check_last': None}
config = {'check_last': None, 'extra_borg_options': {'check': '--extra --options'}}
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}'
)
@ -1034,9 +997,7 @@ def test_check_archives_with_extra_borg_options_calls_borg_with_extra_options():
module.check_archives(
repository_path='repo',
location_config={},
storage_config={'extra_borg_options': {'check': '--extra --options'}},
consistency_config=consistency_config,
config=config,
local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False),
)

Some files were not shown because too many files have changed in this diff Show More