From 48b6a1679a20d46d797ffb5f534d33a6bb29931f Mon Sep 17 00:00:00 2001 From: Dan Helfman Date: Sat, 8 Jul 2023 23:14:30 -0700 Subject: [PATCH 01/12] Remove sections (#721). --- NEWS | 7 + borgmatic/actions/borg.py | 6 +- borgmatic/actions/break_lock.py | 4 +- borgmatic/actions/check.py | 17 +- borgmatic/actions/compact.py | 14 +- borgmatic/actions/config/generate.py | 2 + borgmatic/actions/config/validate.py | 3 + borgmatic/actions/create.py | 32 +- borgmatic/actions/export_tar.py | 6 +- borgmatic/actions/extract.py | 17 +- borgmatic/actions/info.py | 6 +- borgmatic/actions/list.py | 6 +- borgmatic/actions/mount.py | 6 +- borgmatic/actions/prune.py | 15 +- borgmatic/actions/rcreate.py | 4 +- borgmatic/actions/restore.py | 74 +- borgmatic/actions/rinfo.py | 4 +- borgmatic/actions/rlist.py | 4 +- borgmatic/actions/transfer.py | 4 +- borgmatic/borg/borg.py | 8 +- borgmatic/borg/break_lock.py | 12 +- borgmatic/borg/check.py | 116 +- borgmatic/borg/compact.py | 14 +- borgmatic/borg/create.py | 111 +- borgmatic/borg/environment.py | 8 +- borgmatic/borg/export_tar.py | 16 +- borgmatic/borg/extract.py | 23 +- borgmatic/borg/info.py | 14 +- borgmatic/borg/list.py | 30 +- borgmatic/borg/mount.py | 8 +- borgmatic/borg/prune.py | 34 +- borgmatic/borg/rcreate.py | 16 +- borgmatic/borg/rinfo.py | 8 +- borgmatic/borg/rlist.py | 26 +- borgmatic/borg/transfer.py | 12 +- borgmatic/borg/version.py | 6 +- borgmatic/commands/arguments.py | 2 +- borgmatic/commands/borgmatic.py | 117 +- borgmatic/config/generate.py | 27 +- borgmatic/config/load.py | 10 +- borgmatic/config/normalize.py | 119 +- borgmatic/config/override.py | 28 +- borgmatic/config/schema.yaml | 2577 ++++++++++----------- borgmatic/config/validate.py | 25 +- borgmatic/hooks/cronhub.py | 2 +- borgmatic/hooks/cronitor.py | 2 +- borgmatic/hooks/dispatch.py | 39 +- borgmatic/hooks/healthchecks.py | 2 +- borgmatic/hooks/mongodb.py | 41 +- borgmatic/hooks/mysql.py | 41 +- borgmatic/hooks/ntfy.py | 2 +- borgmatic/hooks/pagerduty.py | 2 +- borgmatic/hooks/postgresql.py | 48 +- borgmatic/hooks/sqlite.py | 34 +- tests/integration/borg/test_commands.py | 1 - tests/integration/config/test_generate.py | 28 +- tests/integration/config/test_validate.py | 149 +- tests/unit/actions/test_borg.py | 2 +- tests/unit/actions/test_break_lock.py | 2 +- tests/unit/actions/test_check.py | 18 +- tests/unit/actions/test_compact.py | 12 +- tests/unit/actions/test_create.py | 17 +- tests/unit/actions/test_export_tar.py | 2 +- tests/unit/actions/test_extract.py | 4 +- tests/unit/actions/test_info.py | 2 +- tests/unit/actions/test_list.py | 2 +- tests/unit/actions/test_mount.py | 2 +- tests/unit/actions/test_prune.py | 12 +- tests/unit/actions/test_rcreate.py | 4 +- tests/unit/actions/test_restore.py | 87 +- tests/unit/actions/test_rinfo.py | 2 +- tests/unit/actions/test_rlist.py | 2 +- tests/unit/actions/test_transfer.py | 2 +- tests/unit/borg/test_borg.py | 26 +- tests/unit/borg/test_break_lock.py | 14 +- tests/unit/borg/test_check.py | 113 +- tests/unit/borg/test_compact.py | 30 +- tests/unit/borg/test_create.py | 191 +- tests/unit/borg/test_export_tar.py | 28 +- tests/unit/borg/test_extract.py | 67 +- tests/unit/borg/test_info.py | 38 +- tests/unit/borg/test_list.py | 60 +- tests/unit/borg/test_mount.py | 26 +- tests/unit/borg/test_prune.py | 88 +- tests/unit/borg/test_rcreate.py | 34 +- tests/unit/borg/test_rinfo.py | 24 +- tests/unit/borg/test_rlist.py | 58 +- tests/unit/borg/test_transfer.py | 32 +- tests/unit/commands/test_borgmatic.py | 176 +- tests/unit/config/test_generate.py | 35 +- tests/unit/config/test_normalize.py | 313 ++- tests/unit/config/test_override.py | 46 +- tests/unit/config/test_validate.py | 56 +- tests/unit/hooks/test_cronhub.py | 23 +- tests/unit/hooks/test_cronitor.py | 22 +- tests/unit/hooks/test_dispatch.py | 44 +- tests/unit/hooks/test_healthchecks.py | 12 + tests/unit/hooks/test_mongodb.py | 36 +- tests/unit/hooks/test_mysql.py | 28 +- tests/unit/hooks/test_ntfy.py | 12 + tests/unit/hooks/test_pagerduty.py | 6 + tests/unit/hooks/test_postgresql.py | 48 +- tests/unit/hooks/test_sqlite.py | 20 +- 103 files changed, 2817 insertions(+), 3050 deletions(-) diff --git a/NEWS b/NEWS index f1ecea0d..2831f29a 100644 --- a/NEWS +++ b/NEWS @@ -8,6 +8,13 @@ * #720: Fix an error when dumping a MySQL database and the "exclude_nodump" option is set. * When merging two configuration files, error gracefully if the two files do not adhere to the same format. + * #721: Remove configuration sections ("location:", "storage:", "hooks:" etc.), while still keeping + deprecated support for them. Now, all options are at the same level, and you don't need to worry + about commenting/uncommenting section headers when you change an option. + * #721: BREAKING: The retention prefix and the consistency prefix can no longer have different + values (unless one is not set). + * #721: BREAKING: The storage umask and the hooks umask can no longer have different values (unless + one is not set). * BREAKING: Remove the deprecated (and silently ignored) "--successful" flag on the "list" action, as newer versions of Borg list successful (non-checkpoint) archives by default. * All deprecated configuration option values now generate warning logs. diff --git a/borgmatic/actions/borg.py b/borgmatic/actions/borg.py index 44ffc951..6e46596f 100644 --- a/borgmatic/actions/borg.py +++ b/borgmatic/actions/borg.py @@ -9,7 +9,7 @@ logger = logging.getLogger(__name__) def run_borg( repository, - storage, + config, local_borg_version, borg_arguments, global_arguments, @@ -28,7 +28,7 @@ def run_borg( archive_name = borgmatic.borg.rlist.resolve_archive_name( repository['path'], borg_arguments.archive, - storage, + config, local_borg_version, global_arguments, local_path, @@ -36,7 +36,7 @@ def run_borg( ) borgmatic.borg.borg.run_arbitrary_borg( repository['path'], - storage, + config, local_borg_version, options=borg_arguments.options, archive=archive_name, diff --git a/borgmatic/actions/break_lock.py b/borgmatic/actions/break_lock.py index a00d5785..e94ab466 100644 --- a/borgmatic/actions/break_lock.py +++ b/borgmatic/actions/break_lock.py @@ -8,7 +8,7 @@ logger = logging.getLogger(__name__) def run_break_lock( repository, - storage, + config, local_borg_version, break_lock_arguments, global_arguments, @@ -26,7 +26,7 @@ def run_break_lock( ) borgmatic.borg.break_lock.break_lock( repository['path'], - storage, + config, local_borg_version, global_arguments, local_path=local_path, diff --git a/borgmatic/actions/check.py b/borgmatic/actions/check.py index 610d41ee..fe800156 100644 --- a/borgmatic/actions/check.py +++ b/borgmatic/actions/check.py @@ -10,10 +10,7 @@ logger = logging.getLogger(__name__) def run_check( config_filename, repository, - location, - storage, - consistency, - hooks, + config, hook_context, local_borg_version, check_arguments, @@ -30,8 +27,8 @@ def run_check( return borgmatic.hooks.command.execute_hook( - hooks.get('before_check'), - hooks.get('umask'), + config.get('before_check'), + config.get('umask'), config_filename, 'pre-check', global_arguments.dry_run, @@ -40,9 +37,7 @@ def run_check( logger.info(f'{repository.get("label", repository["path"])}: Running consistency checks') borgmatic.borg.check.check_archives( repository['path'], - location, - storage, - consistency, + config, local_borg_version, global_arguments, local_path=local_path, @@ -53,8 +48,8 @@ def run_check( force=check_arguments.force, ) borgmatic.hooks.command.execute_hook( - hooks.get('after_check'), - hooks.get('umask'), + config.get('after_check'), + config.get('umask'), config_filename, 'post-check', global_arguments.dry_run, diff --git a/borgmatic/actions/compact.py b/borgmatic/actions/compact.py index ad680d21..29cf8943 100644 --- a/borgmatic/actions/compact.py +++ b/borgmatic/actions/compact.py @@ -11,9 +11,7 @@ logger = logging.getLogger(__name__) def run_compact( config_filename, repository, - storage, - retention, - hooks, + config, hook_context, local_borg_version, compact_arguments, @@ -31,8 +29,8 @@ def run_compact( return borgmatic.hooks.command.execute_hook( - hooks.get('before_compact'), - hooks.get('umask'), + config.get('before_compact'), + config.get('umask'), config_filename, 'pre-compact', global_arguments.dry_run, @@ -45,7 +43,7 @@ def run_compact( borgmatic.borg.compact.compact_segments( global_arguments.dry_run, repository['path'], - storage, + config, local_borg_version, global_arguments, local_path=local_path, @@ -59,8 +57,8 @@ def run_compact( f'{repository.get("label", repository["path"])}: Skipping compact (only available/needed in Borg 1.2+)' ) borgmatic.hooks.command.execute_hook( - hooks.get('after_compact'), - hooks.get('umask'), + config.get('after_compact'), + config.get('umask'), config_filename, 'post-compact', global_arguments.dry_run, diff --git a/borgmatic/actions/config/generate.py b/borgmatic/actions/config/generate.py index 1943ea74..48b9a7dd 100644 --- a/borgmatic/actions/config/generate.py +++ b/borgmatic/actions/config/generate.py @@ -2,6 +2,7 @@ import logging import borgmatic.config.generate import borgmatic.config.validate +import borgmatic.logger logger = logging.getLogger(__name__) @@ -14,6 +15,7 @@ def run_generate(generate_arguments, global_arguments): Raise FileExistsError if a file already exists at the destination path and the generate arguments do not have overwrite set. ''' + borgmatic.logger.add_custom_log_levels() dry_run_label = ' (dry run; not actually writing anything)' if global_arguments.dry_run else '' logger.answer( diff --git a/borgmatic/actions/config/validate.py b/borgmatic/actions/config/validate.py index 2cec6135..2929ccaa 100644 --- a/borgmatic/actions/config/validate.py +++ b/borgmatic/actions/config/validate.py @@ -1,6 +1,7 @@ import logging import borgmatic.config.generate +import borgmatic.logger logger = logging.getLogger(__name__) @@ -14,6 +15,8 @@ def run_validate(validate_arguments, configs): loading machinery prior to here, so this function mainly exists to support additional validate flags like "--show". ''' + borgmatic.logger.add_custom_log_levels() + if validate_arguments.show: for config_path, config in configs.items(): if len(configs) > 1: diff --git a/borgmatic/actions/create.py b/borgmatic/actions/create.py index 1bacf73b..1d750f62 100644 --- a/borgmatic/actions/create.py +++ b/borgmatic/actions/create.py @@ -17,7 +17,7 @@ import borgmatic.hooks.dump logger = logging.getLogger(__name__) -def create_borgmatic_manifest(location, config_paths, dry_run): +def create_borgmatic_manifest(config, config_paths, dry_run): ''' Create a borgmatic manifest file to store the paths to the configuration files used to create the archive. @@ -25,7 +25,7 @@ def create_borgmatic_manifest(location, config_paths, dry_run): if dry_run: return - borgmatic_source_directory = location.get( + borgmatic_source_directory = config.get( 'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY ) @@ -49,9 +49,7 @@ def create_borgmatic_manifest(location, config_paths, dry_run): def run_create( config_filename, repository, - location, - storage, - hooks, + config, hook_context, local_borg_version, create_arguments, @@ -71,8 +69,8 @@ def run_create( return borgmatic.hooks.command.execute_hook( - hooks.get('before_backup'), - hooks.get('umask'), + config.get('before_backup'), + config.get('umask'), config_filename, 'pre-backup', global_arguments.dry_run, @@ -81,30 +79,25 @@ def run_create( logger.info(f'{repository.get("label", repository["path"])}: Creating archive{dry_run_label}') borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( 'remove_database_dumps', - hooks, + config, repository['path'], borgmatic.hooks.dump.DATABASE_HOOK_NAMES, - location, global_arguments.dry_run, ) active_dumps = borgmatic.hooks.dispatch.call_hooks( 'dump_databases', - hooks, + config, repository['path'], borgmatic.hooks.dump.DATABASE_HOOK_NAMES, - location, global_arguments.dry_run, ) - create_borgmatic_manifest( - location, global_arguments.used_config_paths, global_arguments.dry_run - ) + create_borgmatic_manifest(config, global_arguments.used_config_paths, global_arguments.dry_run) stream_processes = [process for processes in active_dumps.values() for process in processes] json_output = borgmatic.borg.create.create_archive( global_arguments.dry_run, repository['path'], - location, - storage, + config, local_borg_version, global_arguments, local_path=local_path, @@ -120,15 +113,14 @@ def run_create( borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( 'remove_database_dumps', - hooks, + config, config_filename, borgmatic.hooks.dump.DATABASE_HOOK_NAMES, - location, global_arguments.dry_run, ) borgmatic.hooks.command.execute_hook( - hooks.get('after_backup'), - hooks.get('umask'), + config.get('after_backup'), + config.get('umask'), config_filename, 'post-backup', global_arguments.dry_run, diff --git a/borgmatic/actions/export_tar.py b/borgmatic/actions/export_tar.py index 798bd418..f1937b54 100644 --- a/borgmatic/actions/export_tar.py +++ b/borgmatic/actions/export_tar.py @@ -9,7 +9,7 @@ logger = logging.getLogger(__name__) def run_export_tar( repository, - storage, + config, local_borg_version, export_tar_arguments, global_arguments, @@ -31,7 +31,7 @@ def run_export_tar( borgmatic.borg.rlist.resolve_archive_name( repository['path'], export_tar_arguments.archive, - storage, + config, local_borg_version, global_arguments, local_path, @@ -39,7 +39,7 @@ def run_export_tar( ), export_tar_arguments.paths, export_tar_arguments.destination, - storage, + config, local_borg_version, global_arguments, local_path=local_path, diff --git a/borgmatic/actions/extract.py b/borgmatic/actions/extract.py index 0bb1efb7..5d02d4c7 100644 --- a/borgmatic/actions/extract.py +++ b/borgmatic/actions/extract.py @@ -11,9 +11,7 @@ logger = logging.getLogger(__name__) def run_extract( config_filename, repository, - location, - storage, - hooks, + config, hook_context, local_borg_version, extract_arguments, @@ -25,8 +23,8 @@ def run_extract( Run the "extract" action for the given repository. ''' borgmatic.hooks.command.execute_hook( - hooks.get('before_extract'), - hooks.get('umask'), + config.get('before_extract'), + config.get('umask'), config_filename, 'pre-extract', global_arguments.dry_run, @@ -44,15 +42,14 @@ def run_extract( borgmatic.borg.rlist.resolve_archive_name( repository['path'], extract_arguments.archive, - storage, + config, local_borg_version, global_arguments, local_path, remote_path, ), extract_arguments.paths, - location, - storage, + config, local_borg_version, global_arguments, local_path=local_path, @@ -62,8 +59,8 @@ def run_extract( progress=extract_arguments.progress, ) borgmatic.hooks.command.execute_hook( - hooks.get('after_extract'), - hooks.get('umask'), + config.get('after_extract'), + config.get('umask'), config_filename, 'post-extract', global_arguments.dry_run, diff --git a/borgmatic/actions/info.py b/borgmatic/actions/info.py index 0116fd70..b09f3ece 100644 --- a/borgmatic/actions/info.py +++ b/borgmatic/actions/info.py @@ -11,7 +11,7 @@ logger = logging.getLogger(__name__) def run_info( repository, - storage, + config, local_borg_version, info_arguments, global_arguments, @@ -33,7 +33,7 @@ def run_info( archive_name = borgmatic.borg.rlist.resolve_archive_name( repository['path'], info_arguments.archive, - storage, + config, local_borg_version, global_arguments, local_path, @@ -41,7 +41,7 @@ def run_info( ) json_output = borgmatic.borg.info.display_archives_info( repository['path'], - storage, + config, local_borg_version, borgmatic.actions.arguments.update_arguments(info_arguments, archive=archive_name), global_arguments, diff --git a/borgmatic/actions/list.py b/borgmatic/actions/list.py index 667062d8..ae9da63c 100644 --- a/borgmatic/actions/list.py +++ b/borgmatic/actions/list.py @@ -10,7 +10,7 @@ logger = logging.getLogger(__name__) def run_list( repository, - storage, + config, local_borg_version, list_arguments, global_arguments, @@ -34,7 +34,7 @@ def run_list( archive_name = borgmatic.borg.rlist.resolve_archive_name( repository['path'], list_arguments.archive, - storage, + config, local_borg_version, global_arguments, local_path, @@ -42,7 +42,7 @@ def run_list( ) json_output = borgmatic.borg.list.list_archive( repository['path'], - storage, + config, local_borg_version, borgmatic.actions.arguments.update_arguments(list_arguments, archive=archive_name), global_arguments, diff --git a/borgmatic/actions/mount.py b/borgmatic/actions/mount.py index cc8a2cbd..86b05859 100644 --- a/borgmatic/actions/mount.py +++ b/borgmatic/actions/mount.py @@ -9,7 +9,7 @@ logger = logging.getLogger(__name__) def run_mount( repository, - storage, + config, local_borg_version, mount_arguments, global_arguments, @@ -34,14 +34,14 @@ def run_mount( borgmatic.borg.rlist.resolve_archive_name( repository['path'], mount_arguments.archive, - storage, + config, local_borg_version, global_arguments, local_path, remote_path, ), mount_arguments, - storage, + config, local_borg_version, global_arguments, local_path=local_path, diff --git a/borgmatic/actions/prune.py b/borgmatic/actions/prune.py index deaea384..0cb074b5 100644 --- a/borgmatic/actions/prune.py +++ b/borgmatic/actions/prune.py @@ -10,9 +10,7 @@ logger = logging.getLogger(__name__) def run_prune( config_filename, repository, - storage, - retention, - hooks, + config, hook_context, local_borg_version, prune_arguments, @@ -30,8 +28,8 @@ def run_prune( return borgmatic.hooks.command.execute_hook( - hooks.get('before_prune'), - hooks.get('umask'), + config.get('before_prune'), + config.get('umask'), config_filename, 'pre-prune', global_arguments.dry_run, @@ -41,8 +39,7 @@ def run_prune( borgmatic.borg.prune.prune_archives( global_arguments.dry_run, repository['path'], - storage, - retention, + config, local_borg_version, prune_arguments, global_arguments, @@ -50,8 +47,8 @@ def run_prune( remote_path=remote_path, ) borgmatic.hooks.command.execute_hook( - hooks.get('after_prune'), - hooks.get('umask'), + config.get('after_prune'), + config.get('umask'), config_filename, 'post-prune', global_arguments.dry_run, diff --git a/borgmatic/actions/rcreate.py b/borgmatic/actions/rcreate.py index 1bfc489b..32cdef40 100644 --- a/borgmatic/actions/rcreate.py +++ b/borgmatic/actions/rcreate.py @@ -8,7 +8,7 @@ logger = logging.getLogger(__name__) def run_rcreate( repository, - storage, + config, local_borg_version, rcreate_arguments, global_arguments, @@ -27,7 +27,7 @@ def run_rcreate( borgmatic.borg.rcreate.create_repository( global_arguments.dry_run, repository['path'], - storage, + config, local_borg_version, global_arguments, rcreate_arguments.encryption_mode, diff --git a/borgmatic/actions/restore.py b/borgmatic/actions/restore.py index d44a2cac..8112207a 100644 --- a/borgmatic/actions/restore.py +++ b/borgmatic/actions/restore.py @@ -18,12 +18,12 @@ UNSPECIFIED_HOOK = object() def get_configured_database( - hooks, archive_database_names, hook_name, database_name, configuration_database_name=None + config, archive_database_names, hook_name, database_name, configuration_database_name=None ): ''' - Find the first database with the given hook name and database name in the configured hooks - dict and the given archive database names dict (from hook name to database names contained in - a particular backup archive). If UNSPECIFIED_HOOK is given as the hook name, search all database + Find the first database with the given hook name and database name in the configuration dict and + the given archive database names dict (from hook name to database names contained in a + particular backup archive). If UNSPECIFIED_HOOK is given as the hook name, search all database hooks for the named database. If a configuration database name is given, use that instead of the database name to lookup the database in the given hooks configuration. @@ -33,9 +33,13 @@ def get_configured_database( configuration_database_name = database_name if hook_name == UNSPECIFIED_HOOK: - hooks_to_search = hooks + hooks_to_search = { + hook_name: value + for (hook_name, value) in config.items() + if hook_name in borgmatic.hooks.dump.DATABASE_HOOK_NAMES + } else: - hooks_to_search = {hook_name: hooks[hook_name]} + hooks_to_search = {hook_name: config[hook_name]} return next( ( @@ -58,9 +62,7 @@ def get_configured_hook_name_and_database(hooks, database_name): def restore_single_database( repository, - location, - storage, - hooks, + config, local_borg_version, global_arguments, local_path, @@ -81,10 +83,9 @@ def restore_single_database( dump_pattern = borgmatic.hooks.dispatch.call_hooks( 'make_database_dump_pattern', - hooks, + config, repository['path'], borgmatic.hooks.dump.DATABASE_HOOK_NAMES, - location, database['name'], )[hook_name] @@ -94,8 +95,7 @@ def restore_single_database( repository=repository['path'], archive=archive_name, paths=borgmatic.hooks.dump.convert_glob_patterns_to_borg_patterns([dump_pattern]), - location_config=location, - storage_config=storage, + config=config, local_borg_version=local_borg_version, global_arguments=global_arguments, local_path=local_path, @@ -112,7 +112,7 @@ def restore_single_database( {hook_name: [database]}, repository['path'], borgmatic.hooks.dump.DATABASE_HOOK_NAMES, - location, + config, global_arguments.dry_run, extract_process, connection_params, @@ -122,21 +122,20 @@ def restore_single_database( def collect_archive_database_names( repository, archive, - location, - storage, + config, local_borg_version, global_arguments, local_path, remote_path, ): ''' - Given a local or remote repository path, a resolved archive name, a location configuration dict, - a storage configuration dict, the local Borg version, global_arguments an argparse.Namespace, - and local and remote Borg paths, query the archive for the names of databases it contains and - return them as a dict from hook name to a sequence of database names. + Given a local or remote repository path, a resolved archive name, a configuration dict, the + local Borg version, global_arguments an argparse.Namespace, and local and remote Borg paths, + query the archive for the names of databases it contains and return them as a dict from hook + name to a sequence of database names. ''' borgmatic_source_directory = os.path.expanduser( - location.get( + config.get( 'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY ) ).lstrip('/') @@ -146,7 +145,7 @@ def collect_archive_database_names( dump_paths = borgmatic.borg.list.capture_archive_listing( repository, archive, - storage, + config, local_borg_version, global_arguments, list_path=parent_dump_path, @@ -249,9 +248,7 @@ def ensure_databases_found(restore_names, remaining_restore_names, found_names): def run_restore( repository, - location, - storage, - hooks, + config, local_borg_version, restore_arguments, global_arguments, @@ -275,17 +272,16 @@ def run_restore( borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( 'remove_database_dumps', - hooks, + config, repository['path'], borgmatic.hooks.dump.DATABASE_HOOK_NAMES, - location, global_arguments.dry_run, ) archive_name = borgmatic.borg.rlist.resolve_archive_name( repository['path'], restore_arguments.archive, - storage, + config, local_borg_version, global_arguments, local_path, @@ -294,8 +290,7 @@ def run_restore( archive_database_names = collect_archive_database_names( repository['path'], archive_name, - location, - storage, + config, local_borg_version, global_arguments, local_path, @@ -315,7 +310,7 @@ def run_restore( for hook_name, database_names in restore_names.items(): for database_name in database_names: found_hook_name, found_database = get_configured_database( - hooks, archive_database_names, hook_name, database_name + config, archive_database_names, hook_name, database_name ) if not found_database: @@ -327,9 +322,7 @@ def run_restore( found_names.add(database_name) restore_single_database( repository, - location, - storage, - hooks, + config, local_borg_version, global_arguments, local_path, @@ -340,12 +333,12 @@ def run_restore( connection_params, ) - # For any database that weren't found via exact matches in the hooks configuration, try to - # fallback to "all" entries. + # For any database that weren't found via exact matches in the configuration, try to fallback + # to "all" entries. for hook_name, database_names in remaining_restore_names.items(): for database_name in database_names: found_hook_name, found_database = get_configured_database( - hooks, archive_database_names, hook_name, database_name, 'all' + config, archive_database_names, hook_name, database_name, 'all' ) if not found_database: @@ -357,9 +350,7 @@ def run_restore( restore_single_database( repository, - location, - storage, - hooks, + config, local_borg_version, global_arguments, local_path, @@ -372,10 +363,9 @@ def run_restore( borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( 'remove_database_dumps', - hooks, + config, repository['path'], borgmatic.hooks.dump.DATABASE_HOOK_NAMES, - location, global_arguments.dry_run, ) diff --git a/borgmatic/actions/rinfo.py b/borgmatic/actions/rinfo.py index 7756efd0..00de8922 100644 --- a/borgmatic/actions/rinfo.py +++ b/borgmatic/actions/rinfo.py @@ -9,7 +9,7 @@ logger = logging.getLogger(__name__) def run_rinfo( repository, - storage, + config, local_borg_version, rinfo_arguments, global_arguments, @@ -31,7 +31,7 @@ def run_rinfo( json_output = borgmatic.borg.rinfo.display_repository_info( repository['path'], - storage, + config, local_borg_version, rinfo_arguments=rinfo_arguments, global_arguments=global_arguments, diff --git a/borgmatic/actions/rlist.py b/borgmatic/actions/rlist.py index a9dee21d..a79920b6 100644 --- a/borgmatic/actions/rlist.py +++ b/borgmatic/actions/rlist.py @@ -9,7 +9,7 @@ logger = logging.getLogger(__name__) def run_rlist( repository, - storage, + config, local_borg_version, rlist_arguments, global_arguments, @@ -29,7 +29,7 @@ def run_rlist( json_output = borgmatic.borg.rlist.list_repository( repository['path'], - storage, + config, local_borg_version, rlist_arguments=rlist_arguments, global_arguments=global_arguments, diff --git a/borgmatic/actions/transfer.py b/borgmatic/actions/transfer.py index df481e4d..4051b14e 100644 --- a/borgmatic/actions/transfer.py +++ b/borgmatic/actions/transfer.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) def run_transfer( repository, - storage, + config, local_borg_version, transfer_arguments, global_arguments, @@ -23,7 +23,7 @@ def run_transfer( borgmatic.borg.transfer.transfer_archives( global_arguments.dry_run, repository['path'], - storage, + config, local_borg_version, transfer_arguments, global_arguments, diff --git a/borgmatic/borg/borg.py b/borgmatic/borg/borg.py index e0a56923..1c0d6d1c 100644 --- a/borgmatic/borg/borg.py +++ b/borgmatic/borg/borg.py @@ -13,7 +13,7 @@ BORG_SUBCOMMANDS_WITH_SUBCOMMANDS = {'key', 'debug'} def run_arbitrary_borg( repository_path, - storage_config, + config, local_borg_version, options, archive=None, @@ -21,13 +21,13 @@ def run_arbitrary_borg( remote_path=None, ): ''' - Given a local or remote repository path, a storage config dict, the local Borg version, a + Given a local or remote repository path, a configuration dict, the local Borg version, a sequence of arbitrary command-line Borg options, and an optional archive name, run an arbitrary Borg command, passing in REPOSITORY and ARCHIVE environment variables for optional use in the command. ''' borgmatic.logger.add_custom_log_levels() - lock_wait = storage_config.get('lock_wait', None) + lock_wait = config.get('lock_wait', None) try: options = options[1:] if options[0] == '--' else options @@ -61,7 +61,7 @@ def run_arbitrary_borg( borg_local_path=local_path, shell=True, extra_environment=dict( - (environment.make_environment(storage_config) or {}), + (environment.make_environment(config) or {}), **{ 'BORG_REPO': repository_path, 'ARCHIVE': archive if archive else '', diff --git a/borgmatic/borg/break_lock.py b/borgmatic/borg/break_lock.py index 3c361956..c0ee5dbc 100644 --- a/borgmatic/borg/break_lock.py +++ b/borgmatic/borg/break_lock.py @@ -8,19 +8,19 @@ logger = logging.getLogger(__name__) def break_lock( repository_path, - storage_config, + config, local_borg_version, global_arguments, local_path='borg', remote_path=None, ): ''' - Given a local or remote repository path, a storage configuration dict, the local Borg version, - an argparse.Namespace of global arguments, and optional local and remote Borg paths, break any + Given a local or remote repository path, a configuration dict, the local Borg version, an + argparse.Namespace of global arguments, and optional local and remote Borg paths, break any repository and cache locks leftover from Borg aborting. ''' - umask = storage_config.get('umask', None) - lock_wait = storage_config.get('lock_wait', None) + umask = config.get('umask', None) + lock_wait = config.get('lock_wait', None) full_command = ( (local_path, 'break-lock') @@ -33,5 +33,5 @@ def break_lock( + flags.make_repository_flags(repository_path, local_borg_version) ) - borg_environment = environment.make_environment(storage_config) + borg_environment = environment.make_environment(config) execute_command(full_command, borg_local_path=local_path, extra_environment=borg_environment) diff --git a/borgmatic/borg/check.py b/borgmatic/borg/check.py index 0e54a2cd..859052b0 100644 --- a/borgmatic/borg/check.py +++ b/borgmatic/borg/check.py @@ -19,12 +19,12 @@ DEFAULT_CHECKS = ( logger = logging.getLogger(__name__) -def parse_checks(consistency_config, only_checks=None): +def parse_checks(config, only_checks=None): ''' - Given a consistency config with a "checks" sequence of dicts and an optional list of override + Given a configuration dict with a "checks" sequence of dicts and an optional list of override checks, return a tuple of named checks to run. - For example, given a retention config of: + For example, given a config of: {'checks': ({'name': 'repository'}, {'name': 'archives'})} @@ -36,8 +36,7 @@ def parse_checks(consistency_config, only_checks=None): has a name of "disabled", return an empty tuple, meaning that no checks should be run. ''' checks = only_checks or tuple( - check_config['name'] - for check_config in (consistency_config.get('checks', None) or DEFAULT_CHECKS) + check_config['name'] for check_config in (config.get('checks', None) or DEFAULT_CHECKS) ) checks = tuple(check.lower() for check in checks) if 'disabled' in checks: @@ -90,23 +89,22 @@ def parse_frequency(frequency): def filter_checks_on_frequency( - location_config, - consistency_config, + config, borg_repository_id, checks, force, archives_check_id=None, ): ''' - Given a location config, a consistency config with a "checks" sequence of dicts, a Borg - repository ID, a sequence of checks, whether to force checks to run, and an ID for the archives - check potentially being run (if any), filter down those checks based on the configured - "frequency" for each check as compared to its check time file. + Given a configuration dict with a "checks" sequence of dicts, a Borg repository ID, a sequence + of checks, whether to force checks to run, and an ID for the archives check potentially being + run (if any), filter down those checks based on the configured "frequency" for each check as + compared to its check time file. In other words, a check whose check time file's timestamp is too new (based on the configured frequency) will get cut from the returned sequence of checks. Example: - consistency_config = { + config = { 'checks': [ { 'name': 'archives', @@ -115,9 +113,9 @@ def filter_checks_on_frequency( ] } - When this function is called with that consistency_config and "archives" in checks, "archives" - will get filtered out of the returned result if its check time file is newer than 2 weeks old, - indicating that it's not yet time to run that check again. + When this function is called with that config and "archives" in checks, "archives" will get + filtered out of the returned result if its check time file is newer than 2 weeks old, indicating + that it's not yet time to run that check again. Raise ValueError if a frequency cannot be parsed. ''' @@ -126,7 +124,7 @@ def filter_checks_on_frequency( if force: return tuple(filtered_checks) - for check_config in consistency_config.get('checks', DEFAULT_CHECKS): + for check_config in config.get('checks', DEFAULT_CHECKS): check = check_config['name'] if checks and check not in checks: continue @@ -135,9 +133,7 @@ def filter_checks_on_frequency( if not frequency_delta: continue - check_time = probe_for_check_time( - location_config, borg_repository_id, check, archives_check_id - ) + check_time = probe_for_check_time(config, borg_repository_id, check, archives_check_id) if not check_time: continue @@ -153,13 +149,11 @@ def filter_checks_on_frequency( return tuple(filtered_checks) -def make_archive_filter_flags( - local_borg_version, storage_config, checks, check_last=None, prefix=None -): +def make_archive_filter_flags(local_borg_version, config, checks, check_last=None, prefix=None): ''' - Given the local Borg version, a storage configuration dict, a parsed sequence of checks, the - check last value, and a consistency check prefix, transform the checks into tuple of - command-line flags for filtering archives in a check command. + Given the local Borg version, a configuration dict, a parsed sequence of checks, the check last + value, and a consistency check prefix, transform the checks into tuple of command-line flags for + filtering archives in a check command. If a check_last value is given and "archives" is in checks, then include a "--last" flag. And if a prefix value is given and "archives" is in checks, then include a "--match-archives" flag. @@ -174,8 +168,8 @@ def make_archive_filter_flags( if prefix else ( flags.make_match_archives_flags( - storage_config.get('match_archives'), - storage_config.get('archive_name_format'), + config.get('match_archives'), + config.get('archive_name_format'), local_borg_version, ) ) @@ -237,14 +231,14 @@ def make_check_flags(checks, archive_filter_flags): ) -def make_check_time_path(location_config, borg_repository_id, check_type, archives_check_id=None): +def make_check_time_path(config, borg_repository_id, check_type, archives_check_id=None): ''' - Given a location configuration dict, a Borg repository ID, the name of a check type - ("repository", "archives", etc.), and a unique hash of the archives filter flags, return a - path for recording that check's time (the time of that check last occurring). + Given a configuration dict, a Borg repository ID, the name of a check type ("repository", + "archives", etc.), and a unique hash of the archives filter flags, return a path for recording + that check's time (the time of that check last occurring). ''' borgmatic_source_directory = os.path.expanduser( - location_config.get('borgmatic_source_directory', state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY) + config.get('borgmatic_source_directory', state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY) ) if check_type in ('archives', 'data'): @@ -287,11 +281,11 @@ def read_check_time(path): return None -def probe_for_check_time(location_config, borg_repository_id, check, archives_check_id): +def probe_for_check_time(config, borg_repository_id, check, archives_check_id): ''' - Given a location configuration dict, a Borg repository ID, the name of a check type - ("repository", "archives", etc.), and a unique hash of the archives filter flags, return a - the corresponding check time or None if such a check time does not exist. + Given a configuration dict, a Borg repository ID, the name of a check type ("repository", + "archives", etc.), and a unique hash of the archives filter flags, return a the corresponding + check time or None if such a check time does not exist. When the check type is "archives" or "data", this function probes two different paths to find the check time, e.g.: @@ -311,8 +305,8 @@ def probe_for_check_time(location_config, borg_repository_id, check, archives_ch read_check_time(group[0]) for group in itertools.groupby( ( - make_check_time_path(location_config, borg_repository_id, check, archives_check_id), - make_check_time_path(location_config, borg_repository_id, check), + make_check_time_path(config, borg_repository_id, check, archives_check_id), + make_check_time_path(config, borg_repository_id, check), ) ) ) @@ -323,10 +317,10 @@ def probe_for_check_time(location_config, borg_repository_id, check, archives_ch return None -def upgrade_check_times(location_config, borg_repository_id): +def upgrade_check_times(config, borg_repository_id): ''' - Given a location configuration dict and a Borg repository ID, upgrade any corresponding check - times on disk from old-style paths to new-style paths. + Given a configuration dict and a Borg repository ID, upgrade any corresponding check times on + disk from old-style paths to new-style paths. Currently, the only upgrade performed is renaming an archive or data check path that looks like: @@ -337,7 +331,7 @@ def upgrade_check_times(location_config, borg_repository_id): ~/.borgmatic/checks/1234567890/archives/all ''' for check_type in ('archives', 'data'): - new_path = make_check_time_path(location_config, borg_repository_id, check_type, 'all') + new_path = make_check_time_path(config, borg_repository_id, check_type, 'all') old_path = os.path.dirname(new_path) temporary_path = f'{old_path}.temp' @@ -357,9 +351,7 @@ def upgrade_check_times(location_config, borg_repository_id): def check_archives( repository_path, - location_config, - storage_config, - consistency_config, + config, local_borg_version, global_arguments, local_path='borg', @@ -370,10 +362,9 @@ def check_archives( force=None, ): ''' - Given a local or remote repository path, a storage config dict, a consistency config dict, - local/remote commands to run, whether to include progress information, whether to attempt a - repair, and an optional list of checks to use instead of configured checks, check the contained - Borg archives for consistency. + Given a local or remote repository path, a configuration dict, local/remote commands to run, + whether to include progress information, whether to attempt a repair, and an optional list of + checks to use instead of configured checks, check the contained Borg archives for consistency. If there are no consistency checks to run, skip running them. @@ -383,7 +374,7 @@ def check_archives( borg_repository_id = json.loads( rinfo.display_repository_info( repository_path, - storage_config, + config, local_borg_version, argparse.Namespace(json=True), global_arguments, @@ -394,21 +385,20 @@ def check_archives( except (json.JSONDecodeError, KeyError): raise ValueError(f'Cannot determine Borg repository ID for {repository_path}') - upgrade_check_times(location_config, borg_repository_id) + upgrade_check_times(config, borg_repository_id) - check_last = consistency_config.get('check_last', None) - prefix = consistency_config.get('prefix') - configured_checks = parse_checks(consistency_config, only_checks) + check_last = config.get('check_last', None) + prefix = config.get('prefix') + configured_checks = parse_checks(config, only_checks) lock_wait = None - extra_borg_options = storage_config.get('extra_borg_options', {}).get('check', '') + extra_borg_options = config.get('extra_borg_options', {}).get('check', '') archive_filter_flags = make_archive_filter_flags( - local_borg_version, storage_config, configured_checks, check_last, prefix + local_borg_version, config, configured_checks, check_last, prefix ) archives_check_id = make_archives_check_id(archive_filter_flags) checks = filter_checks_on_frequency( - location_config, - consistency_config, + config, borg_repository_id, configured_checks, force, @@ -416,7 +406,7 @@ def check_archives( ) if set(checks).intersection({'repository', 'archives', 'data'}): - lock_wait = storage_config.get('lock_wait') + lock_wait = config.get('lock_wait') verbosity_flags = () if logger.isEnabledFor(logging.INFO): @@ -437,7 +427,7 @@ def check_archives( + flags.make_repository_flags(repository_path, local_borg_version) ) - borg_environment = environment.make_environment(storage_config) + borg_environment = environment.make_environment(config) # The Borg repair option triggers an interactive prompt, which won't work when output is # captured. And progress messes with the terminal directly. @@ -450,12 +440,12 @@ def check_archives( for check in checks: write_check_time( - make_check_time_path(location_config, borg_repository_id, check, archives_check_id) + make_check_time_path(config, borg_repository_id, check, archives_check_id) ) if 'extract' in checks: extract.extract_last_archive_dry_run( - storage_config, + config, local_borg_version, global_arguments, repository_path, @@ -463,4 +453,4 @@ def check_archives( local_path, remote_path, ) - write_check_time(make_check_time_path(location_config, borg_repository_id, 'extract')) + write_check_time(make_check_time_path(config, borg_repository_id, 'extract')) diff --git a/borgmatic/borg/compact.py b/borgmatic/borg/compact.py index 24f37ee3..20bbe129 100644 --- a/borgmatic/borg/compact.py +++ b/borgmatic/borg/compact.py @@ -9,7 +9,7 @@ logger = logging.getLogger(__name__) def compact_segments( dry_run, repository_path, - storage_config, + config, local_borg_version, global_arguments, local_path='borg', @@ -19,12 +19,12 @@ def compact_segments( threshold=None, ): ''' - Given dry-run flag, a local or remote repository path, a storage config dict, and the local - Borg version, compact the segments in a repository. + Given dry-run flag, a local or remote repository path, a configuration dict, and the local Borg + version, compact the segments in a repository. ''' - umask = storage_config.get('umask', None) - lock_wait = storage_config.get('lock_wait', None) - extra_borg_options = storage_config.get('extra_borg_options', {}).get('compact', '') + umask = config.get('umask', None) + lock_wait = config.get('lock_wait', None) + extra_borg_options = config.get('extra_borg_options', {}).get('compact', '') full_command = ( (local_path, 'compact') @@ -49,5 +49,5 @@ def compact_segments( full_command, output_log_level=logging.INFO, borg_local_path=local_path, - extra_environment=environment.make_environment(storage_config), + extra_environment=environment.make_environment(config), ) diff --git a/borgmatic/borg/create.py b/borgmatic/borg/create.py index 1ec54fdd..018f447d 100644 --- a/borgmatic/borg/create.py +++ b/borgmatic/borg/create.py @@ -146,12 +146,12 @@ def ensure_files_readable(*filename_lists): open(file_object).close() -def make_pattern_flags(location_config, pattern_filename=None): +def make_pattern_flags(config, pattern_filename=None): ''' - Given a location config dict with a potential patterns_from option, and a filename containing - any additional patterns, return the corresponding Borg flags for those files as a tuple. + Given a configuration dict with a potential patterns_from option, and a filename containing any + additional patterns, return the corresponding Borg flags for those files as a tuple. ''' - pattern_filenames = tuple(location_config.get('patterns_from') or ()) + ( + pattern_filenames = tuple(config.get('patterns_from') or ()) + ( (pattern_filename,) if pattern_filename else () ) @@ -162,12 +162,12 @@ def make_pattern_flags(location_config, pattern_filename=None): ) -def make_exclude_flags(location_config, exclude_filename=None): +def make_exclude_flags(config, exclude_filename=None): ''' - Given a location config dict with various exclude options, and a filename containing any exclude + Given a configuration dict with various exclude options, and a filename containing any exclude patterns, return the corresponding Borg flags as a tuple. ''' - exclude_filenames = tuple(location_config.get('exclude_from') or ()) + ( + exclude_filenames = tuple(config.get('exclude_from') or ()) + ( (exclude_filename,) if exclude_filename else () ) exclude_from_flags = tuple( @@ -175,17 +175,15 @@ def make_exclude_flags(location_config, exclude_filename=None): ('--exclude-from', exclude_filename) for exclude_filename in exclude_filenames ) ) - caches_flag = ('--exclude-caches',) if location_config.get('exclude_caches') else () + caches_flag = ('--exclude-caches',) if config.get('exclude_caches') else () if_present_flags = tuple( itertools.chain.from_iterable( ('--exclude-if-present', if_present) - for if_present in location_config.get('exclude_if_present', ()) + for if_present in config.get('exclude_if_present', ()) ) ) - keep_exclude_tags_flags = ( - ('--keep-exclude-tags',) if location_config.get('keep_exclude_tags') else () - ) - exclude_nodump_flags = ('--exclude-nodump',) if location_config.get('exclude_nodump') else () + keep_exclude_tags_flags = ('--keep-exclude-tags',) if config.get('keep_exclude_tags') else () + exclude_nodump_flags = ('--exclude-nodump',) if config.get('exclude_nodump') else () return ( exclude_from_flags @@ -326,8 +324,7 @@ def check_all_source_directories_exist(source_directories): def create_archive( dry_run, repository_path, - location_config, - storage_config, + config, local_borg_version, global_arguments, local_path='borg', @@ -339,72 +336,70 @@ def create_archive( stream_processes=None, ): ''' - Given vebosity/dry-run flags, a local or remote repository path, a location config dict, and a - storage config dict, create a Borg archive and return Borg's JSON output (if any). + Given vebosity/dry-run flags, a local or remote repository path, and a configuration dict, + create a Borg archive and return Borg's JSON output (if any). If a sequence of stream processes is given (instances of subprocess.Popen), then execute the create command while also triggering the given processes to produce output. ''' borgmatic.logger.add_custom_log_levels() borgmatic_source_directories = expand_directories( - collect_borgmatic_source_directories(location_config.get('borgmatic_source_directory')) + collect_borgmatic_source_directories(config.get('borgmatic_source_directory')) ) - if location_config.get('source_directories_must_exist', False): - check_all_source_directories_exist(location_config.get('source_directories')) + if config.get('source_directories_must_exist', False): + check_all_source_directories_exist(config.get('source_directories')) sources = deduplicate_directories( map_directories_to_devices( expand_directories( - tuple(location_config.get('source_directories', ())) + tuple(config.get('source_directories', ())) + borgmatic_source_directories + tuple(global_arguments.used_config_paths) ) ), additional_directory_devices=map_directories_to_devices( - expand_directories(pattern_root_directories(location_config.get('patterns'))) + expand_directories(pattern_root_directories(config.get('patterns'))) ), ) - ensure_files_readable(location_config.get('patterns_from'), location_config.get('exclude_from')) + ensure_files_readable(config.get('patterns_from'), config.get('exclude_from')) try: - working_directory = os.path.expanduser(location_config.get('working_directory')) + working_directory = os.path.expanduser(config.get('working_directory')) except TypeError: working_directory = None pattern_file = ( - write_pattern_file(location_config.get('patterns'), sources) - if location_config.get('patterns') or location_config.get('patterns_from') + write_pattern_file(config.get('patterns'), sources) + if config.get('patterns') or config.get('patterns_from') else None ) - exclude_file = write_pattern_file( - expand_home_directories(location_config.get('exclude_patterns')) - ) - checkpoint_interval = storage_config.get('checkpoint_interval', None) - checkpoint_volume = storage_config.get('checkpoint_volume', None) - chunker_params = storage_config.get('chunker_params', None) - compression = storage_config.get('compression', None) - upload_rate_limit = storage_config.get('upload_rate_limit', None) - umask = storage_config.get('umask', None) - lock_wait = storage_config.get('lock_wait', None) + exclude_file = write_pattern_file(expand_home_directories(config.get('exclude_patterns'))) + checkpoint_interval = config.get('checkpoint_interval', None) + checkpoint_volume = config.get('checkpoint_volume', None) + chunker_params = config.get('chunker_params', None) + compression = config.get('compression', None) + upload_rate_limit = config.get('upload_rate_limit', None) + umask = config.get('umask', None) + lock_wait = config.get('lock_wait', None) list_filter_flags = make_list_filter_flags(local_borg_version, dry_run) - files_cache = location_config.get('files_cache') - archive_name_format = storage_config.get('archive_name_format', DEFAULT_ARCHIVE_NAME_FORMAT) - extra_borg_options = storage_config.get('extra_borg_options', {}).get('create', '') + files_cache = config.get('files_cache') + archive_name_format = config.get('archive_name_format', DEFAULT_ARCHIVE_NAME_FORMAT) + extra_borg_options = config.get('extra_borg_options', {}).get('create', '') if feature.available(feature.Feature.ATIME, local_borg_version): - atime_flags = ('--atime',) if location_config.get('atime') is True else () + atime_flags = ('--atime',) if config.get('atime') is True else () else: - atime_flags = ('--noatime',) if location_config.get('atime') is False else () + atime_flags = ('--noatime',) if config.get('atime') is False else () if feature.available(feature.Feature.NOFLAGS, local_borg_version): - noflags_flags = ('--noflags',) if location_config.get('flags') is False else () + noflags_flags = ('--noflags',) if config.get('flags') is False else () else: - noflags_flags = ('--nobsdflags',) if location_config.get('flags') is False else () + noflags_flags = ('--nobsdflags',) if config.get('flags') is False else () if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version): - numeric_ids_flags = ('--numeric-ids',) if location_config.get('numeric_ids') else () + numeric_ids_flags = ('--numeric-ids',) if config.get('numeric_ids') else () else: - numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_ids') else () + numeric_ids_flags = ('--numeric-owner',) if config.get('numeric_ids') else () if feature.available(feature.Feature.UPLOAD_RATELIMIT, local_borg_version): upload_ratelimit_flags = ( @@ -415,7 +410,7 @@ def create_archive( ('--remote-ratelimit', str(upload_rate_limit)) if upload_rate_limit else () ) - if stream_processes and location_config.get('read_special') is False: + if stream_processes and config.get('read_special') is False: logger.warning( f'{repository_path}: Ignoring configured "read_special" value of false, as true is needed for database hooks.' ) @@ -423,23 +418,19 @@ def create_archive( create_command = ( tuple(local_path.split(' ')) + ('create',) - + make_pattern_flags(location_config, pattern_file.name if pattern_file else None) - + make_exclude_flags(location_config, exclude_file.name if exclude_file else None) + + make_pattern_flags(config, pattern_file.name if pattern_file else None) + + make_exclude_flags(config, exclude_file.name if exclude_file else None) + (('--checkpoint-interval', str(checkpoint_interval)) if checkpoint_interval else ()) + (('--checkpoint-volume', str(checkpoint_volume)) if checkpoint_volume else ()) + (('--chunker-params', chunker_params) if chunker_params else ()) + (('--compression', compression) if compression else ()) + upload_ratelimit_flags - + ( - ('--one-file-system',) - if location_config.get('one_file_system') or stream_processes - else () - ) + + (('--one-file-system',) if config.get('one_file_system') or stream_processes else ()) + numeric_ids_flags + atime_flags - + (('--noctime',) if location_config.get('ctime') is False else ()) - + (('--nobirthtime',) if location_config.get('birthtime') is False else ()) - + (('--read-special',) if location_config.get('read_special') or stream_processes else ()) + + (('--noctime',) if config.get('ctime') is False else ()) + + (('--nobirthtime',) if config.get('birthtime') is False else ()) + + (('--read-special',) if config.get('read_special') or stream_processes else ()) + noflags_flags + (('--files-cache', files_cache) if files_cache else ()) + (('--remote-path', remote_path) if remote_path else ()) @@ -470,11 +461,11 @@ def create_archive( # the terminal directly. output_file = DO_NOT_CAPTURE if progress else None - borg_environment = environment.make_environment(storage_config) + borg_environment = environment.make_environment(config) # If database hooks are enabled (as indicated by streaming processes), exclude files that might # cause Borg to hang. But skip this if the user has explicitly set the "read_special" to True. - if stream_processes and not location_config.get('read_special'): + if stream_processes and not config.get('read_special'): logger.debug(f'{repository_path}: Collecting special file paths') special_file_paths = collect_special_file_paths( create_command, @@ -490,11 +481,11 @@ def create_archive( ) exclude_file = write_pattern_file( expand_home_directories( - tuple(location_config.get('exclude_patterns') or ()) + special_file_paths + tuple(config.get('exclude_patterns') or ()) + special_file_paths ), pattern_file=exclude_file, ) - create_command += make_exclude_flags(location_config, exclude_file.name) + create_command += make_exclude_flags(config, exclude_file.name) create_command += ( (('--info',) if logger.getEffectiveLevel() == logging.INFO and not json else ()) diff --git a/borgmatic/borg/environment.py b/borgmatic/borg/environment.py index 1b14369a..6ee3cbef 100644 --- a/borgmatic/borg/environment.py +++ b/borgmatic/borg/environment.py @@ -17,15 +17,15 @@ DEFAULT_BOOL_OPTION_TO_ENVIRONMENT_VARIABLE = { } -def make_environment(storage_config): +def make_environment(config): ''' - Given a borgmatic storage configuration dict, return its options converted to a Borg environment + Given a borgmatic configuration dict, return its options converted to a Borg environment variable dict. ''' environment = {} for option_name, environment_variable_name in OPTION_TO_ENVIRONMENT_VARIABLE.items(): - value = storage_config.get(option_name) + value = config.get(option_name) if value: environment[environment_variable_name] = str(value) @@ -34,7 +34,7 @@ def make_environment(storage_config): option_name, environment_variable_name, ) in DEFAULT_BOOL_OPTION_TO_ENVIRONMENT_VARIABLE.items(): - value = storage_config.get(option_name, False) + value = config.get(option_name, False) environment[environment_variable_name] = 'yes' if value else 'no' return environment diff --git a/borgmatic/borg/export_tar.py b/borgmatic/borg/export_tar.py index b6d9a04c..47e3c20d 100644 --- a/borgmatic/borg/export_tar.py +++ b/borgmatic/borg/export_tar.py @@ -13,7 +13,7 @@ def export_tar_archive( archive, paths, destination_path, - storage_config, + config, local_borg_version, global_arguments, local_path='borg', @@ -24,16 +24,16 @@ def export_tar_archive( ): ''' Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to - export from the archive, a destination path to export to, a storage configuration dict, the - local Borg version, optional local and remote Borg paths, an optional filter program, whether to - include per-file details, and an optional number of path components to strip, export the archive - into the given destination path as a tar-formatted file. + export from the archive, a destination path to export to, a configuration dict, the local Borg + version, optional local and remote Borg paths, an optional filter program, whether to include + per-file details, and an optional number of path components to strip, export the archive into + the given destination path as a tar-formatted file. If the destination path is "-", then stream the output to stdout instead of to a file. ''' borgmatic.logger.add_custom_log_levels() - umask = storage_config.get('umask', None) - lock_wait = storage_config.get('lock_wait', None) + umask = config.get('umask', None) + lock_wait = config.get('lock_wait', None) full_command = ( (local_path, 'export-tar') @@ -70,5 +70,5 @@ def export_tar_archive( output_file=DO_NOT_CAPTURE if destination_path == '-' else None, output_log_level=output_log_level, borg_local_path=local_path, - extra_environment=environment.make_environment(storage_config), + extra_environment=environment.make_environment(config), ) diff --git a/borgmatic/borg/extract.py b/borgmatic/borg/extract.py index d5465bb9..dec203fc 100644 --- a/borgmatic/borg/extract.py +++ b/borgmatic/borg/extract.py @@ -10,7 +10,7 @@ logger = logging.getLogger(__name__) def extract_last_archive_dry_run( - storage_config, + config, local_borg_version, global_arguments, repository_path, @@ -32,7 +32,7 @@ def extract_last_archive_dry_run( last_archive_name = rlist.resolve_archive_name( repository_path, 'latest', - storage_config, + config, local_borg_version, global_arguments, local_path, @@ -43,7 +43,7 @@ def extract_last_archive_dry_run( return list_flag = ('--list',) if logger.isEnabledFor(logging.DEBUG) else () - borg_environment = environment.make_environment(storage_config) + borg_environment = environment.make_environment(config) full_extract_command = ( (local_path, 'extract', '--dry-run') + (('--remote-path', remote_path) if remote_path else ()) @@ -66,8 +66,7 @@ def extract_archive( repository, archive, paths, - location_config, - storage_config, + config, local_borg_version, global_arguments, local_path='borg', @@ -80,22 +79,22 @@ def extract_archive( ''' Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to restore from the archive, the local Borg version string, an argparse.Namespace of global - arguments, location/storage configuration dicts, optional local and remote Borg paths, and an - optional destination path to extract to, extract the archive into the current directory. + arguments, a configuration dict, optional local and remote Borg paths, and an optional + destination path to extract to, extract the archive into the current directory. If extract to stdout is True, then start the extraction streaming to stdout, and return that extract process as an instance of subprocess.Popen. ''' - umask = storage_config.get('umask', None) - lock_wait = storage_config.get('lock_wait', None) + umask = config.get('umask', None) + lock_wait = config.get('lock_wait', None) if progress and extract_to_stdout: raise ValueError('progress and extract_to_stdout cannot both be set') if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version): - numeric_ids_flags = ('--numeric-ids',) if location_config.get('numeric_ids') else () + numeric_ids_flags = ('--numeric-ids',) if config.get('numeric_ids') else () else: - numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_ids') else () + numeric_ids_flags = ('--numeric-owner',) if config.get('numeric_ids') else () if strip_components == 'all': if not paths: @@ -127,7 +126,7 @@ def extract_archive( + (tuple(paths) if paths else ()) ) - borg_environment = environment.make_environment(storage_config) + borg_environment = environment.make_environment(config) # The progress output isn't compatible with captured and logged output, as progress messes with # the terminal directly. diff --git a/borgmatic/borg/info.py b/borgmatic/borg/info.py index 91520e00..9a8bdda2 100644 --- a/borgmatic/borg/info.py +++ b/borgmatic/borg/info.py @@ -9,7 +9,7 @@ logger = logging.getLogger(__name__) def display_archives_info( repository_path, - storage_config, + config, local_borg_version, info_arguments, global_arguments, @@ -17,12 +17,12 @@ def display_archives_info( remote_path=None, ): ''' - Given a local or remote repository path, a storage config dict, the local Borg version, global + Given a local or remote repository path, a configuration dict, the local Borg version, global arguments as an argparse.Namespace, and the arguments to the info action, display summary information for Borg archives in the repository or return JSON summary information. ''' borgmatic.logger.add_custom_log_levels() - lock_wait = storage_config.get('lock_wait', None) + lock_wait = config.get('lock_wait', None) full_command = ( (local_path, 'info') @@ -50,8 +50,8 @@ def display_archives_info( flags.make_match_archives_flags( info_arguments.match_archives or info_arguments.archive - or storage_config.get('match_archives'), - storage_config.get('archive_name_format'), + or config.get('match_archives'), + config.get('archive_name_format'), local_borg_version, ) ) @@ -65,12 +65,12 @@ def display_archives_info( if info_arguments.json: return execute_command_and_capture_output( full_command, - extra_environment=environment.make_environment(storage_config), + extra_environment=environment.make_environment(config), ) else: execute_command( full_command, output_log_level=logging.ANSWER, borg_local_path=local_path, - extra_environment=environment.make_environment(storage_config), + extra_environment=environment.make_environment(config), ) diff --git a/borgmatic/borg/list.py b/borgmatic/borg/list.py index 5ec1480d..b3db8e90 100644 --- a/borgmatic/borg/list.py +++ b/borgmatic/borg/list.py @@ -21,7 +21,7 @@ MAKE_FLAGS_EXCLUDES = ( def make_list_command( repository_path, - storage_config, + config, local_borg_version, list_arguments, global_arguments, @@ -29,11 +29,11 @@ def make_list_command( remote_path=None, ): ''' - Given a local or remote repository path, a storage config dict, the arguments to the list - action, and local and remote Borg paths, return a command as a tuple to list archives or paths - within an archive. + Given a local or remote repository path, a configuration dict, the arguments to the list action, + and local and remote Borg paths, return a command as a tuple to list archives or paths within an + archive. ''' - lock_wait = storage_config.get('lock_wait', None) + lock_wait = config.get('lock_wait', None) return ( (local_path, 'list') @@ -89,7 +89,7 @@ def make_find_paths(find_paths): def capture_archive_listing( repository_path, archive, - storage_config, + config, local_borg_version, global_arguments, list_path=None, @@ -97,18 +97,18 @@ def capture_archive_listing( remote_path=None, ): ''' - Given a local or remote repository path, an archive name, a storage config dict, the local Borg + Given a local or remote repository path, an archive name, a configuration dict, the local Borg version, global arguments as an argparse.Namespace, the archive path in which to list files, and local and remote Borg paths, capture the output of listing that archive and return it as a list of file paths. ''' - borg_environment = environment.make_environment(storage_config) + borg_environment = environment.make_environment(config) return tuple( execute_command_and_capture_output( make_list_command( repository_path, - storage_config, + config, local_borg_version, argparse.Namespace( repository=repository_path, @@ -131,7 +131,7 @@ def capture_archive_listing( def list_archive( repository_path, - storage_config, + config, local_borg_version, list_arguments, global_arguments, @@ -139,7 +139,7 @@ def list_archive( remote_path=None, ): ''' - Given a local or remote repository path, a storage config dict, the local Borg version, global + Given a local or remote repository path, a configuration dict, the local Borg version, global arguments as an argparse.Namespace, the arguments to the list action as an argparse.Namespace, and local and remote Borg paths, display the output of listing the files of a Borg archive (or return JSON output). If list_arguments.find_paths are given, list the files by searching across @@ -167,7 +167,7 @@ def list_archive( ) return rlist.list_repository( repository_path, - storage_config, + config, local_borg_version, rlist_arguments, global_arguments, @@ -187,7 +187,7 @@ def list_archive( 'The --json flag on the list action is not supported when using the --archive/--find flags.' ) - borg_environment = environment.make_environment(storage_config) + borg_environment = environment.make_environment(config) # If there are any paths to find (and there's not a single archive already selected), start by # getting a list of archives to search. @@ -209,7 +209,7 @@ def list_archive( execute_command_and_capture_output( rlist.make_rlist_command( repository_path, - storage_config, + config, local_borg_version, rlist_arguments, global_arguments, @@ -238,7 +238,7 @@ def list_archive( main_command = make_list_command( repository_path, - storage_config, + config, local_borg_version, archive_arguments, global_arguments, diff --git a/borgmatic/borg/mount.py b/borgmatic/borg/mount.py index 80cfa8d7..9d034688 100644 --- a/borgmatic/borg/mount.py +++ b/borgmatic/borg/mount.py @@ -10,7 +10,7 @@ def mount_archive( repository_path, archive, mount_arguments, - storage_config, + config, local_borg_version, global_arguments, local_path='borg', @@ -22,8 +22,8 @@ def mount_archive( dict, the local Borg version, global arguments as an argparse.Namespace instance, and optional local and remote Borg paths, mount the archive onto the mount point. ''' - umask = storage_config.get('umask', None) - lock_wait = storage_config.get('lock_wait', None) + umask = config.get('umask', None) + lock_wait = config.get('lock_wait', None) full_command = ( (local_path, 'mount') @@ -58,7 +58,7 @@ def mount_archive( + (tuple(mount_arguments.paths) if mount_arguments.paths else ()) ) - borg_environment = environment.make_environment(storage_config) + borg_environment = environment.make_environment(config) # Don't capture the output when foreground mode is used so that ctrl-C can work properly. if mount_arguments.foreground: diff --git a/borgmatic/borg/prune.py b/borgmatic/borg/prune.py index a85cacf9..46c7f34b 100644 --- a/borgmatic/borg/prune.py +++ b/borgmatic/borg/prune.py @@ -7,9 +7,9 @@ from borgmatic.execute import execute_command logger = logging.getLogger(__name__) -def make_prune_flags(storage_config, retention_config, local_borg_version): +def make_prune_flags(config, local_borg_version): ''' - Given a retention config dict mapping from option name to value, transform it into an sequence of + Given a configuration dict mapping from option name to value, transform it into an sequence of command-line flags. For example, given a retention config of: @@ -23,12 +23,12 @@ def make_prune_flags(storage_config, retention_config, local_borg_version): ('--keep-monthly', '6'), ) ''' - config = retention_config.copy() - prefix = config.pop('prefix', None) - flag_pairs = ( - ('--' + option_name.replace('_', '-'), str(value)) for option_name, value in config.items() + ('--' + option_name.replace('_', '-'), str(value)) + for option_name, value in config.items() + if option_name.startswith('keep_') ) + prefix = config.get('prefix') return tuple(element for pair in flag_pairs for element in pair) + ( ( @@ -39,8 +39,8 @@ def make_prune_flags(storage_config, retention_config, local_borg_version): if prefix else ( flags.make_match_archives_flags( - storage_config.get('match_archives'), - storage_config.get('archive_name_format'), + config.get('match_archives'), + config.get('archive_name_format'), local_borg_version, ) ) @@ -50,8 +50,7 @@ def make_prune_flags(storage_config, retention_config, local_borg_version): def prune_archives( dry_run, repository_path, - storage_config, - retention_config, + config, local_borg_version, prune_arguments, global_arguments, @@ -59,18 +58,17 @@ def prune_archives( remote_path=None, ): ''' - Given dry-run flag, a local or remote repository path, a storage config dict, and a - retention config dict, prune Borg archives according to the retention policy specified in that - configuration. + Given dry-run flag, a local or remote repository path, and a configuration dict, prune Borg + archives according to the retention policy specified in that configuration. ''' borgmatic.logger.add_custom_log_levels() - umask = storage_config.get('umask', None) - lock_wait = storage_config.get('lock_wait', None) - extra_borg_options = storage_config.get('extra_borg_options', {}).get('prune', '') + umask = config.get('umask', None) + lock_wait = config.get('lock_wait', None) + extra_borg_options = config.get('extra_borg_options', {}).get('prune', '') full_command = ( (local_path, 'prune') - + make_prune_flags(storage_config, retention_config, local_borg_version) + + make_prune_flags(config, local_borg_version) + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + (('--log-json',) if global_arguments.log_json else ()) @@ -97,5 +95,5 @@ def prune_archives( full_command, output_log_level=output_log_level, borg_local_path=local_path, - extra_environment=environment.make_environment(storage_config), + extra_environment=environment.make_environment(config), ) diff --git a/borgmatic/borg/rcreate.py b/borgmatic/borg/rcreate.py index 54a865c5..8fc70d95 100644 --- a/borgmatic/borg/rcreate.py +++ b/borgmatic/borg/rcreate.py @@ -14,7 +14,7 @@ RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE = 2 def create_repository( dry_run, repository_path, - storage_config, + config, local_borg_version, global_arguments, encryption_mode, @@ -27,15 +27,15 @@ def create_repository( remote_path=None, ): ''' - Given a dry-run flag, a local or remote repository path, a storage configuration dict, the local - Borg version, a Borg encryption mode, the path to another repo whose key material should be - reused, whether the repository should be append-only, and the storage quota to use, create the + Given a dry-run flag, a local or remote repository path, a configuration dict, the local Borg + version, a Borg encryption mode, the path to another repo whose key material should be reused, + whether the repository should be append-only, and the storage quota to use, create the repository. If the repository already exists, then log and skip creation. ''' try: rinfo.display_repository_info( repository_path, - storage_config, + config, local_borg_version, argparse.Namespace(json=True), global_arguments, @@ -48,8 +48,8 @@ def create_repository( if error.returncode != RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE: raise - lock_wait = storage_config.get('lock_wait') - extra_borg_options = storage_config.get('extra_borg_options', {}).get('rcreate', '') + lock_wait = config.get('lock_wait') + extra_borg_options = config.get('extra_borg_options', {}).get('rcreate', '') rcreate_command = ( (local_path,) @@ -82,5 +82,5 @@ def create_repository( rcreate_command, output_file=DO_NOT_CAPTURE, borg_local_path=local_path, - extra_environment=environment.make_environment(storage_config), + extra_environment=environment.make_environment(config), ) diff --git a/borgmatic/borg/rinfo.py b/borgmatic/borg/rinfo.py index e1542d28..a7ae8229 100644 --- a/borgmatic/borg/rinfo.py +++ b/borgmatic/borg/rinfo.py @@ -9,7 +9,7 @@ logger = logging.getLogger(__name__) def display_repository_info( repository_path, - storage_config, + config, local_borg_version, rinfo_arguments, global_arguments, @@ -17,12 +17,12 @@ def display_repository_info( remote_path=None, ): ''' - Given a local or remote repository path, a storage config dict, the local Borg version, the + Given a local or remote repository path, a configuration dict, the local Borg version, the arguments to the rinfo action, and global arguments as an argparse.Namespace, display summary information for the Borg repository or return JSON summary information. ''' borgmatic.logger.add_custom_log_levels() - lock_wait = storage_config.get('lock_wait', None) + lock_wait = config.get('lock_wait', None) full_command = ( (local_path,) @@ -48,7 +48,7 @@ def display_repository_info( + flags.make_repository_flags(repository_path, local_borg_version) ) - extra_environment = environment.make_environment(storage_config) + extra_environment = environment.make_environment(config) if rinfo_arguments.json: return execute_command_and_capture_output( diff --git a/borgmatic/borg/rlist.py b/borgmatic/borg/rlist.py index ba45aa0a..b532a6aa 100644 --- a/borgmatic/borg/rlist.py +++ b/borgmatic/borg/rlist.py @@ -10,14 +10,14 @@ logger = logging.getLogger(__name__) def resolve_archive_name( repository_path, archive, - storage_config, + config, local_borg_version, global_arguments, local_path='borg', remote_path=None, ): ''' - Given a local or remote repository path, an archive name, a storage config dict, the local Borg + Given a local or remote repository path, an archive name, a configuration dict, the local Borg version, global arguments as an argparse.Namespace, a local Borg path, and a remote Borg path, return the archive name. But if the archive name is "latest", then instead introspect the repository for the latest archive and return its name. @@ -34,7 +34,7 @@ def resolve_archive_name( ) + flags.make_flags('remote-path', remote_path) + flags.make_flags('log-json', global_arguments.log_json) - + flags.make_flags('lock-wait', storage_config.get('lock_wait')) + + flags.make_flags('lock-wait', config.get('lock_wait')) + flags.make_flags('last', 1) + ('--short',) + flags.make_repository_flags(repository_path, local_borg_version) @@ -42,7 +42,7 @@ def resolve_archive_name( output = execute_command_and_capture_output( full_command, - extra_environment=environment.make_environment(storage_config), + extra_environment=environment.make_environment(config), ) try: latest_archive = output.strip().splitlines()[-1] @@ -59,7 +59,7 @@ MAKE_FLAGS_EXCLUDES = ('repository', 'prefix', 'match_archives') def make_rlist_command( repository_path, - storage_config, + config, local_borg_version, rlist_arguments, global_arguments, @@ -67,7 +67,7 @@ def make_rlist_command( remote_path=None, ): ''' - Given a local or remote repository path, a storage config dict, the local Borg version, the + Given a local or remote repository path, a configuration dict, the local Borg version, the arguments to the rlist action, global arguments as an argparse.Namespace instance, and local and remote Borg paths, return a command as a tuple to list archives with a repository. ''' @@ -88,7 +88,7 @@ def make_rlist_command( ) + flags.make_flags('remote-path', remote_path) + flags.make_flags('log-json', global_arguments.log_json) - + flags.make_flags('lock-wait', storage_config.get('lock_wait')) + + flags.make_flags('lock-wait', config.get('lock_wait')) + ( ( flags.make_flags('match-archives', f'sh:{rlist_arguments.prefix}*') @@ -98,8 +98,8 @@ def make_rlist_command( if rlist_arguments.prefix else ( flags.make_match_archives_flags( - rlist_arguments.match_archives or storage_config.get('match_archives'), - storage_config.get('archive_name_format'), + rlist_arguments.match_archives or config.get('match_archives'), + config.get('archive_name_format'), local_borg_version, ) ) @@ -111,7 +111,7 @@ def make_rlist_command( def list_repository( repository_path, - storage_config, + config, local_borg_version, rlist_arguments, global_arguments, @@ -119,17 +119,17 @@ def list_repository( remote_path=None, ): ''' - Given a local or remote repository path, a storage config dict, the local Borg version, the + Given a local or remote repository path, a configuration dict, the local Borg version, the arguments to the list action, global arguments as an argparse.Namespace instance, and local and remote Borg paths, display the output of listing Borg archives in the given repository (or return JSON output). ''' borgmatic.logger.add_custom_log_levels() - borg_environment = environment.make_environment(storage_config) + borg_environment = environment.make_environment(config) main_command = make_rlist_command( repository_path, - storage_config, + config, local_borg_version, rlist_arguments, global_arguments, diff --git a/borgmatic/borg/transfer.py b/borgmatic/borg/transfer.py index d8f3978f..f91349fc 100644 --- a/borgmatic/borg/transfer.py +++ b/borgmatic/borg/transfer.py @@ -10,7 +10,7 @@ logger = logging.getLogger(__name__) def transfer_archives( dry_run, repository_path, - storage_config, + config, local_borg_version, transfer_arguments, global_arguments, @@ -18,7 +18,7 @@ def transfer_archives( remote_path=None, ): ''' - Given a dry-run flag, a local or remote repository path, a storage config dict, the local Borg + Given a dry-run flag, a local or remote repository path, a configuration dict, the local Borg version, the arguments to the transfer action, and global arguments as an argparse.Namespace instance, transfer archives to the given repository. ''' @@ -30,7 +30,7 @@ def transfer_archives( + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + flags.make_flags('remote-path', remote_path) + flags.make_flags('log-json', global_arguments.log_json) - + flags.make_flags('lock-wait', storage_config.get('lock_wait', None)) + + flags.make_flags('lock-wait', config.get('lock_wait', None)) + ( flags.make_flags_from_arguments( transfer_arguments, @@ -40,8 +40,8 @@ def transfer_archives( flags.make_match_archives_flags( transfer_arguments.match_archives or transfer_arguments.archive - or storage_config.get('match_archives'), - storage_config.get('archive_name_format'), + or config.get('match_archives'), + config.get('archive_name_format'), local_borg_version, ) ) @@ -56,5 +56,5 @@ def transfer_archives( output_log_level=logging.ANSWER, output_file=DO_NOT_CAPTURE if transfer_arguments.progress else None, borg_local_path=local_path, - extra_environment=environment.make_environment(storage_config), + extra_environment=environment.make_environment(config), ) diff --git a/borgmatic/borg/version.py b/borgmatic/borg/version.py index d90a7aae..feb677ad 100644 --- a/borgmatic/borg/version.py +++ b/borgmatic/borg/version.py @@ -6,9 +6,9 @@ from borgmatic.execute import execute_command_and_capture_output logger = logging.getLogger(__name__) -def local_borg_version(storage_config, local_path='borg'): +def local_borg_version(config, local_path='borg'): ''' - Given a storage configuration dict and a local Borg binary path, return a version string for it. + Given a configuration dict and a local Borg binary path, return a version string for it. Raise OSError or CalledProcessError if there is a problem running Borg. Raise ValueError if the version cannot be parsed. @@ -20,7 +20,7 @@ def local_borg_version(storage_config, local_path='borg'): ) output = execute_command_and_capture_output( full_command, - extra_environment=environment.make_environment(storage_config), + extra_environment=environment.make_environment(config), ) try: diff --git a/borgmatic/commands/arguments.py b/borgmatic/commands/arguments.py index 588e31c9..ce246e14 100644 --- a/borgmatic/commands/arguments.py +++ b/borgmatic/commands/arguments.py @@ -330,7 +330,7 @@ def make_parsers(): ) global_group.add_argument( '--override', - metavar='SECTION.OPTION=VALUE', + metavar='OPTION.SUBOPTION=VALUE', nargs='+', dest='overrides', action='extend', diff --git a/borgmatic/commands/borgmatic.py b/borgmatic/commands/borgmatic.py index 6aaad9d1..a869594e 100644 --- a/borgmatic/commands/borgmatic.py +++ b/borgmatic/commands/borgmatic.py @@ -58,16 +58,12 @@ def run_configuration(config_filename, config, arguments): * JSON output strings from successfully executing any actions that produce JSON * logging.LogRecord instances containing errors from any actions or backup hooks that fail ''' - (location, storage, retention, consistency, hooks) = ( - config.get(section_name, {}) - for section_name in ('location', 'storage', 'retention', 'consistency', 'hooks') - ) global_arguments = arguments['global'] - local_path = location.get('local_path', 'borg') - remote_path = location.get('remote_path') - retries = storage.get('retries', 0) - retry_wait = storage.get('retry_wait', 0) + local_path = config.get('local_path', 'borg') + remote_path = config.get('remote_path') + retries = config.get('retries', 0) + retry_wait = config.get('retry_wait', 0) encountered_error = None error_repository = '' using_primary_action = {'create', 'prune', 'compact', 'check'}.intersection(arguments) @@ -75,7 +71,7 @@ def run_configuration(config_filename, config, arguments): monitoring_hooks_are_activated = using_primary_action and monitoring_log_level != DISABLED try: - local_borg_version = borg_version.local_borg_version(storage, local_path) + local_borg_version = borg_version.local_borg_version(config, local_path) except (OSError, CalledProcessError, ValueError) as error: yield from log_error_records(f'{config_filename}: Error getting local Borg version', error) return @@ -84,7 +80,7 @@ def run_configuration(config_filename, config, arguments): if monitoring_hooks_are_activated: dispatch.call_hooks( 'initialize_monitor', - hooks, + config, config_filename, monitor.MONITOR_HOOK_NAMES, monitoring_log_level, @@ -93,7 +89,7 @@ def run_configuration(config_filename, config, arguments): dispatch.call_hooks( 'ping_monitor', - hooks, + config, config_filename, monitor.MONITOR_HOOK_NAMES, monitor.State.START, @@ -109,7 +105,7 @@ def run_configuration(config_filename, config, arguments): if not encountered_error: repo_queue = Queue() - for repo in location['repositories']: + for repo in config['repositories']: repo_queue.put( (repo, 0), ) @@ -129,11 +125,7 @@ def run_configuration(config_filename, config, arguments): yield from run_actions( arguments=arguments, config_filename=config_filename, - location=location, - storage=storage, - retention=retention, - consistency=consistency, - hooks=hooks, + config=config, local_path=local_path, remote_path=remote_path, local_borg_version=local_borg_version, @@ -172,7 +164,7 @@ def run_configuration(config_filename, config, arguments): # send logs irrespective of error dispatch.call_hooks( 'ping_monitor', - hooks, + config, config_filename, monitor.MONITOR_HOOK_NAMES, monitor.State.LOG, @@ -191,7 +183,7 @@ def run_configuration(config_filename, config, arguments): if monitoring_hooks_are_activated: dispatch.call_hooks( 'ping_monitor', - hooks, + config, config_filename, monitor.MONITOR_HOOK_NAMES, monitor.State.FINISH, @@ -200,7 +192,7 @@ def run_configuration(config_filename, config, arguments): ) dispatch.call_hooks( 'destroy_monitor', - hooks, + config, config_filename, monitor.MONITOR_HOOK_NAMES, monitoring_log_level, @@ -216,8 +208,8 @@ def run_configuration(config_filename, config, arguments): if encountered_error and using_primary_action: try: command.execute_hook( - hooks.get('on_error'), - hooks.get('umask'), + config.get('on_error'), + config.get('umask'), config_filename, 'on-error', global_arguments.dry_run, @@ -227,7 +219,7 @@ def run_configuration(config_filename, config, arguments): ) dispatch.call_hooks( 'ping_monitor', - hooks, + config, config_filename, monitor.MONITOR_HOOK_NAMES, monitor.State.FAIL, @@ -236,7 +228,7 @@ def run_configuration(config_filename, config, arguments): ) dispatch.call_hooks( 'destroy_monitor', - hooks, + config, config_filename, monitor.MONITOR_HOOK_NAMES, monitoring_log_level, @@ -253,11 +245,7 @@ def run_actions( *, arguments, config_filename, - location, - storage, - retention, - consistency, - hooks, + config, local_path, remote_path, local_borg_version, @@ -282,13 +270,13 @@ def run_actions( hook_context = { 'repository': repository_path, # Deprecated: For backwards compatibility with borgmatic < 1.6.0. - 'repositories': ','.join([repo['path'] for repo in location['repositories']]), + 'repositories': ','.join([repo['path'] for repo in config['repositories']]), 'log_file': global_arguments.log_file if global_arguments.log_file else '', } command.execute_hook( - hooks.get('before_actions'), - hooks.get('umask'), + config.get('before_actions'), + config.get('umask'), config_filename, 'pre-actions', global_arguments.dry_run, @@ -299,7 +287,7 @@ def run_actions( if action_name == 'rcreate': borgmatic.actions.rcreate.run_rcreate( repository, - storage, + config, local_borg_version, action_arguments, global_arguments, @@ -309,7 +297,7 @@ def run_actions( elif action_name == 'transfer': borgmatic.actions.transfer.run_transfer( repository, - storage, + config, local_borg_version, action_arguments, global_arguments, @@ -320,9 +308,7 @@ def run_actions( yield from borgmatic.actions.create.run_create( config_filename, repository, - location, - storage, - hooks, + config, hook_context, local_borg_version, action_arguments, @@ -335,9 +321,7 @@ def run_actions( borgmatic.actions.prune.run_prune( config_filename, repository, - storage, - retention, - hooks, + config, hook_context, local_borg_version, action_arguments, @@ -350,9 +334,7 @@ def run_actions( borgmatic.actions.compact.run_compact( config_filename, repository, - storage, - retention, - hooks, + config, hook_context, local_borg_version, action_arguments, @@ -362,14 +344,11 @@ def run_actions( remote_path, ) elif action_name == 'check': - if checks.repository_enabled_for_checks(repository, consistency): + if checks.repository_enabled_for_checks(repository, config): borgmatic.actions.check.run_check( config_filename, repository, - location, - storage, - consistency, - hooks, + config, hook_context, local_borg_version, action_arguments, @@ -381,9 +360,7 @@ def run_actions( borgmatic.actions.extract.run_extract( config_filename, repository, - location, - storage, - hooks, + config, hook_context, local_borg_version, action_arguments, @@ -394,7 +371,7 @@ def run_actions( elif action_name == 'export-tar': borgmatic.actions.export_tar.run_export_tar( repository, - storage, + config, local_borg_version, action_arguments, global_arguments, @@ -404,7 +381,7 @@ def run_actions( elif action_name == 'mount': borgmatic.actions.mount.run_mount( repository, - storage, + config, local_borg_version, action_arguments, global_arguments, @@ -414,9 +391,7 @@ def run_actions( elif action_name == 'restore': borgmatic.actions.restore.run_restore( repository, - location, - storage, - hooks, + config, local_borg_version, action_arguments, global_arguments, @@ -426,7 +401,7 @@ def run_actions( elif action_name == 'rlist': yield from borgmatic.actions.rlist.run_rlist( repository, - storage, + config, local_borg_version, action_arguments, global_arguments, @@ -436,7 +411,7 @@ def run_actions( elif action_name == 'list': yield from borgmatic.actions.list.run_list( repository, - storage, + config, local_borg_version, action_arguments, global_arguments, @@ -446,7 +421,7 @@ def run_actions( elif action_name == 'rinfo': yield from borgmatic.actions.rinfo.run_rinfo( repository, - storage, + config, local_borg_version, action_arguments, global_arguments, @@ -456,7 +431,7 @@ def run_actions( elif action_name == 'info': yield from borgmatic.actions.info.run_info( repository, - storage, + config, local_borg_version, action_arguments, global_arguments, @@ -466,7 +441,7 @@ def run_actions( elif action_name == 'break-lock': borgmatic.actions.break_lock.run_break_lock( repository, - storage, + config, local_borg_version, action_arguments, global_arguments, @@ -476,7 +451,7 @@ def run_actions( elif action_name == 'borg': borgmatic.actions.borg.run_borg( repository, - storage, + config, local_borg_version, action_arguments, global_arguments, @@ -485,8 +460,8 @@ def run_actions( ) command.execute_hook( - hooks.get('after_actions'), - hooks.get('umask'), + config.get('after_actions'), + config.get('umask'), config_filename, 'post-actions', global_arguments.dry_run, @@ -613,7 +588,7 @@ def get_local_path(configs): Arbitrarily return the local path from the first configuration dict. Default to "borg" if not set. ''' - return next(iter(configs.values())).get('location', {}).get('local_path', 'borg') + return next(iter(configs.values())).get('local_path', 'borg') def collect_highlander_action_summary_logs(configs, arguments, configuration_parse_errors): @@ -627,6 +602,8 @@ def collect_highlander_action_summary_logs(configs, arguments, configuration_par A highlander action is an action that cannot coexist with other actions on the borgmatic command-line, and borgmatic exits after processing such an action. ''' + add_custom_log_levels() + if 'bootstrap' in arguments: try: # No configuration file is needed for bootstrap. @@ -744,10 +721,9 @@ def collect_configuration_run_summary_logs(configs, arguments): if 'create' in arguments: try: for config_filename, config in configs.items(): - hooks = config.get('hooks', {}) command.execute_hook( - hooks.get('before_everything'), - hooks.get('umask'), + config.get('before_everything'), + config.get('umask'), config_filename, 'pre-everything', arguments['global'].dry_run, @@ -792,10 +768,9 @@ def collect_configuration_run_summary_logs(configs, arguments): if 'create' in arguments: try: for config_filename, config in configs.items(): - hooks = config.get('hooks', {}) command.execute_hook( - hooks.get('after_everything'), - hooks.get('umask'), + config.get('after_everything'), + config.get('umask'), config_filename, 'post-everything', arguments['global'].dry_run, diff --git a/borgmatic/config/generate.py b/borgmatic/config/generate.py index 6ef8e3ae..01096547 100644 --- a/borgmatic/config/generate.py +++ b/borgmatic/config/generate.py @@ -11,7 +11,7 @@ INDENT = 4 SEQUENCE_INDENT = 2 -def _insert_newline_before_comment(config, field_name): +def insert_newline_before_comment(config, field_name): ''' Using some ruamel.yaml black magic, insert a blank line in the config right before the given field and its comments. @@ -21,10 +21,10 @@ def _insert_newline_before_comment(config, field_name): ) -def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False): +def schema_to_sample_configuration(schema, level=0, parent_is_sequence=False): ''' Given a loaded configuration schema, generate and return sample config for it. Include comments - for each section based on the schema "description". + for each option based on the schema "description". ''' schema_type = schema.get('type') example = schema.get('example') @@ -33,13 +33,13 @@ def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False): if schema_type == 'array': config = yaml.comments.CommentedSeq( - [_schema_to_sample_configuration(schema['items'], level, parent_is_sequence=True)] + [schema_to_sample_configuration(schema['items'], level, parent_is_sequence=True)] ) add_comments_to_configuration_sequence(config, schema, indent=(level * INDENT)) elif schema_type == 'object': config = yaml.comments.CommentedMap( [ - (field_name, _schema_to_sample_configuration(sub_schema, level + 1)) + (field_name, schema_to_sample_configuration(sub_schema, level + 1)) for field_name, sub_schema in schema['properties'].items() ] ) @@ -53,13 +53,13 @@ def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False): return config -def _comment_out_line(line): +def comment_out_line(line): # If it's already is commented out (or empty), there's nothing further to do! stripped_line = line.lstrip() if not stripped_line or stripped_line.startswith('#'): return line - # Comment out the names of optional sections, inserting the '#' after any indent for aesthetics. + # Comment out the names of optional options, inserting the '#' after any indent for aesthetics. matches = re.match(r'(\s*)', line) indent_spaces = matches.group(0) if matches else '' count_indent_spaces = len(indent_spaces) @@ -67,7 +67,7 @@ def _comment_out_line(line): return '# '.join((indent_spaces, line[count_indent_spaces:])) -def _comment_out_optional_configuration(rendered_config): +def comment_out_optional_configuration(rendered_config): ''' Post-process a rendered configuration string to comment out optional key/values, as determined by a sentinel in the comment before each key. @@ -92,7 +92,7 @@ def _comment_out_optional_configuration(rendered_config): if not line.strip(): optional = False - lines.append(_comment_out_line(line) if optional else line) + lines.append(comment_out_line(line) if optional else line) return '\n'.join(lines) @@ -165,7 +165,6 @@ def add_comments_to_configuration_sequence(config, schema, indent=0): return -REQUIRED_SECTION_NAMES = {'location', 'retention'} REQUIRED_KEYS = {'source_directories', 'repositories', 'keep_daily'} COMMENTED_OUT_SENTINEL = 'COMMENT_OUT' @@ -185,7 +184,7 @@ def add_comments_to_configuration_object(config, schema, indent=0, skip_first=Fa # If this is an optional key, add an indicator to the comment flagging it to be commented # out from the sample configuration. This sentinel is consumed by downstream processing that # does the actual commenting out. - if field_name not in REQUIRED_SECTION_NAMES and field_name not in REQUIRED_KEYS: + if field_name not in REQUIRED_KEYS: description = ( '\n'.join((description, COMMENTED_OUT_SENTINEL)) if description @@ -199,7 +198,7 @@ def add_comments_to_configuration_object(config, schema, indent=0, skip_first=Fa config.yaml_set_comment_before_after_key(key=field_name, before=description, indent=indent) if index > 0: - _insert_newline_before_comment(config, field_name) + insert_newline_before_comment(config, field_name) RUAMEL_YAML_COMMENTS_INDEX = 1 @@ -284,7 +283,7 @@ def generate_sample_configuration( normalize.normalize(source_filename, source_config) destination_config = merge_source_configuration_into_destination( - _schema_to_sample_configuration(schema), source_config + schema_to_sample_configuration(schema), source_config ) if dry_run: @@ -292,6 +291,6 @@ def generate_sample_configuration( write_configuration( destination_filename, - _comment_out_optional_configuration(render_configuration(destination_config)), + comment_out_optional_configuration(render_configuration(destination_config)), overwrite=overwrite, ) diff --git a/borgmatic/config/load.py b/borgmatic/config/load.py index f5d071c9..e0fabfa6 100644 --- a/borgmatic/config/load.py +++ b/borgmatic/config/load.py @@ -97,8 +97,8 @@ class Include_constructor(ruamel.yaml.SafeConstructor): ``` These includes are deep merged into the current configuration file. For instance, in this - example, any "retention" options in common.yaml will get merged into the "retention" section - in the example configuration file. + example, any "option" with sub-options in common.yaml will get merged into the corresponding + "option" with sub-options in the example configuration file. ''' representer = ruamel.yaml.representer.SafeRepresenter() @@ -116,7 +116,7 @@ def load_configuration(filename): ''' Load the given configuration file and return its contents as a data structure of nested dicts and lists. Also, replace any "{constant}" strings with the value of the "constant" key in the - "constants" section of the configuration file. + "constants" option of the configuration file. Raise ruamel.yaml.error.YAMLError if something goes wrong parsing the YAML, or RecursionError if there are too many recursive includes. @@ -223,8 +223,8 @@ def deep_merge_nodes(nodes): If a mapping or sequence node has a YAML "!retain" tag, then that node is not merged. The purpose of deep merging like this is to support, for instance, merging one borgmatic - configuration file into another for reuse, such that a configuration section ("retention", - etc.) does not completely replace the corresponding section in a merged file. + configuration file into another for reuse, such that a configuration option with sub-options + does not completely replace the corresponding option in a merged file. Raise ValueError if a merge is implied using two incompatible types. ''' diff --git a/borgmatic/config/normalize.py b/borgmatic/config/normalize.py index daadfeb4..83f2a20b 100644 --- a/borgmatic/config/normalize.py +++ b/borgmatic/config/normalize.py @@ -2,21 +2,70 @@ import logging import os +def normalize_sections(config_filename, config): + ''' + Given a configuration filename and a configuration dict of its loaded contents, airlift any + options out of sections ("location:", etc.) to the global scope and delete those sections. + Return any log message warnings produced based on the normalization performed. + + Raise ValueError if the "prefix" option is set in both "location" and "consistency" sections. + ''' + location = config.get('location') or {} + storage = config.get('storage') or {} + consistency = config.get('consistency') or {} + hooks = config.get('hooks') or {} + + if ( + location.get('prefix') + and consistency.get('prefix') + and location.get('prefix') != consistency.get('prefix') + ): + raise ValueError( + 'The retention prefix and the consistency prefix cannot have different values (unless one is not set).' + ) + + if storage.get('umask') and hooks.get('umask') and storage.get('umask') != hooks.get('umask'): + raise ValueError( + 'The storage umask and the hooks umask cannot have different values (unless one is not set).' + ) + + any_section_upgraded = False + + # Move any options from deprecated sections into the global scope. + for section_name in ('location', 'storage', 'retention', 'consistency', 'output', 'hooks'): + section_config = config.get(section_name) + + if section_config: + any_section_upgraded = True + del config[section_name] + config.update(section_config) + + if any_section_upgraded: + return [ + logging.makeLogRecord( + dict( + levelno=logging.WARNING, + levelname='WARNING', + msg=f'{config_filename}: Configuration sections like location: and storage: are deprecated and support will be removed from a future release. Move all of your options out of sections to the global scope.', + ) + ) + ] + + return [] + + def normalize(config_filename, config): ''' Given a configuration filename and a configuration dict of its loaded contents, apply particular hard-coded rules to normalize the configuration to adhere to the current schema. Return any log message warnings produced based on the normalization performed. + + Raise ValueError the configuration cannot be normalized. ''' - logs = [] - location = config.get('location') or {} - storage = config.get('storage') or {} - consistency = config.get('consistency') or {} - retention = config.get('retention') or {} - hooks = config.get('hooks') or {} + logs = normalize_sections(config_filename, config) # Upgrade exclude_if_present from a string to a list. - exclude_if_present = location.get('exclude_if_present') + exclude_if_present = config.get('exclude_if_present') if isinstance(exclude_if_present, str): logs.append( logging.makeLogRecord( @@ -27,10 +76,10 @@ def normalize(config_filename, config): ) ) ) - config['location']['exclude_if_present'] = [exclude_if_present] + config['exclude_if_present'] = [exclude_if_present] # Upgrade various monitoring hooks from a string to a dict. - healthchecks = hooks.get('healthchecks') + healthchecks = config.get('healthchecks') if isinstance(healthchecks, str): logs.append( logging.makeLogRecord( @@ -41,9 +90,9 @@ def normalize(config_filename, config): ) ) ) - config['hooks']['healthchecks'] = {'ping_url': healthchecks} + config['healthchecks'] = {'ping_url': healthchecks} - cronitor = hooks.get('cronitor') + cronitor = config.get('cronitor') if isinstance(cronitor, str): logs.append( logging.makeLogRecord( @@ -54,9 +103,9 @@ def normalize(config_filename, config): ) ) ) - config['hooks']['cronitor'] = {'ping_url': cronitor} + config['cronitor'] = {'ping_url': cronitor} - pagerduty = hooks.get('pagerduty') + pagerduty = config.get('pagerduty') if isinstance(pagerduty, str): logs.append( logging.makeLogRecord( @@ -67,9 +116,9 @@ def normalize(config_filename, config): ) ) ) - config['hooks']['pagerduty'] = {'integration_key': pagerduty} + config['pagerduty'] = {'integration_key': pagerduty} - cronhub = hooks.get('cronhub') + cronhub = config.get('cronhub') if isinstance(cronhub, str): logs.append( logging.makeLogRecord( @@ -80,10 +129,10 @@ def normalize(config_filename, config): ) ) ) - config['hooks']['cronhub'] = {'ping_url': cronhub} + config['cronhub'] = {'ping_url': cronhub} # Upgrade consistency checks from a list of strings to a list of dicts. - checks = consistency.get('checks') + checks = config.get('checks') if isinstance(checks, list) and len(checks) and isinstance(checks[0], str): logs.append( logging.makeLogRecord( @@ -94,10 +143,10 @@ def normalize(config_filename, config): ) ) ) - config['consistency']['checks'] = [{'name': check_type} for check_type in checks] + config['checks'] = [{'name': check_type} for check_type in checks] # Rename various configuration options. - numeric_owner = location.pop('numeric_owner', None) + numeric_owner = config.pop('numeric_owner', None) if numeric_owner is not None: logs.append( logging.makeLogRecord( @@ -108,9 +157,9 @@ def normalize(config_filename, config): ) ) ) - config['location']['numeric_ids'] = numeric_owner + config['numeric_ids'] = numeric_owner - bsd_flags = location.pop('bsd_flags', None) + bsd_flags = config.pop('bsd_flags', None) if bsd_flags is not None: logs.append( logging.makeLogRecord( @@ -121,9 +170,9 @@ def normalize(config_filename, config): ) ) ) - config['location']['flags'] = bsd_flags + config['flags'] = bsd_flags - remote_rate_limit = storage.pop('remote_rate_limit', None) + remote_rate_limit = config.pop('remote_rate_limit', None) if remote_rate_limit is not None: logs.append( logging.makeLogRecord( @@ -134,10 +183,10 @@ def normalize(config_filename, config): ) ) ) - config['storage']['upload_rate_limit'] = remote_rate_limit + config['upload_rate_limit'] = remote_rate_limit # Upgrade remote repositories to ssh:// syntax, required in Borg 2. - repositories = location.get('repositories') + repositories = config.get('repositories') if repositories: if isinstance(repositories[0], str): logs.append( @@ -149,11 +198,11 @@ def normalize(config_filename, config): ) ) ) - config['location']['repositories'] = [ - {'path': repository} for repository in repositories - ] - repositories = config['location']['repositories'] - config['location']['repositories'] = [] + config['repositories'] = [{'path': repository} for repository in repositories] + repositories = config['repositories'] + + config['repositories'] = [] + for repository_dict in repositories: repository_path = repository_dict['path'] if '~' in repository_path: @@ -171,14 +220,14 @@ def normalize(config_filename, config): updated_repository_path = os.path.abspath( repository_path.partition('file://')[-1] ) - config['location']['repositories'].append( + config['repositories'].append( dict( repository_dict, path=updated_repository_path, ) ) elif repository_path.startswith('ssh://'): - config['location']['repositories'].append(repository_dict) + config['repositories'].append(repository_dict) else: rewritten_repository_path = f"ssh://{repository_path.replace(':~', '/~').replace(':/', '/').replace(':', '/./')}" logs.append( @@ -190,16 +239,16 @@ def normalize(config_filename, config): ) ) ) - config['location']['repositories'].append( + config['repositories'].append( dict( repository_dict, path=rewritten_repository_path, ) ) else: - config['location']['repositories'].append(repository_dict) + config['repositories'].append(repository_dict) - if consistency.get('prefix') or retention.get('prefix'): + if config.get('prefix'): logs.append( logging.makeLogRecord( dict( diff --git a/borgmatic/config/override.py b/borgmatic/config/override.py index aacf375d..05173d2c 100644 --- a/borgmatic/config/override.py +++ b/borgmatic/config/override.py @@ -32,19 +32,33 @@ def convert_value_type(value): return ruamel.yaml.YAML(typ='safe').load(io.StringIO(value)) +LEGACY_SECTION_NAMES = {'location', 'storage', 'retention', 'consistency', 'output', 'hooks'} + + +def strip_section_names(parsed_override_key): + ''' + Given a parsed override key as a tuple of option and suboption names, strip out any initial + legacy section names, since configuration file normalization also strips them out. + ''' + if parsed_override_key[0] in LEGACY_SECTION_NAMES: + return parsed_override_key[1:] + + return parsed_override_key + + def parse_overrides(raw_overrides): ''' - Given a sequence of configuration file override strings in the form of "section.option=value", + Given a sequence of configuration file override strings in the form of "option.suboption=value", parse and return a sequence of tuples (keys, values), where keys is a sequence of strings. For instance, given the following raw overrides: - ['section.my_option=value1', 'section.other_option=value2'] + ['my_option.suboption=value1', 'other_option=value2'] ... return this: ( - (('section', 'my_option'), 'value1'), - (('section', 'other_option'), 'value2'), + (('my_option', 'suboption'), 'value1'), + (('other_option'), 'value2'), ) Raise ValueError if an override can't be parsed. @@ -59,13 +73,13 @@ def parse_overrides(raw_overrides): raw_keys, value = raw_override.split('=', 1) parsed_overrides.append( ( - tuple(raw_keys.split('.')), + strip_section_names(tuple(raw_keys.split('.'))), convert_value_type(value), ) ) except ValueError: raise ValueError( - f"Invalid override '{raw_override}'. Make sure you use the form: SECTION.OPTION=VALUE" + f"Invalid override '{raw_override}'. Make sure you use the form: OPTION=VALUE or OPTION.SUBOPTION=VALUE" ) except ruamel.yaml.error.YAMLError as error: raise ValueError(f"Invalid override '{raw_override}': {error.problem}") @@ -76,7 +90,7 @@ def parse_overrides(raw_overrides): def apply_overrides(config, raw_overrides): ''' Given a configuration dict and a sequence of configuration file override strings in the form of - "section.option=value", parse each override and set it the configuration dict. + "option.suboption=value", parse each override and set it the configuration dict. ''' overrides = parse_overrides(raw_overrides) diff --git a/borgmatic/config/schema.yaml b/borgmatic/config/schema.yaml index 3f1b3cb4..b0cbe6ae 100644 --- a/borgmatic/config/schema.yaml +++ b/borgmatic/config/schema.yaml @@ -1,6 +1,6 @@ type: object required: - - location + - repositories additionalProperties: false properties: constants: @@ -14,1391 +14,1268 @@ properties: example: hostname: myhostname prefix: myprefix - location: - type: object + source_directories: + type: array + items: + type: string description: | - Where to look for files to backup, and where to store those backups. - See https://borgbackup.readthedocs.io/en/stable/quickstart.html and - https://borgbackup.readthedocs.io/en/stable/usage/create.html - for details. - required: - - repositories + List of source directories and files to backup. Globs and tildes are + expanded. Do not backslash spaces in path names. + example: + - /home + - /etc + - /var/log/syslog* + - /home/user/path with spaces + repositories: + type: array + items: + type: object + required: + - path + properties: + path: + type: string + example: ssh://user@backupserver/./{fqdn} + label: + type: string + example: backupserver + description: | + A required list of local or remote repositories with paths and + optional labels (which can be used with the --repository flag to + select a repository). Tildes are expanded. Multiple repositories are + backed up to in sequence. Borg placeholders can be used. See the + output of "borg help placeholders" for details. See ssh_command for + SSH options like identity file or port. If systemd service is used, + then add local repository paths in the systemd service file to the + ReadWritePaths list. Prior to borgmatic 1.7.10, repositories was a + list of plain path strings. + example: + - path: ssh://user@backupserver/./sourcehostname.borg + label: backupserver + - path: /mnt/backup + label: local + working_directory: + type: string + description: | + Working directory for the "borg create" command. Tildes are + expanded. Useful for backing up using relative paths. See + http://borgbackup.readthedocs.io/en/stable/usage/create.html for + details. Defaults to not set. + example: /path/to/working/directory + one_file_system: + type: boolean + description: | + Stay in same file system; do not cross mount points beyond the given + source directories. Defaults to false. But when a database hook is + used, the setting here is ignored and one_file_system is considered + true. + example: true + numeric_ids: + type: boolean + description: | + Only store/extract numeric user and group identifiers. Defaults to + false. + example: true + atime: + type: boolean + description: | + Store atime into archive. Defaults to true in Borg < 1.2, false in + Borg 1.2+. + example: false + ctime: + type: boolean + description: Store ctime into archive. Defaults to true. + example: false + birthtime: + type: boolean + description: | + Store birthtime (creation date) into archive. Defaults to true. + example: false + read_special: + type: boolean + description: | + Use Borg's --read-special flag to allow backup of block and other + special devices. Use with caution, as it will lead to problems if + used when backing up special devices such as /dev/zero. Defaults to + false. But when a database hook is used, the setting here is ignored + and read_special is considered true. + example: false + flags: + type: boolean + description: | + Record filesystem flags (e.g. NODUMP, IMMUTABLE) in archive. + Defaults to true. + example: true + files_cache: + type: string + description: | + Mode in which to operate the files cache. See + http://borgbackup.readthedocs.io/en/stable/usage/create.html for + details. Defaults to "ctime,size,inode". + example: ctime,size,inode + local_path: + type: string + description: | + Alternate Borg local executable. Defaults to "borg". + example: borg1 + remote_path: + type: string + description: | + Alternate Borg remote executable. Defaults to "borg". + example: borg1 + patterns: + type: array + items: + type: string + description: | + Any paths matching these patterns are included/excluded from + backups. Globs are expanded. (Tildes are not.) See the output of + "borg help patterns" for more details. Quote any value if it + contains leading punctuation, so it parses correctly. Note that only + one of "patterns" and "source_directories" may be used. + example: + - 'R /' + - '- /home/*/.cache' + - '+ /home/susan' + - '- /home/*' + patterns_from: + type: array + items: + type: string + description: | + Read include/exclude patterns from one or more separate named files, + one pattern per line. Note that Borg considers this option + experimental. See the output of "borg help patterns" for more + details. + example: + - /etc/borgmatic/patterns + exclude_patterns: + type: array + items: + type: string + description: | + Any paths matching these patterns are excluded from backups. Globs + and tildes are expanded. Note that a glob pattern must either start + with a glob or be an absolute path. Do not backslash spaces in path + names. See the output of "borg help patterns" for more details. + example: + - '*.pyc' + - /home/*/.cache + - '*/.vim*.tmp' + - /etc/ssl + - /home/user/path with spaces + exclude_from: + type: array + items: + type: string + description: | + Read exclude patterns from one or more separate named files, one + pattern per line. See the output of "borg help patterns" for more + details. + example: + - /etc/borgmatic/excludes + exclude_caches: + type: boolean + description: | + Exclude directories that contain a CACHEDIR.TAG file. See + http://www.brynosaurus.com/cachedir/spec.html for details. Defaults + to false. + example: true + exclude_if_present: + type: array + items: + type: string + description: | + Exclude directories that contain a file with the given filenames. + Defaults to not set. + example: + - .nobackup + keep_exclude_tags: + type: boolean + description: | + If true, the exclude_if_present filename is included in backups. + Defaults to false, meaning that the exclude_if_present filename is + omitted from backups. + example: true + exclude_nodump: + type: boolean + description: | + Exclude files with the NODUMP flag. Defaults to false. + example: true + borgmatic_source_directory: + type: string + description: | + Path for additional source files used for temporary internal state + like borgmatic database dumps. Note that changing this path prevents + "borgmatic restore" from finding any database dumps created before + the change. Defaults to ~/.borgmatic + example: /tmp/borgmatic + source_directories_must_exist: + type: boolean + description: | + If true, then source directories must exist, otherwise an error is + raised. Defaults to false. + example: true + encryption_passcommand: + type: string + description: | + The standard output of this command is used to unlock the encryption + key. Only use on repositories that were initialized with + passcommand/repokey/keyfile encryption. Note that if both + encryption_passcommand and encryption_passphrase are set, then + encryption_passphrase takes precedence. Defaults to not set. + example: "secret-tool lookup borg-repository repo-name" + encryption_passphrase: + type: string + description: | + Passphrase to unlock the encryption key with. Only use on + repositories that were initialized with passphrase/repokey/keyfile + encryption. Quote the value if it contains punctuation, so it parses + correctly. And backslash any quote or backslash literals as well. + Defaults to not set. + example: "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~" + checkpoint_interval: + type: integer + description: | + Number of seconds between each checkpoint during a long-running + backup. See https://borgbackup.readthedocs.io/en/stable/faq.html for + details. Defaults to checkpoints every 1800 seconds (30 minutes). + example: 1800 + checkpoint_volume: + type: integer + description: | + Number of backed up bytes between each checkpoint during a + long-running backup. Only supported with Borg 2+. See + https://borgbackup.readthedocs.io/en/stable/faq.html for details. + Defaults to only time-based checkpointing (see + "checkpoint_interval") instead of volume-based checkpointing. + example: 1048576 + chunker_params: + type: string + description: | + Specify the parameters passed to then chunker (CHUNK_MIN_EXP, + CHUNK_MAX_EXP, HASH_MASK_BITS, HASH_WINDOW_SIZE). See + https://borgbackup.readthedocs.io/en/stable/internals.html for + details. Defaults to "19,23,21,4095". + example: 19,23,21,4095 + compression: + type: string + description: | + Type of compression to use when creating archives. See + http://borgbackup.readthedocs.io/en/stable/usage/create.html for + details. Defaults to "lz4". + example: lz4 + upload_rate_limit: + type: integer + description: | + Remote network upload rate limit in kiBytes/second. Defaults to + unlimited. + example: 100 + retries: + type: integer + description: | + Number of times to retry a failing backup before giving up. Defaults + to 0 (i.e., does not attempt retry). + example: 3 + retry_wait: + type: integer + description: | + Wait time between retries (in seconds) to allow transient issues to + pass. Increases after each retry as a form of backoff. Defaults to 0 + (no wait). + example: 10 + temporary_directory: + type: string + description: | + Directory where temporary files are stored. Defaults to $TMPDIR. + example: /path/to/tmpdir + ssh_command: + type: string + description: | + Command to use instead of "ssh". This can be used to specify ssh + options. Defaults to not set. + example: ssh -i /path/to/private/key + borg_base_directory: + type: string + description: | + Base path used for various Borg directories. Defaults to $HOME, + ~$USER, or ~. + example: /path/to/base + borg_config_directory: + type: string + description: | + Path for Borg configuration files. Defaults to + $borg_base_directory/.config/borg + example: /path/to/base/config + borg_cache_directory: + type: string + description: | + Path for Borg cache files. Defaults to + $borg_base_directory/.cache/borg + example: /path/to/base/cache + borg_files_cache_ttl: + type: integer + description: | + Maximum time to live (ttl) for entries in the Borg files cache. + example: 20 + borg_security_directory: + type: string + description: | + Path for Borg security and encryption nonce files. Defaults to + $borg_base_directory/.config/borg/security + example: /path/to/base/config/security + borg_keys_directory: + type: string + description: | + Path for Borg encryption key files. Defaults to + $borg_base_directory/.config/borg/keys + example: /path/to/base/config/keys + umask: + type: integer + description: | + Umask used for when executing Borg or calling hooks. Defaults to + 0077 for Borg or the umask that borgmatic is run with for hooks. + example: 0077 + lock_wait: + type: integer + description: | + Maximum seconds to wait for acquiring a repository/cache lock. + Defaults to 1. + example: 5 + archive_name_format: + type: string + description: | + Name of the archive. Borg placeholders can be used. See the output + of "borg help placeholders" for details. Defaults to + "{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}". When running actions like + rlist, info, or check, borgmatic automatically tries to match only + archives created with this name format. + example: "{hostname}-documents-{now}" + match_archives: + type: string + description: | + A Borg pattern for filtering down the archives used by borgmatic + actions that operate on multiple archives. For Borg 1.x, use a shell + pattern here and see the output of "borg help placeholders" for + details. For Borg 2.x, see the output of "borg help match-archives". + If match_archives is not specified, borgmatic defaults to deriving + the match_archives value from archive_name_format. + example: "sh:{hostname}-*" + relocated_repo_access_is_ok: + type: boolean + description: | + Bypass Borg error about a repository that has been moved. Defaults + to false. + example: true + unknown_unencrypted_repo_access_is_ok: + type: boolean + description: | + Bypass Borg error about a previously unknown unencrypted repository. + Defaults to false. + example: true + extra_borg_options: + type: object additionalProperties: false properties: - source_directories: + init: + type: string + description: | + Extra command-line options to pass to "borg init". + example: "--extra-option" + create: + type: string + description: | + Extra command-line options to pass to "borg create". + example: "--extra-option" + prune: + type: string + description: | + Extra command-line options to pass to "borg prune". + example: "--extra-option" + compact: + type: string + description: | + Extra command-line options to pass to "borg compact". + example: "--extra-option" + check: + type: string + description: | + Extra command-line options to pass to "borg check". + example: "--extra-option" + description: | + Additional options to pass directly to particular Borg commands, + handy for Borg options that borgmatic does not yet support natively. + Note that borgmatic does not perform any validation on these + options. Running borgmatic with "--verbosity 2" shows the exact Borg + command-line invocation. + keep_within: + type: string + description: Keep all archives within this time interval. + example: 3H + keep_secondly: + type: integer + description: Number of secondly archives to keep. + example: 60 + keep_minutely: + type: integer + description: Number of minutely archives to keep. + example: 60 + keep_hourly: + type: integer + description: Number of hourly archives to keep. + example: 24 + keep_daily: + type: integer + description: Number of daily archives to keep. + example: 7 + keep_weekly: + type: integer + description: Number of weekly archives to keep. + example: 4 + keep_monthly: + type: integer + description: Number of monthly archives to keep. + example: 6 + keep_yearly: + type: integer + description: Number of yearly archives to keep. + example: 1 + prefix: + type: string + description: | + Deprecated. When pruning or checking archives, only consider archive + names starting with this prefix. Borg placeholders can be used. See + the output of "borg help placeholders" for details. If a prefix is + not specified, borgmatic defaults to matching archives based on the + archive_name_format (see above). + example: sourcehostname + checks: + type: array + items: + type: object + required: ['name'] + additionalProperties: false + properties: + name: + type: string + enum: + - repository + - archives + - data + - extract + - disabled + description: | + Name of consistency check to run: "repository", + "archives", "data", and/or "extract". Set to "disabled" + to disable all consistency checks. "repository" checks + the consistency of the repository, "archives" checks all + of the archives, "data" verifies the integrity of the + data within the archives, and "extract" does an + extraction dry-run of the most recent archive. Note that + "data" implies "archives". + example: repository + frequency: + type: string + description: | + How frequently to run this type of consistency check (as + a best effort). The value is a number followed by a unit + of time. E.g., "2 weeks" to run this consistency check + no more than every two weeks for a given repository or + "1 month" to run it no more than monthly. Defaults to + "always": running this check every time checks are run. + example: 2 weeks + description: | + List of one or more consistency checks to run on a periodic basis + (if "frequency" is set) or every time borgmatic runs checks (if + "frequency" is omitted). + check_repositories: + type: array + items: + type: string + description: | + Paths or labels for a subset of the configured "repositories" (see + above) on which to run consistency checks. Handy in case some of + your repositories are very large, and so running consistency checks + on them would take too long. Defaults to running consistency checks + on all configured repositories. + example: + - user@backupserver:sourcehostname.borg + check_last: + type: integer + description: | + Restrict the number of checked archives to the last n. Applies only + to the "archives" check. Defaults to checking all archives. + example: 3 + color: + type: boolean + description: | + Apply color to console output. Can be overridden with --no-color + command-line flag. Defaults to true. + example: false + before_actions: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute before all + the actions for each repository. + example: + - "echo Starting actions." + before_backup: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute before + creating a backup, run once per repository. + example: + - "echo Starting a backup." + before_prune: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute before + pruning, run once per repository. + example: + - "echo Starting pruning." + before_compact: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute before + compaction, run once per repository. + example: + - "echo Starting compaction." + before_check: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute before + consistency checks, run once per repository. + example: + - "echo Starting checks." + before_extract: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute before + extracting a backup, run once per repository. + example: + - "echo Starting extracting." + after_backup: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute after + creating a backup, run once per repository. + example: + - "echo Finished a backup." + after_compact: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute after + compaction, run once per repository. + example: + - "echo Finished compaction." + after_prune: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute after + pruning, run once per repository. + example: + - "echo Finished pruning." + after_check: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute after + consistency checks, run once per repository. + example: + - "echo Finished checks." + after_extract: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute after + extracting a backup, run once per repository. + example: + - "echo Finished extracting." + after_actions: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute after all + actions for each repository. + example: + - "echo Finished actions." + on_error: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute when an + exception occurs during a "create", "prune", "compact", or "check" + action or an associated before/after hook. + example: + - "echo Error during create/prune/compact/check." + before_everything: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute before + running all actions (if one of them is "create"). These are + collected from all configuration files and then run once before all + of them (prior to all actions). + example: + - "echo Starting actions." + after_everything: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute after + running all actions (if one of them is "create"). These are + collected from all configuration files and then run once after all + of them (after any action). + example: + - "echo Completed actions." + postgresql_databases: + type: array + items: + type: object + required: ['name'] + additionalProperties: false + properties: + name: + type: string + description: | + Database name (required if using this hook). Or "all" to + dump all databases on the host. (Also set the "format" + to dump each database to a separate file instead of one + combined file.) Note that using this database hook + implicitly enables both read_special and one_file_system + (see above) to support dump and restore streaming. + example: users + hostname: + type: string + description: | + Database hostname to connect to. Defaults to connecting + via local Unix socket. + example: database.example.org + restore_hostname: + type: string + description: | + Database hostname to restore to. Defaults to the + "hostname" option. + example: database.example.org + port: + type: integer + description: Port to connect to. Defaults to 5432. + example: 5433 + restore_port: + type: integer + description: | + Port to restore to. Defaults to the "port" option. + example: 5433 + username: + type: string + description: | + Username with which to connect to the database. Defaults + to the username of the current user. You probably want + to specify the "postgres" superuser here when the + database name is "all". + example: dbuser + restore_username: + type: string + description: | + Username with which to restore the database. Defaults to + the "username" option. + example: dbuser + password: + type: string + description: | + Password with which to connect to the database. Omitting + a password will only work if PostgreSQL is configured to + trust the configured username without a password or you + create a ~/.pgpass file. + example: trustsome1 + restore_password: + type: string + description: | + Password with which to connect to the restore database. + Defaults to the "password" option. + example: trustsome1 + no_owner: + type: boolean + description: | + Do not output commands to set ownership of objects to + match the original database. By default, pg_dump and + pg_restore issue ALTER OWNER or SET SESSION + AUTHORIZATION statements to set ownership of created + schema elements. These statements will fail unless the + initial connection to the database is made by a + superuser. + example: true + format: + type: string + enum: ['plain', 'custom', 'directory', 'tar'] + description: | + Database dump output format. One of "plain", "custom", + "directory", or "tar". Defaults to "custom" (unlike raw + pg_dump) for a single database. Or, when database name + is "all" and format is blank, dumps all databases to a + single file. But if a format is specified with an "all" + database name, dumps each database to a separate file of + that format, allowing more convenient restores of + individual databases. See the pg_dump documentation for + more about formats. + example: directory + ssl_mode: + type: string + enum: ['disable', 'allow', 'prefer', + 'require', 'verify-ca', 'verify-full'] + description: | + SSL mode to use to connect to the database server. One + of "disable", "allow", "prefer", "require", "verify-ca" + or "verify-full". Defaults to "disable". + example: require + ssl_cert: + type: string + description: | + Path to a client certificate. + example: "/root/.postgresql/postgresql.crt" + ssl_key: + type: string + description: | + Path to a private client key. + example: "/root/.postgresql/postgresql.key" + ssl_root_cert: + type: string + description: | + Path to a root certificate containing a list of trusted + certificate authorities. + example: "/root/.postgresql/root.crt" + ssl_crl: + type: string + description: | + Path to a certificate revocation list. + example: "/root/.postgresql/root.crl" + pg_dump_command: + type: string + description: | + Command to use instead of "pg_dump" or "pg_dumpall". + This can be used to run a specific pg_dump version + (e.g., one inside a running container). Defaults to + "pg_dump" for single database dump or "pg_dumpall" to + dump all databases. + example: docker exec my_pg_container pg_dump + pg_restore_command: + type: string + description: | + Command to use instead of "pg_restore". This can be used + to run a specific pg_restore version (e.g., one inside a + running container). Defaults to "pg_restore". + example: docker exec my_pg_container pg_restore + psql_command: + type: string + description: | + Command to use instead of "psql". This can be used to + run a specific psql version (e.g., one inside a running + container). Defaults to "psql". + example: docker exec my_pg_container psql + options: + type: string + description: | + Additional pg_dump/pg_dumpall options to pass directly + to the dump command, without performing any validation + on them. See pg_dump documentation for details. + example: --role=someone + list_options: + type: string + description: | + Additional psql options to pass directly to the psql + command that lists available databases, without + performing any validation on them. See psql + documentation for details. + example: --role=someone + restore_options: + type: string + description: | + Additional pg_restore/psql options to pass directly to + the restore command, without performing any validation + on them. See pg_restore/psql documentation for details. + example: --role=someone + analyze_options: + type: string + description: | + Additional psql options to pass directly to the analyze + command run after a restore, without performing any + validation on them. See psql documentation for details. + example: --role=someone + description: | + List of one or more PostgreSQL databases to dump before creating a + backup, run once per configuration file. The database dumps are + added to your source directories at runtime, backed up, and removed + afterwards. Requires pg_dump/pg_dumpall/pg_restore commands. See + https://www.postgresql.org/docs/current/app-pgdump.html and + https://www.postgresql.org/docs/current/libpq-ssl.html for details. + mysql_databases: + type: array + items: + type: object + required: ['name'] + additionalProperties: false + properties: + name: + type: string + description: | + Database name (required if using this hook). Or "all" to + dump all databases on the host. Note that using this + database hook implicitly enables both read_special and + one_file_system (see above) to support dump and restore + streaming. + example: users + hostname: + type: string + description: | + Database hostname to connect to. Defaults to connecting + via local Unix socket. + example: database.example.org + restore_hostname: + type: string + description: | + Database hostname to restore to. Defaults to the + "hostname" option. + example: database.example.org + port: + type: integer + description: Port to connect to. Defaults to 3306. + example: 3307 + restore_port: + type: integer + description: | + Port to restore to. Defaults to the "port" option. + example: 5433 + username: + type: string + description: | + Username with which to connect to the database. Defaults + to the username of the current user. + example: dbuser + restore_username: + type: string + description: | + Username with which to restore the database. Defaults to + the "username" option. + example: dbuser + password: + type: string + description: | + Password with which to connect to the database. Omitting + a password will only work if MySQL is configured to + trust the configured username without a password. + example: trustsome1 + restore_password: + type: string + description: | + Password with which to connect to the restore database. + Defaults to the "password" option. + example: trustsome1 + format: + type: string + enum: ['sql'] + description: | + Database dump output format. Currently only "sql" is + supported. Defaults to "sql" for a single database. Or, + when database name is "all" and format is blank, dumps + all databases to a single file. But if a format is + specified with an "all" database name, dumps each + database to a separate file of that format, allowing + more convenient restores of individual databases. + example: directory + add_drop_database: + type: boolean + description: | + Use the "--add-drop-database" flag with mysqldump, + causing the database to be dropped right before restore. + Defaults to true. + example: false + options: + type: string + description: | + Additional mysqldump options to pass directly to the + dump command, without performing any validation on them. + See mysqldump documentation for details. + example: --skip-comments + list_options: + type: string + description: | + Additional mysql options to pass directly to the mysql + command that lists available databases, without + performing any validation on them. See mysql + documentation for details. + example: --defaults-extra-file=my.cnf + restore_options: + type: string + description: | + Additional mysql options to pass directly to the mysql + command that restores database dumps, without performing + any validation on them. See mysql documentation for + details. + example: --defaults-extra-file=my.cnf + description: | + List of one or more MySQL/MariaDB databases to dump before creating + a backup, run once per configuration file. The database dumps are + added to your source directories at runtime, backed up, and removed + afterwards. Requires mysqldump/mysql commands (from either MySQL or + MariaDB). See https://dev.mysql.com/doc/refman/8.0/en/mysqldump.html + or https://mariadb.com/kb/en/library/mysqldump/ for details. + sqlite_databases: + type: array + items: + type: object + required: ['path','name'] + additionalProperties: false + properties: + name: + type: string + description: | + This is used to tag the database dump file with a name. + It is not the path to the database file itself. The name + "all" has no special meaning for SQLite databases. + example: users + path: + type: string + description: | + Path to the SQLite database file to dump. If relative, + it is relative to the current working directory. Note + that using this database hook implicitly enables both + read_special and one_file_system (see above) to support + dump and restore streaming. + example: /var/lib/sqlite/users.db + restore_path: + type: string + description: | + Path to the SQLite database file to restore to. Defaults + to the "path" option. + example: /var/lib/sqlite/users.db + mongodb_databases: + type: array + items: + type: object + required: ['name'] + additionalProperties: false + properties: + name: + type: string + description: | + Database name (required if using this hook). Or "all" to + dump all databases on the host. Note that using this + database hook implicitly enables both read_special and + one_file_system (see above) to support dump and restore + streaming. + example: users + hostname: + type: string + description: | + Database hostname to connect to. Defaults to connecting + to localhost. + example: database.example.org + restore_hostname: + type: string + description: | + Database hostname to restore to. Defaults to the + "hostname" option. + example: database.example.org + port: + type: integer + description: Port to connect to. Defaults to 27017. + example: 27018 + restore_port: + type: integer + description: | + Port to restore to. Defaults to the "port" option. + example: 5433 + username: + type: string + description: | + Username with which to connect to the database. Skip it + if no authentication is needed. + example: dbuser + restore_username: + type: string + description: | + Username with which to restore the database. Defaults to + the "username" option. + example: dbuser + password: + type: string + description: | + Password with which to connect to the database. Skip it + if no authentication is needed. + example: trustsome1 + restore_password: + type: string + description: | + Password with which to connect to the restore database. + Defaults to the "password" option. + example: trustsome1 + authentication_database: + type: string + description: | + Authentication database where the specified username + exists. If no authentication database is specified, the + database provided in "name" is used. If "name" is "all", + the "admin" database is used. + example: admin + format: + type: string + enum: ['archive', 'directory'] + description: | + Database dump output format. One of "archive", or + "directory". Defaults to "archive". See mongodump + documentation for details. Note that format is ignored + when the database name is "all". + example: directory + options: + type: string + description: | + Additional mongodump options to pass directly to the + dump command, without performing any validation on them. + See mongodump documentation for details. + example: --dumpDbUsersAndRoles + restore_options: + type: string + description: | + Additional mongorestore options to pass directly to the + dump command, without performing any validation on them. + See mongorestore documentation for details. + example: --restoreDbUsersAndRoles + description: | + List of one or more MongoDB databases to dump before creating a + backup, run once per configuration file. The database dumps are + added to your source directories at runtime, backed up, and removed + afterwards. Requires mongodump/mongorestore commands. See + https://docs.mongodb.com/database-tools/mongodump/ and + https://docs.mongodb.com/database-tools/mongorestore/ for details. + ntfy: + type: object + required: ['topic'] + additionalProperties: false + properties: + topic: + type: string + description: | + The topic to publish to. See https://ntfy.sh/docs/publish/ + for details. + example: topic + server: + type: string + description: | + The address of your self-hosted ntfy.sh instance. + example: https://ntfy.your-domain.com + username: + type: string + description: | + The username used for authentication. + example: testuser + password: + type: string + description: | + The password used for authentication. + example: fakepassword + start: + type: object + properties: + title: + type: string + description: | + The title of the message. + example: Ping! + message: + type: string + description: | + The message body to publish. + example: Your backups have failed. + priority: + type: string + description: | + The priority to set. + example: urgent + tags: + type: string + description: | + Tags to attach to the message. + example: incoming_envelope + finish: + type: object + properties: + title: + type: string + description: | + The title of the message. + example: Ping! + message: + type: string + description: | + The message body to publish. + example: Your backups have failed. + priority: + type: string + description: | + The priority to set. + example: urgent + tags: + type: string + description: | + Tags to attach to the message. + example: incoming_envelope + fail: + type: object + properties: + title: + type: string + description: | + The title of the message. + example: Ping! + message: + type: string + description: | + The message body to publish. + example: Your backups have failed. + priority: + type: string + description: | + The priority to set. + example: urgent + tags: + type: string + description: | + Tags to attach to the message. + example: incoming_envelope + states: type: array items: type: string + enum: + - start + - finish + - fail + uniqueItems: true description: | - List of source directories and files to backup. Globs and - tildes are expanded. Do not backslash spaces in path names. + List of one or more monitoring states to ping for: "start", + "finish", and/or "fail". Defaults to pinging for failure + only. example: - - /home - - /etc - - /var/log/syslog* - - /home/user/path with spaces - repositories: - type: array - items: - type: object - required: - - path - properties: - path: - type: string - example: ssh://user@backupserver/./{fqdn} - label: - type: string - example: backupserver - description: | - A required list of local or remote repositories with paths - and optional labels (which can be used with the --repository - flag to select a repository). Tildes are expanded. Multiple - repositories are backed up to in sequence. Borg placeholders - can be used. See the output of "borg help placeholders" for - details. See ssh_command for SSH options like identity file - or port. If systemd service is used, then add local - repository paths in the systemd service file to the - ReadWritePaths list. Prior to borgmatic 1.7.10, repositories - was a list of plain path strings. - example: - - path: ssh://user@backupserver/./sourcehostname.borg - label: backupserver - - path: /mnt/backup - label: local - working_directory: + - start + - finish + healthchecks: + type: object + required: ['ping_url'] + additionalProperties: false + properties: + ping_url: type: string description: | - Working directory for the "borg create" command. Tildes are - expanded. Useful for backing up using relative paths. See - http://borgbackup.readthedocs.io/en/stable/usage/create.html - for details. Defaults to not set. - example: /path/to/working/directory - one_file_system: + Healthchecks ping URL or UUID to notify when a backup + begins, ends, errors, or to send only logs. + example: https://hc-ping.com/your-uuid-here + verify_tls: type: boolean description: | - Stay in same file system: do not cross mount points beyond - the given source directories. Defaults to false. But when a - database hook is used, the setting here is ignored and - one_file_system is considered true. - example: true - numeric_ids: - type: boolean - description: | - Only store/extract numeric user and group identifiers. - Defaults to false. - example: true - atime: - type: boolean - description: | - Store atime into archive. Defaults to true in Borg < 1.2, - false in Borg 1.2+. - example: false - ctime: - type: boolean - description: Store ctime into archive. Defaults to true. - example: false - birthtime: - type: boolean - description: | - Store birthtime (creation date) into archive. Defaults to + Verify the TLS certificate of the ping URL host. Defaults to true. example: false - read_special: + send_logs: type: boolean description: | - Use Borg's --read-special flag to allow backup of block and - other special devices. Use with caution, as it will lead to - problems if used when backing up special devices such as - /dev/zero. Defaults to false. But when a database hook is - used, the setting here is ignored and read_special is - considered true. + Send borgmatic logs to Healthchecks as part the "finish", + "fail", and "log" states. Defaults to true. example: false - flags: - type: boolean + ping_body_limit: + type: integer description: | - Record filesystem flags (e.g. NODUMP, IMMUTABLE) in archive. - Defaults to true. - example: true - files_cache: - type: string - description: | - Mode in which to operate the files cache. See - http://borgbackup.readthedocs.io/en/stable/usage/create.html - for details. Defaults to "ctime,size,inode". - example: ctime,size,inode - local_path: - type: string - description: | - Alternate Borg local executable. Defaults to "borg". - example: borg1 - remote_path: - type: string - description: | - Alternate Borg remote executable. Defaults to "borg". - example: borg1 - patterns: + Number of bytes of borgmatic logs to send to Healthchecks, + ideally the same as PING_BODY_LIMIT configured on the + Healthchecks server. Set to 0 to send all logs and disable + this truncation. Defaults to 100000. + example: 200000 + states: type: array items: type: string + enum: + - start + - finish + - fail + - log + uniqueItems: true description: | - Any paths matching these patterns are included/excluded from - backups. Globs are expanded. (Tildes are not.) See the - output of "borg help patterns" for more details. Quote any - value if it contains leading punctuation, so it parses - correctly. Note that only one of "patterns" and - "source_directories" may be used. + List of one or more monitoring states to ping for: "start", + "finish", "fail", and/or "log". Defaults to pinging for all + states. example: - - 'R /' - - '- /home/*/.cache' - - '+ /home/susan' - - '- /home/*' - patterns_from: - type: array - items: - type: string - description: | - Read include/exclude patterns from one or more separate - named files, one pattern per line. Note that Borg considers - this option experimental. See the output of "borg help - patterns" for more details. - example: - - /etc/borgmatic/patterns - exclude_patterns: - type: array - items: - type: string - description: | - Any paths matching these patterns are excluded from backups. - Globs and tildes are expanded. Note that a glob pattern must - either start with a glob or be an absolute path. Do not - backslash spaces in path names. See the output of "borg help - patterns" for more details. - example: - - '*.pyc' - - /home/*/.cache - - '*/.vim*.tmp' - - /etc/ssl - - /home/user/path with spaces - exclude_from: - type: array - items: - type: string - description: | - Read exclude patterns from one or more separate named files, - one pattern per line. See the output of "borg help patterns" - for more details. - example: - - /etc/borgmatic/excludes - exclude_caches: - type: boolean - description: | - Exclude directories that contain a CACHEDIR.TAG file. See - http://www.brynosaurus.com/cachedir/spec.html for details. - Defaults to false. - example: true - exclude_if_present: - type: array - items: - type: string - description: | - Exclude directories that contain a file with the given - filenames. Defaults to not set. - example: - - .nobackup - keep_exclude_tags: - type: boolean - description: | - If true, the exclude_if_present filename is included in - backups. Defaults to false, meaning that the - exclude_if_present filename is omitted from backups. - example: true - exclude_nodump: - type: boolean - description: | - Exclude files with the NODUMP flag. Defaults to false. - example: true - borgmatic_source_directory: - type: string - description: | - Path for additional source files used for temporary internal - state like borgmatic database dumps. Note that changing this - path prevents "borgmatic restore" from finding any database - dumps created before the change. Defaults to ~/.borgmatic - example: /tmp/borgmatic - source_directories_must_exist: - type: boolean - description: | - If true, then source directories must exist, otherwise an - error is raised. Defaults to false. - example: true - storage: - type: object + - finish description: | - Repository storage options. See - https://borgbackup.readthedocs.io/en/stable/usage/create.html and - https://borgbackup.readthedocs.io/en/stable/usage/general.html for - details. + Configuration for a monitoring integration with Healthchecks. Create + an account at https://healthchecks.io (or self-host Healthchecks) if + you'd like to use this service. See borgmatic monitoring + documentation for details. + cronitor: + type: object + required: ['ping_url'] additionalProperties: false properties: - encryption_passcommand: + ping_url: type: string description: | - The standard output of this command is used to unlock the - encryption key. Only use on repositories that were - initialized with passcommand/repokey/keyfile encryption. - Note that if both encryption_passcommand and - encryption_passphrase are set, then encryption_passphrase - takes precedence. Defaults to not set. - example: "secret-tool lookup borg-repository repo-name" - encryption_passphrase: - type: string - description: | - Passphrase to unlock the encryption key with. Only use on - repositories that were initialized with - passphrase/repokey/keyfile encryption. Quote the value if it - contains punctuation, so it parses correctly. And backslash - any quote or backslash literals as well. Defaults to not - set. - example: "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~" - checkpoint_interval: - type: integer - description: | - Number of seconds between each checkpoint during a - long-running backup. See - https://borgbackup.readthedocs.io/en/stable/faq.html - for details. Defaults to checkpoints every 1800 seconds (30 - minutes). - example: 1800 - checkpoint_volume: - type: integer - description: | - Number of backed up bytes between each checkpoint during a - long-running backup. Only supported with Borg 2+. See - https://borgbackup.readthedocs.io/en/stable/faq.html - for details. Defaults to only time-based checkpointing (see - "checkpoint_interval") instead of volume-based - checkpointing. - example: 1048576 - chunker_params: - type: string - description: | - Specify the parameters passed to then chunker - (CHUNK_MIN_EXP, CHUNK_MAX_EXP, HASH_MASK_BITS, - HASH_WINDOW_SIZE). See - https://borgbackup.readthedocs.io/en/stable/internals.html - for details. Defaults to "19,23,21,4095". - example: 19,23,21,4095 - compression: - type: string - description: | - Type of compression to use when creating archives. See - http://borgbackup.readthedocs.io/en/stable/usage/create.html - for details. Defaults to "lz4". - example: lz4 - upload_rate_limit: - type: integer - description: | - Remote network upload rate limit in kiBytes/second. Defaults - to unlimited. - example: 100 - retries: - type: integer - description: | - Number of times to retry a failing backup before giving up. - Defaults to 0 (i.e., does not attempt retry). - example: 3 - retry_wait: - type: integer - description: | - Wait time between retries (in seconds) to allow transient - issues to pass. Increases after each retry as a form of - backoff. Defaults to 0 (no wait). - example: 10 - temporary_directory: - type: string - description: | - Directory where temporary files are stored. Defaults to - $TMPDIR - example: /path/to/tmpdir - ssh_command: - type: string - description: | - Command to use instead of "ssh". This can be used to specify - ssh options. Defaults to not set. - example: ssh -i /path/to/private/key - borg_base_directory: - type: string - description: | - Base path used for various Borg directories. Defaults to - $HOME, ~$USER, or ~. - example: /path/to/base - borg_config_directory: - type: string - description: | - Path for Borg configuration files. Defaults to - $borg_base_directory/.config/borg - example: /path/to/base/config - borg_cache_directory: - type: string - description: | - Path for Borg cache files. Defaults to - $borg_base_directory/.cache/borg - example: /path/to/base/cache - borg_files_cache_ttl: - type: integer - description: | - Maximum time to live (ttl) for entries in the Borg files - cache. - example: 20 - borg_security_directory: - type: string - description: | - Path for Borg security and encryption nonce files. Defaults - to $borg_base_directory/.config/borg/security - example: /path/to/base/config/security - borg_keys_directory: - type: string - description: | - Path for Borg encryption key files. Defaults to - $borg_base_directory/.config/borg/keys - example: /path/to/base/config/keys - umask: - type: integer - description: Umask to be used for borg create. Defaults to 0077. - example: 0077 - lock_wait: - type: integer - description: | - Maximum seconds to wait for acquiring a repository/cache - lock. Defaults to 1. - example: 5 - archive_name_format: - type: string - description: | - Name of the archive. Borg placeholders can be used. See the - output of "borg help placeholders" for details. Defaults to - "{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}". When running - actions like rlist, info, or check, borgmatic automatically - tries to match only archives created with this name format. - example: "{hostname}-documents-{now}" - match_archives: - type: string - description: | - A Borg pattern for filtering down the archives used by - borgmatic actions that operate on multiple archives. For - Borg 1.x, use a shell pattern here and see the output of - "borg help placeholders" for details. For Borg 2.x, see the - output of "borg help match-archives". If match_archives is - not specified, borgmatic defaults to deriving the - match_archives value from archive_name_format. - example: "sh:{hostname}-*" - relocated_repo_access_is_ok: - type: boolean - description: | - Bypass Borg error about a repository that has been moved. - Defaults to false. - example: true - unknown_unencrypted_repo_access_is_ok: - type: boolean - description: | - Bypass Borg error about a previously unknown unencrypted - repository. Defaults to false. - example: true - extra_borg_options: - type: object - additionalProperties: false - properties: - init: - type: string - description: | - Extra command-line options to pass to "borg init". - example: "--extra-option" - create: - type: string - description: | - Extra command-line options to pass to "borg create". - example: "--extra-option" - prune: - type: string - description: | - Extra command-line options to pass to "borg prune". - example: "--extra-option" - compact: - type: string - description: | - Extra command-line options to pass to "borg compact". - example: "--extra-option" - check: - type: string - description: | - Extra command-line options to pass to "borg check". - example: "--extra-option" - description: | - Additional options to pass directly to particular Borg - commands, handy for Borg options that borgmatic does not yet - support natively. Note that borgmatic does not perform any - validation on these options. Running borgmatic with - "--verbosity 2" shows the exact Borg command-line - invocation. - retention: - type: object + Cronitor ping URL to notify when a backup begins, + ends, or errors. + example: https://cronitor.link/d3x0c1 description: | - Retention policy for how many backups to keep in each category. See - https://borgbackup.readthedocs.io/en/stable/usage/prune.html for - details. At least one of the "keep" options is required for pruning - to work. To skip pruning entirely, run "borgmatic create" or "check" - without the "prune" action. See borgmatic documentation for details. + Configuration for a monitoring integration with Cronitor. Create an + account at https://cronitor.io if you'd like to use this service. + See borgmatic monitoring documentation for details. + pagerduty: + type: object + required: ['integration_key'] additionalProperties: false properties: - keep_within: - type: string - description: Keep all archives within this time interval. - example: 3H - keep_secondly: - type: integer - description: Number of secondly archives to keep. - example: 60 - keep_minutely: - type: integer - description: Number of minutely archives to keep. - example: 60 - keep_hourly: - type: integer - description: Number of hourly archives to keep. - example: 24 - keep_daily: - type: integer - description: Number of daily archives to keep. - example: 7 - keep_weekly: - type: integer - description: Number of weekly archives to keep. - example: 4 - keep_monthly: - type: integer - description: Number of monthly archives to keep. - example: 6 - keep_yearly: - type: integer - description: Number of yearly archives to keep. - example: 1 - prefix: + integration_key: type: string description: | - Deprecated. When pruning, only consider archive names - starting with this prefix. Borg placeholders can be used. - See the output of "borg help placeholders" for details. - If a prefix is not specified, borgmatic defaults to - matching archives based on the archive_name_format (see - above). - example: sourcehostname - consistency: - type: object + PagerDuty integration key used to notify PagerDuty + when a backup errors. + example: a177cad45bd374409f78906a810a3074 description: | - Consistency checks to run after backups. See - https://borgbackup.readthedocs.io/en/stable/usage/check.html and - https://borgbackup.readthedocs.io/en/stable/usage/extract.html for - details. + Configuration for a monitoring integration with PagerDuty. Create an + account at https://www.pagerduty.com/ if you'd like to use this + service. See borgmatic monitoring documentation for details. + cronhub: + type: object + required: ['ping_url'] additionalProperties: false properties: - checks: - type: array - items: - type: object - required: ['name'] - additionalProperties: false - properties: - name: - type: string - enum: - - repository - - archives - - data - - extract - - disabled - description: | - Name of consistency check to run: "repository", - "archives", "data", and/or "extract". Set to - "disabled" to disable all consistency checks. - "repository" checks the consistency of the - repository, "archives" checks all of the - archives, "data" verifies the integrity of the - data within the archives, and "extract" does an - extraction dry-run of the most recent archive. - Note that "data" implies "archives". - example: repository - frequency: - type: string - description: | - How frequently to run this type of consistency - check (as a best effort). The value is a number - followed by a unit of time. E.g., "2 weeks" to - run this consistency check no more than every - two weeks for a given repository or "1 month" to - run it no more than monthly. Defaults to - "always": running this check every time checks - are run. - example: 2 weeks - description: | - List of one or more consistency checks to run on a periodic - basis (if "frequency" is set) or every time borgmatic runs - checks (if "frequency" is omitted). - check_repositories: - type: array - items: - type: string - description: | - Paths or labels for a subset of the repositories in the - location section on which to run consistency checks. Handy - in case some of your repositories are very large, and so - running consistency checks on them would take too long. - Defaults to running consistency checks on all repositories - configured in the location section. - example: - - user@backupserver:sourcehostname.borg - check_last: - type: integer - description: | - Restrict the number of checked archives to the last n. - Applies only to the "archives" check. Defaults to checking - all archives. - example: 3 - prefix: + ping_url: type: string description: | - Deprecated. When performing the "archives" check, only - consider archive names starting with this prefix. Borg - placeholders can be used. See the output of "borg help - placeholders" for details. If a prefix is not specified, - borgmatic defaults to matching archives based on the - archive_name_format (see above). - example: sourcehostname - output: - type: object + Cronhub ping URL to notify when a backup begins, + ends, or errors. + example: https://cronhub.io/ping/1f5e3410-254c-5587 description: | - Options for customizing borgmatic's own output and logging. - additionalProperties: false - properties: - color: - type: boolean - description: | - Apply color to console output. Can be overridden with - --no-color command-line flag. Defaults to true. - example: false - hooks: - type: object - description: | - Shell commands, scripts, or integrations to execute at various - points during a borgmatic run. IMPORTANT: All provided commands and - scripts are executed with user permissions of borgmatic. Do not - forget to set secure permissions on this configuration file (chmod - 0600) as well as on any script called from a hook (chmod 0700) to - prevent potential shell injection or privilege escalation. - additionalProperties: false - properties: - before_actions: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - before all the actions for each repository. - example: - - "echo Starting actions." - before_backup: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - before creating a backup, run once per repository. - example: - - "echo Starting a backup." - before_prune: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - before pruning, run once per repository. - example: - - "echo Starting pruning." - before_compact: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - before compaction, run once per repository. - example: - - "echo Starting compaction." - before_check: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - before consistency checks, run once per repository. - example: - - "echo Starting checks." - before_extract: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - before extracting a backup, run once per repository. - example: - - "echo Starting extracting." - after_backup: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - after creating a backup, run once per repository. - example: - - "echo Finished a backup." - after_compact: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - after compaction, run once per repository. - example: - - "echo Finished compaction." - after_prune: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - after pruning, run once per repository. - example: - - "echo Finished pruning." - after_check: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - after consistency checks, run once per repository. - example: - - "echo Finished checks." - after_extract: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - after extracting a backup, run once per repository. - example: - - "echo Finished extracting." - after_actions: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - after all actions for each repository. - example: - - "echo Finished actions." - on_error: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - when an exception occurs during a "create", "prune", - "compact", or "check" action or an associated before/after - hook. - example: - - "echo Error during create/prune/compact/check." - before_everything: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - before running all actions (if one of them is "create"). - These are collected from all configuration files and then - run once before all of them (prior to all actions). - example: - - "echo Starting actions." - after_everything: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - after running all actions (if one of them is "create"). - These are collected from all configuration files and then - run once after all of them (after any action). - example: - - "echo Completed actions." - postgresql_databases: - type: array - items: - type: object - required: ['name'] - additionalProperties: false - properties: - name: - type: string - description: | - Database name (required if using this hook). Or - "all" to dump all databases on the host. (Also - set the "format" to dump each database to a - separate file instead of one combined file.) - Note that using this database hook implicitly - enables both read_special and one_file_system - (see above) to support dump and restore - streaming. - example: users - hostname: - type: string - description: | - Database hostname to connect to. Defaults to - connecting via local Unix socket. - example: database.example.org - restore_hostname: - type: string - description: | - Database hostname to restore to. Defaults to - the "hostname" option. - example: database.example.org - port: - type: integer - description: Port to connect to. Defaults to 5432. - example: 5433 - restore_port: - type: integer - description: Port to restore to. Defaults to the - "port" option. - example: 5433 - username: - type: string - description: | - Username with which to connect to the database. - Defaults to the username of the current user. - You probably want to specify the "postgres" - superuser here when the database name is "all". - example: dbuser - restore_username: - type: string - description: | - Username with which to restore the database. - Defaults to the "username" option. - example: dbuser - password: - type: string - description: | - Password with which to connect to the database. - Omitting a password will only work if PostgreSQL - is configured to trust the configured username - without a password or you create a ~/.pgpass - file. - example: trustsome1 - restore_password: - type: string - description: | - Password with which to connect to the restore - database. Defaults to the "password" option. - example: trustsome1 - no_owner: - type: boolean - description: | - Do not output commands to set ownership of - objects to match the original database. By - default, pg_dump and pg_restore issue ALTER - OWNER or SET SESSION AUTHORIZATION statements - to set ownership of created schema elements. - These statements will fail unless the initial - connection to the database is made by a - superuser. - example: true - format: - type: string - enum: ['plain', 'custom', 'directory', 'tar'] - description: | - Database dump output format. One of "plain", - "custom", "directory", or "tar". Defaults to - "custom" (unlike raw pg_dump) for a single - database. Or, when database name is "all" and - format is blank, dumps all databases to a single - file. But if a format is specified with an "all" - database name, dumps each database to a separate - file of that format, allowing more convenient - restores of individual databases. See the - pg_dump documentation for more about formats. - example: directory - ssl_mode: - type: string - enum: ['disable', 'allow', 'prefer', - 'require', 'verify-ca', 'verify-full'] - description: | - SSL mode to use to connect to the database - server. One of "disable", "allow", "prefer", - "require", "verify-ca" or "verify-full". - Defaults to "disable". - example: require - ssl_cert: - type: string - description: | - Path to a client certificate. - example: "/root/.postgresql/postgresql.crt" - ssl_key: - type: string - description: | - Path to a private client key. - example: "/root/.postgresql/postgresql.key" - ssl_root_cert: - type: string - description: | - Path to a root certificate containing a list of - trusted certificate authorities. - example: "/root/.postgresql/root.crt" - ssl_crl: - type: string - description: | - Path to a certificate revocation list. - example: "/root/.postgresql/root.crl" - pg_dump_command: - type: string - description: | - Command to use instead of "pg_dump" or - "pg_dumpall". This can be used to run a specific - pg_dump version (e.g., one inside a running - container). Defaults to "pg_dump" for single - database dump or "pg_dumpall" to dump all - databases. - example: docker exec my_pg_container pg_dump - pg_restore_command: - type: string - description: | - Command to use instead of "pg_restore". This - can be used to run a specific pg_restore - version (e.g., one inside a running container). - Defaults to "pg_restore". - example: docker exec my_pg_container pg_restore - psql_command: - type: string - description: | - Command to use instead of "psql". This can be - used to run a specific psql version (e.g., - one inside a running container). Defaults to - "psql". - example: docker exec my_pg_container psql - options: - type: string - description: | - Additional pg_dump/pg_dumpall options to pass - directly to the dump command, without performing - any validation on them. See pg_dump - documentation for details. - example: --role=someone - list_options: - type: string - description: | - Additional psql options to pass directly to the - psql command that lists available databases, - without performing any validation on them. See - psql documentation for details. - example: --role=someone - restore_options: - type: string - description: | - Additional pg_restore/psql options to pass - directly to the restore command, without - performing any validation on them. See - pg_restore/psql documentation for details. - example: --role=someone - analyze_options: - type: string - description: | - Additional psql options to pass directly to the - analyze command run after a restore, without - performing any validation on them. See psql - documentation for details. - example: --role=someone - description: | - List of one or more PostgreSQL databases to dump before - creating a backup, run once per configuration file. The - database dumps are added to your source directories at - runtime, backed up, and removed afterwards. Requires - pg_dump/pg_dumpall/pg_restore commands. See - https://www.postgresql.org/docs/current/app-pgdump.html and - https://www.postgresql.org/docs/current/libpq-ssl.html for - details. - mysql_databases: - type: array - items: - type: object - required: ['name'] - additionalProperties: false - properties: - name: - type: string - description: | - Database name (required if using this hook). Or - "all" to dump all databases on the host. Note - that using this database hook implicitly enables - both read_special and one_file_system (see - above) to support dump and restore streaming. - example: users - hostname: - type: string - description: | - Database hostname to connect to. Defaults to - connecting via local Unix socket. - example: database.example.org - restore_hostname: - type: string - description: | - Database hostname to restore to. Defaults to - the "hostname" option. - example: database.example.org - port: - type: integer - description: Port to connect to. Defaults to 3306. - example: 3307 - restore_port: - type: integer - description: Port to restore to. Defaults to the - "port" option. - example: 5433 - username: - type: string - description: | - Username with which to connect to the database. - Defaults to the username of the current user. - example: dbuser - restore_username: - type: string - description: | - Username with which to restore the database. - Defaults to the "username" option. - example: dbuser - password: - type: string - description: | - Password with which to connect to the database. - Omitting a password will only work if MySQL is - configured to trust the configured username - without a password. - example: trustsome1 - restore_password: - type: string - description: | - Password with which to connect to the restore - database. Defaults to the "password" option. - example: trustsome1 - format: - type: string - enum: ['sql'] - description: | - Database dump output format. Currently only - "sql" is supported. Defaults to "sql" for a - single database. Or, when database name is "all" - and format is blank, dumps all databases to a - single file. But if a format is specified with - an "all" database name, dumps each database to a - separate file of that format, allowing more - convenient restores of individual databases. - example: directory - add_drop_database: - type: boolean - description: | - Use the "--add-drop-database" flag with - mysqldump, causing the database to be dropped - right before restore. Defaults to true. - example: false - options: - type: string - description: | - Additional mysqldump options to pass directly to - the dump command, without performing any - validation on them. See mysqldump documentation - for details. - example: --skip-comments - list_options: - type: string - description: | - Additional mysql options to pass directly to - the mysql command that lists available - databases, without performing any validation on - them. See mysql documentation for details. - example: --defaults-extra-file=my.cnf - restore_options: - type: string - description: | - Additional mysql options to pass directly to - the mysql command that restores database dumps, - without performing any validation on them. See - mysql documentation for details. - example: --defaults-extra-file=my.cnf - description: | - List of one or more MySQL/MariaDB databases to dump before - creating a backup, run once per configuration file. The - database dumps are added to your source directories at - runtime, backed up, and removed afterwards. Requires - mysqldump/mysql commands (from either MySQL or MariaDB). See - https://dev.mysql.com/doc/refman/8.0/en/mysqldump.html or - https://mariadb.com/kb/en/library/mysqldump/ for details. - sqlite_databases: - type: array - items: - type: object - required: ['path','name'] - additionalProperties: false - properties: - name: - type: string - description: | - This is used to tag the database dump file - with a name. It is not the path to the database - file itself. The name "all" has no special - meaning for SQLite databases. - example: users - path: - type: string - description: | - Path to the SQLite database file to dump. If - relative, it is relative to the current working - directory. Note that using this - database hook implicitly enables both - read_special and one_file_system (see above) to - support dump and restore streaming. - example: /var/lib/sqlite/users.db - restore_path: - type: string - description: | - Path to the SQLite database file to restore to. - Defaults to the "path" option. - example: /var/lib/sqlite/users.db - mongodb_databases: - type: array - items: - type: object - required: ['name'] - additionalProperties: false - properties: - name: - type: string - description: | - Database name (required if using this hook). Or - "all" to dump all databases on the host. Note - that using this database hook implicitly enables - both read_special and one_file_system (see - above) to support dump and restore streaming. - example: users - hostname: - type: string - description: | - Database hostname to connect to. Defaults to - connecting to localhost. - example: database.example.org - restore_hostname: - type: string - description: | - Database hostname to restore to. Defaults to - the "hostname" option. - example: database.example.org - port: - type: integer - description: Port to connect to. Defaults to 27017. - example: 27018 - restore_port: - type: integer - description: Port to restore to. Defaults to the - "port" option. - example: 5433 - username: - type: string - description: | - Username with which to connect to the database. - Skip it if no authentication is needed. - example: dbuser - restore_username: - type: string - description: | - Username with which to restore the database. - Defaults to the "username" option. - example: dbuser - password: - type: string - description: | - Password with which to connect to the database. - Skip it if no authentication is needed. - example: trustsome1 - restore_password: - type: string - description: | - Password with which to connect to the restore - database. Defaults to the "password" option. - example: trustsome1 - authentication_database: - type: string - description: | - Authentication database where the specified - username exists. If no authentication database - is specified, the database provided in "name" - is used. If "name" is "all", the "admin" - database is used. - example: admin - format: - type: string - enum: ['archive', 'directory'] - description: | - Database dump output format. One of "archive", - or "directory". Defaults to "archive". See - mongodump documentation for details. Note that - format is ignored when the database name is - "all". - example: directory - options: - type: string - description: | - Additional mongodump options to pass - directly to the dump command, without performing - any validation on them. See mongodump - documentation for details. - example: --dumpDbUsersAndRoles - restore_options: - type: string - description: | - Additional mongorestore options to pass - directly to the dump command, without performing - any validation on them. See mongorestore - documentation for details. - example: --restoreDbUsersAndRoles - description: | - List of one or more MongoDB databases to dump before - creating a backup, run once per configuration file. The - database dumps are added to your source directories at - runtime, backed up, and removed afterwards. Requires - mongodump/mongorestore commands. See - https://docs.mongodb.com/database-tools/mongodump/ and - https://docs.mongodb.com/database-tools/mongorestore/ for - details. - ntfy: - type: object - required: ['topic'] - additionalProperties: false - properties: - topic: - type: string - description: | - The topic to publish to. - (https://ntfy.sh/docs/publish/) - example: topic - server: - type: string - description: | - The address of your self-hosted ntfy.sh instance. - example: https://ntfy.your-domain.com - username: - type: string - description: | - The username used for authentication. - example: testuser - password: - type: string - description: | - The password used for authentication. - example: fakepassword - start: - type: object - properties: - title: - type: string - description: | - The title of the message - example: Ping! - message: - type: string - description: | - The message body to publish. - example: Your backups have failed. - priority: - type: string - description: | - The priority to set. - example: urgent - tags: - type: string - description: | - Tags to attach to the message. - example: incoming_envelope - finish: - type: object - properties: - title: - type: string - description: | - The title of the message. - example: Ping! - message: - type: string - description: | - The message body to publish. - example: Your backups have failed. - priority: - type: string - description: | - The priority to set. - example: urgent - tags: - type: string - description: | - Tags to attach to the message. - example: incoming_envelope - fail: - type: object - properties: - title: - type: string - description: | - The title of the message. - example: Ping! - message: - type: string - description: | - The message body to publish. - example: Your backups have failed. - priority: - type: string - description: | - The priority to set. - example: urgent - tags: - type: string - description: | - Tags to attach to the message. - example: incoming_envelope - states: - type: array - items: - type: string - enum: - - start - - finish - - fail - uniqueItems: true - description: | - List of one or more monitoring states to ping for: - "start", "finish", and/or "fail". Defaults to - pinging for failure only. - example: - - start - - finish - healthchecks: - type: object - required: ['ping_url'] - additionalProperties: false - properties: - ping_url: - type: string - description: | - Healthchecks ping URL or UUID to notify when a - backup begins, ends, errors, or to send only logs. - example: https://hc-ping.com/your-uuid-here - verify_tls: - type: boolean - description: | - Verify the TLS certificate of the ping URL host. - Defaults to true. - example: false - send_logs: - type: boolean - description: | - Send borgmatic logs to Healthchecks as part the - "finish", "fail", and "log" states. Defaults to - true. - example: false - ping_body_limit: - type: integer - description: | - Number of bytes of borgmatic logs to send to - Healthchecks, ideally the same as PING_BODY_LIMIT - configured on the Healthchecks server. Set to 0 to - send all logs and disable this truncation. Defaults - to 100000. - example: 200000 - states: - type: array - items: - type: string - enum: - - start - - finish - - fail - - log - uniqueItems: true - description: | - List of one or more monitoring states to ping for: - "start", "finish", "fail", and/or "log". Defaults to - pinging for all states. - example: - - finish - description: | - Configuration for a monitoring integration with - Healthchecks. Create an account at https://healthchecks.io - (or self-host Healthchecks) if you'd like to use this - service. See borgmatic monitoring documentation for details. - cronitor: - type: object - required: ['ping_url'] - additionalProperties: false - properties: - ping_url: - type: string - description: | - Cronitor ping URL to notify when a backup begins, - ends, or errors. - example: https://cronitor.link/d3x0c1 - description: | - Configuration for a monitoring integration with Cronitor. - Create an account at https://cronitor.io if you'd - like to use this service. See borgmatic monitoring - documentation for details. - pagerduty: - type: object - required: ['integration_key'] - additionalProperties: false - properties: - integration_key: - type: string - description: | - PagerDuty integration key used to notify PagerDuty - when a backup errors. - example: a177cad45bd374409f78906a810a3074 - description: | - Configuration for a monitoring integration with PagerDuty. - Create an account at https://www.pagerduty.com/ if you'd - like to use this service. See borgmatic monitoring - documentation for details. - cronhub: - type: object - required: ['ping_url'] - additionalProperties: false - properties: - ping_url: - type: string - description: | - Cronhub ping URL to notify when a backup begins, - ends, or errors. - example: https://cronhub.io/ping/1f5e3410-254c-5587 - description: | - Configuration for a monitoring integration with Crunhub. - Create an account at https://cronhub.io if you'd like to - use this service. See borgmatic monitoring documentation - for details. - umask: - type: integer - description: | - Umask used when executing hooks. Defaults to the umask that - borgmatic is run with. - example: 0077 + Configuration for a monitoring integration with Crunhub. Create an + account at https://cronhub.io if you'd like to use this service. See + borgmatic monitoring documentation for details. diff --git a/borgmatic/config/validate.py b/borgmatic/config/validate.py index 5835ead1..c13329f3 100644 --- a/borgmatic/config/validate.py +++ b/borgmatic/config/validate.py @@ -71,18 +71,15 @@ def apply_logical_validation(config_filename, parsed_configuration): below), run through any additional logical validation checks. If there are any such validation problems, raise a Validation_error. ''' - location_repositories = parsed_configuration.get('location', {}).get('repositories') - check_repositories = parsed_configuration.get('consistency', {}).get('check_repositories', []) + repositories = parsed_configuration.get('repositories') + check_repositories = parsed_configuration.get('check_repositories', []) for repository in check_repositories: if not any( - repositories_match(repository, config_repository) - for config_repository in location_repositories + repositories_match(repository, config_repository) for config_repository in repositories ): raise Validation_error( config_filename, - ( - f'Unknown repository in the "consistency" section\'s "check_repositories": {repository}', - ), + (f'Unknown repository in "check_repositories": {repository}',), ) @@ -90,11 +87,15 @@ def parse_configuration(config_filename, schema_filename, overrides=None, resolv ''' Given the path to a config filename in YAML format, the path to a schema filename in a YAML rendition of JSON Schema format, a sequence of configuration file override strings in the form - of "section.option=value", return the parsed configuration as a data structure of nested dicts + of "option.suboption=value", return the parsed configuration as a data structure of nested dicts and lists corresponding to the schema. Example return value: - {'location': {'source_directories': ['/home', '/etc'], 'repository': 'hostname.borg'}, - 'retention': {'keep_daily': 7}, 'consistency': {'checks': ['repository', 'archives']}} + { + 'source_directories': ['/home', '/etc'], + 'repository': 'hostname.borg', + 'keep_daily': 7, + 'checks': ['repository', 'archives'], + } Also return a sequence of logging.LogRecord instances containing any warnings about the configuration. @@ -174,7 +175,7 @@ def guard_configuration_contains_repository(repository, configurations): tuple( config_repository for config in configurations.values() - for config_repository in config['location']['repositories'] + for config_repository in config['repositories'] if repositories_match(config_repository, repository) ) ) @@ -198,7 +199,7 @@ def guard_single_repository_selected(repository, configurations): tuple( config_repository for config in configurations.values() - for config_repository in config['location']['repositories'] + for config_repository in config['repositories'] ) ) diff --git a/borgmatic/hooks/cronhub.py b/borgmatic/hooks/cronhub.py index 05ada575..170f1916 100644 --- a/borgmatic/hooks/cronhub.py +++ b/borgmatic/hooks/cronhub.py @@ -22,7 +22,7 @@ def initialize_monitor( pass -def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): +def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run): ''' Ping the configured Cronhub URL, modified with the monitor.State. Use the given configuration filename in any log entries. If this is a dry run, then don't actually ping anything. diff --git a/borgmatic/hooks/cronitor.py b/borgmatic/hooks/cronitor.py index d669c09d..d57920cd 100644 --- a/borgmatic/hooks/cronitor.py +++ b/borgmatic/hooks/cronitor.py @@ -22,7 +22,7 @@ def initialize_monitor( pass -def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): +def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run): ''' Ping the configured Cronitor URL, modified with the monitor.State. Use the given configuration filename in any log entries. If this is a dry run, then don't actually ping anything. diff --git a/borgmatic/hooks/dispatch.py b/borgmatic/hooks/dispatch.py index fa7bd9b3..d98473ab 100644 --- a/borgmatic/hooks/dispatch.py +++ b/borgmatic/hooks/dispatch.py @@ -27,18 +27,17 @@ HOOK_NAME_TO_MODULE = { } -def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs): +def call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs): ''' - Given the hooks configuration dict and a prefix to use in log entries, call the requested - function of the Python module corresponding to the given hook name. Supply that call with the - configuration for this hook (if any), the log prefix, and any given args and kwargs. Return any - return value. + Given a configuration dict and a prefix to use in log entries, call the requested function of + the Python module corresponding to the given hook name. Supply that call with the configuration + for this hook (if any), the log prefix, and any given args and kwargs. Return any return value. Raise ValueError if the hook name is unknown. Raise AttributeError if the function name is not found in the module. Raise anything else that the called function raises. ''' - config = hooks.get(hook_name, {}) + hook_config = config.get(hook_name, {}) try: module = HOOK_NAME_TO_MODULE[hook_name] @@ -46,15 +45,15 @@ def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs): raise ValueError(f'Unknown hook name: {hook_name}') logger.debug(f'{log_prefix}: Calling {hook_name} hook function {function_name}') - return getattr(module, function_name)(config, log_prefix, *args, **kwargs) + return getattr(module, function_name)(hook_config, config, log_prefix, *args, **kwargs) -def call_hooks(function_name, hooks, log_prefix, hook_names, *args, **kwargs): +def call_hooks(function_name, config, log_prefix, hook_names, *args, **kwargs): ''' - Given the hooks configuration dict and a prefix to use in log entries, call the requested - function of the Python module corresponding to each given hook name. Supply each call with the - configuration for that hook, the log prefix, and any given args and kwargs. Collect any return - values into a dict from hook name to return value. + Given a configuration dict and a prefix to use in log entries, call the requested function of + the Python module corresponding to each given hook name. Supply each call with the configuration + for that hook, the log prefix, and any given args and kwargs. Collect any return values into a + dict from hook name to return value. If the hook name is not present in the hooks configuration, then don't call the function for it and omit it from the return values. @@ -64,23 +63,23 @@ def call_hooks(function_name, hooks, log_prefix, hook_names, *args, **kwargs): Raise anything else that a called function raises. An error stops calls to subsequent functions. ''' return { - hook_name: call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs) + hook_name: call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs) for hook_name in hook_names - if hooks.get(hook_name) + if config.get(hook_name) } -def call_hooks_even_if_unconfigured(function_name, hooks, log_prefix, hook_names, *args, **kwargs): +def call_hooks_even_if_unconfigured(function_name, config, log_prefix, hook_names, *args, **kwargs): ''' - Given the hooks configuration dict and a prefix to use in log entries, call the requested - function of the Python module corresponding to each given hook name. Supply each call with the - configuration for that hook, the log prefix, and any given args and kwargs. Collect any return - values into a dict from hook name to return value. + Given a configuration dict and a prefix to use in log entries, call the requested function of + the Python module corresponding to each given hook name. Supply each call with the configuration + for that hook, the log prefix, and any given args and kwargs. Collect any return values into a + dict from hook name to return value. Raise AttributeError if the function name is not found in the module. Raise anything else that a called function raises. An error stops calls to subsequent functions. ''' return { - hook_name: call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs) + hook_name: call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs) for hook_name in hook_names } diff --git a/borgmatic/hooks/healthchecks.py b/borgmatic/hooks/healthchecks.py index 4cafc49f..511e9566 100644 --- a/borgmatic/hooks/healthchecks.py +++ b/borgmatic/hooks/healthchecks.py @@ -90,7 +90,7 @@ def initialize_monitor(hook_config, config_filename, monitoring_log_level, dry_r ) -def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): +def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run): ''' Ping the configured Healthchecks URL or UUID, modified with the monitor.State. Use the given configuration filename in any log entries, and log to Healthchecks with the giving log level. diff --git a/borgmatic/hooks/mongodb.py b/borgmatic/hooks/mongodb.py index f8899268..3c91a183 100644 --- a/borgmatic/hooks/mongodb.py +++ b/borgmatic/hooks/mongodb.py @@ -6,21 +6,20 @@ from borgmatic.hooks import dump logger = logging.getLogger(__name__) -def make_dump_path(location_config): # pragma: no cover +def make_dump_path(config): # pragma: no cover ''' - Make the dump path from the given location configuration and the name of this hook. + Make the dump path from the given configuration dict and the name of this hook. ''' return dump.make_database_dump_path( - location_config.get('borgmatic_source_directory'), 'mongodb_databases' + config.get('borgmatic_source_directory'), 'mongodb_databases' ) -def dump_databases(databases, log_prefix, location_config, dry_run): +def dump_databases(databases, config, log_prefix, dry_run): ''' Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of - dicts, one dict describing each database as per the configuration schema. Use the given log - prefix in any log entries. Use the given location configuration dict to construct the - destination path. + dicts, one dict describing each database as per the configuration schema. Use the configuration + dict to construct the destination path and the given log prefix in any log entries. Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence. @@ -33,7 +32,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run): for database in databases: name = database['name'] dump_filename = dump.make_database_dump_filename( - make_dump_path(location_config), name, database.get('hostname') + make_dump_path(config), name, database.get('hostname') ) dump_format = database.get('format', 'archive') @@ -82,35 +81,33 @@ def build_dump_command(database, dump_filename, dump_format): return command -def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover +def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover ''' Remove all database dump files for this hook regardless of the given databases. Use the log - prefix in any log entries. Use the given location configuration dict to construct the - destination path. If this is a dry run, then don't actually remove anything. + prefix in any log entries. Use the given configuration dict to construct the destination path. + If this is a dry run, then don't actually remove anything. ''' - dump.remove_database_dumps(make_dump_path(location_config), 'MongoDB', log_prefix, dry_run) + dump.remove_database_dumps(make_dump_path(config), 'MongoDB', log_prefix, dry_run) -def make_database_dump_pattern( - databases, log_prefix, location_config, name=None -): # pragma: no cover +def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover ''' - Given a sequence of configurations dicts, a prefix to log with, a location configuration dict, + Given a sequence of database configurations dicts, a configuration dict, a prefix to log with, and a database name to match, return the corresponding glob patterns to match the database dump in an archive. ''' - return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*') + return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*') def restore_database_dump( - database_config, log_prefix, location_config, dry_run, extract_process, connection_params + database_config, config, log_prefix, dry_run, extract_process, connection_params ): ''' Restore the given MongoDB database from an extract stream. The database is supplied as a one-element sequence containing a dict describing the database, as per the configuration schema. - Use the given log prefix in any log entries. If this is a dry run, then don't actually restore - anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce - output to consume. + Use the configuration dict to construct the destination path and the given log prefix in any log + entries. If this is a dry run, then don't actually restore anything. Trigger the given active + extract process (an instance of subprocess.Popen) to produce output to consume. If the extract process is None, then restore the dump from the filesystem rather than from an extract stream. @@ -122,7 +119,7 @@ def restore_database_dump( database = database_config[0] dump_filename = dump.make_database_dump_filename( - make_dump_path(location_config), database['name'], database.get('hostname') + make_dump_path(config), database['name'], database.get('hostname') ) restore_command = build_restore_command( extract_process, database, dump_filename, connection_params diff --git a/borgmatic/hooks/mysql.py b/borgmatic/hooks/mysql.py index aee13d80..8ca2f6ba 100644 --- a/borgmatic/hooks/mysql.py +++ b/borgmatic/hooks/mysql.py @@ -12,13 +12,11 @@ from borgmatic.hooks import dump logger = logging.getLogger(__name__) -def make_dump_path(location_config): # pragma: no cover +def make_dump_path(config): # pragma: no cover ''' - Make the dump path from the given location configuration and the name of this hook. + Make the dump path from the given configuration dict and the name of this hook. ''' - return dump.make_database_dump_path( - location_config.get('borgmatic_source_directory'), 'mysql_databases' - ) + return dump.make_database_dump_path(config.get('borgmatic_source_directory'), 'mysql_databases') SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys') @@ -106,12 +104,11 @@ def execute_dump_command( ) -def dump_databases(databases, log_prefix, location_config, dry_run): +def dump_databases(databases, config, log_prefix, dry_run): ''' Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence - of dicts, one dict describing each database as per the configuration schema. Use the given log - prefix in any log entries. Use the given location configuration dict to construct the - destination path. + of dicts, one dict describing each database as per the configuration schema. Use the given + configuration dict to construct the destination path and the given log prefix in any log entries. Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence. @@ -122,7 +119,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run): logger.info(f'{log_prefix}: Dumping MySQL databases{dry_run_label}') for database in databases: - dump_path = make_dump_path(location_config) + dump_path = make_dump_path(config) extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None dump_database_names = database_names_to_dump( database, extra_environment, log_prefix, dry_run @@ -165,28 +162,26 @@ def dump_databases(databases, log_prefix, location_config, dry_run): return [process for process in processes if process] -def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover +def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover ''' - Remove all database dump files for this hook regardless of the given databases. Use the log - prefix in any log entries. Use the given location configuration dict to construct the - destination path. If this is a dry run, then don't actually remove anything. + Remove all database dump files for this hook regardless of the given databases. Use the given + configuration dict to construct the destination path and the log prefix in any log entries. If + this is a dry run, then don't actually remove anything. ''' - dump.remove_database_dumps(make_dump_path(location_config), 'MySQL', log_prefix, dry_run) + dump.remove_database_dumps(make_dump_path(config), 'MySQL', log_prefix, dry_run) -def make_database_dump_pattern( - databases, log_prefix, location_config, name=None -): # pragma: no cover +def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover ''' - Given a sequence of configurations dicts, a prefix to log with, a location configuration dict, - and a database name to match, return the corresponding glob patterns to match the database dump - in an archive. + Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a + database name to match, return the corresponding glob patterns to match the database dump in an + archive. ''' - return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*') + return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*') def restore_database_dump( - database_config, log_prefix, location_config, dry_run, extract_process, connection_params + database_config, config, log_prefix, dry_run, extract_process, connection_params ): ''' Restore the given MySQL/MariaDB database from an extract stream. The database is supplied as a diff --git a/borgmatic/hooks/ntfy.py b/borgmatic/hooks/ntfy.py index 8a6f0fb8..50aa387a 100644 --- a/borgmatic/hooks/ntfy.py +++ b/borgmatic/hooks/ntfy.py @@ -14,7 +14,7 @@ def initialize_monitor( pass -def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): +def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run): ''' Ping the configured Ntfy topic. Use the given configuration filename in any log entries. If this is a dry run, then don't actually ping anything. diff --git a/borgmatic/hooks/pagerduty.py b/borgmatic/hooks/pagerduty.py index 561b1e24..382a402f 100644 --- a/borgmatic/hooks/pagerduty.py +++ b/borgmatic/hooks/pagerduty.py @@ -21,7 +21,7 @@ def initialize_monitor( pass -def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): +def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run): ''' If this is an error state, create a PagerDuty event with the configured integration key. Use the given configuration filename in any log entries. If this is a dry run, then don't actually diff --git a/borgmatic/hooks/postgresql.py b/borgmatic/hooks/postgresql.py index ecb5f3c3..7bef5a70 100644 --- a/borgmatic/hooks/postgresql.py +++ b/borgmatic/hooks/postgresql.py @@ -14,12 +14,12 @@ from borgmatic.hooks import dump logger = logging.getLogger(__name__) -def make_dump_path(location_config): # pragma: no cover +def make_dump_path(config): # pragma: no cover ''' - Make the dump path from the given location configuration and the name of this hook. + Make the dump path from the given configuration dict and the name of this hook. ''' return dump.make_database_dump_path( - location_config.get('borgmatic_source_directory'), 'postgresql_databases' + config.get('borgmatic_source_directory'), 'postgresql_databases' ) @@ -92,12 +92,12 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run): ) -def dump_databases(databases, log_prefix, location_config, dry_run): +def dump_databases(databases, config, log_prefix, dry_run): ''' Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of - dicts, one dict describing each database as per the configuration schema. Use the given log - prefix in any log entries. Use the given location configuration dict to construct the - destination path. + dicts, one dict describing each database as per the configuration schema. Use the given + configuration dict to construct the destination path and the given log prefix in any log + entries. Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence. @@ -111,7 +111,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run): for database in databases: extra_environment = make_extra_environment(database) - dump_path = make_dump_path(location_config) + dump_path = make_dump_path(config) dump_database_names = database_names_to_dump( database, extra_environment, log_prefix, dry_run ) @@ -183,35 +183,33 @@ def dump_databases(databases, log_prefix, location_config, dry_run): return processes -def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover +def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover ''' - Remove all database dump files for this hook regardless of the given databases. Use the log - prefix in any log entries. Use the given location configuration dict to construct the - destination path. If this is a dry run, then don't actually remove anything. + Remove all database dump files for this hook regardless of the given databases. Use the given + configuration dict to construct the destination path and the log prefix in any log entries. If + this is a dry run, then don't actually remove anything. ''' - dump.remove_database_dumps(make_dump_path(location_config), 'PostgreSQL', log_prefix, dry_run) + dump.remove_database_dumps(make_dump_path(config), 'PostgreSQL', log_prefix, dry_run) -def make_database_dump_pattern( - databases, log_prefix, location_config, name=None -): # pragma: no cover +def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover ''' - Given a sequence of configurations dicts, a prefix to log with, a location configuration dict, - and a database name to match, return the corresponding glob patterns to match the database dump - in an archive. + Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a + database name to match, return the corresponding glob patterns to match the database dump in an + archive. ''' - return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*') + return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*') def restore_database_dump( - database_config, log_prefix, location_config, dry_run, extract_process, connection_params + database_config, config, log_prefix, dry_run, extract_process, connection_params ): ''' Restore the given PostgreSQL database from an extract stream. The database is supplied as a one-element sequence containing a dict describing the database, as per the configuration schema. - Use the given log prefix in any log entries. If this is a dry run, then don't actually restore - anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce - output to consume. + Use the given configuration dict to construct the destination path and the given log prefix in + any log entries. If this is a dry run, then don't actually restore anything. Trigger the given + active extract process (an instance of subprocess.Popen) to produce output to consume. If the extract process is None, then restore the dump from the filesystem rather than from an extract stream. @@ -236,7 +234,7 @@ def restore_database_dump( all_databases = bool(database['name'] == 'all') dump_filename = dump.make_database_dump_filename( - make_dump_path(location_config), database['name'], database.get('hostname') + make_dump_path(config), database['name'], database.get('hostname') ) psql_command = shlex.split(database.get('psql_command') or 'psql') analyze_command = ( diff --git a/borgmatic/hooks/sqlite.py b/borgmatic/hooks/sqlite.py index 21b1455a..109f253a 100644 --- a/borgmatic/hooks/sqlite.py +++ b/borgmatic/hooks/sqlite.py @@ -7,21 +7,21 @@ from borgmatic.hooks import dump logger = logging.getLogger(__name__) -def make_dump_path(location_config): # pragma: no cover +def make_dump_path(config): # pragma: no cover ''' - Make the dump path from the given location configuration and the name of this hook. + Make the dump path from the given configuration dict and the name of this hook. ''' return dump.make_database_dump_path( - location_config.get('borgmatic_source_directory'), 'sqlite_databases' + config.get('borgmatic_source_directory'), 'sqlite_databases' ) -def dump_databases(databases, log_prefix, location_config, dry_run): +def dump_databases(databases, config, log_prefix, dry_run): ''' Dump the given SQLite3 databases to a file. The databases are supplied as a sequence of - configuration dicts, as per the configuration schema. Use the given log prefix in any log - entries. Use the given location configuration dict to construct the destination path. If this - is a dry run, then don't actually dump anything. + configuration dicts, as per the configuration schema. Use the given configuration dict to + construct the destination path and the given log prefix in any log entries. If this is a dry + run, then don't actually dump anything. ''' dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else '' processes = [] @@ -38,7 +38,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run): f'{log_prefix}: No SQLite database at {database_path}; An empty database will be created and dumped' ) - dump_path = make_dump_path(location_config) + dump_path = make_dump_path(config) dump_filename = dump.make_database_dump_filename(dump_path, database['name']) if os.path.exists(dump_filename): logger.warning( @@ -65,28 +65,26 @@ def dump_databases(databases, log_prefix, location_config, dry_run): return processes -def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover +def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover ''' Remove the given SQLite3 database dumps from the filesystem. The databases are supplied as a - sequence of configuration dicts, as per the configuration schema. Use the given log prefix in - any log entries. Use the given location configuration dict to construct the destination path. - If this is a dry run, then don't actually remove anything. + sequence of configuration dicts, as per the configuration schema. Use the given configuration + dict to construct the destination path and the given log prefix in any log entries. If this is a + dry run, then don't actually remove anything. ''' - dump.remove_database_dumps(make_dump_path(location_config), 'SQLite', log_prefix, dry_run) + dump.remove_database_dumps(make_dump_path(config), 'SQLite', log_prefix, dry_run) -def make_database_dump_pattern( - databases, log_prefix, location_config, name=None -): # pragma: no cover +def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover ''' Make a pattern that matches the given SQLite3 databases. The databases are supplied as a sequence of configuration dicts, as per the configuration schema. ''' - return dump.make_database_dump_filename(make_dump_path(location_config), name) + return dump.make_database_dump_filename(make_dump_path(config), name) def restore_database_dump( - database_config, log_prefix, location_config, dry_run, extract_process, connection_params + database_config, config, log_prefix, dry_run, extract_process, connection_params ): ''' Restore the given SQLite3 database from an extract stream. The database is supplied as a diff --git a/tests/integration/borg/test_commands.py b/tests/integration/borg/test_commands.py index a76d43f1..9a121215 100644 --- a/tests/integration/borg/test_commands.py +++ b/tests/integration/borg/test_commands.py @@ -84,7 +84,6 @@ def test_prune_archives_command_does_not_duplicate_flags_or_raise(): False, 'repo', {}, - {}, '2.3.4', fuzz_argument(arguments, argument_name), argparse.Namespace(log_json=False), diff --git a/tests/integration/config/test_generate.py b/tests/integration/config/test_generate.py index cf4b3945..5df1825a 100644 --- a/tests/integration/config/test_generate.py +++ b/tests/integration/config/test_generate.py @@ -13,43 +13,43 @@ def test_insert_newline_before_comment_does_not_raise(): config = module.yaml.comments.CommentedMap([(field_name, 33)]) config.yaml_set_comment_before_after_key(key=field_name, before='Comment') - module._insert_newline_before_comment(config, field_name) + module.insert_newline_before_comment(config, field_name) def test_comment_out_line_skips_blank_line(): line = ' \n' - assert module._comment_out_line(line) == line + assert module.comment_out_line(line) == line def test_comment_out_line_skips_already_commented_out_line(): line = ' # foo' - assert module._comment_out_line(line) == line + assert module.comment_out_line(line) == line def test_comment_out_line_comments_section_name(): line = 'figgy-pudding:' - assert module._comment_out_line(line) == '# ' + line + assert module.comment_out_line(line) == '# ' + line def test_comment_out_line_comments_indented_option(): line = ' enabled: true' - assert module._comment_out_line(line) == ' # enabled: true' + assert module.comment_out_line(line) == ' # enabled: true' def test_comment_out_line_comments_twice_indented_option(): line = ' - item' - assert module._comment_out_line(line) == ' # - item' + assert module.comment_out_line(line) == ' # - item' def test_comment_out_optional_configuration_comments_optional_config_only(): # The "# COMMENT_OUT" comment is a sentinel used to express that the following key is optional. # It's stripped out of the final output. - flexmock(module)._comment_out_line = lambda line: '# ' + line + flexmock(module).comment_out_line = lambda line: '# ' + line config = ''' # COMMENT_OUT foo: @@ -84,7 +84,7 @@ location: # other: thing ''' - assert module._comment_out_optional_configuration(config.strip()) == expected_config.strip() + assert module.comment_out_optional_configuration(config.strip()) == expected_config.strip() def test_render_configuration_converts_configuration_to_yaml_string(): @@ -204,10 +204,10 @@ def test_generate_sample_configuration_does_not_raise(): builtins = flexmock(sys.modules['builtins']) builtins.should_receive('open').with_args('schema.yaml').and_return('') flexmock(module.yaml).should_receive('round_trip_load') - flexmock(module).should_receive('_schema_to_sample_configuration') + flexmock(module).should_receive('schema_to_sample_configuration') flexmock(module).should_receive('merge_source_configuration_into_destination') flexmock(module).should_receive('render_configuration') - flexmock(module).should_receive('_comment_out_optional_configuration') + flexmock(module).should_receive('comment_out_optional_configuration') flexmock(module).should_receive('write_configuration') module.generate_sample_configuration(False, None, 'dest.yaml', 'schema.yaml') @@ -219,10 +219,10 @@ def test_generate_sample_configuration_with_source_filename_does_not_raise(): flexmock(module.yaml).should_receive('round_trip_load') flexmock(module.load).should_receive('load_configuration') flexmock(module.normalize).should_receive('normalize') - flexmock(module).should_receive('_schema_to_sample_configuration') + flexmock(module).should_receive('schema_to_sample_configuration') flexmock(module).should_receive('merge_source_configuration_into_destination') flexmock(module).should_receive('render_configuration') - flexmock(module).should_receive('_comment_out_optional_configuration') + flexmock(module).should_receive('comment_out_optional_configuration') flexmock(module).should_receive('write_configuration') module.generate_sample_configuration(False, 'source.yaml', 'dest.yaml', 'schema.yaml') @@ -232,10 +232,10 @@ def test_generate_sample_configuration_with_dry_run_does_not_write_file(): builtins = flexmock(sys.modules['builtins']) builtins.should_receive('open').with_args('schema.yaml').and_return('') flexmock(module.yaml).should_receive('round_trip_load') - flexmock(module).should_receive('_schema_to_sample_configuration') + flexmock(module).should_receive('schema_to_sample_configuration') flexmock(module).should_receive('merge_source_configuration_into_destination') flexmock(module).should_receive('render_configuration') - flexmock(module).should_receive('_comment_out_optional_configuration') + flexmock(module).should_receive('comment_out_optional_configuration') flexmock(module).should_receive('write_configuration').never() module.generate_sample_configuration(True, None, 'dest.yaml', 'schema.yaml') diff --git a/tests/integration/config/test_validate.py b/tests/integration/config/test_validate.py index 1abc3ba5..d446421e 100644 --- a/tests/integration/config/test_validate.py +++ b/tests/integration/config/test_validate.py @@ -40,35 +40,32 @@ def mock_config_and_schema(config_yaml, schema_yaml=None): def test_parse_configuration_transforms_file_into_mapping(): mock_config_and_schema( ''' - location: - source_directories: - - /home - - /etc + source_directories: + - /home + - /etc - repositories: - - path: hostname.borg + repositories: + - path: hostname.borg - retention: - keep_minutely: 60 - keep_hourly: 24 - keep_daily: 7 + keep_minutely: 60 + keep_hourly: 24 + keep_daily: 7 - consistency: - checks: - - name: repository - - name: archives + checks: + - name: repository + - name: archives ''' ) config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') assert config == { - 'location': { - 'source_directories': ['/home', '/etc'], - 'repositories': [{'path': 'hostname.borg'}], - }, - 'retention': {'keep_daily': 7, 'keep_hourly': 24, 'keep_minutely': 60}, - 'consistency': {'checks': [{'name': 'repository'}, {'name': 'archives'}]}, + 'source_directories': ['/home', '/etc'], + 'repositories': [{'path': 'hostname.borg'}], + 'keep_daily': 7, + 'keep_hourly': 24, + 'keep_minutely': 60, + 'checks': [{'name': 'repository'}, {'name': 'archives'}], } assert logs == [] @@ -78,22 +75,19 @@ def test_parse_configuration_passes_through_quoted_punctuation(): mock_config_and_schema( f''' - location: - source_directories: - - "/home/{escaped_punctuation}" + source_directories: + - "/home/{escaped_punctuation}" - repositories: - - path: test.borg + repositories: + - path: test.borg ''' ) config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') assert config == { - 'location': { - 'source_directories': [f'/home/{string.punctuation}'], - 'repositories': [{'path': 'test.borg'}], - } + 'source_directories': [f'/home/{string.punctuation}'], + 'repositories': [{'path': 'test.borg'}], } assert logs == [] @@ -101,26 +95,22 @@ def test_parse_configuration_passes_through_quoted_punctuation(): def test_parse_configuration_with_schema_lacking_examples_does_not_raise(): mock_config_and_schema( ''' - location: - source_directories: - - /home + source_directories: + - /home - repositories: - - path: hostname.borg + repositories: + - path: hostname.borg ''', ''' map: - location: + source_directories: required: true - map: - source_directories: - required: true - seq: - - type: scalar - repositories: - required: true - seq: - - type: scalar + seq: + - type: scalar + repositories: + required: true + seq: + - type: scalar ''', ) @@ -130,12 +120,11 @@ def test_parse_configuration_with_schema_lacking_examples_does_not_raise(): def test_parse_configuration_inlines_include(): mock_config_and_schema( ''' - location: - source_directories: - - /home + source_directories: + - /home - repositories: - - path: hostname.borg + repositories: + - path: hostname.borg retention: !include include.yaml @@ -154,25 +143,25 @@ def test_parse_configuration_inlines_include(): config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') assert config == { - 'location': {'source_directories': ['/home'], 'repositories': [{'path': 'hostname.borg'}]}, - 'retention': {'keep_daily': 7, 'keep_hourly': 24}, + 'source_directories': ['/home'], + 'repositories': [{'path': 'hostname.borg'}], + 'keep_daily': 7, + 'keep_hourly': 24, } - assert logs == [] + assert len(logs) == 1 def test_parse_configuration_merges_include(): mock_config_and_schema( ''' - location: - source_directories: - - /home + source_directories: + - /home - repositories: - - path: hostname.borg + repositories: + - path: hostname.borg - retention: - keep_daily: 1 - <<: !include include.yaml + keep_daily: 1 + <<: !include include.yaml ''' ) builtins = flexmock(sys.modules['builtins']) @@ -188,8 +177,10 @@ def test_parse_configuration_merges_include(): config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') assert config == { - 'location': {'source_directories': ['/home'], 'repositories': [{'path': 'hostname.borg'}]}, - 'retention': {'keep_daily': 1, 'keep_hourly': 24}, + 'source_directories': ['/home'], + 'repositories': [{'path': 'hostname.borg'}], + 'keep_daily': 1, + 'keep_hourly': 24, } assert logs == [] @@ -218,10 +209,9 @@ def test_parse_configuration_raises_for_syntax_error(): def test_parse_configuration_raises_for_validation_error(): mock_config_and_schema( ''' - location: - source_directories: yes - repositories: - - path: hostname.borg + source_directories: yes + repositories: + - path: hostname.borg ''' ) @@ -232,14 +222,13 @@ def test_parse_configuration_raises_for_validation_error(): def test_parse_configuration_applies_overrides(): mock_config_and_schema( ''' - location: - source_directories: - - /home + source_directories: + - /home - repositories: - - path: hostname.borg + repositories: + - path: hostname.borg - local_path: borg1 + local_path: borg1 ''' ) @@ -248,11 +237,9 @@ def test_parse_configuration_applies_overrides(): ) assert config == { - 'location': { - 'source_directories': ['/home'], - 'repositories': [{'path': 'hostname.borg'}], - 'local_path': 'borg2', - } + 'source_directories': ['/home'], + 'repositories': [{'path': 'hostname.borg'}], + 'local_path': 'borg2', } assert logs == [] @@ -274,10 +261,8 @@ def test_parse_configuration_applies_normalization(): config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') assert config == { - 'location': { - 'source_directories': ['/home'], - 'repositories': [{'path': 'hostname.borg'}], - 'exclude_if_present': ['.nobackup'], - } + 'source_directories': ['/home'], + 'repositories': [{'path': 'hostname.borg'}], + 'exclude_if_present': ['.nobackup'], } assert logs diff --git a/tests/unit/actions/test_borg.py b/tests/unit/actions/test_borg.py index 2e03ec9c..6cabe0d9 100644 --- a/tests/unit/actions/test_borg.py +++ b/tests/unit/actions/test_borg.py @@ -14,7 +14,7 @@ def test_run_borg_does_not_raise(): module.run_borg( repository={'path': 'repos'}, - storage={}, + config={}, local_borg_version=None, global_arguments=flexmock(log_json=False), borg_arguments=borg_arguments, diff --git a/tests/unit/actions/test_break_lock.py b/tests/unit/actions/test_break_lock.py index 5949d7c1..edc60f60 100644 --- a/tests/unit/actions/test_break_lock.py +++ b/tests/unit/actions/test_break_lock.py @@ -11,7 +11,7 @@ def test_run_break_lock_does_not_raise(): module.run_break_lock( repository={'path': 'repo'}, - storage={}, + config={}, local_borg_version=None, break_lock_arguments=break_lock_arguments, global_arguments=flexmock(), diff --git a/tests/unit/actions/test_check.py b/tests/unit/actions/test_check.py index 05f63b6a..72798e02 100644 --- a/tests/unit/actions/test_check.py +++ b/tests/unit/actions/test_check.py @@ -5,9 +5,6 @@ from borgmatic.actions import check as module def test_run_check_calls_hooks_for_configured_repository(): flexmock(module.logger).answer = lambda message: None - flexmock(module.borgmatic.config.checks).should_receive( - 'repository_enabled_for_checks' - ).and_return(True) flexmock(module.borgmatic.config.validate).should_receive('repositories_match').never() flexmock(module.borgmatic.borg.check).should_receive('check_archives').once() flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2) @@ -23,10 +20,7 @@ def test_run_check_calls_hooks_for_configured_repository(): module.run_check( config_filename='test.yaml', repository={'path': 'repo'}, - location={'repositories': ['repo']}, - storage={}, - consistency={}, - hooks={}, + config={'repositories': ['repo']}, hook_context={}, local_borg_version=None, check_arguments=check_arguments, @@ -54,10 +48,7 @@ def test_run_check_runs_with_selected_repository(): module.run_check( config_filename='test.yaml', repository={'path': 'repo'}, - location={'repositories': ['repo']}, - storage={}, - consistency={}, - hooks={}, + config={'repositories': ['repo']}, hook_context={}, local_borg_version=None, check_arguments=check_arguments, @@ -85,10 +76,7 @@ def test_run_check_bails_if_repository_does_not_match(): module.run_check( config_filename='test.yaml', repository={'path': 'repo'}, - location={'repositories': ['repo']}, - storage={}, - consistency={}, - hooks={}, + config={'repositories': ['repo']}, hook_context={}, local_borg_version=None, check_arguments=check_arguments, diff --git a/tests/unit/actions/test_compact.py b/tests/unit/actions/test_compact.py index fbd4f905..0df83fdd 100644 --- a/tests/unit/actions/test_compact.py +++ b/tests/unit/actions/test_compact.py @@ -17,9 +17,7 @@ def test_compact_actions_calls_hooks_for_configured_repository(): module.run_compact( config_filename='test.yaml', repository={'path': 'repo'}, - storage={}, - retention={}, - hooks={}, + config={}, hook_context={}, local_borg_version=None, compact_arguments=compact_arguments, @@ -45,9 +43,7 @@ def test_compact_runs_with_selected_repository(): module.run_compact( config_filename='test.yaml', repository={'path': 'repo'}, - storage={}, - retention={}, - hooks={}, + config={}, hook_context={}, local_borg_version=None, compact_arguments=compact_arguments, @@ -73,9 +69,7 @@ def test_compact_bails_if_repository_does_not_match(): module.run_compact( config_filename='test.yaml', repository={'path': 'repo'}, - storage={}, - retention={}, - hooks={}, + config={}, hook_context={}, local_borg_version=None, compact_arguments=compact_arguments, diff --git a/tests/unit/actions/test_create.py b/tests/unit/actions/test_create.py index de94fd7e..355e544a 100644 --- a/tests/unit/actions/test_create.py +++ b/tests/unit/actions/test_create.py @@ -28,9 +28,7 @@ def test_run_create_executes_and_calls_hooks_for_configured_repository(): module.run_create( config_filename='test.yaml', repository={'path': 'repo'}, - location={}, - storage={}, - hooks={}, + config={}, hook_context={}, local_borg_version=None, create_arguments=create_arguments, @@ -49,6 +47,11 @@ def test_run_create_runs_with_selected_repository(): ).once().and_return(True) flexmock(module.borgmatic.borg.create).should_receive('create_archive').once() flexmock(module).should_receive('create_borgmatic_manifest').once() + flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2) + flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return({}) + flexmock(module.borgmatic.hooks.dispatch).should_receive( + 'call_hooks_even_if_unconfigured' + ).and_return({}) create_arguments = flexmock( repository=flexmock(), progress=flexmock(), @@ -62,9 +65,7 @@ def test_run_create_runs_with_selected_repository(): module.run_create( config_filename='test.yaml', repository={'path': 'repo'}, - location={}, - storage={}, - hooks={}, + config={}, hook_context={}, local_borg_version=None, create_arguments=create_arguments, @@ -96,9 +97,7 @@ def test_run_create_bails_if_repository_does_not_match(): module.run_create( config_filename='test.yaml', repository='repo', - location={}, - storage={}, - hooks={}, + config={}, hook_context={}, local_borg_version=None, create_arguments=create_arguments, diff --git a/tests/unit/actions/test_export_tar.py b/tests/unit/actions/test_export_tar.py index 6741d427..aea54af3 100644 --- a/tests/unit/actions/test_export_tar.py +++ b/tests/unit/actions/test_export_tar.py @@ -20,7 +20,7 @@ def test_run_export_tar_does_not_raise(): module.run_export_tar( repository={'path': 'repo'}, - storage={}, + config={}, local_borg_version=None, export_tar_arguments=export_tar_arguments, global_arguments=global_arguments, diff --git a/tests/unit/actions/test_extract.py b/tests/unit/actions/test_extract.py index 32b93b4e..7fadf4d7 100644 --- a/tests/unit/actions/test_extract.py +++ b/tests/unit/actions/test_extract.py @@ -21,9 +21,7 @@ def test_run_extract_calls_hooks(): module.run_extract( config_filename='test.yaml', repository={'path': 'repo'}, - location={'repositories': ['repo']}, - storage={}, - hooks={}, + config={'repositories': ['repo']}, hook_context={}, local_borg_version=None, extract_arguments=extract_arguments, diff --git a/tests/unit/actions/test_info.py b/tests/unit/actions/test_info.py index 748d866f..1a5f5533 100644 --- a/tests/unit/actions/test_info.py +++ b/tests/unit/actions/test_info.py @@ -18,7 +18,7 @@ def test_run_info_does_not_raise(): list( module.run_info( repository={'path': 'repo'}, - storage={}, + config={}, local_borg_version=None, info_arguments=info_arguments, global_arguments=flexmock(log_json=False), diff --git a/tests/unit/actions/test_list.py b/tests/unit/actions/test_list.py index 07a1a58c..dd3b1326 100644 --- a/tests/unit/actions/test_list.py +++ b/tests/unit/actions/test_list.py @@ -18,7 +18,7 @@ def test_run_list_does_not_raise(): list( module.run_list( repository={'path': 'repo'}, - storage={}, + config={}, local_borg_version=None, list_arguments=list_arguments, global_arguments=flexmock(log_json=False), diff --git a/tests/unit/actions/test_mount.py b/tests/unit/actions/test_mount.py index 743747d2..46607690 100644 --- a/tests/unit/actions/test_mount.py +++ b/tests/unit/actions/test_mount.py @@ -18,7 +18,7 @@ def test_run_mount_does_not_raise(): module.run_mount( repository={'path': 'repo'}, - storage={}, + config={}, local_borg_version=None, mount_arguments=mount_arguments, global_arguments=flexmock(log_json=False), diff --git a/tests/unit/actions/test_prune.py b/tests/unit/actions/test_prune.py index 7af7ea77..d5dd182e 100644 --- a/tests/unit/actions/test_prune.py +++ b/tests/unit/actions/test_prune.py @@ -14,9 +14,7 @@ def test_run_prune_calls_hooks_for_configured_repository(): module.run_prune( config_filename='test.yaml', repository={'path': 'repo'}, - storage={}, - retention={}, - hooks={}, + config={}, hook_context={}, local_borg_version=None, prune_arguments=prune_arguments, @@ -39,9 +37,7 @@ def test_run_prune_runs_with_selected_repository(): module.run_prune( config_filename='test.yaml', repository={'path': 'repo'}, - storage={}, - retention={}, - hooks={}, + config={}, hook_context={}, local_borg_version=None, prune_arguments=prune_arguments, @@ -64,9 +60,7 @@ def test_run_prune_bails_if_repository_does_not_match(): module.run_prune( config_filename='test.yaml', repository='repo', - storage={}, - retention={}, - hooks={}, + config={}, hook_context={}, local_borg_version=None, prune_arguments=prune_arguments, diff --git a/tests/unit/actions/test_rcreate.py b/tests/unit/actions/test_rcreate.py index b77fa757..0de8d5df 100644 --- a/tests/unit/actions/test_rcreate.py +++ b/tests/unit/actions/test_rcreate.py @@ -19,7 +19,7 @@ def test_run_rcreate_does_not_raise(): module.run_rcreate( repository={'path': 'repo'}, - storage={}, + config={}, local_borg_version=None, rcreate_arguments=arguments, global_arguments=flexmock(dry_run=False), @@ -46,7 +46,7 @@ def test_run_rcreate_bails_if_repository_does_not_match(): module.run_rcreate( repository={'path': 'repo'}, - storage={}, + config={}, local_borg_version=None, rcreate_arguments=arguments, global_arguments=flexmock(dry_run=False), diff --git a/tests/unit/actions/test_restore.py b/tests/unit/actions/test_restore.py index 4e19964f..35488808 100644 --- a/tests/unit/actions/test_restore.py +++ b/tests/unit/actions/test_restore.py @@ -6,7 +6,7 @@ import borgmatic.actions.restore as module def test_get_configured_database_matches_database_by_name(): assert module.get_configured_database( - hooks={ + config={ 'other_databases': [{'name': 'other'}], 'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}], }, @@ -18,7 +18,7 @@ def test_get_configured_database_matches_database_by_name(): def test_get_configured_database_matches_nothing_when_database_name_not_configured(): assert module.get_configured_database( - hooks={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]}, + config={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]}, archive_database_names={'postgresql_databases': ['foo']}, hook_name='postgresql_databases', database_name='quux', @@ -27,7 +27,7 @@ def test_get_configured_database_matches_nothing_when_database_name_not_configur def test_get_configured_database_matches_nothing_when_database_name_not_in_archive(): assert module.get_configured_database( - hooks={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]}, + config={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]}, archive_database_names={'postgresql_databases': ['bar']}, hook_name='postgresql_databases', database_name='foo', @@ -36,7 +36,7 @@ def test_get_configured_database_matches_nothing_when_database_name_not_in_archi def test_get_configured_database_matches_database_by_configuration_database_name(): assert module.get_configured_database( - hooks={'postgresql_databases': [{'name': 'all'}, {'name': 'bar'}]}, + config={'postgresql_databases': [{'name': 'all'}, {'name': 'bar'}]}, archive_database_names={'postgresql_databases': ['foo']}, hook_name='postgresql_databases', database_name='foo', @@ -46,7 +46,7 @@ def test_get_configured_database_matches_database_by_configuration_database_name def test_get_configured_database_with_unspecified_hook_matches_database_by_name(): assert module.get_configured_database( - hooks={ + config={ 'other_databases': [{'name': 'other'}], 'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}], }, @@ -69,8 +69,7 @@ def test_collect_archive_database_names_parses_archive_paths(): archive_database_names = module.collect_archive_database_names( repository={'path': 'repo'}, archive='archive', - location={'borgmatic_source_directory': '.borgmatic'}, - storage=flexmock(), + config={'borgmatic_source_directory': '.borgmatic'}, local_borg_version=flexmock(), global_arguments=flexmock(log_json=False), local_path=flexmock(), @@ -95,8 +94,7 @@ def test_collect_archive_database_names_parses_directory_format_archive_paths(): archive_database_names = module.collect_archive_database_names( repository={'path': 'repo'}, archive='archive', - location={'borgmatic_source_directory': '.borgmatic'}, - storage=flexmock(), + config={'borgmatic_source_directory': '.borgmatic'}, local_borg_version=flexmock(), global_arguments=flexmock(log_json=False), local_path=flexmock(), @@ -117,8 +115,7 @@ def test_collect_archive_database_names_skips_bad_archive_paths(): archive_database_names = module.collect_archive_database_names( repository={'path': 'repo'}, archive='archive', - location={'borgmatic_source_directory': '.borgmatic'}, - storage=flexmock(), + config={'borgmatic_source_directory': '.borgmatic'}, local_borg_version=flexmock(), global_arguments=flexmock(log_json=False), local_path=flexmock(), @@ -231,9 +228,7 @@ def test_run_restore_restores_each_database(): ).and_return(('postgresql_databases', {'name': 'bar'})) flexmock(module).should_receive('restore_single_database').with_args( repository=object, - location=object, - storage=object, - hooks=object, + config=object, local_borg_version=object, global_arguments=object, local_path=object, @@ -245,9 +240,7 @@ def test_run_restore_restores_each_database(): ).once() flexmock(module).should_receive('restore_single_database').with_args( repository=object, - location=object, - storage=object, - hooks=object, + config=object, local_borg_version=object, global_arguments=object, local_path=object, @@ -261,9 +254,7 @@ def test_run_restore_restores_each_database(): module.run_restore( repository={'path': 'repo'}, - location=flexmock(), - storage=flexmock(), - hooks=flexmock(), + config=flexmock(), local_borg_version=flexmock(), restore_arguments=flexmock( repository='repo', @@ -293,9 +284,7 @@ def test_run_restore_bails_for_non_matching_repository(): module.run_restore( repository={'path': 'repo'}, - location=flexmock(), - storage=flexmock(), - hooks=flexmock(), + config=flexmock(), local_borg_version=flexmock(), restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()), global_arguments=flexmock(dry_run=False), @@ -317,19 +306,19 @@ def test_run_restore_restores_database_configured_with_all_name(): flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock()) flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names) flexmock(module).should_receive('get_configured_database').with_args( - hooks=object, + config=object, archive_database_names=object, hook_name='postgresql_databases', database_name='foo', ).and_return(('postgresql_databases', {'name': 'foo'})) flexmock(module).should_receive('get_configured_database').with_args( - hooks=object, + config=object, archive_database_names=object, hook_name='postgresql_databases', database_name='bar', ).and_return((None, None)) flexmock(module).should_receive('get_configured_database').with_args( - hooks=object, + config=object, archive_database_names=object, hook_name='postgresql_databases', database_name='bar', @@ -337,9 +326,7 @@ def test_run_restore_restores_database_configured_with_all_name(): ).and_return(('postgresql_databases', {'name': 'bar'})) flexmock(module).should_receive('restore_single_database').with_args( repository=object, - location=object, - storage=object, - hooks=object, + config=object, local_borg_version=object, global_arguments=object, local_path=object, @@ -351,9 +338,7 @@ def test_run_restore_restores_database_configured_with_all_name(): ).once() flexmock(module).should_receive('restore_single_database').with_args( repository=object, - location=object, - storage=object, - hooks=object, + config=object, local_borg_version=object, global_arguments=object, local_path=object, @@ -367,9 +352,7 @@ def test_run_restore_restores_database_configured_with_all_name(): module.run_restore( repository={'path': 'repo'}, - location=flexmock(), - storage=flexmock(), - hooks=flexmock(), + config=flexmock(), local_borg_version=flexmock(), restore_arguments=flexmock( repository='repo', @@ -401,19 +384,19 @@ def test_run_restore_skips_missing_database(): flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock()) flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names) flexmock(module).should_receive('get_configured_database').with_args( - hooks=object, + config=object, archive_database_names=object, hook_name='postgresql_databases', database_name='foo', ).and_return(('postgresql_databases', {'name': 'foo'})) flexmock(module).should_receive('get_configured_database').with_args( - hooks=object, + config=object, archive_database_names=object, hook_name='postgresql_databases', database_name='bar', ).and_return((None, None)) flexmock(module).should_receive('get_configured_database').with_args( - hooks=object, + config=object, archive_database_names=object, hook_name='postgresql_databases', database_name='bar', @@ -421,9 +404,7 @@ def test_run_restore_skips_missing_database(): ).and_return((None, None)) flexmock(module).should_receive('restore_single_database').with_args( repository=object, - location=object, - storage=object, - hooks=object, + config=object, local_borg_version=object, global_arguments=object, local_path=object, @@ -435,9 +416,7 @@ def test_run_restore_skips_missing_database(): ).once() flexmock(module).should_receive('restore_single_database').with_args( repository=object, - location=object, - storage=object, - hooks=object, + config=object, local_borg_version=object, global_arguments=object, local_path=object, @@ -451,9 +430,7 @@ def test_run_restore_skips_missing_database(): module.run_restore( repository={'path': 'repo'}, - location=flexmock(), - storage=flexmock(), - hooks=flexmock(), + config=flexmock(), local_borg_version=flexmock(), restore_arguments=flexmock( repository='repo', @@ -486,22 +463,20 @@ def test_run_restore_restores_databases_from_different_hooks(): flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock()) flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names) flexmock(module).should_receive('get_configured_database').with_args( - hooks=object, + config=object, archive_database_names=object, hook_name='postgresql_databases', database_name='foo', ).and_return(('postgresql_databases', {'name': 'foo'})) flexmock(module).should_receive('get_configured_database').with_args( - hooks=object, + config=object, archive_database_names=object, hook_name='mysql_databases', database_name='bar', ).and_return(('mysql_databases', {'name': 'bar'})) flexmock(module).should_receive('restore_single_database').with_args( repository=object, - location=object, - storage=object, - hooks=object, + config=object, local_borg_version=object, global_arguments=object, local_path=object, @@ -513,9 +488,7 @@ def test_run_restore_restores_databases_from_different_hooks(): ).once() flexmock(module).should_receive('restore_single_database').with_args( repository=object, - location=object, - storage=object, - hooks=object, + config=object, local_borg_version=object, global_arguments=object, local_path=object, @@ -529,9 +502,7 @@ def test_run_restore_restores_databases_from_different_hooks(): module.run_restore( repository={'path': 'repo'}, - location=flexmock(), - storage=flexmock(), - hooks=flexmock(), + config=flexmock(), local_borg_version=flexmock(), restore_arguments=flexmock( repository='repo', diff --git a/tests/unit/actions/test_rinfo.py b/tests/unit/actions/test_rinfo.py index 7b2371a3..4ba73c41 100644 --- a/tests/unit/actions/test_rinfo.py +++ b/tests/unit/actions/test_rinfo.py @@ -12,7 +12,7 @@ def test_run_rinfo_does_not_raise(): list( module.run_rinfo( repository={'path': 'repo'}, - storage={}, + config={}, local_borg_version=None, rinfo_arguments=rinfo_arguments, global_arguments=flexmock(log_json=False), diff --git a/tests/unit/actions/test_rlist.py b/tests/unit/actions/test_rlist.py index 4a59dc30..84798a76 100644 --- a/tests/unit/actions/test_rlist.py +++ b/tests/unit/actions/test_rlist.py @@ -12,7 +12,7 @@ def test_run_rlist_does_not_raise(): list( module.run_rlist( repository={'path': 'repo'}, - storage={}, + config={}, local_borg_version=None, rlist_arguments=rlist_arguments, global_arguments=flexmock(), diff --git a/tests/unit/actions/test_transfer.py b/tests/unit/actions/test_transfer.py index 58d8a160..03d259be 100644 --- a/tests/unit/actions/test_transfer.py +++ b/tests/unit/actions/test_transfer.py @@ -11,7 +11,7 @@ def test_run_transfer_does_not_raise(): module.run_transfer( repository={'path': 'repo'}, - storage={}, + config={}, local_borg_version=None, transfer_arguments=transfer_arguments, global_arguments=global_arguments, diff --git a/tests/unit/borg/test_borg.py b/tests/unit/borg/test_borg.py index 2d7e1750..f38ec0e3 100644 --- a/tests/unit/borg/test_borg.py +++ b/tests/unit/borg/test_borg.py @@ -22,7 +22,7 @@ def test_run_arbitrary_borg_calls_borg_with_flags(): module.run_arbitrary_borg( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', options=['break-lock', '::'], ) @@ -44,7 +44,7 @@ def test_run_arbitrary_borg_with_log_info_calls_borg_with_info_flag(): module.run_arbitrary_borg( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', options=['break-lock', '::'], ) @@ -66,7 +66,7 @@ def test_run_arbitrary_borg_with_log_debug_calls_borg_with_debug_flag(): module.run_arbitrary_borg( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', options=['break-lock', '::'], ) @@ -75,7 +75,7 @@ def test_run_arbitrary_borg_with_log_debug_calls_borg_with_debug_flag(): def test_run_arbitrary_borg_with_lock_wait_calls_borg_with_lock_wait_flags(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - storage_config = {'lock_wait': 5} + config = {'lock_wait': 5} flexmock(module.flags).should_receive('make_flags').and_return(()).and_return( ('--lock-wait', '5') ) @@ -90,7 +90,7 @@ def test_run_arbitrary_borg_with_lock_wait_calls_borg_with_lock_wait_flags(): module.run_arbitrary_borg( repository_path='repo', - storage_config=storage_config, + config=config, local_borg_version='1.2.3', options=['break-lock', '::'], ) @@ -111,7 +111,7 @@ def test_run_arbitrary_borg_with_archive_calls_borg_with_archive_flag(): module.run_arbitrary_borg( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', options=['break-lock', '::$ARCHIVE'], archive='archive', @@ -133,7 +133,7 @@ def test_run_arbitrary_borg_with_local_path_calls_borg_via_local_path(): module.run_arbitrary_borg( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', options=['break-lock', '::'], local_path='borg1', @@ -157,7 +157,7 @@ def test_run_arbitrary_borg_with_remote_path_calls_borg_with_remote_path_flags() module.run_arbitrary_borg( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', options=['break-lock', '::'], remote_path='borg1', @@ -179,7 +179,7 @@ def test_run_arbitrary_borg_passes_borg_specific_flags_to_borg(): module.run_arbitrary_borg( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', options=['list', '--progress', '::'], ) @@ -200,7 +200,7 @@ def test_run_arbitrary_borg_omits_dash_dash_in_flags_passed_to_borg(): module.run_arbitrary_borg( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', options=['--', 'break-lock', '::'], ) @@ -221,7 +221,7 @@ def test_run_arbitrary_borg_without_borg_specific_flags_does_not_raise(): module.run_arbitrary_borg( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', options=[], ) @@ -243,7 +243,7 @@ def test_run_arbitrary_borg_passes_key_sub_command_to_borg_before_injected_flags module.run_arbitrary_borg( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', options=['key', 'export', '::'], ) @@ -265,7 +265,7 @@ def test_run_arbitrary_borg_passes_debug_sub_command_to_borg_before_injected_fla module.run_arbitrary_borg( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', options=['debug', 'dump-manifest', '::', 'path'], ) diff --git a/tests/unit/borg/test_break_lock.py b/tests/unit/borg/test_break_lock.py index 3dc55672..ff26cab7 100644 --- a/tests/unit/borg/test_break_lock.py +++ b/tests/unit/borg/test_break_lock.py @@ -22,7 +22,7 @@ def test_break_lock_calls_borg_with_required_flags(): module.break_lock( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -34,7 +34,7 @@ def test_break_lock_calls_borg_with_remote_path_flags(): module.break_lock( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), remote_path='borg1', @@ -47,7 +47,7 @@ def test_break_lock_calls_borg_with_umask_flags(): module.break_lock( repository_path='repo', - storage_config={'umask': '0770'}, + config={'umask': '0770'}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -59,7 +59,7 @@ def test_break_lock_calls_borg_with_log_json_flags(): module.break_lock( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=True), ) @@ -71,7 +71,7 @@ def test_break_lock_calls_borg_with_lock_wait_flags(): module.break_lock( repository_path='repo', - storage_config={'lock_wait': '5'}, + config={'lock_wait': '5'}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -84,7 +84,7 @@ def test_break_lock_with_log_info_calls_borg_with_info_parameter(): module.break_lock( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -97,7 +97,7 @@ def test_break_lock_with_log_debug_calls_borg_with_debug_flags(): module.break_lock( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) diff --git a/tests/unit/borg/test_check.py b/tests/unit/borg/test_check.py index 79201166..c524b47e 100644 --- a/tests/unit/borg/test_check.py +++ b/tests/unit/borg/test_check.py @@ -99,8 +99,7 @@ def test_filter_checks_on_frequency_without_config_uses_default_checks(): flexmock(module).should_receive('probe_for_check_time').and_return(None) assert module.filter_checks_on_frequency( - location_config={}, - consistency_config={}, + config={}, borg_repository_id='repo', checks=('repository', 'archives'), force=False, @@ -110,8 +109,7 @@ def test_filter_checks_on_frequency_without_config_uses_default_checks(): def test_filter_checks_on_frequency_retains_unconfigured_check(): assert module.filter_checks_on_frequency( - location_config={}, - consistency_config={}, + config={}, borg_repository_id='repo', checks=('data',), force=False, @@ -122,8 +120,7 @@ def test_filter_checks_on_frequency_retains_check_without_frequency(): flexmock(module).should_receive('parse_frequency').and_return(None) assert module.filter_checks_on_frequency( - location_config={}, - consistency_config={'checks': [{'name': 'archives'}]}, + config={'checks': [{'name': 'archives'}]}, borg_repository_id='repo', checks=('archives',), force=False, @@ -141,8 +138,7 @@ def test_filter_checks_on_frequency_retains_check_with_elapsed_frequency(): ) assert module.filter_checks_on_frequency( - location_config={}, - consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, + config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, borg_repository_id='repo', checks=('archives',), force=False, @@ -158,8 +154,7 @@ def test_filter_checks_on_frequency_retains_check_with_missing_check_time_file() flexmock(module).should_receive('probe_for_check_time').and_return(None) assert module.filter_checks_on_frequency( - location_config={}, - consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, + config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, borg_repository_id='repo', checks=('archives',), force=False, @@ -178,8 +173,7 @@ def test_filter_checks_on_frequency_skips_check_with_unelapsed_frequency(): assert ( module.filter_checks_on_frequency( - location_config={}, - consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, + config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, borg_repository_id='repo', checks=('archives',), force=False, @@ -191,8 +185,7 @@ def test_filter_checks_on_frequency_skips_check_with_unelapsed_frequency(): def test_filter_checks_on_frequency_restains_check_with_unelapsed_frequency_and_force(): assert module.filter_checks_on_frequency( - location_config={}, - consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, + config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, borg_repository_id='repo', checks=('archives',), force=True, @@ -616,7 +609,7 @@ def test_upgrade_check_times_renames_stale_temporary_check_path(): def test_check_archives_with_progress_calls_borg_with_progress_parameter(): checks = ('repository',) - consistency_config = {'check_last': None} + config = {'check_last': None} flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) @@ -639,9 +632,7 @@ def test_check_archives_with_progress_calls_borg_with_progress_parameter(): module.check_archives( repository_path='repo', - location_config={}, - storage_config={}, - consistency_config=consistency_config, + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), progress=True, @@ -650,7 +641,7 @@ def test_check_archives_with_progress_calls_borg_with_progress_parameter(): def test_check_archives_with_repair_calls_borg_with_repair_parameter(): checks = ('repository',) - consistency_config = {'check_last': None} + config = {'check_last': None} flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) @@ -673,9 +664,7 @@ def test_check_archives_with_repair_calls_borg_with_repair_parameter(): module.check_archives( repository_path='repo', - location_config={}, - storage_config={}, - consistency_config=consistency_config, + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), repair=True, @@ -693,7 +682,7 @@ def test_check_archives_with_repair_calls_borg_with_repair_parameter(): ) def test_check_archives_calls_borg_with_parameters(checks): check_last = flexmock() - consistency_config = {'check_last': check_last} + config = {'check_last': check_last} flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) @@ -710,9 +699,7 @@ def test_check_archives_calls_borg_with_parameters(checks): module.check_archives( repository_path='repo', - location_config={}, - storage_config={}, - consistency_config=consistency_config, + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -721,7 +708,7 @@ def test_check_archives_calls_borg_with_parameters(checks): def test_check_archives_with_json_error_raises(): checks = ('archives',) check_last = flexmock() - consistency_config = {'check_last': check_last} + config = {'check_last': check_last} flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"unexpected": {"id": "repo"}}' ) @@ -734,9 +721,7 @@ def test_check_archives_with_json_error_raises(): with pytest.raises(ValueError): module.check_archives( repository_path='repo', - location_config={}, - storage_config={}, - consistency_config=consistency_config, + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -745,7 +730,7 @@ def test_check_archives_with_json_error_raises(): def test_check_archives_with_missing_json_keys_raises(): checks = ('archives',) check_last = flexmock() - consistency_config = {'check_last': check_last} + config = {'check_last': check_last} flexmock(module.rinfo).should_receive('display_repository_info').and_return('{invalid JSON') flexmock(module).should_receive('upgrade_check_times') flexmock(module).should_receive('parse_checks') @@ -756,9 +741,7 @@ def test_check_archives_with_missing_json_keys_raises(): with pytest.raises(ValueError): module.check_archives( repository_path='repo', - location_config={}, - storage_config={}, - consistency_config=consistency_config, + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -767,7 +750,7 @@ def test_check_archives_with_missing_json_keys_raises(): def test_check_archives_with_extract_check_calls_extract_only(): checks = ('extract',) check_last = flexmock() - consistency_config = {'check_last': check_last} + config = {'check_last': check_last} flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) @@ -784,9 +767,7 @@ def test_check_archives_with_extract_check_calls_extract_only(): module.check_archives( repository_path='repo', - location_config={}, - storage_config={}, - consistency_config=consistency_config, + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -794,7 +775,7 @@ def test_check_archives_with_extract_check_calls_extract_only(): def test_check_archives_with_log_info_calls_borg_with_info_parameter(): checks = ('repository',) - consistency_config = {'check_last': None} + config = {'check_last': None} flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) @@ -812,9 +793,7 @@ def test_check_archives_with_log_info_calls_borg_with_info_parameter(): module.check_archives( repository_path='repo', - location_config={}, - storage_config={}, - consistency_config=consistency_config, + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -822,7 +801,7 @@ def test_check_archives_with_log_info_calls_borg_with_info_parameter(): def test_check_archives_with_log_debug_calls_borg_with_debug_parameter(): checks = ('repository',) - consistency_config = {'check_last': None} + config = {'check_last': None} flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) @@ -840,16 +819,14 @@ def test_check_archives_with_log_debug_calls_borg_with_debug_parameter(): module.check_archives( repository_path='repo', - location_config={}, - storage_config={}, - consistency_config=consistency_config, + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) def test_check_archives_without_any_checks_bails(): - consistency_config = {'check_last': None} + config = {'check_last': None} flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) @@ -862,9 +839,7 @@ def test_check_archives_without_any_checks_bails(): module.check_archives( repository_path='repo', - location_config={}, - storage_config={}, - consistency_config=consistency_config, + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -873,7 +848,7 @@ def test_check_archives_without_any_checks_bails(): def test_check_archives_with_local_path_calls_borg_via_local_path(): checks = ('repository',) check_last = flexmock() - consistency_config = {'check_last': check_last} + config = {'check_last': check_last} flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) @@ -890,9 +865,7 @@ def test_check_archives_with_local_path_calls_borg_via_local_path(): module.check_archives( repository_path='repo', - location_config={}, - storage_config={}, - consistency_config=consistency_config, + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), local_path='borg1', @@ -902,7 +875,7 @@ def test_check_archives_with_local_path_calls_borg_via_local_path(): def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters(): checks = ('repository',) check_last = flexmock() - consistency_config = {'check_last': check_last} + config = {'check_last': check_last} flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) @@ -919,9 +892,7 @@ def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters( module.check_archives( repository_path='repo', - location_config={}, - storage_config={}, - consistency_config=consistency_config, + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), remote_path='borg1', @@ -931,8 +902,7 @@ def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters( def test_check_archives_with_log_json_calls_borg_with_log_json_parameters(): checks = ('repository',) check_last = flexmock() - storage_config = {} - consistency_config = {'check_last': check_last} + config = {'check_last': check_last} flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) @@ -949,9 +919,7 @@ def test_check_archives_with_log_json_calls_borg_with_log_json_parameters(): module.check_archives( repository_path='repo', - location_config={}, - storage_config=storage_config, - consistency_config=consistency_config, + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=True), ) @@ -960,8 +928,7 @@ def test_check_archives_with_log_json_calls_borg_with_log_json_parameters(): def test_check_archives_with_lock_wait_calls_borg_with_lock_wait_parameters(): checks = ('repository',) check_last = flexmock() - storage_config = {'lock_wait': 5} - consistency_config = {'check_last': check_last} + config = {'lock_wait': 5, 'check_last': check_last} flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) @@ -978,9 +945,7 @@ def test_check_archives_with_lock_wait_calls_borg_with_lock_wait_parameters(): module.check_archives( repository_path='repo', - location_config={}, - storage_config=storage_config, - consistency_config=consistency_config, + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -990,7 +955,7 @@ def test_check_archives_with_retention_prefix(): checks = ('repository',) check_last = flexmock() prefix = 'foo-' - consistency_config = {'check_last': check_last, 'prefix': prefix} + config = {'check_last': check_last, 'prefix': prefix} flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) @@ -1007,9 +972,7 @@ def test_check_archives_with_retention_prefix(): module.check_archives( repository_path='repo', - location_config={}, - storage_config={}, - consistency_config=consistency_config, + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -1017,7 +980,7 @@ def test_check_archives_with_retention_prefix(): def test_check_archives_with_extra_borg_options_calls_borg_with_extra_options(): checks = ('repository',) - consistency_config = {'check_last': None} + config = {'check_last': None, 'extra_borg_options': {'check': '--extra --options'}} flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) @@ -1034,9 +997,7 @@ def test_check_archives_with_extra_borg_options_calls_borg_with_extra_options(): module.check_archives( repository_path='repo', - location_config={}, - storage_config={'extra_borg_options': {'check': '--extra --options'}}, - consistency_config=consistency_config, + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) diff --git a/tests/unit/borg/test_compact.py b/tests/unit/borg/test_compact.py index beacf547..c8e3e7f4 100644 --- a/tests/unit/borg/test_compact.py +++ b/tests/unit/borg/test_compact.py @@ -27,7 +27,7 @@ def test_compact_segments_calls_borg_with_parameters(): module.compact_segments( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -40,7 +40,7 @@ def test_compact_segments_with_log_info_calls_borg_with_info_parameter(): module.compact_segments( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), dry_run=False, @@ -54,7 +54,7 @@ def test_compact_segments_with_log_debug_calls_borg_with_debug_parameter(): module.compact_segments( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), dry_run=False, @@ -66,7 +66,7 @@ def test_compact_segments_with_dry_run_skips_borg_call(): module.compact_segments( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), dry_run=True, @@ -80,7 +80,7 @@ def test_compact_segments_with_local_path_calls_borg_via_local_path(): module.compact_segments( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), local_path='borg1', @@ -94,7 +94,7 @@ def test_compact_segments_with_remote_path_calls_borg_with_remote_path_parameter module.compact_segments( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), remote_path='borg1', @@ -108,7 +108,7 @@ def test_compact_segments_with_progress_calls_borg_with_progress_parameter(): module.compact_segments( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), progress=True, @@ -122,7 +122,7 @@ def test_compact_segments_with_cleanup_commits_calls_borg_with_cleanup_commits_p module.compact_segments( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), cleanup_commits=True, @@ -136,7 +136,7 @@ def test_compact_segments_with_threshold_calls_borg_with_threshold_parameter(): module.compact_segments( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), threshold=20, @@ -144,14 +144,14 @@ def test_compact_segments_with_threshold_calls_borg_with_threshold_parameter(): def test_compact_segments_with_umask_calls_borg_with_umask_parameters(): - storage_config = {'umask': '077'} + config = {'umask': '077'} flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(COMPACT_COMMAND + ('--umask', '077', 'repo'), logging.INFO) module.compact_segments( dry_run=False, repository_path='repo', - storage_config=storage_config, + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -164,21 +164,21 @@ def test_compact_segments_with_log_json_calls_borg_with_log_json_parameters(): module.compact_segments( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=True), ) def test_compact_segments_with_lock_wait_calls_borg_with_lock_wait_parameters(): - storage_config = {'lock_wait': 5} + config = {'lock_wait': 5} flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(COMPACT_COMMAND + ('--lock-wait', '5', 'repo'), logging.INFO) module.compact_segments( dry_run=False, repository_path='repo', - storage_config=storage_config, + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -191,7 +191,7 @@ def test_compact_segments_with_extra_borg_options_calls_borg_with_extra_options( module.compact_segments( dry_run=False, repository_path='repo', - storage_config={'extra_borg_options': {'compact': '--extra --options'}}, + config={'extra_borg_options': {'compact': '--extra --options'}}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) diff --git a/tests/unit/borg/test_create.py b/tests/unit/borg/test_create.py index 33e95607..5b196d9b 100644 --- a/tests/unit/borg/test_create.py +++ b/tests/unit/borg/test_create.py @@ -170,79 +170,75 @@ def test_ensure_files_readable_opens_filenames(filename_lists, opened_filenames) def test_make_pattern_flags_includes_pattern_filename_when_given(): pattern_flags = module.make_pattern_flags( - location_config={'patterns': ['R /', '- /var']}, pattern_filename='/tmp/patterns' + config={'patterns': ['R /', '- /var']}, pattern_filename='/tmp/patterns' ) assert pattern_flags == ('--patterns-from', '/tmp/patterns') def test_make_pattern_flags_includes_patterns_from_filenames_when_in_config(): - pattern_flags = module.make_pattern_flags( - location_config={'patterns_from': ['patterns', 'other']} - ) + pattern_flags = module.make_pattern_flags(config={'patterns_from': ['patterns', 'other']}) assert pattern_flags == ('--patterns-from', 'patterns', '--patterns-from', 'other') def test_make_pattern_flags_includes_both_filenames_when_patterns_given_and_patterns_from_in_config(): pattern_flags = module.make_pattern_flags( - location_config={'patterns_from': ['patterns']}, pattern_filename='/tmp/patterns' + config={'patterns_from': ['patterns']}, pattern_filename='/tmp/patterns' ) assert pattern_flags == ('--patterns-from', 'patterns', '--patterns-from', '/tmp/patterns') def test_make_pattern_flags_considers_none_patterns_from_filenames_as_empty(): - pattern_flags = module.make_pattern_flags(location_config={'patterns_from': None}) + pattern_flags = module.make_pattern_flags(config={'patterns_from': None}) assert pattern_flags == () def test_make_exclude_flags_includes_exclude_patterns_filename_when_given(): exclude_flags = module.make_exclude_flags( - location_config={'exclude_patterns': ['*.pyc', '/var']}, exclude_filename='/tmp/excludes' + config={'exclude_patterns': ['*.pyc', '/var']}, exclude_filename='/tmp/excludes' ) assert exclude_flags == ('--exclude-from', '/tmp/excludes') def test_make_exclude_flags_includes_exclude_from_filenames_when_in_config(): - exclude_flags = module.make_exclude_flags( - location_config={'exclude_from': ['excludes', 'other']} - ) + exclude_flags = module.make_exclude_flags(config={'exclude_from': ['excludes', 'other']}) assert exclude_flags == ('--exclude-from', 'excludes', '--exclude-from', 'other') def test_make_exclude_flags_includes_both_filenames_when_patterns_given_and_exclude_from_in_config(): exclude_flags = module.make_exclude_flags( - location_config={'exclude_from': ['excludes']}, exclude_filename='/tmp/excludes' + config={'exclude_from': ['excludes']}, exclude_filename='/tmp/excludes' ) assert exclude_flags == ('--exclude-from', 'excludes', '--exclude-from', '/tmp/excludes') def test_make_exclude_flags_considers_none_exclude_from_filenames_as_empty(): - exclude_flags = module.make_exclude_flags(location_config={'exclude_from': None}) + exclude_flags = module.make_exclude_flags(config={'exclude_from': None}) assert exclude_flags == () def test_make_exclude_flags_includes_exclude_caches_when_true_in_config(): - exclude_flags = module.make_exclude_flags(location_config={'exclude_caches': True}) + exclude_flags = module.make_exclude_flags(config={'exclude_caches': True}) assert exclude_flags == ('--exclude-caches',) def test_make_exclude_flags_does_not_include_exclude_caches_when_false_in_config(): - exclude_flags = module.make_exclude_flags(location_config={'exclude_caches': False}) + exclude_flags = module.make_exclude_flags(config={'exclude_caches': False}) assert exclude_flags == () def test_make_exclude_flags_includes_exclude_if_present_when_in_config(): exclude_flags = module.make_exclude_flags( - location_config={'exclude_if_present': ['exclude_me', 'also_me']} + config={'exclude_if_present': ['exclude_me', 'also_me']} ) assert exclude_flags == ( @@ -254,31 +250,31 @@ def test_make_exclude_flags_includes_exclude_if_present_when_in_config(): def test_make_exclude_flags_includes_keep_exclude_tags_when_true_in_config(): - exclude_flags = module.make_exclude_flags(location_config={'keep_exclude_tags': True}) + exclude_flags = module.make_exclude_flags(config={'keep_exclude_tags': True}) assert exclude_flags == ('--keep-exclude-tags',) def test_make_exclude_flags_does_not_include_keep_exclude_tags_when_false_in_config(): - exclude_flags = module.make_exclude_flags(location_config={'keep_exclude_tags': False}) + exclude_flags = module.make_exclude_flags(config={'keep_exclude_tags': False}) assert exclude_flags == () def test_make_exclude_flags_includes_exclude_nodump_when_true_in_config(): - exclude_flags = module.make_exclude_flags(location_config={'exclude_nodump': True}) + exclude_flags = module.make_exclude_flags(config={'exclude_nodump': True}) assert exclude_flags == ('--exclude-nodump',) def test_make_exclude_flags_does_not_include_exclude_nodump_when_false_in_config(): - exclude_flags = module.make_exclude_flags(location_config={'exclude_nodump': False}) + exclude_flags = module.make_exclude_flags(config={'exclude_nodump': False}) assert exclude_flags == () def test_make_exclude_flags_is_empty_when_config_has_no_excludes(): - exclude_flags = module.make_exclude_flags(location_config={}) + exclude_flags = module.make_exclude_flags(config={}) assert exclude_flags == () @@ -504,12 +500,11 @@ def test_create_archive_calls_borg_with_parameters(): module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -548,12 +543,11 @@ def test_create_archive_calls_borg_with_environment(): module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -594,12 +588,11 @@ def test_create_archive_with_patterns_calls_borg_with_patterns_including_convert module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'patterns': ['pattern'], }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -644,11 +637,10 @@ def test_create_archive_with_sources_and_used_config_paths_calls_borg_with_sourc module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=['/etc/borgmatic/config.yaml']), ) @@ -689,12 +681,11 @@ def test_create_archive_with_exclude_patterns_calls_borg_with_excludes(): module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': ['exclude'], }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -733,12 +724,11 @@ def test_create_archive_with_log_info_calls_borg_with_info_parameter(): module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -774,12 +764,11 @@ def test_create_archive_with_log_info_and_json_suppresses_most_borg_output(): module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), json=True, @@ -819,12 +808,11 @@ def test_create_archive_with_log_debug_calls_borg_with_debug_parameter(): module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -860,12 +848,11 @@ def test_create_archive_with_log_debug_and_json_suppresses_most_borg_output(): module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), json=True, @@ -904,12 +891,11 @@ def test_create_archive_with_dry_run_calls_borg_with_dry_run_parameter(): module.create_archive( dry_run=True, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -950,12 +936,11 @@ def test_create_archive_with_stats_and_dry_run_calls_borg_without_stats_paramete module.create_archive( dry_run=True, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), stats=True, @@ -994,12 +979,12 @@ def test_create_archive_with_checkpoint_interval_calls_borg_with_checkpoint_inte module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, + 'checkpoint_interval': 600, }, - storage_config={'checkpoint_interval': 600}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -1037,12 +1022,12 @@ def test_create_archive_with_checkpoint_volume_calls_borg_with_checkpoint_volume module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, + 'checkpoint_volume': 1024, }, - storage_config={'checkpoint_volume': 1024}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -1080,12 +1065,12 @@ def test_create_archive_with_chunker_params_calls_borg_with_chunker_params_param module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, + 'chunker_params': '1,2,3,4', }, - storage_config={'chunker_params': '1,2,3,4'}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -1123,12 +1108,12 @@ def test_create_archive_with_compression_calls_borg_with_compression_parameters( module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, + 'compression': 'rle', }, - storage_config={'compression': 'rle'}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -1172,12 +1157,12 @@ def test_create_archive_with_upload_rate_limit_calls_borg_with_upload_ratelimit_ module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, + 'upload_rate_limit': 100, }, - storage_config={'upload_rate_limit': 100}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -1217,13 +1202,12 @@ def test_create_archive_with_working_directory_calls_borg_with_working_directory module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'working_directory': '/working/dir', 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -1261,13 +1245,12 @@ def test_create_archive_with_one_file_system_calls_borg_with_one_file_system_par module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'one_file_system': True, 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -1311,13 +1294,12 @@ def test_create_archive_with_numeric_ids_calls_borg_with_numeric_ids_parameter( module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'numeric_ids': True, 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -1365,13 +1347,12 @@ def test_create_archive_with_read_special_calls_borg_with_read_special_parameter module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'read_special': True, 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -1421,13 +1402,12 @@ def test_create_archive_with_basic_option_calls_borg_with_corresponding_paramete module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], option_name: option_value, 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -1476,13 +1456,12 @@ def test_create_archive_with_atime_option_calls_borg_with_corresponding_paramete module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'atime': option_value, 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -1531,13 +1510,12 @@ def test_create_archive_with_flags_option_calls_borg_with_corresponding_paramete module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'flags': option_value, 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -1575,13 +1553,12 @@ def test_create_archive_with_files_cache_calls_borg_with_files_cache_parameters( module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'files_cache': 'ctime,size', 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -1619,12 +1596,11 @@ def test_create_archive_with_local_path_calls_borg_via_local_path(): module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), local_path='borg1', @@ -1663,12 +1639,11 @@ def test_create_archive_with_remote_path_calls_borg_with_remote_path_parameters( module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), remote_path='borg1', @@ -1707,12 +1682,12 @@ def test_create_archive_with_umask_calls_borg_with_umask_parameters(): module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, + 'umask': 740, }, - storage_config={'umask': 740}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -1750,12 +1725,11 @@ def test_create_archive_with_log_json_calls_borg_with_log_json_parameters(): module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=True, used_config_paths=[]), ) @@ -1793,12 +1767,12 @@ def test_create_archive_with_lock_wait_calls_borg_with_lock_wait_parameters(): module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, + 'lock_wait': 5, }, - storage_config={'lock_wait': 5}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -1836,12 +1810,11 @@ def test_create_archive_with_stats_calls_borg_with_stats_parameter_and_answer_ou module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), stats=True, @@ -1880,12 +1853,11 @@ def test_create_archive_with_files_calls_borg_with_list_parameter_and_answer_out module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), list_files=True, @@ -1930,12 +1902,11 @@ def test_create_archive_with_progress_and_log_info_calls_borg_with_progress_para module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), progress=True, @@ -1974,12 +1945,11 @@ def test_create_archive_with_progress_calls_borg_with_progress_parameter(): module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), progress=True, @@ -2035,12 +2005,11 @@ def test_create_archive_with_progress_and_stream_processes_calls_borg_with_progr module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), progress=True, @@ -2099,13 +2068,12 @@ def test_create_archive_with_stream_processes_ignores_read_special_false_and_log module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, 'read_special': False, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), stream_processes=processes, @@ -2168,12 +2136,11 @@ def test_create_archive_with_stream_processes_adds_special_files_to_excludes(): module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), stream_processes=processes, @@ -2232,13 +2199,12 @@ def test_create_archive_with_stream_processes_and_read_special_does_not_add_spec module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, 'read_special': True, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), stream_processes=processes, @@ -2274,12 +2240,11 @@ def test_create_archive_with_json_calls_borg_with_json_parameter(): json_output = module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), json=True, @@ -2317,12 +2282,11 @@ def test_create_archive_with_stats_and_json_calls_borg_without_stats_parameter() json_output = module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), json=True, @@ -2365,12 +2329,11 @@ def test_create_archive_with_source_directories_glob_expands(): module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo*'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -2409,12 +2372,11 @@ def test_create_archive_with_non_matching_source_directories_glob_passes_through module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo*'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -2452,12 +2414,11 @@ def test_create_archive_with_glob_calls_borg_with_expanded_directories(): module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo*'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -2495,12 +2456,12 @@ def test_create_archive_with_archive_name_format_calls_borg_with_archive_name(): module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, + 'archive_name_format': 'ARCHIVE_NAME', }, - storage_config={'archive_name_format': 'ARCHIVE_NAME'}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -2539,12 +2500,12 @@ def test_create_archive_with_archive_name_format_accepts_borg_placeholders(): module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, + 'archive_name_format': 'Documents_{hostname}-{now}', # noqa: FS003 }, - storage_config={'archive_name_format': 'Documents_{hostname}-{now}'}, # noqa: FS003 local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -2583,12 +2544,12 @@ def test_create_archive_with_repository_accepts_borg_placeholders(): module.create_archive( dry_run=False, repository_path='{fqdn}', # noqa: FS003 - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['{fqdn}'], # noqa: FS003 'exclude_patterns': None, + 'archive_name_format': 'Documents_{hostname}-{now}', # noqa: FS003 }, - storage_config={'archive_name_format': 'Documents_{hostname}-{now}'}, # noqa: FS003 local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -2626,12 +2587,12 @@ def test_create_archive_with_extra_borg_options_calls_borg_with_extra_options(): module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, + 'extra_borg_options': {'create': '--extra --options'}, }, - storage_config={'extra_borg_options': {'create': '--extra --options'}}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) @@ -2687,12 +2648,11 @@ def test_create_archive_with_stream_processes_calls_borg_with_processes_and_read module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), stream_processes=processes, @@ -2712,13 +2672,12 @@ def test_create_archive_with_non_existent_directory_and_source_directories_must_ module.create_archive( dry_run=False, repository_path='repo', - location_config={ + config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, 'source_directories_must_exist': True, }, - storage_config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False, used_config_paths=[]), ) diff --git a/tests/unit/borg/test_export_tar.py b/tests/unit/borg/test_export_tar.py index 5fb7bff2..32b0967f 100644 --- a/tests/unit/borg/test_export_tar.py +++ b/tests/unit/borg/test_export_tar.py @@ -36,7 +36,7 @@ def test_export_tar_archive_calls_borg_with_path_parameters(): archive='archive', paths=['path1', 'path2'], destination_path='test.tar', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -58,7 +58,7 @@ def test_export_tar_archive_calls_borg_with_local_path_parameters(): archive='archive', paths=None, destination_path='test.tar', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), local_path='borg1', @@ -81,7 +81,7 @@ def test_export_tar_archive_calls_borg_with_remote_path_parameters(): archive='archive', paths=None, destination_path='test.tar', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), remote_path='borg1', @@ -104,7 +104,7 @@ def test_export_tar_archive_calls_borg_with_umask_parameters(): archive='archive', paths=None, destination_path='test.tar', - storage_config={'umask': '0770'}, + config={'umask': '0770'}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -124,7 +124,7 @@ def test_export_tar_archive_calls_borg_with_log_json_parameter(): archive='archive', paths=None, destination_path='test.tar', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=True), ) @@ -146,7 +146,7 @@ def test_export_tar_archive_calls_borg_with_lock_wait_parameters(): archive='archive', paths=None, destination_path='test.tar', - storage_config={'lock_wait': '5'}, + config={'lock_wait': '5'}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -167,7 +167,7 @@ def test_export_tar_archive_with_log_info_calls_borg_with_info_parameter(): archive='archive', paths=None, destination_path='test.tar', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -190,7 +190,7 @@ def test_export_tar_archive_with_log_debug_calls_borg_with_debug_parameters(): archive='archive', paths=None, destination_path='test.tar', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -210,7 +210,7 @@ def test_export_tar_archive_calls_borg_with_dry_run_parameter(): archive='archive', paths=None, destination_path='test.tar', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -232,7 +232,7 @@ def test_export_tar_archive_calls_borg_with_tar_filter_parameters(): archive='archive', paths=None, destination_path='test.tar', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), tar_filter='bzip2', @@ -256,7 +256,7 @@ def test_export_tar_archive_calls_borg_with_list_parameter(): archive='archive', paths=None, destination_path='test.tar', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), list_files=True, @@ -279,7 +279,7 @@ def test_export_tar_archive_calls_borg_with_strip_components_parameter(): archive='archive', paths=None, destination_path='test.tar', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), strip_components=5, @@ -300,7 +300,7 @@ def test_export_tar_archive_skips_abspath_for_remote_repository_parameter(): archive='archive', paths=None, destination_path='test.tar', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -320,7 +320,7 @@ def test_export_tar_archive_calls_borg_with_stdout_destination_path(): archive='archive', paths=None, destination_path='-', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) diff --git a/tests/unit/borg/test_extract.py b/tests/unit/borg/test_extract.py index a4032f6c..a65aac73 100644 --- a/tests/unit/borg/test_extract.py +++ b/tests/unit/borg/test_extract.py @@ -25,7 +25,7 @@ def test_extract_last_archive_dry_run_calls_borg_with_last_archive(): ) module.extract_last_archive_dry_run( - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), repository_path='repo', @@ -38,7 +38,7 @@ def test_extract_last_archive_dry_run_without_any_archives_should_not_raise(): flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(('repo',)) module.extract_last_archive_dry_run( - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), repository_path='repo', @@ -55,7 +55,7 @@ def test_extract_last_archive_dry_run_with_log_info_calls_borg_with_info_paramet ) module.extract_last_archive_dry_run( - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), repository_path='repo', @@ -74,7 +74,7 @@ def test_extract_last_archive_dry_run_with_log_debug_calls_borg_with_debug_param ) module.extract_last_archive_dry_run( - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), repository_path='repo', @@ -90,7 +90,7 @@ def test_extract_last_archive_dry_run_calls_borg_via_local_path(): ) module.extract_last_archive_dry_run( - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), repository_path='repo', @@ -109,7 +109,7 @@ def test_extract_last_archive_dry_run_calls_borg_with_remote_path_flags(): ) module.extract_last_archive_dry_run( - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), repository_path='repo', @@ -126,7 +126,7 @@ def test_extract_last_archive_dry_run_calls_borg_with_log_json_flag(): ) module.extract_last_archive_dry_run( - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=True), repository_path='repo', @@ -144,7 +144,7 @@ def test_extract_last_archive_dry_run_calls_borg_with_lock_wait_flags(): ) module.extract_last_archive_dry_run( - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), repository_path='repo', @@ -168,8 +168,7 @@ def test_extract_archive_calls_borg_with_path_flags(): repository='repo', archive='archive', paths=['path1', 'path2'], - location_config={}, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -191,8 +190,7 @@ def test_extract_archive_calls_borg_with_remote_path_flags(): repository='repo', archive='archive', paths=None, - location_config={}, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), remote_path='borg1', @@ -222,8 +220,7 @@ def test_extract_archive_calls_borg_with_numeric_ids_parameter(feature_available repository='repo', archive='archive', paths=None, - location_config={'numeric_ids': True}, - storage_config={}, + config={'numeric_ids': True}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -245,8 +242,7 @@ def test_extract_archive_calls_borg_with_umask_flags(): repository='repo', archive='archive', paths=None, - location_config={}, - storage_config={'umask': '0770'}, + config={'umask': '0770'}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -265,8 +261,7 @@ def test_extract_archive_calls_borg_with_log_json_flags(): repository='repo', archive='archive', paths=None, - location_config={}, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=True), ) @@ -288,8 +283,7 @@ def test_extract_archive_calls_borg_with_lock_wait_flags(): repository='repo', archive='archive', paths=None, - location_config={}, - storage_config={'lock_wait': '5'}, + config={'lock_wait': '5'}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -312,8 +306,7 @@ def test_extract_archive_with_log_info_calls_borg_with_info_parameter(): repository='repo', archive='archive', paths=None, - location_config={}, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -338,8 +331,7 @@ def test_extract_archive_with_log_debug_calls_borg_with_debug_flags(): repository='repo', archive='archive', paths=None, - location_config={}, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -361,8 +353,7 @@ def test_extract_archive_calls_borg_with_dry_run_parameter(): repository='repo', archive='archive', paths=None, - location_config={}, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -384,8 +375,7 @@ def test_extract_archive_calls_borg_with_destination_path(): repository='repo', archive='archive', paths=None, - location_config={}, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), destination_path='/dest', @@ -408,8 +398,7 @@ def test_extract_archive_calls_borg_with_strip_components(): repository='repo', archive='archive', paths=None, - location_config={}, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), strip_components=5, @@ -442,8 +431,7 @@ def test_extract_archive_calls_borg_with_strip_components_calculated_from_all(): repository='repo', archive='archive', paths=['foo/bar/baz.txt', 'foo/bar.txt'], - location_config={}, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), strip_components='all', @@ -467,8 +455,7 @@ def test_extract_archive_with_strip_components_all_and_no_paths_raises(): repository='repo', archive='archive', paths=None, - location_config={}, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), strip_components='all', @@ -497,8 +484,7 @@ def test_extract_archive_calls_borg_with_progress_parameter(): repository='repo', archive='archive', paths=None, - location_config={}, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), progress=True, @@ -514,8 +500,7 @@ def test_extract_archive_with_progress_and_extract_to_stdout_raises(): repository='repo', archive='archive', paths=None, - location_config={}, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), progress=True, @@ -548,8 +533,7 @@ def test_extract_archive_calls_borg_with_stdout_parameter_and_returns_process(): repository='repo', archive='archive', paths=None, - location_config={}, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), extract_to_stdout=True, @@ -579,8 +563,7 @@ def test_extract_archive_skips_abspath_for_remote_repository(): repository='server:repo', archive='archive', paths=None, - location_config={}, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) diff --git a/tests/unit/borg/test_info.py b/tests/unit/borg/test_info.py index e7b126a6..3e93bb73 100644 --- a/tests/unit/borg/test_info.py +++ b/tests/unit/borg/test_info.py @@ -27,7 +27,7 @@ def test_display_archives_info_calls_borg_with_parameters(): module.display_archives_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None), @@ -53,7 +53,7 @@ def test_display_archives_info_with_log_info_calls_borg_with_info_parameter(): insert_logging_mock(logging.INFO) module.display_archives_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None), @@ -78,7 +78,7 @@ def test_display_archives_info_with_log_info_and_json_suppresses_most_borg_outpu insert_logging_mock(logging.INFO) json_output = module.display_archives_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), info_arguments=flexmock(archive=None, json=True, prefix=None, match_archives=None), @@ -107,7 +107,7 @@ def test_display_archives_info_with_log_debug_calls_borg_with_debug_parameter(): module.display_archives_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None), @@ -132,7 +132,7 @@ def test_display_archives_info_with_log_debug_and_json_suppresses_most_borg_outp insert_logging_mock(logging.DEBUG) json_output = module.display_archives_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), info_arguments=flexmock(archive=None, json=True, prefix=None, match_archives=None), @@ -158,7 +158,7 @@ def test_display_archives_info_with_json_calls_borg_with_json_parameter(): json_output = module.display_archives_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), info_arguments=flexmock(archive=None, json=True, prefix=None, match_archives=None), @@ -186,7 +186,7 @@ def test_display_archives_info_with_archive_calls_borg_with_match_archives_param module.display_archives_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), info_arguments=flexmock(archive='archive', json=False, prefix=None, match_archives=None), @@ -212,7 +212,7 @@ def test_display_archives_info_with_local_path_calls_borg_via_local_path(): module.display_archives_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None), @@ -242,7 +242,7 @@ def test_display_archives_info_with_remote_path_calls_borg_with_remote_path_para module.display_archives_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None), @@ -272,7 +272,7 @@ def test_display_archives_info_with_log_json_calls_borg_with_log_json_parameters module.display_archives_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=True), info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None), @@ -291,7 +291,7 @@ def test_display_archives_info_with_lock_wait_calls_borg_with_lock_wait_paramete ).and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) - storage_config = {'lock_wait': 5} + config = {'lock_wait': 5} flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'info', '--lock-wait', '5', '--repo', 'repo'), @@ -302,7 +302,7 @@ def test_display_archives_info_with_lock_wait_calls_borg_with_lock_wait_paramete module.display_archives_info( repository_path='repo', - storage_config=storage_config, + config=config, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None), @@ -331,7 +331,7 @@ def test_display_archives_info_transforms_prefix_into_match_archives_parameters( module.display_archives_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), info_arguments=flexmock(archive=None, json=False, prefix='foo'), @@ -360,7 +360,7 @@ def test_display_archives_info_prefers_prefix_over_archive_name_format(): module.display_archives_info( repository_path='repo', - storage_config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 + config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), info_arguments=flexmock(archive=None, json=False, prefix='foo'), @@ -386,7 +386,7 @@ def test_display_archives_info_transforms_archive_name_format_into_match_archive module.display_archives_info( repository_path='repo', - storage_config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 + config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None), @@ -412,7 +412,7 @@ def test_display_archives_with_match_archives_option_calls_borg_with_match_archi module.display_archives_info( repository_path='repo', - storage_config={ + config={ 'archive_name_format': 'bar-{now}', # noqa: FS003 'match_archives': 'sh:foo-*', }, @@ -441,7 +441,7 @@ def test_display_archives_with_match_archives_flag_calls_borg_with_match_archive module.display_archives_info( repository_path='repo', - storage_config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 + config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives='sh:foo-*'), @@ -471,7 +471,7 @@ def test_display_archives_info_passes_through_arguments_to_borg(argument_name): module.display_archives_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), info_arguments=flexmock( @@ -523,7 +523,7 @@ def test_display_archives_info_with_date_based_matching_calls_borg_with_date_bas ) module.display_archives_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), info_arguments=info_arguments, diff --git a/tests/unit/borg/test_list.py b/tests/unit/borg/test_list.py index 4e3a5f7c..2f82b802 100644 --- a/tests/unit/borg/test_list.py +++ b/tests/unit/borg/test_list.py @@ -17,7 +17,7 @@ def test_make_list_command_includes_log_info(): command = module.make_list_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=False), global_arguments=flexmock(log_json=False), @@ -34,7 +34,7 @@ def test_make_list_command_includes_json_but_not_info(): command = module.make_list_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=True), global_arguments=flexmock(log_json=False), @@ -51,7 +51,7 @@ def test_make_list_command_includes_log_debug(): command = module.make_list_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=False), global_arguments=flexmock(log_json=False), @@ -68,7 +68,7 @@ def test_make_list_command_includes_json_but_not_debug(): command = module.make_list_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=True), global_arguments=flexmock(log_json=False), @@ -84,7 +84,7 @@ def test_make_list_command_includes_json(): command = module.make_list_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=True), global_arguments=flexmock(log_json=False), @@ -100,7 +100,7 @@ def test_make_list_command_includes_log_json(): command = module.make_list_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=False), global_arguments=flexmock(log_json=True), @@ -118,7 +118,7 @@ def test_make_list_command_includes_lock_wait(): command = module.make_list_command( repository_path='repo', - storage_config={'lock_wait': 5}, + config={'lock_wait': 5}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=False), global_arguments=flexmock(log_json=False), @@ -136,7 +136,7 @@ def test_make_list_command_includes_archive(): command = module.make_list_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive='archive', paths=None, json=False), global_arguments=flexmock(log_json=False), @@ -154,7 +154,7 @@ def test_make_list_command_includes_archive_and_path(): command = module.make_list_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive='archive', paths=['var/lib'], json=False), global_arguments=flexmock(log_json=False), @@ -170,7 +170,7 @@ def test_make_list_command_includes_local_path(): command = module.make_list_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=False), global_arguments=flexmock(log_json=False), @@ -193,7 +193,7 @@ def test_make_list_command_includes_remote_path(): command = module.make_list_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=False), global_arguments=flexmock(log_json=False), @@ -210,7 +210,7 @@ def test_make_list_command_includes_short(): command = module.make_list_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=False, short=True), global_arguments=flexmock(log_json=False), @@ -242,7 +242,7 @@ def test_make_list_command_includes_additional_flags(argument_name): command = module.make_list_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=flexmock( archive=None, @@ -293,7 +293,7 @@ def test_capture_archive_listing_does_not_raise(): module.capture_archive_listing( repository_path='repo', archive='archive', - storage_config=flexmock(), + config=flexmock(), local_borg_version=flexmock(), global_arguments=flexmock(log_json=False), ) @@ -319,7 +319,7 @@ def test_list_archive_calls_borg_with_flags(): flexmock(module.feature).should_receive('available').and_return(False) flexmock(module).should_receive('make_list_command').with_args( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=list_arguments, global_arguments=global_arguments, @@ -337,7 +337,7 @@ def test_list_archive_calls_borg_with_flags(): module.list_archive( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=list_arguments, global_arguments=global_arguments, @@ -355,7 +355,7 @@ def test_list_archive_with_archive_and_json_errors(): with pytest.raises(ValueError): module.list_archive( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=list_arguments, global_arguments=flexmock(log_json=False), @@ -382,7 +382,7 @@ def test_list_archive_calls_borg_with_local_path(): flexmock(module.feature).should_receive('available').and_return(False) flexmock(module).should_receive('make_list_command').with_args( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=list_arguments, global_arguments=global_arguments, @@ -400,7 +400,7 @@ def test_list_archive_calls_borg_with_local_path(): module.list_archive( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=list_arguments, global_arguments=global_arguments, @@ -450,7 +450,7 @@ def test_list_archive_calls_borg_multiple_times_with_find_paths(): module.list_archive( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=list_arguments, global_arguments=flexmock(log_json=False), @@ -477,7 +477,7 @@ def test_list_archive_calls_borg_with_archive(): flexmock(module.feature).should_receive('available').and_return(False) flexmock(module).should_receive('make_list_command').with_args( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=list_arguments, global_arguments=global_arguments, @@ -495,7 +495,7 @@ def test_list_archive_calls_borg_with_archive(): module.list_archive( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=list_arguments, global_arguments=global_arguments, @@ -526,7 +526,7 @@ def test_list_archive_without_archive_delegates_to_list_repository(): module.list_archive( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=list_arguments, global_arguments=flexmock(log_json=False), @@ -557,7 +557,7 @@ def test_list_archive_with_borg_features_without_archive_delegates_to_list_repos module.list_archive( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=list_arguments, global_arguments=flexmock(log_json=False), @@ -595,7 +595,7 @@ def test_list_archive_with_archive_ignores_archive_filter_flag( ).and_return(False) flexmock(module).should_receive('make_list_command').with_args( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=argparse.Namespace( archive='archive', paths=None, json=False, find_paths=None, **default_filter_flags @@ -615,7 +615,7 @@ def test_list_archive_with_archive_ignores_archive_filter_flag( module.list_archive( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=argparse.Namespace( archive='archive', paths=None, json=False, find_paths=None, **altered_filter_flags @@ -654,7 +654,7 @@ def test_list_archive_with_find_paths_allows_archive_filter_flag_but_only_passes flexmock(module.rlist).should_receive('make_rlist_command').with_args( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', rlist_arguments=argparse.Namespace( repository='repo', short=True, format=None, json=None, **altered_filter_flags @@ -671,7 +671,7 @@ def test_list_archive_with_find_paths_allows_archive_filter_flag_but_only_passes flexmock(module).should_receive('make_list_command').with_args( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=argparse.Namespace( repository='repo', @@ -690,7 +690,7 @@ def test_list_archive_with_find_paths_allows_archive_filter_flag_but_only_passes flexmock(module).should_receive('make_list_command').with_args( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=argparse.Namespace( repository='repo', @@ -724,7 +724,7 @@ def test_list_archive_with_find_paths_allows_archive_filter_flag_but_only_passes module.list_archive( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', list_arguments=argparse.Namespace( repository='repo', diff --git a/tests/unit/borg/test_mount.py b/tests/unit/borg/test_mount.py index bd93cf3d..9ee37f91 100644 --- a/tests/unit/borg/test_mount.py +++ b/tests/unit/borg/test_mount.py @@ -26,7 +26,7 @@ def test_mount_archive_calls_borg_with_required_flags(): repository_path='repo', archive=None, mount_arguments=mount_arguments, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -49,7 +49,7 @@ def test_mount_archive_with_borg_features_calls_borg_with_repository_and_match_a repository_path='repo', archive='archive', mount_arguments=mount_arguments, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -67,7 +67,7 @@ def test_mount_archive_without_archive_calls_borg_with_repository_flags_only(): repository_path='repo', archive='archive', mount_arguments=mount_arguments, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -87,7 +87,7 @@ def test_mount_archive_calls_borg_with_path_flags(): repository_path='repo', archive='archive', mount_arguments=mount_arguments, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -107,7 +107,7 @@ def test_mount_archive_calls_borg_with_remote_path_flags(): repository_path='repo', archive='archive', mount_arguments=mount_arguments, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), remote_path='borg1', @@ -126,7 +126,7 @@ def test_mount_archive_calls_borg_with_umask_flags(): repository_path='repo', archive='archive', mount_arguments=mount_arguments, - storage_config={'umask': '0770'}, + config={'umask': '0770'}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -144,7 +144,7 @@ def test_mount_archive_calls_borg_with_log_json_flags(): repository_path='repo', archive='archive', mount_arguments=mount_arguments, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=True), ) @@ -162,7 +162,7 @@ def test_mount_archive_calls_borg_with_lock_wait_flags(): repository_path='repo', archive='archive', mount_arguments=mount_arguments, - storage_config={'lock_wait': '5'}, + config={'lock_wait': '5'}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -181,7 +181,7 @@ def test_mount_archive_with_log_info_calls_borg_with_info_parameter(): repository_path='repo', archive='archive', mount_arguments=mount_arguments, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -200,7 +200,7 @@ def test_mount_archive_with_log_debug_calls_borg_with_debug_flags(): repository_path='repo', archive='archive', mount_arguments=mount_arguments, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -224,7 +224,7 @@ def test_mount_archive_calls_borg_with_foreground_parameter(): repository_path='repo', archive='archive', mount_arguments=mount_arguments, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -244,7 +244,7 @@ def test_mount_archive_calls_borg_with_options_flags(): repository_path='repo', archive='archive', mount_arguments=mount_arguments, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -305,7 +305,7 @@ def test_mount_archive_with_date_based_matching_calls_borg_with_date_based_flags repository_path='repo', archive=None, mount_arguments=mount_arguments, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) diff --git a/tests/unit/borg/test_prune.py b/tests/unit/borg/test_prune.py index 584d0d50..971eb1e6 100644 --- a/tests/unit/borg/test_prune.py +++ b/tests/unit/borg/test_prune.py @@ -1,5 +1,4 @@ import logging -from collections import OrderedDict from flexmock import flexmock @@ -22,23 +21,28 @@ BASE_PRUNE_FLAGS = ('--keep-daily', '1', '--keep-weekly', '2', '--keep-monthly', def test_make_prune_flags_returns_flags_from_config(): - retention_config = OrderedDict((('keep_daily', 1), ('keep_weekly', 2), ('keep_monthly', 3))) + config = { + 'keep_daily': 1, + 'keep_weekly': 2, + 'keep_monthly': 3, + } flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) - result = module.make_prune_flags({}, retention_config, local_borg_version='1.2.3') + result = module.make_prune_flags(config, local_borg_version='1.2.3') assert result == BASE_PRUNE_FLAGS def test_make_prune_flags_accepts_prefix_with_placeholders(): - retention_config = OrderedDict( - (('keep_daily', 1), ('prefix', 'Documents_{hostname}-{now}')) # noqa: FS003 - ) + config = { + 'keep_daily': 1, + 'prefix': 'Documents_{hostname}-{now}', # noqa: FS003 + } flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) - result = module.make_prune_flags({}, retention_config, local_borg_version='1.2.3') + result = module.make_prune_flags(config, local_borg_version='1.2.3') expected = ( '--keep-daily', @@ -51,13 +55,14 @@ def test_make_prune_flags_accepts_prefix_with_placeholders(): def test_make_prune_flags_with_prefix_without_borg_features_uses_glob_archives(): - retention_config = OrderedDict( - (('keep_daily', 1), ('prefix', 'Documents_{hostname}-{now}')) # noqa: FS003 - ) + config = { + 'keep_daily': 1, + 'prefix': 'Documents_{hostname}-{now}', # noqa: FS003 + } flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) - result = module.make_prune_flags({}, retention_config, local_borg_version='1.2.3') + result = module.make_prune_flags(config, local_borg_version='1.2.3') expected = ( '--keep-daily', @@ -70,12 +75,15 @@ def test_make_prune_flags_with_prefix_without_borg_features_uses_glob_archives() def test_make_prune_flags_prefers_prefix_to_archive_name_format(): - storage_config = {'archive_name_format': 'bar-{now}'} # noqa: FS003 - retention_config = OrderedDict((('keep_daily', 1), ('prefix', 'bar-'))) + config = { + 'archive_name_format': 'bar-{now}', # noqa: FS003 + 'keep_daily': 1, + 'prefix': 'bar-', + } flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_match_archives_flags').never() - result = module.make_prune_flags(storage_config, retention_config, local_borg_version='1.2.3') + result = module.make_prune_flags(config, local_borg_version='1.2.3') expected = ( '--keep-daily', @@ -88,14 +96,17 @@ def test_make_prune_flags_prefers_prefix_to_archive_name_format(): def test_make_prune_flags_without_prefix_uses_archive_name_format_instead(): - storage_config = {'archive_name_format': 'bar-{now}'} # noqa: FS003 - retention_config = OrderedDict((('keep_daily', 1), ('prefix', None))) + config = { + 'archive_name_format': 'bar-{now}', # noqa: FS003 + 'keep_daily': 1, + 'prefix': None, + } flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_match_archives_flags').with_args( None, 'bar-{now}', '1.2.3' # noqa: FS003 ).and_return(('--match-archives', 'sh:bar-*')) - result = module.make_prune_flags(storage_config, retention_config, local_borg_version='1.2.3') + result = module.make_prune_flags(config, local_borg_version='1.2.3') expected = ( '--keep-daily', @@ -121,8 +132,7 @@ def test_prune_archives_calls_borg_with_flags(): module.prune_archives( dry_run=False, repository_path='repo', - storage_config={}, - retention_config=flexmock(), + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), prune_arguments=prune_arguments, @@ -140,9 +150,8 @@ def test_prune_archives_with_log_info_calls_borg_with_info_flag(): prune_arguments = flexmock(stats=False, list_archives=False) module.prune_archives( repository_path='repo', - storage_config={}, + config={}, dry_run=False, - retention_config=flexmock(), local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), prune_arguments=prune_arguments, @@ -160,9 +169,8 @@ def test_prune_archives_with_log_debug_calls_borg_with_debug_flag(): prune_arguments = flexmock(stats=False, list_archives=False) module.prune_archives( repository_path='repo', - storage_config={}, + config={}, dry_run=False, - retention_config=flexmock(), local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), prune_arguments=prune_arguments, @@ -179,9 +187,8 @@ def test_prune_archives_with_dry_run_calls_borg_with_dry_run_flag(): prune_arguments = flexmock(stats=False, list_archives=False) module.prune_archives( repository_path='repo', - storage_config={}, + config={}, dry_run=True, - retention_config=flexmock(), local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), prune_arguments=prune_arguments, @@ -199,8 +206,7 @@ def test_prune_archives_with_local_path_calls_borg_via_local_path(): module.prune_archives( dry_run=False, repository_path='repo', - storage_config={}, - retention_config=flexmock(), + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), local_path='borg1', @@ -219,8 +225,7 @@ def test_prune_archives_with_remote_path_calls_borg_with_remote_path_flags(): module.prune_archives( dry_run=False, repository_path='repo', - storage_config={}, - retention_config=flexmock(), + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), remote_path='borg1', @@ -239,8 +244,7 @@ def test_prune_archives_with_stats_calls_borg_with_stats_flag_and_answer_output_ module.prune_archives( dry_run=False, repository_path='repo', - storage_config={}, - retention_config=flexmock(), + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), prune_arguments=prune_arguments, @@ -258,8 +262,7 @@ def test_prune_archives_with_files_calls_borg_with_list_flag_and_answer_output_l module.prune_archives( dry_run=False, repository_path='repo', - storage_config={}, - retention_config=flexmock(), + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), prune_arguments=prune_arguments, @@ -269,7 +272,7 @@ def test_prune_archives_with_files_calls_borg_with_list_flag_and_answer_output_l def test_prune_archives_with_umask_calls_borg_with_umask_flags(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - storage_config = {'umask': '077'} + config = {'umask': '077'} flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(PRUNE_COMMAND + ('--umask', '077', 'repo'), logging.INFO) @@ -278,8 +281,7 @@ def test_prune_archives_with_umask_calls_borg_with_umask_flags(): module.prune_archives( dry_run=False, repository_path='repo', - storage_config=storage_config, - retention_config=flexmock(), + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), prune_arguments=prune_arguments, @@ -297,8 +299,7 @@ def test_prune_archives_with_log_json_calls_borg_with_log_json_flag(): module.prune_archives( dry_run=False, repository_path='repo', - storage_config={}, - retention_config=flexmock(), + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=True), prune_arguments=prune_arguments, @@ -308,7 +309,7 @@ def test_prune_archives_with_log_json_calls_borg_with_log_json_flag(): def test_prune_archives_with_lock_wait_calls_borg_with_lock_wait_flags(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - storage_config = {'lock_wait': 5} + config = {'lock_wait': 5} flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(PRUNE_COMMAND + ('--lock-wait', '5', 'repo'), logging.INFO) @@ -317,8 +318,7 @@ def test_prune_archives_with_lock_wait_calls_borg_with_lock_wait_flags(): module.prune_archives( dry_run=False, repository_path='repo', - storage_config=storage_config, - retention_config=flexmock(), + config=config, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), prune_arguments=prune_arguments, @@ -336,8 +336,7 @@ def test_prune_archives_with_extra_borg_options_calls_borg_with_extra_options(): module.prune_archives( dry_run=False, repository_path='repo', - storage_config={'extra_borg_options': {'prune': '--extra --options'}}, - retention_config=flexmock(), + config={'extra_borg_options': {'prune': '--extra --options'}}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), prune_arguments=prune_arguments, @@ -400,8 +399,7 @@ def test_prune_archives_with_date_based_matching_calls_borg_with_date_based_flag module.prune_archives( dry_run=False, repository_path='repo', - storage_config={}, - retention_config=flexmock(), + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), prune_arguments=prune_arguments, diff --git a/tests/unit/borg/test_rcreate.py b/tests/unit/borg/test_rcreate.py index 2f71a8ff..be11a829 100644 --- a/tests/unit/borg/test_rcreate.py +++ b/tests/unit/borg/test_rcreate.py @@ -46,7 +46,7 @@ def test_create_repository_calls_borg_with_flags(): module.create_repository( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), encryption_mode='repokey', @@ -67,7 +67,7 @@ def test_create_repository_with_dry_run_skips_borg_call(): module.create_repository( dry_run=True, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), encryption_mode='repokey', @@ -92,7 +92,7 @@ def test_create_repository_raises_for_borg_rcreate_error(): module.create_repository( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), encryption_mode='repokey', @@ -112,7 +112,7 @@ def test_create_repository_skips_creation_when_repository_already_exists(): module.create_repository( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), encryption_mode='repokey', @@ -128,7 +128,7 @@ def test_create_repository_raises_for_unknown_rinfo_command_error(): module.create_repository( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), encryption_mode='repokey', @@ -149,7 +149,7 @@ def test_create_repository_with_source_repository_calls_borg_with_other_repo_fla module.create_repository( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), encryption_mode='repokey', @@ -171,7 +171,7 @@ def test_create_repository_with_copy_crypt_key_calls_borg_with_copy_crypt_key_fl module.create_repository( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), encryption_mode='repokey', @@ -193,7 +193,7 @@ def test_create_repository_with_append_only_calls_borg_with_append_only_flag(): module.create_repository( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), encryption_mode='repokey', @@ -215,7 +215,7 @@ def test_create_repository_with_storage_quota_calls_borg_with_storage_quota_flag module.create_repository( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), encryption_mode='repokey', @@ -237,7 +237,7 @@ def test_create_repository_with_make_parent_dirs_calls_borg_with_make_parent_dir module.create_repository( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), encryption_mode='repokey', @@ -260,7 +260,7 @@ def test_create_repository_with_log_info_calls_borg_with_info_flag(): module.create_repository( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), encryption_mode='repokey', @@ -282,7 +282,7 @@ def test_create_repository_with_log_debug_calls_borg_with_debug_flag(): module.create_repository( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), encryption_mode='repokey', @@ -303,7 +303,7 @@ def test_create_repository_with_log_json_calls_borg_with_log_json_flag(): module.create_repository( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=True), encryption_mode='repokey', @@ -324,7 +324,7 @@ def test_create_repository_with_lock_wait_calls_borg_with_lock_wait_flag(): module.create_repository( dry_run=False, repository_path='repo', - storage_config={'lock_wait': 5}, + config={'lock_wait': 5}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), encryption_mode='repokey', @@ -345,7 +345,7 @@ def test_create_repository_with_local_path_calls_borg_via_local_path(): module.create_repository( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), encryption_mode='repokey', @@ -367,7 +367,7 @@ def test_create_repository_with_remote_path_calls_borg_with_remote_path_flag(): module.create_repository( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), encryption_mode='repokey', @@ -389,7 +389,7 @@ def test_create_repository_with_extra_borg_options_calls_borg_with_extra_options module.create_repository( dry_run=False, repository_path='repo', - storage_config={'extra_borg_options': {'rcreate': '--extra --options'}}, + config={'extra_borg_options': {'rcreate': '--extra --options'}}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), encryption_mode='repokey', diff --git a/tests/unit/borg/test_rinfo.py b/tests/unit/borg/test_rinfo.py index a6e3f08c..8628b9aa 100644 --- a/tests/unit/borg/test_rinfo.py +++ b/tests/unit/borg/test_rinfo.py @@ -27,7 +27,7 @@ def test_display_repository_info_calls_borg_with_flags(): module.display_repository_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=False), global_arguments=flexmock(log_json=False), @@ -49,7 +49,7 @@ def test_display_repository_info_without_borg_features_calls_borg_with_info_sub_ module.display_repository_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=False), global_arguments=flexmock(log_json=False), @@ -76,7 +76,7 @@ def test_display_repository_info_with_log_info_calls_borg_with_info_flag(): insert_logging_mock(logging.INFO) module.display_repository_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=False), global_arguments=flexmock(log_json=False), @@ -102,7 +102,7 @@ def test_display_repository_info_with_log_info_and_json_suppresses_most_borg_out insert_logging_mock(logging.INFO) json_output = module.display_repository_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=True), global_arguments=flexmock(log_json=False), @@ -132,7 +132,7 @@ def test_display_repository_info_with_log_debug_calls_borg_with_debug_flag(): module.display_repository_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=False), global_arguments=flexmock(log_json=False), @@ -158,7 +158,7 @@ def test_display_repository_info_with_log_debug_and_json_suppresses_most_borg_ou insert_logging_mock(logging.DEBUG) json_output = module.display_repository_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=True), global_arguments=flexmock(log_json=False), @@ -185,7 +185,7 @@ def test_display_repository_info_with_json_calls_borg_with_json_flag(): json_output = module.display_repository_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=True), global_arguments=flexmock(log_json=False), @@ -214,7 +214,7 @@ def test_display_repository_info_with_local_path_calls_borg_via_local_path(): module.display_repository_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=False), global_arguments=flexmock(log_json=False), @@ -242,7 +242,7 @@ def test_display_repository_info_with_remote_path_calls_borg_with_remote_path_fl module.display_repository_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=False), global_arguments=flexmock(log_json=False), @@ -270,7 +270,7 @@ def test_display_repository_info_with_log_json_calls_borg_with_log_json_flags(): module.display_repository_info( repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=False), global_arguments=flexmock(log_json=True), @@ -280,7 +280,7 @@ def test_display_repository_info_with_log_json_calls_borg_with_log_json_flags(): def test_display_repository_info_with_lock_wait_calls_borg_with_lock_wait_flags(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - storage_config = {'lock_wait': 5} + config = {'lock_wait': 5} flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return( ( @@ -298,7 +298,7 @@ def test_display_repository_info_with_lock_wait_calls_borg_with_lock_wait_flags( module.display_repository_info( repository_path='repo', - storage_config=storage_config, + config=config, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=False), global_arguments=flexmock(log_json=False), diff --git a/tests/unit/borg/test_rlist.py b/tests/unit/borg/test_rlist.py index 76bda987..65fcef1d 100644 --- a/tests/unit/borg/test_rlist.py +++ b/tests/unit/borg/test_rlist.py @@ -23,7 +23,7 @@ def test_resolve_archive_name_passes_through_non_latest_archive_name(): module.resolve_archive_name( 'repo', archive, - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -43,7 +43,7 @@ def test_resolve_archive_name_calls_borg_with_flags(): module.resolve_archive_name( 'repo', 'latest', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -64,7 +64,7 @@ def test_resolve_archive_name_with_log_info_calls_borg_without_info_flag(): module.resolve_archive_name( 'repo', 'latest', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -85,7 +85,7 @@ def test_resolve_archive_name_with_log_debug_calls_borg_without_debug_flag(): module.resolve_archive_name( 'repo', 'latest', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -105,7 +105,7 @@ def test_resolve_archive_name_with_local_path_calls_borg_via_local_path(): module.resolve_archive_name( 'repo', 'latest', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), local_path='borg1', @@ -126,7 +126,7 @@ def test_resolve_archive_name_with_remote_path_calls_borg_with_remote_path_flags module.resolve_archive_name( 'repo', 'latest', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), remote_path='borg1', @@ -146,7 +146,7 @@ def test_resolve_archive_name_without_archives_raises(): module.resolve_archive_name( 'repo', 'latest', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -165,7 +165,7 @@ def test_resolve_archive_name_with_log_json_calls_borg_with_log_json_flags(): module.resolve_archive_name( 'repo', 'latest', - storage_config={}, + config={}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=True), ) @@ -186,7 +186,7 @@ def test_resolve_archive_name_with_lock_wait_calls_borg_with_lock_wait_flags(): module.resolve_archive_name( 'repo', 'latest', - storage_config={'lock_wait': 'okay'}, + config={'lock_wait': 'okay'}, local_borg_version='1.2.3', global_arguments=flexmock(log_json=False), ) @@ -205,7 +205,7 @@ def test_make_rlist_command_includes_log_info(): command = module.make_rlist_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', rlist_arguments=flexmock( archive=None, paths=None, json=False, prefix=None, match_archives=None @@ -227,7 +227,7 @@ def test_make_rlist_command_includes_json_but_not_info(): command = module.make_rlist_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', rlist_arguments=flexmock( archive=None, paths=None, json=True, prefix=None, match_archives=None @@ -249,7 +249,7 @@ def test_make_rlist_command_includes_log_debug(): command = module.make_rlist_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', rlist_arguments=flexmock( archive=None, paths=None, json=False, prefix=None, match_archives=None @@ -271,7 +271,7 @@ def test_make_rlist_command_includes_json_but_not_debug(): command = module.make_rlist_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', rlist_arguments=flexmock( archive=None, paths=None, json=True, prefix=None, match_archives=None @@ -292,7 +292,7 @@ def test_make_rlist_command_includes_json(): command = module.make_rlist_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', rlist_arguments=flexmock( archive=None, paths=None, json=True, prefix=None, match_archives=None @@ -315,7 +315,7 @@ def test_make_rlist_command_includes_log_json(): command = module.make_rlist_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', rlist_arguments=flexmock( archive=None, paths=None, json=False, prefix=None, match_archives=None @@ -338,7 +338,7 @@ def test_make_rlist_command_includes_lock_wait(): command = module.make_rlist_command( repository_path='repo', - storage_config={'lock_wait': 5}, + config={'lock_wait': 5}, local_borg_version='1.2.3', rlist_arguments=flexmock( archive=None, paths=None, json=False, prefix=None, match_archives=None @@ -359,7 +359,7 @@ def test_make_rlist_command_includes_local_path(): command = module.make_rlist_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', rlist_arguments=flexmock( archive=None, paths=None, json=False, prefix=None, match_archives=None @@ -383,7 +383,7 @@ def test_make_rlist_command_includes_remote_path(): command = module.make_rlist_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', rlist_arguments=flexmock( archive=None, paths=None, json=False, prefix=None, match_archives=None @@ -407,7 +407,7 @@ def test_make_rlist_command_transforms_prefix_into_match_archives(): command = module.make_rlist_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', rlist_arguments=flexmock(archive=None, paths=None, json=False, prefix='foo'), global_arguments=flexmock(log_json=False), @@ -426,7 +426,7 @@ def test_make_rlist_command_prefers_prefix_over_archive_name_format(): command = module.make_rlist_command( repository_path='repo', - storage_config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 + config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 local_borg_version='1.2.3', rlist_arguments=flexmock(archive=None, paths=None, json=False, prefix='foo'), global_arguments=flexmock(log_json=False), @@ -445,7 +445,7 @@ def test_make_rlist_command_transforms_archive_name_format_into_match_archives() command = module.make_rlist_command( repository_path='repo', - storage_config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 + config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 local_borg_version='1.2.3', rlist_arguments=flexmock( archive=None, paths=None, json=False, prefix=None, match_archives=None @@ -466,7 +466,7 @@ def test_make_rlist_command_includes_short(): command = module.make_rlist_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', rlist_arguments=flexmock( archive=None, paths=None, json=False, prefix=None, match_archives=None, short=True @@ -501,7 +501,7 @@ def test_make_rlist_command_includes_additional_flags(argument_name): command = module.make_rlist_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', rlist_arguments=flexmock( archive=None, @@ -534,7 +534,7 @@ def test_make_rlist_command_with_match_archives_calls_borg_with_match_archives_f command = module.make_rlist_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', rlist_arguments=flexmock( archive=None, @@ -560,7 +560,7 @@ def test_list_repository_calls_borg_with_flags(): flexmock(module.feature).should_receive('available').and_return(False) flexmock(module).should_receive('make_rlist_command').with_args( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', rlist_arguments=rlist_arguments, global_arguments=global_arguments, @@ -577,7 +577,7 @@ def test_list_repository_calls_borg_with_flags(): module.list_repository( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', rlist_arguments=rlist_arguments, global_arguments=global_arguments, @@ -594,7 +594,7 @@ def test_list_repository_with_json_returns_borg_output(): flexmock(module.feature).should_receive('available').and_return(False) flexmock(module).should_receive('make_rlist_command').with_args( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', rlist_arguments=rlist_arguments, global_arguments=global_arguments, @@ -607,7 +607,7 @@ def test_list_repository_with_json_returns_borg_output(): assert ( module.list_repository( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', rlist_arguments=rlist_arguments, global_arguments=global_arguments, @@ -628,7 +628,7 @@ def test_make_rlist_command_with_date_based_matching_calls_borg_with_date_based_ command = module.make_rlist_command( repository_path='repo', - storage_config={}, + config={}, local_borg_version='1.2.3', rlist_arguments=flexmock( archive=None, diff --git a/tests/unit/borg/test_transfer.py b/tests/unit/borg/test_transfer.py index 6cd9530b..3e1aa804 100644 --- a/tests/unit/borg/test_transfer.py +++ b/tests/unit/borg/test_transfer.py @@ -27,7 +27,7 @@ def test_transfer_archives_calls_borg_with_flags(): module.transfer_archives( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository=None @@ -58,7 +58,7 @@ def test_transfer_archives_with_dry_run_calls_borg_with_dry_run_flag(): module.transfer_archives( dry_run=True, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository=None @@ -86,7 +86,7 @@ def test_transfer_archives_with_log_info_calls_borg_with_info_flag(): module.transfer_archives( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository=None @@ -115,7 +115,7 @@ def test_transfer_archives_with_log_debug_calls_borg_with_debug_flag(): module.transfer_archives( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository=None @@ -145,7 +145,7 @@ def test_transfer_archives_with_archive_calls_borg_with_match_archives_flag(): module.transfer_archives( dry_run=False, repository_path='repo', - storage_config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 + config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 local_borg_version='2.3.4', transfer_arguments=flexmock( archive='archive', progress=None, match_archives=None, source_repository=None @@ -175,7 +175,7 @@ def test_transfer_archives_with_match_archives_calls_borg_with_match_archives_fl module.transfer_archives( dry_run=False, repository_path='repo', - storage_config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 + config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives='sh:foo*', source_repository=None @@ -205,7 +205,7 @@ def test_transfer_archives_with_archive_name_format_calls_borg_with_match_archiv module.transfer_archives( dry_run=False, repository_path='repo', - storage_config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 + config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository=None @@ -233,7 +233,7 @@ def test_transfer_archives_with_local_path_calls_borg_via_local_path(): module.transfer_archives( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository=None @@ -265,7 +265,7 @@ def test_transfer_archives_with_remote_path_calls_borg_with_remote_path_flags(): module.transfer_archives( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository=None @@ -297,7 +297,7 @@ def test_transfer_archives_with_log_json_calls_borg_with_log_json_flags(): module.transfer_archives( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository=None @@ -316,7 +316,7 @@ def test_transfer_archives_with_lock_wait_calls_borg_with_lock_wait_flags(): flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) - storage_config = {'lock_wait': 5} + config = {'lock_wait': 5} flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'transfer', '--lock-wait', '5', '--repo', 'repo'), @@ -329,7 +329,7 @@ def test_transfer_archives_with_lock_wait_calls_borg_with_lock_wait_flags(): module.transfer_archives( dry_run=False, repository_path='repo', - storage_config=storage_config, + config=config, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository=None @@ -357,7 +357,7 @@ def test_transfer_archives_with_progress_calls_borg_with_progress_flag(): module.transfer_archives( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=True, match_archives=None, source_repository=None @@ -389,7 +389,7 @@ def test_transfer_archives_passes_through_arguments_to_borg(argument_name): module.transfer_archives( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, @@ -423,7 +423,7 @@ def test_transfer_archives_with_source_repository_calls_borg_with_other_repo_fla module.transfer_archives( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository='other' @@ -465,7 +465,7 @@ def test_transfer_archives_with_date_based_matching_calls_borg_with_date_based_f module.transfer_archives( dry_run=False, repository_path='repo', - storage_config={}, + config={}, local_borg_version='2.3.4', global_arguments=flexmock(log_json=False), transfer_arguments=flexmock( diff --git a/tests/unit/commands/test_borgmatic.py b/tests/unit/commands/test_borgmatic.py index 89eda4ce..e94bdd8c 100644 --- a/tests/unit/commands/test_borgmatic.py +++ b/tests/unit/commands/test_borgmatic.py @@ -15,7 +15,7 @@ def test_run_configuration_runs_actions_for_each_repository(): flexmock(module).should_receive('run_actions').and_return(expected_results[:1]).and_return( expected_results[1:] ) - config = {'location': {'repositories': [{'path': 'foo'}, {'path': 'bar'}]}} + config = {'repositories': [{'path': 'foo'}, {'path': 'bar'}]} arguments = {'global': flexmock(monitoring_verbosity=1)} results = list(module.run_configuration('test.yaml', config, arguments)) @@ -29,7 +29,7 @@ def test_run_configuration_with_invalid_borg_version_errors(): flexmock(module.command).should_receive('execute_hook').never() flexmock(module.dispatch).should_receive('call_hooks').never() flexmock(module).should_receive('run_actions').never() - config = {'location': {'repositories': ['foo']}} + config = {'repositories': ['foo']} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'prune': flexmock()} list(module.run_configuration('test.yaml', config, arguments)) @@ -44,7 +44,7 @@ def test_run_configuration_logs_monitor_start_error(): expected_results = [flexmock()] flexmock(module).should_receive('log_error_records').and_return(expected_results) flexmock(module).should_receive('run_actions').never() - config = {'location': {'repositories': ['foo']}} + config = {'repositories': ['foo']} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) @@ -59,7 +59,7 @@ def test_run_configuration_bails_for_monitor_start_soft_failure(): flexmock(module.dispatch).should_receive('call_hooks').and_raise(error) flexmock(module).should_receive('log_error_records').never() flexmock(module).should_receive('run_actions').never() - config = {'location': {'repositories': ['foo']}} + config = {'repositories': ['foo']} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) @@ -75,7 +75,7 @@ def test_run_configuration_logs_actions_error(): expected_results = [flexmock()] flexmock(module).should_receive('log_error_records').and_return(expected_results) flexmock(module).should_receive('run_actions').and_raise(OSError) - config = {'location': {'repositories': [{'path': 'foo'}]}} + config = {'repositories': [{'path': 'foo'}]} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False)} results = list(module.run_configuration('test.yaml', config, arguments)) @@ -91,7 +91,7 @@ def test_run_configuration_bails_for_actions_soft_failure(): flexmock(module).should_receive('run_actions').and_raise(error) flexmock(module).should_receive('log_error_records').never() flexmock(module.command).should_receive('considered_soft_failure').and_return(True) - config = {'location': {'repositories': [{'path': 'foo'}]}} + config = {'repositories': [{'path': 'foo'}]} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) @@ -108,7 +108,7 @@ def test_run_configuration_logs_monitor_log_error(): expected_results = [flexmock()] flexmock(module).should_receive('log_error_records').and_return(expected_results) flexmock(module).should_receive('run_actions').and_return([]) - config = {'location': {'repositories': [{'path': 'foo'}]}} + config = {'repositories': [{'path': 'foo'}]} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) @@ -126,7 +126,7 @@ def test_run_configuration_bails_for_monitor_log_soft_failure(): flexmock(module).should_receive('log_error_records').never() flexmock(module).should_receive('run_actions').and_return([]) flexmock(module.command).should_receive('considered_soft_failure').and_return(True) - config = {'location': {'repositories': [{'path': 'foo'}]}} + config = {'repositories': [{'path': 'foo'}]} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) @@ -143,7 +143,7 @@ def test_run_configuration_logs_monitor_finish_error(): expected_results = [flexmock()] flexmock(module).should_receive('log_error_records').and_return(expected_results) flexmock(module).should_receive('run_actions').and_return([]) - config = {'location': {'repositories': [{'path': 'foo'}]}} + config = {'repositories': [{'path': 'foo'}]} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) @@ -161,7 +161,7 @@ def test_run_configuration_bails_for_monitor_finish_soft_failure(): flexmock(module).should_receive('log_error_records').never() flexmock(module).should_receive('run_actions').and_return([]) flexmock(module.command).should_receive('considered_soft_failure').and_return(True) - config = {'location': {'repositories': [{'path': 'foo'}]}} + config = {'repositories': [{'path': 'foo'}]} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) @@ -176,7 +176,7 @@ def test_run_configuration_does_not_call_monitoring_hooks_if_monitoring_hooks_ar flexmock(module.dispatch).should_receive('call_hooks').never() flexmock(module).should_receive('run_actions').and_return([]) - config = {'location': {'repositories': [{'path': 'foo'}]}} + config = {'repositories': [{'path': 'foo'}]} arguments = {'global': flexmock(monitoring_verbosity=-2, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == [] @@ -191,7 +191,7 @@ def test_run_configuration_logs_on_error_hook_error(): expected_results[:1] ).and_return(expected_results[1:]) flexmock(module).should_receive('run_actions').and_raise(OSError) - config = {'location': {'repositories': [{'path': 'foo'}]}} + config = {'repositories': [{'path': 'foo'}]} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) @@ -207,7 +207,7 @@ def test_run_configuration_bails_for_on_error_hook_soft_failure(): expected_results = [flexmock()] flexmock(module).should_receive('log_error_records').and_return(expected_results) flexmock(module).should_receive('run_actions').and_raise(OSError) - config = {'location': {'repositories': [{'path': 'foo'}]}} + config = {'repositories': [{'path': 'foo'}]} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) @@ -222,7 +222,7 @@ def test_run_configuration_retries_soft_error(): flexmock(module.command).should_receive('execute_hook') flexmock(module).should_receive('run_actions').and_raise(OSError).and_return([]) flexmock(module).should_receive('log_error_records').and_return([flexmock()]).once() - config = {'location': {'repositories': [{'path': 'foo'}]}, 'storage': {'retries': 1}} + config = {'repositories': [{'path': 'foo'}], 'retries': 1} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == [] @@ -245,7 +245,7 @@ def test_run_configuration_retries_hard_error(): 'foo: Error running actions for repository', OSError, ).and_return(error_logs) - config = {'location': {'repositories': [{'path': 'foo'}]}, 'storage': {'retries': 1}} + config = {'repositories': [{'path': 'foo'}], 'retries': 1} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == error_logs @@ -263,7 +263,7 @@ def test_run_configuration_repos_ordered(): flexmock(module).should_receive('log_error_records').with_args( 'bar: Error running actions for repository', OSError ).and_return(expected_results[1:]).ordered() - config = {'location': {'repositories': [{'path': 'foo'}, {'path': 'bar'}]}} + config = {'repositories': [{'path': 'foo'}, {'path': 'bar'}]} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == expected_results @@ -295,8 +295,8 @@ def test_run_configuration_retries_round_robin(): 'bar: Error running actions for repository', OSError ).and_return(bar_error_logs).ordered() config = { - 'location': {'repositories': [{'path': 'foo'}, {'path': 'bar'}]}, - 'storage': {'retries': 1}, + 'repositories': [{'path': 'foo'}, {'path': 'bar'}], + 'retries': 1, } arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) @@ -327,8 +327,8 @@ def test_run_configuration_retries_one_passes(): 'bar: Error running actions for repository', OSError ).and_return(error_logs).ordered() config = { - 'location': {'repositories': [{'path': 'foo'}, {'path': 'bar'}]}, - 'storage': {'retries': 1}, + 'repositories': [{'path': 'foo'}, {'path': 'bar'}], + 'retries': 1, } arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) @@ -369,8 +369,9 @@ def test_run_configuration_retry_wait(): 'foo: Error running actions for repository', OSError ).and_return(error_logs).ordered() config = { - 'location': {'repositories': [{'path': 'foo'}]}, - 'storage': {'retries': 3, 'retry_wait': 10}, + 'repositories': [{'path': 'foo'}], + 'retries': 3, + 'retry_wait': 10, } arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) @@ -407,8 +408,9 @@ def test_run_configuration_retries_timeout_multiple_repos(): 'bar: Error running actions for repository', OSError ).and_return(error_logs).ordered() config = { - 'location': {'repositories': [{'path': 'foo'}, {'path': 'bar'}]}, - 'storage': {'retries': 1, 'retry_wait': 10}, + 'repositories': [{'path': 'foo'}, {'path': 'bar'}], + 'retries': 1, + 'retry_wait': 10, } arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) @@ -424,11 +426,7 @@ def test_run_actions_runs_rcreate(): module.run_actions( arguments={'global': flexmock(dry_run=False, log_file='foo'), 'rcreate': flexmock()}, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -444,9 +442,7 @@ def test_run_actions_adds_log_file_to_hook_context(): flexmock(borgmatic.actions.create).should_receive('run_create').with_args( config_filename=object, repository={'path': 'repo'}, - location={'repositories': []}, - storage=object, - hooks={}, + config={'repositories': []}, hook_context={'repository': 'repo', 'repositories': '', 'log_file': 'foo'}, local_borg_version=object, create_arguments=object, @@ -460,11 +456,7 @@ def test_run_actions_adds_log_file_to_hook_context(): module.run_actions( arguments={'global': flexmock(dry_run=False, log_file='foo'), 'create': flexmock()}, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -483,11 +475,7 @@ def test_run_actions_runs_transfer(): module.run_actions( arguments={'global': flexmock(dry_run=False, log_file='foo'), 'transfer': flexmock()}, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -506,11 +494,7 @@ def test_run_actions_runs_create(): module.run_actions( arguments={'global': flexmock(dry_run=False, log_file='foo'), 'create': flexmock()}, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -529,11 +513,7 @@ def test_run_actions_runs_prune(): module.run_actions( arguments={'global': flexmock(dry_run=False, log_file='foo'), 'prune': flexmock()}, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -551,11 +531,7 @@ def test_run_actions_runs_compact(): module.run_actions( arguments={'global': flexmock(dry_run=False, log_file='foo'), 'compact': flexmock()}, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -574,11 +550,7 @@ def test_run_actions_runs_check_when_repository_enabled_for_checks(): module.run_actions( arguments={'global': flexmock(dry_run=False, log_file='foo'), 'check': flexmock()}, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -597,11 +569,7 @@ def test_run_actions_skips_check_when_repository_not_enabled_for_checks(): module.run_actions( arguments={'global': flexmock(dry_run=False, log_file='foo'), 'check': flexmock()}, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -619,11 +587,7 @@ def test_run_actions_runs_extract(): module.run_actions( arguments={'global': flexmock(dry_run=False, log_file='foo'), 'extract': flexmock()}, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -641,11 +605,7 @@ def test_run_actions_runs_export_tar(): module.run_actions( arguments={'global': flexmock(dry_run=False, log_file='foo'), 'export-tar': flexmock()}, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -663,11 +623,7 @@ def test_run_actions_runs_mount(): module.run_actions( arguments={'global': flexmock(dry_run=False, log_file='foo'), 'mount': flexmock()}, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -685,11 +641,7 @@ def test_run_actions_runs_restore(): module.run_actions( arguments={'global': flexmock(dry_run=False, log_file='foo'), 'restore': flexmock()}, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -708,11 +660,7 @@ def test_run_actions_runs_rlist(): module.run_actions( arguments={'global': flexmock(dry_run=False, log_file='foo'), 'rlist': flexmock()}, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -732,11 +680,7 @@ def test_run_actions_runs_list(): module.run_actions( arguments={'global': flexmock(dry_run=False, log_file='foo'), 'list': flexmock()}, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -756,11 +700,7 @@ def test_run_actions_runs_rinfo(): module.run_actions( arguments={'global': flexmock(dry_run=False, log_file='foo'), 'rinfo': flexmock()}, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -780,11 +720,7 @@ def test_run_actions_runs_info(): module.run_actions( arguments={'global': flexmock(dry_run=False, log_file='foo'), 'info': flexmock()}, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -803,11 +739,7 @@ def test_run_actions_runs_break_lock(): module.run_actions( arguments={'global': flexmock(dry_run=False, log_file='foo'), 'break-lock': flexmock()}, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -825,11 +757,7 @@ def test_run_actions_runs_borg(): module.run_actions( arguments={'global': flexmock(dry_run=False, log_file='foo'), 'borg': flexmock()}, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -852,11 +780,7 @@ def test_run_actions_runs_multiple_actions_in_argument_order(): 'restore': flexmock(), }, config_filename=flexmock(), - location={'repositories': []}, - storage=flexmock(), - retention=flexmock(), - consistency=flexmock(), - hooks={}, + config={'repositories': []}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), @@ -951,15 +875,11 @@ def test_log_error_records_generates_nothing_for_other_error(): def test_get_local_path_uses_configuration_value(): - assert module.get_local_path({'test.yaml': {'location': {'local_path': 'borg1'}}}) == 'borg1' - - -def test_get_local_path_without_location_defaults_to_borg(): - assert module.get_local_path({'test.yaml': {}}) == 'borg' + assert module.get_local_path({'test.yaml': {'local_path': 'borg1'}}) == 'borg1' def test_get_local_path_without_local_path_defaults_to_borg(): - assert module.get_local_path({'test.yaml': {'location': {}}}) == 'borg' + assert module.get_local_path({'test.yaml': {}}) == 'borg' def test_collect_highlander_action_summary_logs_info_for_success_with_bootstrap(): diff --git a/tests/unit/config/test_generate.py b/tests/unit/config/test_generate.py index 7855592c..1dcbf074 100644 --- a/tests/unit/config/test_generate.py +++ b/tests/unit/config/test_generate.py @@ -13,35 +13,20 @@ def test_schema_to_sample_configuration_generates_config_map_with_examples(): 'type': 'object', 'properties': OrderedDict( [ - ( - 'section1', - { - 'type': 'object', - 'properties': {'field1': OrderedDict([('example', 'Example 1')])}, - }, - ), - ( - 'section2', - { - 'type': 'object', - 'properties': OrderedDict( - [ - ('field2', {'example': 'Example 2'}), - ('field3', {'example': 'Example 3'}), - ] - ), - }, - ), + ('field1', {'example': 'Example 1'}), + ('field2', {'example': 'Example 2'}), + ('field3', {'example': 'Example 3'}), ] ), } - config = module._schema_to_sample_configuration(schema) + config = module.schema_to_sample_configuration(schema) assert config == OrderedDict( [ - ('section1', OrderedDict([('field1', 'Example 1')])), - ('section2', OrderedDict([('field2', 'Example 2'), ('field3', 'Example 3')])), + ('field1', 'Example 1'), + ('field2', 'Example 2'), + ('field3', 'Example 3'), ] ) @@ -51,7 +36,7 @@ def test_schema_to_sample_configuration_generates_config_sequence_of_strings_wit flexmock(module).should_receive('add_comments_to_configuration_sequence') schema = {'type': 'array', 'items': {'type': 'string'}, 'example': ['hi']} - config = module._schema_to_sample_configuration(schema) + config = module.schema_to_sample_configuration(schema) assert config == ['hi'] @@ -70,7 +55,7 @@ def test_schema_to_sample_configuration_generates_config_sequence_of_maps_with_e }, } - config = module._schema_to_sample_configuration(schema) + config = module.schema_to_sample_configuration(schema) assert config == [OrderedDict([('field1', 'Example 1'), ('field2', 'Example 2')])] @@ -79,7 +64,7 @@ def test_schema_to_sample_configuration_with_unsupported_schema_raises(): schema = {'gobbledygook': [{'type': 'not-your'}]} with pytest.raises(ValueError): - module._schema_to_sample_configuration(schema) + module.schema_to_sample_configuration(schema) def test_merge_source_configuration_into_destination_inserts_map_fields(): diff --git a/tests/unit/config/test_normalize.py b/tests/unit/config/test_normalize.py index 6393d902..633e0781 100644 --- a/tests/unit/config/test_normalize.py +++ b/tests/unit/config/test_normalize.py @@ -1,4 +1,5 @@ import pytest +from flexmock import flexmock from borgmatic.config import normalize as module @@ -7,138 +8,220 @@ from borgmatic.config import normalize as module 'config,expected_config,produces_logs', ( ( - {'location': {'exclude_if_present': '.nobackup'}}, - {'location': {'exclude_if_present': ['.nobackup']}}, + {'location': {'foo': 'bar', 'baz': 'quux'}}, + {'foo': 'bar', 'baz': 'quux'}, True, ), ( - {'location': {'exclude_if_present': ['.nobackup']}}, - {'location': {'exclude_if_present': ['.nobackup']}}, + {'retention': {'foo': 'bar', 'baz': 'quux'}}, + {'foo': 'bar', 'baz': 'quux'}, + True, + ), + ( + {'consistency': {'foo': 'bar', 'baz': 'quux'}}, + {'foo': 'bar', 'baz': 'quux'}, + True, + ), + ( + {'output': {'foo': 'bar', 'baz': 'quux'}}, + {'foo': 'bar', 'baz': 'quux'}, + True, + ), + ( + {'hooks': {'foo': 'bar', 'baz': 'quux'}}, + {'foo': 'bar', 'baz': 'quux'}, + True, + ), + ( + {'location': {'foo': 'bar'}, 'storage': {'baz': 'quux'}}, + {'foo': 'bar', 'baz': 'quux'}, + True, + ), + ( + {'foo': 'bar', 'baz': 'quux'}, + {'foo': 'bar', 'baz': 'quux'}, False, ), ( - {'location': {'source_directories': ['foo', 'bar']}}, - {'location': {'source_directories': ['foo', 'bar']}}, + {'location': {'prefix': 'foo'}, 'consistency': {'prefix': 'foo'}}, + {'prefix': 'foo'}, + True, + ), + ( + {'location': {'prefix': 'foo'}, 'consistency': {'prefix': 'foo'}}, + {'prefix': 'foo'}, + True, + ), + ( + {'location': {'prefix': 'foo'}, 'consistency': {'bar': 'baz'}}, + {'prefix': 'foo', 'bar': 'baz'}, + True, + ), + ( + {'storage': {'umask': 'foo'}, 'hooks': {'umask': 'foo'}}, + {'umask': 'foo'}, + True, + ), + ( + {'storage': {'umask': 'foo'}, 'hooks': {'umask': 'foo'}}, + {'umask': 'foo'}, + True, + ), + ( + {'storage': {'umask': 'foo'}, 'hooks': {'bar': 'baz'}}, + {'umask': 'foo', 'bar': 'baz'}, + True, + ), + ( + {'location': {'bar': 'baz'}, 'consistency': {'prefix': 'foo'}}, + {'bar': 'baz', 'prefix': 'foo'}, + True, + ), + ( + {}, + {}, + False, + ), + ), +) +def test_normalize_sections_moves_section_options_to_global_scope( + config, expected_config, produces_logs +): + logs = module.normalize_sections('test.yaml', config) + + assert config == expected_config + + if produces_logs: + assert logs + else: + assert logs == [] + + +def test_normalize_sections_with_different_prefix_values_raises(): + config = {'location': {'prefix': 'foo'}, 'consistency': {'prefix': 'bar'}} + + with pytest.raises(ValueError): + module.normalize_sections('test.yaml', config) + + +def test_normalize_sections_with_different_umask_values_raises(): + config = {'storage': {'umask': 'foo'}, 'hooks': {'umask': 'bar'}} + + with pytest.raises(ValueError): + module.normalize_sections('test.yaml', config) + + +@pytest.mark.parametrize( + 'config,expected_config,produces_logs', + ( + ( + {'exclude_if_present': '.nobackup'}, + {'exclude_if_present': ['.nobackup']}, + True, + ), + ( + {'exclude_if_present': ['.nobackup']}, + {'exclude_if_present': ['.nobackup']}, False, ), ( - {'location': None}, - {'location': None}, + {'source_directories': ['foo', 'bar']}, + {'source_directories': ['foo', 'bar']}, False, ), ( - {'storage': {'compression': 'yes_please'}}, - {'storage': {'compression': 'yes_please'}}, + {'compression': 'yes_please'}, + {'compression': 'yes_please'}, False, ), ( - {'storage': None}, - {'storage': None}, + {'healthchecks': 'https://example.com'}, + {'healthchecks': {'ping_url': 'https://example.com'}}, + True, + ), + ( + {'cronitor': 'https://example.com'}, + {'cronitor': {'ping_url': 'https://example.com'}}, + True, + ), + ( + {'pagerduty': 'https://example.com'}, + {'pagerduty': {'integration_key': 'https://example.com'}}, + True, + ), + ( + {'cronhub': 'https://example.com'}, + {'cronhub': {'ping_url': 'https://example.com'}}, + True, + ), + ( + {'checks': ['archives']}, + {'checks': [{'name': 'archives'}]}, + True, + ), + ( + {'checks': ['archives']}, + {'checks': [{'name': 'archives'}]}, + True, + ), + ( + {'numeric_owner': False}, + {'numeric_ids': False}, + True, + ), + ( + {'bsd_flags': False}, + {'flags': False}, + True, + ), + ( + {'remote_rate_limit': False}, + {'upload_rate_limit': False}, + True, + ), + ( + {'repositories': ['foo@bar:/repo']}, + {'repositories': [{'path': 'ssh://foo@bar/repo'}]}, + True, + ), + ( + {'repositories': ['foo@bar:repo']}, + {'repositories': [{'path': 'ssh://foo@bar/./repo'}]}, + True, + ), + ( + {'repositories': ['foo@bar:~/repo']}, + {'repositories': [{'path': 'ssh://foo@bar/~/repo'}]}, + True, + ), + ( + {'repositories': ['ssh://foo@bar:1234/repo']}, + {'repositories': [{'path': 'ssh://foo@bar:1234/repo'}]}, + True, + ), + ( + {'repositories': ['file:///repo']}, + {'repositories': [{'path': '/repo'}]}, + True, + ), + ( + {'repositories': [{'path': 'foo@bar:/repo', 'label': 'foo'}]}, + {'repositories': [{'path': 'ssh://foo@bar/repo', 'label': 'foo'}]}, + True, + ), + ( + {'repositories': [{'path': 'file:///repo', 'label': 'foo'}]}, + {'repositories': [{'path': '/repo', 'label': 'foo'}]}, False, ), ( - {'hooks': {'healthchecks': 'https://example.com'}}, - {'hooks': {'healthchecks': {'ping_url': 'https://example.com'}}}, - True, - ), - ( - {'hooks': {'cronitor': 'https://example.com'}}, - {'hooks': {'cronitor': {'ping_url': 'https://example.com'}}}, - True, - ), - ( - {'hooks': {'pagerduty': 'https://example.com'}}, - {'hooks': {'pagerduty': {'integration_key': 'https://example.com'}}}, - True, - ), - ( - {'hooks': {'cronhub': 'https://example.com'}}, - {'hooks': {'cronhub': {'ping_url': 'https://example.com'}}}, - True, - ), - ( - {'hooks': None}, - {'hooks': None}, + {'repositories': [{'path': '/repo', 'label': 'foo'}]}, + {'repositories': [{'path': '/repo', 'label': 'foo'}]}, False, ), ( - {'consistency': {'checks': ['archives']}}, - {'consistency': {'checks': [{'name': 'archives'}]}}, - True, - ), - ( - {'consistency': {'checks': ['archives']}}, - {'consistency': {'checks': [{'name': 'archives'}]}}, - True, - ), - ( - {'consistency': None}, - {'consistency': None}, - False, - ), - ( - {'location': {'numeric_owner': False}}, - {'location': {'numeric_ids': False}}, - True, - ), - ( - {'location': {'bsd_flags': False}}, - {'location': {'flags': False}}, - True, - ), - ( - {'storage': {'remote_rate_limit': False}}, - {'storage': {'upload_rate_limit': False}}, - True, - ), - ( - {'location': {'repositories': ['foo@bar:/repo']}}, - {'location': {'repositories': [{'path': 'ssh://foo@bar/repo'}]}}, - True, - ), - ( - {'location': {'repositories': ['foo@bar:repo']}}, - {'location': {'repositories': [{'path': 'ssh://foo@bar/./repo'}]}}, - True, - ), - ( - {'location': {'repositories': ['foo@bar:~/repo']}}, - {'location': {'repositories': [{'path': 'ssh://foo@bar/~/repo'}]}}, - True, - ), - ( - {'location': {'repositories': ['ssh://foo@bar:1234/repo']}}, - {'location': {'repositories': [{'path': 'ssh://foo@bar:1234/repo'}]}}, - True, - ), - ( - {'location': {'repositories': ['file:///repo']}}, - {'location': {'repositories': [{'path': '/repo'}]}}, - True, - ), - ( - {'location': {'repositories': [{'path': 'foo@bar:/repo', 'label': 'foo'}]}}, - {'location': {'repositories': [{'path': 'ssh://foo@bar/repo', 'label': 'foo'}]}}, - True, - ), - ( - {'location': {'repositories': [{'path': 'file:///repo', 'label': 'foo'}]}}, - {'location': {'repositories': [{'path': '/repo', 'label': 'foo'}]}}, - False, - ), - ( - {'location': {'repositories': [{'path': '/repo', 'label': 'foo'}]}}, - {'location': {'repositories': [{'path': '/repo', 'label': 'foo'}]}}, - False, - ), - ( - {'consistency': {'prefix': 'foo'}}, - {'consistency': {'prefix': 'foo'}}, - True, - ), - ( - {'retention': {'prefix': 'foo'}}, - {'retention': {'prefix': 'foo'}}, + {'prefix': 'foo'}, + {'prefix': 'foo'}, True, ), ), @@ -146,6 +229,8 @@ from borgmatic.config import normalize as module def test_normalize_applies_hard_coded_normalization_to_config( config, expected_config, produces_logs ): + flexmock(module).should_receive('normalize_sections').and_return([]) + logs = module.normalize('test.yaml', config) assert config == expected_config @@ -157,12 +242,12 @@ def test_normalize_applies_hard_coded_normalization_to_config( def test_normalize_raises_error_if_repository_data_is_not_consistent(): + flexmock(module).should_receive('normalize_sections').and_return([]) + with pytest.raises(TypeError): module.normalize( 'test.yaml', { - 'location': { - 'repositories': [{'path': 'foo@bar:/repo', 'label': 'foo'}, 'file:///repo'] - } + 'repositories': [{'path': 'foo@bar:/repo', 'label': 'foo'}, 'file:///repo'], }, ) diff --git a/tests/unit/config/test_override.py b/tests/unit/config/test_override.py index a5d62eb4..38b07e40 100644 --- a/tests/unit/config/test_override.py +++ b/tests/unit/config/test_override.py @@ -32,54 +32,76 @@ def test_set_values_with_one_key_overwrites_existing_key(): def test_set_values_with_multiple_keys_creates_hierarchy(): config = {} - module.set_values(config, ('section', 'key'), 'value') + module.set_values(config, ('option', 'suboption'), 'value') - assert config == {'section': {'key': 'value'}} + assert config == {'option': {'suboption': 'value'}} def test_set_values_with_multiple_keys_updates_hierarchy(): - config = {'section': {'other': 'other_value'}} - module.set_values(config, ('section', 'key'), 'value') + config = {'option': {'other': 'other_value'}} + module.set_values(config, ('option', 'key'), 'value') - assert config == {'section': {'key': 'value', 'other': 'other_value'}} + assert config == {'option': {'key': 'value', 'other': 'other_value'}} + + +@pytest.mark.parametrize( + 'key,expected_key', + ( + (('foo', 'bar'), ('foo', 'bar')), + (('location', 'foo'), ('foo',)), + (('storage', 'foo'), ('foo',)), + (('retention', 'foo'), ('foo',)), + (('consistency', 'foo'), ('foo',)), + (('output', 'foo'), ('foo',)), + (('hooks', 'foo', 'bar'), ('foo', 'bar')), + (('foo', 'hooks'), ('foo', 'hooks')), + ), +) +def test_strip_section_names_passes_through_key_without_section_name(key, expected_key): + assert module.strip_section_names(key) == expected_key def test_parse_overrides_splits_keys_and_values(): + flexmock(module).should_receive('strip_section_names').replace_with(lambda value: value) flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value) - raw_overrides = ['section.my_option=value1', 'section.other_option=value2'] + raw_overrides = ['option.my_option=value1', 'other_option=value2'] expected_result = ( - (('section', 'my_option'), 'value1'), - (('section', 'other_option'), 'value2'), + (('option', 'my_option'), 'value1'), + (('other_option'), 'value2'), ) module.parse_overrides(raw_overrides) == expected_result def test_parse_overrides_allows_value_with_equal_sign(): + flexmock(module).should_receive('strip_section_names').replace_with(lambda value: value) flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value) - raw_overrides = ['section.option=this===value'] - expected_result = ((('section', 'option'), 'this===value'),) + raw_overrides = ['option=this===value'] + expected_result = ((('option',), 'this===value'),) module.parse_overrides(raw_overrides) == expected_result def test_parse_overrides_raises_on_missing_equal_sign(): + flexmock(module).should_receive('strip_section_names').replace_with(lambda value: value) flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value) - raw_overrides = ['section.option'] + raw_overrides = ['option'] with pytest.raises(ValueError): module.parse_overrides(raw_overrides) def test_parse_overrides_raises_on_invalid_override_value(): + flexmock(module).should_receive('strip_section_names').replace_with(lambda value: value) flexmock(module).should_receive('convert_value_type').and_raise(ruamel.yaml.parser.ParserError) - raw_overrides = ['section.option=[in valid]'] + raw_overrides = ['option=[in valid]'] with pytest.raises(ValueError): module.parse_overrides(raw_overrides) def test_parse_overrides_allows_value_with_single_key(): + flexmock(module).should_receive('strip_section_names').replace_with(lambda value: value) flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value) raw_overrides = ['option=value'] expected_result = ((('option',), 'value'),) diff --git a/tests/unit/config/test_validate.py b/tests/unit/config/test_validate.py index 4ab2b762..182cbd17 100644 --- a/tests/unit/config/test_validate.py +++ b/tests/unit/config/test_validate.py @@ -68,9 +68,9 @@ def test_apply_logical_validation_raises_if_unknown_repository_in_check_reposito module.apply_logical_validation( 'config.yaml', { - 'location': {'repositories': ['repo.borg', 'other.borg']}, - 'retention': {'keep_secondly': 1000}, - 'consistency': {'check_repositories': ['repo.borg', 'unknown.borg']}, + 'repositories': ['repo.borg', 'other.borg'], + 'keep_secondly': 1000, + 'check_repositories': ['repo.borg', 'unknown.borg'], }, ) @@ -79,9 +79,9 @@ def test_apply_logical_validation_does_not_raise_if_known_repository_path_in_che module.apply_logical_validation( 'config.yaml', { - 'location': {'repositories': [{'path': 'repo.borg'}, {'path': 'other.borg'}]}, - 'retention': {'keep_secondly': 1000}, - 'consistency': {'check_repositories': ['repo.borg']}, + 'repositories': [{'path': 'repo.borg'}, {'path': 'other.borg'}], + 'keep_secondly': 1000, + 'check_repositories': ['repo.borg'], }, ) @@ -90,14 +90,12 @@ def test_apply_logical_validation_does_not_raise_if_known_repository_label_in_ch module.apply_logical_validation( 'config.yaml', { - 'location': { - 'repositories': [ - {'path': 'repo.borg', 'label': 'my_repo'}, - {'path': 'other.borg', 'label': 'other_repo'}, - ] - }, - 'retention': {'keep_secondly': 1000}, - 'consistency': {'check_repositories': ['my_repo']}, + 'repositories': [ + {'path': 'repo.borg', 'label': 'my_repo'}, + {'path': 'other.borg', 'label': 'other_repo'}, + ], + 'keep_secondly': 1000, + 'check_repositories': ['my_repo'], }, ) @@ -106,15 +104,15 @@ def test_apply_logical_validation_does_not_raise_if_archive_name_format_and_pref module.apply_logical_validation( 'config.yaml', { - 'storage': {'archive_name_format': '{hostname}-{now}'}, # noqa: FS003 - 'retention': {'prefix': '{hostname}-'}, # noqa: FS003 - 'consistency': {'prefix': '{hostname}-'}, # noqa: FS003 + 'archive_name_format': '{hostname}-{now}', # noqa: FS003 + 'prefix': '{hostname}-', # noqa: FS003 + 'prefix': '{hostname}-', # noqa: FS003 }, ) def test_apply_logical_validation_does_not_raise_otherwise(): - module.apply_logical_validation('config.yaml', {'retention': {'keep_secondly': 1000}}) + module.apply_logical_validation('config.yaml', {'keep_secondly': 1000}) def test_normalize_repository_path_passes_through_remote_repository(): @@ -157,22 +155,20 @@ def test_guard_configuration_contains_repository_does_not_raise_when_repository_ ) module.guard_configuration_contains_repository( - repository='repo', configurations={'config.yaml': {'location': {'repositories': ['repo']}}} + repository='repo', configurations={'config.yaml': {'repositories': ['repo']}} ) def test_guard_configuration_contains_repository_does_not_raise_when_repository_label_in_config(): module.guard_configuration_contains_repository( repository='repo', - configurations={ - 'config.yaml': {'location': {'repositories': [{'path': 'foo/bar', 'label': 'repo'}]}} - }, + configurations={'config.yaml': {'repositories': [{'path': 'foo/bar', 'label': 'repo'}]}}, ) def test_guard_configuration_contains_repository_does_not_raise_when_repository_not_given(): module.guard_configuration_contains_repository( - repository=None, configurations={'config.yaml': {'location': {'repositories': ['repo']}}} + repository=None, configurations={'config.yaml': {'repositories': ['repo']}} ) @@ -184,7 +180,7 @@ def test_guard_configuration_contains_repository_errors_when_repository_missing_ with pytest.raises(ValueError): module.guard_configuration_contains_repository( repository='nope', - configurations={'config.yaml': {'location': {'repositories': ['repo', 'repo2']}}}, + configurations={'config.yaml': {'repositories': ['repo', 'repo2']}}, ) @@ -197,8 +193,8 @@ def test_guard_configuration_contains_repository_errors_when_repository_matches_ module.guard_configuration_contains_repository( repository='repo', configurations={ - 'config.yaml': {'location': {'repositories': ['repo', 'repo2']}}, - 'other.yaml': {'location': {'repositories': ['repo']}}, + 'config.yaml': {'repositories': ['repo', 'repo2']}, + 'other.yaml': {'repositories': ['repo']}, }, ) @@ -207,26 +203,26 @@ def test_guard_single_repository_selected_raises_when_multiple_repositories_conf with pytest.raises(ValueError): module.guard_single_repository_selected( repository=None, - configurations={'config.yaml': {'location': {'repositories': ['repo', 'repo2']}}}, + configurations={'config.yaml': {'repositories': ['repo', 'repo2']}}, ) def test_guard_single_repository_selected_does_not_raise_when_single_repository_configured_and_none_selected(): module.guard_single_repository_selected( repository=None, - configurations={'config.yaml': {'location': {'repositories': ['repo']}}}, + configurations={'config.yaml': {'repositories': ['repo']}}, ) def test_guard_single_repository_selected_does_not_raise_when_no_repositories_configured_and_one_selected(): module.guard_single_repository_selected( repository='repo', - configurations={'config.yaml': {'location': {'repositories': []}}}, + configurations={'config.yaml': {'repositories': []}}, ) def test_guard_single_repository_selected_does_not_raise_when_repositories_configured_and_one_selected(): module.guard_single_repository_selected( repository='repo', - configurations={'config.yaml': {'location': {'repositories': ['repo', 'repo2']}}}, + configurations={'config.yaml': {'repositories': ['repo', 'repo2']}}, ) diff --git a/tests/unit/hooks/test_cronhub.py b/tests/unit/hooks/test_cronhub.py index 2941592b..edb167ad 100644 --- a/tests/unit/hooks/test_cronhub.py +++ b/tests/unit/hooks/test_cronhub.py @@ -11,6 +11,7 @@ def test_ping_monitor_rewrites_ping_url_for_start_state(): module.ping_monitor( hook_config, + {}, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, @@ -26,6 +27,7 @@ def test_ping_monitor_rewrites_ping_url_and_state_for_start_state(): module.ping_monitor( hook_config, + {}, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, @@ -41,6 +43,7 @@ def test_ping_monitor_rewrites_ping_url_for_finish_state(): module.ping_monitor( hook_config, + {}, 'config.yaml', module.monitor.State.FINISH, monitoring_log_level=1, @@ -55,7 +58,12 @@ def test_ping_monitor_rewrites_ping_url_for_fail_state(): ).and_return(flexmock(ok=True)) module.ping_monitor( - hook_config, 'config.yaml', module.monitor.State.FAIL, monitoring_log_level=1, dry_run=False + hook_config, + {}, + 'config.yaml', + module.monitor.State.FAIL, + monitoring_log_level=1, + dry_run=False, ) @@ -64,7 +72,12 @@ def test_ping_monitor_dry_run_does_not_hit_ping_url(): flexmock(module.requests).should_receive('get').never() module.ping_monitor( - hook_config, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, dry_run=True + hook_config, + {}, + 'config.yaml', + module.monitor.State.START, + monitoring_log_level=1, + dry_run=True, ) @@ -77,6 +90,7 @@ def test_ping_monitor_with_connection_error_logs_warning(): module.ping_monitor( hook_config, + (), 'config.yaml', module.monitor.State.START, monitoring_log_level=1, @@ -97,6 +111,7 @@ def test_ping_monitor_with_other_error_logs_warning(): module.ping_monitor( hook_config, + {}, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, @@ -104,11 +119,13 @@ def test_ping_monitor_with_other_error_logs_warning(): ) -def test_ping_monitor_with_unsupported_monitoring_state(): +def test_ping_monitor_with_unsupported_monitoring_state_bails(): hook_config = {'ping_url': 'https://example.com'} flexmock(module.requests).should_receive('get').never() + module.ping_monitor( hook_config, + {}, 'config.yaml', module.monitor.State.LOG, monitoring_log_level=1, diff --git a/tests/unit/hooks/test_cronitor.py b/tests/unit/hooks/test_cronitor.py index 12b96855..9daa2c11 100644 --- a/tests/unit/hooks/test_cronitor.py +++ b/tests/unit/hooks/test_cronitor.py @@ -11,6 +11,7 @@ def test_ping_monitor_hits_ping_url_for_start_state(): module.ping_monitor( hook_config, + {}, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, @@ -26,6 +27,7 @@ def test_ping_monitor_hits_ping_url_for_finish_state(): module.ping_monitor( hook_config, + {}, 'config.yaml', module.monitor.State.FINISH, monitoring_log_level=1, @@ -40,7 +42,12 @@ def test_ping_monitor_hits_ping_url_for_fail_state(): ).and_return(flexmock(ok=True)) module.ping_monitor( - hook_config, 'config.yaml', module.monitor.State.FAIL, monitoring_log_level=1, dry_run=False + hook_config, + {}, + 'config.yaml', + module.monitor.State.FAIL, + monitoring_log_level=1, + dry_run=False, ) @@ -49,7 +56,12 @@ def test_ping_monitor_dry_run_does_not_hit_ping_url(): flexmock(module.requests).should_receive('get').never() module.ping_monitor( - hook_config, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, dry_run=True + hook_config, + {}, + 'config.yaml', + module.monitor.State.START, + monitoring_log_level=1, + dry_run=True, ) @@ -62,6 +74,7 @@ def test_ping_monitor_with_connection_error_logs_warning(): module.ping_monitor( hook_config, + {}, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, @@ -82,6 +95,7 @@ def test_ping_monitor_with_other_error_logs_warning(): module.ping_monitor( hook_config, + {}, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, @@ -89,11 +103,13 @@ def test_ping_monitor_with_other_error_logs_warning(): ) -def test_ping_monitor_with_unsupported_monitoring_state(): +def test_ping_monitor_with_unsupported_monitoring_state_bails(): hook_config = {'ping_url': 'https://example.com'} flexmock(module.requests).should_receive('get').never() + module.ping_monitor( hook_config, + {}, 'config.yaml', module.monitor.State.LOG, monitoring_log_level=1, diff --git a/tests/unit/hooks/test_dispatch.py b/tests/unit/hooks/test_dispatch.py index a332109a..26df72d6 100644 --- a/tests/unit/hooks/test_dispatch.py +++ b/tests/unit/hooks/test_dispatch.py @@ -6,7 +6,7 @@ from flexmock import flexmock from borgmatic.hooks import dispatch as module -def hook_function(config, log_prefix, thing, value): +def hook_function(hook_config, config, log_prefix, thing, value): ''' This test function gets mocked out below. ''' @@ -14,98 +14,104 @@ def hook_function(config, log_prefix, thing, value): def test_call_hook_invokes_module_function_with_arguments_and_returns_value(): - hooks = {'super_hook': flexmock(), 'other_hook': flexmock()} + config = {'super_hook': flexmock(), 'other_hook': flexmock()} expected_return_value = flexmock() test_module = sys.modules[__name__] flexmock(module).HOOK_NAME_TO_MODULE = {'super_hook': test_module} flexmock(test_module).should_receive('hook_function').with_args( - hooks['super_hook'], 'prefix', 55, value=66 + config['super_hook'], config, 'prefix', 55, value=66 ).and_return(expected_return_value).once() - return_value = module.call_hook('hook_function', hooks, 'prefix', 'super_hook', 55, value=66) + return_value = module.call_hook('hook_function', config, 'prefix', 'super_hook', 55, value=66) assert return_value == expected_return_value def test_call_hook_without_hook_config_invokes_module_function_with_arguments_and_returns_value(): - hooks = {'other_hook': flexmock()} + config = {'other_hook': flexmock()} expected_return_value = flexmock() test_module = sys.modules[__name__] flexmock(module).HOOK_NAME_TO_MODULE = {'super_hook': test_module} flexmock(test_module).should_receive('hook_function').with_args( - {}, 'prefix', 55, value=66 + {}, config, 'prefix', 55, value=66 ).and_return(expected_return_value).once() - return_value = module.call_hook('hook_function', hooks, 'prefix', 'super_hook', 55, value=66) + return_value = module.call_hook('hook_function', config, 'prefix', 'super_hook', 55, value=66) assert return_value == expected_return_value def test_call_hook_without_corresponding_module_raises(): - hooks = {'super_hook': flexmock(), 'other_hook': flexmock()} + config = {'super_hook': flexmock(), 'other_hook': flexmock()} test_module = sys.modules[__name__] flexmock(module).HOOK_NAME_TO_MODULE = {'other_hook': test_module} flexmock(test_module).should_receive('hook_function').never() with pytest.raises(ValueError): - module.call_hook('hook_function', hooks, 'prefix', 'super_hook', 55, value=66) + module.call_hook('hook_function', config, 'prefix', 'super_hook', 55, value=66) def test_call_hooks_calls_each_hook_and_collects_return_values(): - hooks = {'super_hook': flexmock(), 'other_hook': flexmock()} + config = {'super_hook': flexmock(), 'other_hook': flexmock()} expected_return_values = {'super_hook': flexmock(), 'other_hook': flexmock()} flexmock(module).should_receive('call_hook').and_return( expected_return_values['super_hook'] ).and_return(expected_return_values['other_hook']) - return_values = module.call_hooks('do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55) + return_values = module.call_hooks( + 'do_stuff', config, 'prefix', ('super_hook', 'other_hook'), 55 + ) assert return_values == expected_return_values def test_call_hooks_calls_skips_return_values_for_missing_hooks(): - hooks = {'super_hook': flexmock()} + config = {'super_hook': flexmock()} expected_return_values = {'super_hook': flexmock()} flexmock(module).should_receive('call_hook').and_return(expected_return_values['super_hook']) - return_values = module.call_hooks('do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55) + return_values = module.call_hooks( + 'do_stuff', config, 'prefix', ('super_hook', 'other_hook'), 55 + ) assert return_values == expected_return_values def test_call_hooks_calls_skips_return_values_for_null_hooks(): - hooks = {'super_hook': flexmock(), 'other_hook': None} + config = {'super_hook': flexmock(), 'other_hook': None} expected_return_values = {'super_hook': flexmock()} flexmock(module).should_receive('call_hook').and_return(expected_return_values['super_hook']) - return_values = module.call_hooks('do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55) + return_values = module.call_hooks( + 'do_stuff', config, 'prefix', ('super_hook', 'other_hook'), 55 + ) assert return_values == expected_return_values def test_call_hooks_even_if_unconfigured_calls_each_hook_and_collects_return_values(): - hooks = {'super_hook': flexmock(), 'other_hook': flexmock()} + config = {'super_hook': flexmock(), 'other_hook': flexmock()} expected_return_values = {'super_hook': flexmock(), 'other_hook': flexmock()} flexmock(module).should_receive('call_hook').and_return( expected_return_values['super_hook'] ).and_return(expected_return_values['other_hook']) return_values = module.call_hooks_even_if_unconfigured( - 'do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55 + 'do_stuff', config, 'prefix', ('super_hook', 'other_hook'), 55 ) assert return_values == expected_return_values def test_call_hooks_even_if_unconfigured_calls_each_hook_configured_or_not_and_collects_return_values(): - hooks = {'other_hook': flexmock()} + config = {'other_hook': flexmock()} expected_return_values = {'super_hook': flexmock(), 'other_hook': flexmock()} flexmock(module).should_receive('call_hook').and_return( expected_return_values['super_hook'] ).and_return(expected_return_values['other_hook']) return_values = module.call_hooks_even_if_unconfigured( - 'do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55 + 'do_stuff', config, 'prefix', ('super_hook', 'other_hook'), 55 ) assert return_values == expected_return_values diff --git a/tests/unit/hooks/test_healthchecks.py b/tests/unit/hooks/test_healthchecks.py index 5c6977da..fd43507b 100644 --- a/tests/unit/hooks/test_healthchecks.py +++ b/tests/unit/hooks/test_healthchecks.py @@ -143,6 +143,7 @@ def test_ping_monitor_hits_ping_url_for_start_state(): module.ping_monitor( hook_config, + {}, 'config.yaml', state=module.monitor.State.START, monitoring_log_level=1, @@ -160,6 +161,7 @@ def test_ping_monitor_hits_ping_url_for_finish_state(): module.ping_monitor( hook_config, + {}, 'config.yaml', state=module.monitor.State.FINISH, monitoring_log_level=1, @@ -177,6 +179,7 @@ def test_ping_monitor_hits_ping_url_for_fail_state(): module.ping_monitor( hook_config, + {}, 'config.yaml', state=module.monitor.State.FAIL, monitoring_log_level=1, @@ -194,6 +197,7 @@ def test_ping_monitor_hits_ping_url_for_log_state(): module.ping_monitor( hook_config, + {}, 'config.yaml', state=module.monitor.State.LOG, monitoring_log_level=1, @@ -213,6 +217,7 @@ def test_ping_monitor_with_ping_uuid_hits_corresponding_url(): module.ping_monitor( hook_config, + {}, 'config.yaml', state=module.monitor.State.FINISH, monitoring_log_level=1, @@ -230,6 +235,7 @@ def test_ping_monitor_skips_ssl_verification_when_verify_tls_false(): module.ping_monitor( hook_config, + {}, 'config.yaml', state=module.monitor.State.FINISH, monitoring_log_level=1, @@ -247,6 +253,7 @@ def test_ping_monitor_executes_ssl_verification_when_verify_tls_true(): module.ping_monitor( hook_config, + {}, 'config.yaml', state=module.monitor.State.FINISH, monitoring_log_level=1, @@ -261,6 +268,7 @@ def test_ping_monitor_dry_run_does_not_hit_ping_url(): module.ping_monitor( hook_config, + {}, 'config.yaml', state=module.monitor.State.START, monitoring_log_level=1, @@ -275,6 +283,7 @@ def test_ping_monitor_does_not_hit_ping_url_when_states_not_matching(): module.ping_monitor( hook_config, + {}, 'config.yaml', state=module.monitor.State.START, monitoring_log_level=1, @@ -291,6 +300,7 @@ def test_ping_monitor_hits_ping_url_when_states_matching(): module.ping_monitor( hook_config, + {}, 'config.yaml', state=module.monitor.State.START, monitoring_log_level=1, @@ -308,6 +318,7 @@ def test_ping_monitor_with_connection_error_logs_warning(): module.ping_monitor( hook_config, + {}, 'config.yaml', state=module.monitor.State.START, monitoring_log_level=1, @@ -329,6 +340,7 @@ def test_ping_monitor_with_other_error_logs_warning(): module.ping_monitor( hook_config, + {}, 'config.yaml', state=module.monitor.State.START, monitoring_log_level=1, diff --git a/tests/unit/hooks/test_mongodb.py b/tests/unit/hooks/test_mongodb.py index 5ac8ce96..234201f4 100644 --- a/tests/unit/hooks/test_mongodb.py +++ b/tests/unit/hooks/test_mongodb.py @@ -22,7 +22,7 @@ def test_dump_databases_runs_mongodump_for_each_database(): run_to_completion=False, ).and_return(process).once() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes def test_dump_databases_with_dry_run_skips_mongodump(): @@ -34,7 +34,7 @@ def test_dump_databases_with_dry_run_skips_mongodump(): flexmock(module.dump).should_receive('create_named_pipe_for_dump').never() flexmock(module).should_receive('execute_command').never() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=True) == [] + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=True) == [] def test_dump_databases_runs_mongodump_with_hostname_and_port(): @@ -63,7 +63,7 @@ def test_dump_databases_runs_mongodump_with_hostname_and_port(): run_to_completion=False, ).and_return(process).once() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process] def test_dump_databases_runs_mongodump_with_username_and_password(): @@ -101,7 +101,7 @@ def test_dump_databases_runs_mongodump_with_username_and_password(): run_to_completion=False, ).and_return(process).once() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process] def test_dump_databases_runs_mongodump_with_directory_format(): @@ -118,7 +118,7 @@ def test_dump_databases_runs_mongodump_with_directory_format(): shell=True, ).and_return(flexmock()).once() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [] + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [] def test_dump_databases_runs_mongodump_with_options(): @@ -136,7 +136,7 @@ def test_dump_databases_runs_mongodump_with_options(): run_to_completion=False, ).and_return(process).once() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process] def test_dump_databases_runs_mongodumpall_for_all_databases(): @@ -154,7 +154,7 @@ def test_dump_databases_runs_mongodumpall_for_all_databases(): run_to_completion=False, ).and_return(process).once() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process] def test_restore_database_dump_runs_mongorestore(): @@ -172,8 +172,8 @@ def test_restore_database_dump_runs_mongorestore(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -196,8 +196,8 @@ def test_restore_database_dump_errors_on_multiple_database_config(): with pytest.raises(ValueError): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=flexmock(), connection_params={ @@ -236,8 +236,8 @@ def test_restore_database_dump_runs_mongorestore_with_hostname_and_port(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -284,8 +284,8 @@ def test_restore_database_dump_runs_mongorestore_with_username_and_password(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -340,8 +340,8 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -396,8 +396,8 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_ module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -424,8 +424,8 @@ def test_restore_database_dump_runs_mongorestore_with_options(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -462,8 +462,8 @@ def test_restore_databases_dump_runs_mongorestore_with_schemas(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -490,8 +490,8 @@ def test_restore_database_dump_runs_psql_for_all_database_dump(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -512,8 +512,8 @@ def test_restore_database_dump_with_dry_run_skips_restore(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=True, extract_process=flexmock(), connection_params={ @@ -539,8 +539,8 @@ def test_restore_database_dump_without_extract_process_restores_from_disk(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=None, connection_params={ diff --git a/tests/unit/hooks/test_mysql.py b/tests/unit/hooks/test_mysql.py index cdcddf5e..4b55be47 100644 --- a/tests/unit/hooks/test_mysql.py +++ b/tests/unit/hooks/test_mysql.py @@ -63,7 +63,7 @@ def test_dump_databases_dumps_each_database(): dry_run_label=object, ).and_return(process).once() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes def test_dump_databases_dumps_with_password(): @@ -84,7 +84,7 @@ def test_dump_databases_dumps_with_password(): dry_run_label=object, ).and_return(process).once() - assert module.dump_databases([database], 'test.yaml', {}, dry_run=False) == [process] + assert module.dump_databases([database], {}, 'test.yaml', dry_run=False) == [process] def test_dump_databases_dumps_all_databases_at_once(): @@ -102,7 +102,7 @@ def test_dump_databases_dumps_all_databases_at_once(): dry_run_label=object, ).and_return(process).once() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process] def test_dump_databases_dumps_all_databases_separately_when_format_configured(): @@ -122,7 +122,7 @@ def test_dump_databases_dumps_all_databases_separately_when_format_configured(): dry_run_label=object, ).and_return(process).once() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes def test_database_names_to_dump_runs_mysql_with_list_options(): @@ -365,7 +365,7 @@ def test_dump_databases_errors_for_missing_all_databases(): flexmock(module).should_receive('database_names_to_dump').and_return(()) with pytest.raises(ValueError): - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) def test_dump_databases_does_not_error_for_missing_all_databases_with_dry_run(): @@ -376,7 +376,7 @@ def test_dump_databases_does_not_error_for_missing_all_databases_with_dry_run(): ) flexmock(module).should_receive('database_names_to_dump').and_return(()) - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=True) == [] + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=True) == [] def test_restore_database_dump_runs_mysql_to_restore(): @@ -393,8 +393,8 @@ def test_restore_database_dump_runs_mysql_to_restore(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -415,8 +415,8 @@ def test_restore_database_dump_errors_on_multiple_database_config(): with pytest.raises(ValueError): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=flexmock(), connection_params={ @@ -442,8 +442,8 @@ def test_restore_database_dump_runs_mysql_with_options(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -478,8 +478,8 @@ def test_restore_database_dump_runs_mysql_with_hostname_and_port(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -505,8 +505,8 @@ def test_restore_database_dump_runs_mysql_with_username_and_password(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -553,8 +553,8 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -603,8 +603,8 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_ module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -623,8 +623,8 @@ def test_restore_database_dump_with_dry_run_skips_restore(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=True, extract_process=flexmock(), connection_params={ diff --git a/tests/unit/hooks/test_ntfy.py b/tests/unit/hooks/test_ntfy.py index 9731df7a..7bace524 100644 --- a/tests/unit/hooks/test_ntfy.py +++ b/tests/unit/hooks/test_ntfy.py @@ -44,6 +44,7 @@ def test_ping_monitor_minimal_config_hits_hosted_ntfy_on_fail(): module.ping_monitor( hook_config, + {}, 'config.yaml', borgmatic.hooks.monitor.State.FAIL, monitoring_log_level=1, @@ -65,6 +66,7 @@ def test_ping_monitor_with_auth_hits_hosted_ntfy_on_fail(): module.ping_monitor( hook_config, + {}, 'config.yaml', borgmatic.hooks.monitor.State.FAIL, monitoring_log_level=1, @@ -83,6 +85,7 @@ def test_ping_monitor_auth_with_no_username_warning(): module.ping_monitor( hook_config, + {}, 'config.yaml', borgmatic.hooks.monitor.State.FAIL, monitoring_log_level=1, @@ -101,6 +104,7 @@ def test_ping_monitor_auth_with_no_password_warning(): module.ping_monitor( hook_config, + {}, 'config.yaml', borgmatic.hooks.monitor.State.FAIL, monitoring_log_level=1, @@ -114,6 +118,7 @@ def test_ping_monitor_minimal_config_does_not_hit_hosted_ntfy_on_start(): module.ping_monitor( hook_config, + {}, 'config.yaml', borgmatic.hooks.monitor.State.START, monitoring_log_level=1, @@ -127,6 +132,7 @@ def test_ping_monitor_minimal_config_does_not_hit_hosted_ntfy_on_finish(): module.ping_monitor( hook_config, + {}, 'config.yaml', borgmatic.hooks.monitor.State.FINISH, monitoring_log_level=1, @@ -144,6 +150,7 @@ def test_ping_monitor_minimal_config_hits_selfhosted_ntfy_on_fail(): module.ping_monitor( hook_config, + {}, 'config.yaml', borgmatic.hooks.monitor.State.FAIL, monitoring_log_level=1, @@ -157,6 +164,7 @@ def test_ping_monitor_minimal_config_does_not_hit_hosted_ntfy_on_fail_dry_run(): module.ping_monitor( hook_config, + {}, 'config.yaml', borgmatic.hooks.monitor.State.FAIL, monitoring_log_level=1, @@ -172,6 +180,7 @@ def test_ping_monitor_custom_message_hits_hosted_ntfy_on_fail(): module.ping_monitor( hook_config, + {}, 'config.yaml', borgmatic.hooks.monitor.State.FAIL, monitoring_log_level=1, @@ -189,6 +198,7 @@ def test_ping_monitor_custom_state_hits_hosted_ntfy_on_start(): module.ping_monitor( hook_config, + {}, 'config.yaml', borgmatic.hooks.monitor.State.START, monitoring_log_level=1, @@ -207,6 +217,7 @@ def test_ping_monitor_with_connection_error_logs_warning(): module.ping_monitor( hook_config, + {}, 'config.yaml', borgmatic.hooks.monitor.State.FAIL, monitoring_log_level=1, @@ -229,6 +240,7 @@ def test_ping_monitor_with_other_error_logs_warning(): module.ping_monitor( hook_config, + {}, 'config.yaml', borgmatic.hooks.monitor.State.FAIL, monitoring_log_level=1, diff --git a/tests/unit/hooks/test_pagerduty.py b/tests/unit/hooks/test_pagerduty.py index 0fccae00..5a5ed12a 100644 --- a/tests/unit/hooks/test_pagerduty.py +++ b/tests/unit/hooks/test_pagerduty.py @@ -8,6 +8,7 @@ def test_ping_monitor_ignores_start_state(): module.ping_monitor( {'integration_key': 'abc123'}, + {}, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, @@ -20,6 +21,7 @@ def test_ping_monitor_ignores_finish_state(): module.ping_monitor( {'integration_key': 'abc123'}, + {}, 'config.yaml', module.monitor.State.FINISH, monitoring_log_level=1, @@ -32,6 +34,7 @@ def test_ping_monitor_calls_api_for_fail_state(): module.ping_monitor( {'integration_key': 'abc123'}, + {}, 'config.yaml', module.monitor.State.FAIL, monitoring_log_level=1, @@ -44,6 +47,7 @@ def test_ping_monitor_dry_run_does_not_call_api(): module.ping_monitor( {'integration_key': 'abc123'}, + {}, 'config.yaml', module.monitor.State.FAIL, monitoring_log_level=1, @@ -59,6 +63,7 @@ def test_ping_monitor_with_connection_error_logs_warning(): module.ping_monitor( {'integration_key': 'abc123'}, + {}, 'config.yaml', module.monitor.State.FAIL, monitoring_log_level=1, @@ -76,6 +81,7 @@ def test_ping_monitor_with_other_error_logs_warning(): module.ping_monitor( {'integration_key': 'abc123'}, + {}, 'config.yaml', module.monitor.State.FAIL, monitoring_log_level=1, diff --git a/tests/unit/hooks/test_postgresql.py b/tests/unit/hooks/test_postgresql.py index 7ba45847..d7416100 100644 --- a/tests/unit/hooks/test_postgresql.py +++ b/tests/unit/hooks/test_postgresql.py @@ -217,7 +217,7 @@ def test_dump_databases_runs_pg_dump_for_each_database(): run_to_completion=False, ).and_return(process).once() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes def test_dump_databases_raises_when_no_database_names_to_dump(): @@ -227,7 +227,7 @@ def test_dump_databases_raises_when_no_database_names_to_dump(): flexmock(module).should_receive('database_names_to_dump').and_return(()) with pytest.raises(ValueError): - module.dump_databases(databases, 'test.yaml', {}, dry_run=False) + module.dump_databases(databases, {}, 'test.yaml', dry_run=False) def test_dump_databases_does_not_raise_when_no_database_names_to_dump(): @@ -236,7 +236,7 @@ def test_dump_databases_does_not_raise_when_no_database_names_to_dump(): flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('database_names_to_dump').and_return(()) - module.dump_databases(databases, 'test.yaml', {}, dry_run=True) == [] + module.dump_databases(databases, {}, 'test.yaml', dry_run=True) == [] def test_dump_databases_with_duplicate_dump_skips_pg_dump(): @@ -253,7 +253,7 @@ def test_dump_databases_with_duplicate_dump_skips_pg_dump(): flexmock(module.dump).should_receive('create_named_pipe_for_dump').never() flexmock(module).should_receive('execute_command').never() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [] + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [] def test_dump_databases_with_dry_run_skips_pg_dump(): @@ -270,7 +270,7 @@ def test_dump_databases_with_dry_run_skips_pg_dump(): flexmock(module.dump).should_receive('create_named_pipe_for_dump').never() flexmock(module).should_receive('execute_command').never() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=True) == [] + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=True) == [] def test_dump_databases_runs_pg_dump_with_hostname_and_port(): @@ -306,7 +306,7 @@ def test_dump_databases_runs_pg_dump_with_hostname_and_port(): run_to_completion=False, ).and_return(process).once() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process] def test_dump_databases_runs_pg_dump_with_username_and_password(): @@ -342,7 +342,7 @@ def test_dump_databases_runs_pg_dump_with_username_and_password(): run_to_completion=False, ).and_return(process).once() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process] def test_dump_databases_runs_pg_dump_with_directory_format(): @@ -373,7 +373,7 @@ def test_dump_databases_runs_pg_dump_with_directory_format(): extra_environment={'PGSSLMODE': 'disable'}, ).and_return(flexmock()).once() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [] + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [] def test_dump_databases_runs_pg_dump_with_options(): @@ -406,7 +406,7 @@ def test_dump_databases_runs_pg_dump_with_options(): run_to_completion=False, ).and_return(process).once() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process] def test_dump_databases_runs_pg_dumpall_for_all_databases(): @@ -428,7 +428,7 @@ def test_dump_databases_runs_pg_dumpall_for_all_databases(): run_to_completion=False, ).and_return(process).once() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process] def test_dump_databases_runs_non_default_pg_dump(): @@ -460,7 +460,7 @@ def test_dump_databases_runs_non_default_pg_dump(): run_to_completion=False, ).and_return(process).once() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process] def test_restore_database_dump_runs_pg_restore(): @@ -501,8 +501,8 @@ def test_restore_database_dump_runs_pg_restore(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -526,8 +526,8 @@ def test_restore_database_dump_errors_on_multiple_database_config(): with pytest.raises(ValueError): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=flexmock(), connection_params={ @@ -587,8 +587,8 @@ def test_restore_database_dump_runs_pg_restore_with_hostname_and_port(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -646,8 +646,8 @@ def test_restore_database_dump_runs_pg_restore_with_username_and_password(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -724,8 +724,8 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -802,8 +802,8 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_ module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -862,8 +862,8 @@ def test_restore_database_dump_runs_pg_restore_with_options(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -900,8 +900,8 @@ def test_restore_database_dump_runs_psql_for_all_database_dump(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -943,8 +943,8 @@ def test_restore_database_dump_runs_psql_for_plain_database_dump(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -1007,8 +1007,8 @@ def test_restore_database_dump_runs_non_default_pg_restore_and_psql(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={ @@ -1030,8 +1030,8 @@ def test_restore_database_dump_with_dry_run_skips_restore(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=True, extract_process=flexmock(), connection_params={ @@ -1081,8 +1081,8 @@ def test_restore_database_dump_without_extract_process_restores_from_disk(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=None, connection_params={ @@ -1136,8 +1136,8 @@ def test_restore_database_dump_with_schemas_restores_schemas(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=None, connection_params={ diff --git a/tests/unit/hooks/test_sqlite.py b/tests/unit/hooks/test_sqlite.py index 33317372..f61ffc75 100644 --- a/tests/unit/hooks/test_sqlite.py +++ b/tests/unit/hooks/test_sqlite.py @@ -17,7 +17,7 @@ def test_dump_databases_logs_and_skips_if_dump_already_exists(): flexmock(module.dump).should_receive('create_parent_directory_for_dump').never() flexmock(module).should_receive('execute_command').never() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [] + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [] def test_dump_databases_dumps_each_database(): @@ -37,7 +37,7 @@ def test_dump_databases_dumps_each_database(): processes[1] ) - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes def test_dumping_database_with_non_existent_path_warns_and_dumps_database(): @@ -55,7 +55,7 @@ def test_dumping_database_with_non_existent_path_warns_and_dumps_database(): flexmock(module.dump).should_receive('create_parent_directory_for_dump') flexmock(module).should_receive('execute_command').and_return(processes[0]) - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes def test_dumping_database_with_name_all_warns_and_dumps_all_databases(): @@ -75,7 +75,7 @@ def test_dumping_database_with_name_all_warns_and_dumps_all_databases(): flexmock(module.dump).should_receive('create_parent_directory_for_dump') flexmock(module).should_receive('execute_command').and_return(processes[0]) - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes def test_dump_databases_does_not_dump_if_dry_run(): @@ -89,7 +89,7 @@ def test_dump_databases_does_not_dump_if_dry_run(): flexmock(module.dump).should_receive('create_parent_directory_for_dump').never() flexmock(module).should_receive('execute_command').never() - assert module.dump_databases(databases, 'test.yaml', {}, dry_run=True) == [] + assert module.dump_databases(databases, {}, 'test.yaml', dry_run=True) == [] def test_restore_database_dump_restores_database(): @@ -110,8 +110,8 @@ def test_restore_database_dump_restores_database(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={'restore_path': None}, @@ -138,8 +138,8 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={'restore_path': 'cli/path/to/database'}, @@ -166,8 +166,8 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_ module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={'restore_path': None}, @@ -183,8 +183,8 @@ def test_restore_database_dump_does_not_restore_database_if_dry_run(): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=True, extract_process=extract_process, connection_params={'restore_path': None}, @@ -198,8 +198,8 @@ def test_restore_database_dump_raises_error_if_database_config_is_invalid(): with pytest.raises(ValueError): module.restore_database_dump( database_config, - 'test.yaml', {}, + 'test.yaml', dry_run=False, extract_process=extract_process, connection_params={'restore_path': None}, From ab351548d2533110280dd6b24760f6910e5f9a61 Mon Sep 17 00:00:00 2001 From: Dan Helfman Date: Sun, 9 Jul 2023 17:40:02 -0700 Subject: [PATCH 02/12] Fix (some) end-to-end tests (#721). --- borgmatic/hooks/cronhub.py | 4 +- borgmatic/hooks/cronitor.py | 4 +- borgmatic/hooks/healthchecks.py | 4 +- borgmatic/hooks/ntfy.py | 4 +- borgmatic/hooks/pagerduty.py | 4 +- tests/end-to-end/test_database.py | 211 +++++++++---------- tests/end-to-end/test_validate_config.py | 1 - tests/integration/hooks/test_healthchecks.py | 4 +- tests/unit/hooks/test_healthchecks.py | 10 +- 9 files changed, 118 insertions(+), 128 deletions(-) diff --git a/borgmatic/hooks/cronhub.py b/borgmatic/hooks/cronhub.py index 170f1916..bbdc19a8 100644 --- a/borgmatic/hooks/cronhub.py +++ b/borgmatic/hooks/cronhub.py @@ -14,7 +14,7 @@ MONITOR_STATE_TO_CRONHUB = { def initialize_monitor( - ping_url, config_filename, monitoring_log_level, dry_run + ping_url, config, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No initialization is necessary for this monitor. @@ -55,7 +55,7 @@ def ping_monitor(hook_config, config, config_filename, state, monitoring_log_lev def destroy_monitor( - ping_url_or_uuid, config_filename, monitoring_log_level, dry_run + ping_url_or_uuid, config, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No destruction is necessary for this monitor. diff --git a/borgmatic/hooks/cronitor.py b/borgmatic/hooks/cronitor.py index d57920cd..fe4cc1d8 100644 --- a/borgmatic/hooks/cronitor.py +++ b/borgmatic/hooks/cronitor.py @@ -14,7 +14,7 @@ MONITOR_STATE_TO_CRONITOR = { def initialize_monitor( - ping_url, config_filename, monitoring_log_level, dry_run + ping_url, config, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No initialization is necessary for this monitor. @@ -50,7 +50,7 @@ def ping_monitor(hook_config, config, config_filename, state, monitoring_log_lev def destroy_monitor( - ping_url_or_uuid, config_filename, monitoring_log_level, dry_run + ping_url_or_uuid, config, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No destruction is necessary for this monitor. diff --git a/borgmatic/hooks/healthchecks.py b/borgmatic/hooks/healthchecks.py index 511e9566..ae0772c5 100644 --- a/borgmatic/hooks/healthchecks.py +++ b/borgmatic/hooks/healthchecks.py @@ -70,7 +70,7 @@ def format_buffered_logs_for_payload(): return payload -def initialize_monitor(hook_config, config_filename, monitoring_log_level, dry_run): +def initialize_monitor(hook_config, config, config_filename, monitoring_log_level, dry_run): ''' Add a handler to the root logger that stores in memory the most recent logs emitted. That way, we can send them all to Healthchecks upon a finish or failure state. But skip this if the @@ -133,7 +133,7 @@ def ping_monitor(hook_config, config, config_filename, state, monitoring_log_lev logger.warning(f'{config_filename}: Healthchecks error: {error}') -def destroy_monitor(hook_config, config_filename, monitoring_log_level, dry_run): +def destroy_monitor(hook_config, config, config_filename, monitoring_log_level, dry_run): ''' Remove the monitor handler that was added to the root logger. This prevents the handler from getting reused by other instances of this monitor. diff --git a/borgmatic/hooks/ntfy.py b/borgmatic/hooks/ntfy.py index 50aa387a..abe976f0 100644 --- a/borgmatic/hooks/ntfy.py +++ b/borgmatic/hooks/ntfy.py @@ -6,7 +6,7 @@ logger = logging.getLogger(__name__) def initialize_monitor( - ping_url, config_filename, monitoring_log_level, dry_run + ping_url, config, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No initialization is necessary for this monitor. @@ -75,7 +75,7 @@ def ping_monitor(hook_config, config, config_filename, state, monitoring_log_lev def destroy_monitor( - ping_url_or_uuid, config_filename, monitoring_log_level, dry_run + ping_url_or_uuid, config, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No destruction is necessary for this monitor. diff --git a/borgmatic/hooks/pagerduty.py b/borgmatic/hooks/pagerduty.py index 382a402f..aeeec97c 100644 --- a/borgmatic/hooks/pagerduty.py +++ b/borgmatic/hooks/pagerduty.py @@ -13,7 +13,7 @@ EVENTS_API_URL = 'https://events.pagerduty.com/v2/enqueue' def initialize_monitor( - integration_key, config_filename, monitoring_log_level, dry_run + integration_key, config, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No initialization is necessary for this monitor. @@ -75,7 +75,7 @@ def ping_monitor(hook_config, config, config_filename, state, monitoring_log_lev def destroy_monitor( - ping_url_or_uuid, config_filename, monitoring_log_level, dry_run + ping_url_or_uuid, config, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No destruction is necessary for this monitor. diff --git a/tests/end-to-end/test_database.py b/tests/end-to-end/test_database.py index 0a10339e..9209ec39 100644 --- a/tests/end-to-end/test_database.py +++ b/tests/end-to-end/test_database.py @@ -22,60 +22,57 @@ def write_configuration( storing database dumps, dump format (for PostgreSQL), and encryption passphrase. ''' config = f''' -location: - source_directories: - - {source_directory} - repositories: - - {repository_path} - borgmatic_source_directory: {borgmatic_source_directory} +source_directories: + - {source_directory} +repositories: + - {repository_path} +borgmatic_source_directory: {borgmatic_source_directory} -storage: - encryption_passphrase: "test" +encryption_passphrase: "test" -hooks: - postgresql_databases: - - name: test - hostname: postgresql - username: postgres - password: test - format: {postgresql_dump_format} - - name: all - hostname: postgresql - username: postgres - password: test - - name: all - format: custom - hostname: postgresql - username: postgres - password: test - mysql_databases: - - name: test - hostname: mysql - username: root - password: test - - name: all - hostname: mysql - username: root - password: test - - name: all - format: sql - hostname: mysql - username: root - password: test - mongodb_databases: - - name: test - hostname: mongodb - username: root - password: test - authentication_database: admin - format: {mongodb_dump_format} - - name: all - hostname: mongodb - username: root - password: test - sqlite_databases: - - name: sqlite_test - path: /tmp/sqlite_test.db +postgresql_databases: + - name: test + hostname: postgresql + username: postgres + password: test + format: {postgresql_dump_format} + - name: all + hostname: postgresql + username: postgres + password: test + - name: all + format: custom + hostname: postgresql + username: postgres + password: test +mysql_databases: + - name: test + hostname: mysql + username: root + password: test + - name: all + hostname: mysql + username: root + password: test + - name: all + format: sql + hostname: mysql + username: root + password: test +mongodb_databases: + - name: test + hostname: mongodb + username: root + password: test + authentication_database: admin + format: {mongodb_dump_format} + - name: all + hostname: mongodb + username: root + password: test +sqlite_databases: + - name: sqlite_test + path: /tmp/sqlite_test.db ''' with open(config_path, 'w') as config_file: @@ -96,51 +93,48 @@ def write_custom_restore_configuration( restore_username, restore_password and restore_path. ''' config = f''' -location: - source_directories: - - {source_directory} - repositories: - - {repository_path} - borgmatic_source_directory: {borgmatic_source_directory} +source_directories: + - {source_directory} +repositories: + - {repository_path} +borgmatic_source_directory: {borgmatic_source_directory} -storage: - encryption_passphrase: "test" +encryption_passphrase: "test" -hooks: - postgresql_databases: - - name: test - hostname: postgresql - username: postgres - password: test - format: {postgresql_dump_format} - restore_hostname: postgresql2 - restore_port: 5433 - restore_username: postgres2 - restore_password: test2 - mysql_databases: - - name: test - hostname: mysql - username: root - password: test - restore_hostname: mysql2 - restore_port: 3307 - restore_username: root - restore_password: test2 - mongodb_databases: - - name: test - hostname: mongodb - username: root - password: test - authentication_database: admin - format: {mongodb_dump_format} - restore_hostname: mongodb2 - restore_port: 27018 - restore_username: root2 - restore_password: test2 - sqlite_databases: - - name: sqlite_test - path: /tmp/sqlite_test.db - restore_path: /tmp/sqlite_test2.db +postgresql_databases: + - name: test + hostname: postgresql + username: postgres + password: test + format: {postgresql_dump_format} + restore_hostname: postgresql2 + restore_port: 5433 + restore_username: postgres2 + restore_password: test2 +mysql_databases: + - name: test + hostname: mysql + username: root + password: test + restore_hostname: mysql2 + restore_port: 3307 + restore_username: root + restore_password: test2 +mongodb_databases: + - name: test + hostname: mongodb + username: root + password: test + authentication_database: admin + format: {mongodb_dump_format} + restore_hostname: mongodb2 + restore_port: 27018 + restore_username: root2 + restore_password: test2 +sqlite_databases: + - name: sqlite_test + path: /tmp/sqlite_test.db + restore_path: /tmp/sqlite_test2.db ''' with open(config_path, 'w') as config_file: @@ -161,23 +155,20 @@ def write_simple_custom_restore_configuration( these options for PostgreSQL. ''' config = f''' -location: - source_directories: - - {source_directory} - repositories: - - {repository_path} - borgmatic_source_directory: {borgmatic_source_directory} +source_directories: + - {source_directory} +repositories: + - {repository_path} +borgmatic_source_directory: {borgmatic_source_directory} -storage: - encryption_passphrase: "test" +encryption_passphrase: "test" -hooks: - postgresql_databases: - - name: test - hostname: postgresql - username: postgres - password: test - format: {postgresql_dump_format} +postgresql_databases: + - name: test + hostname: postgresql + username: postgres + password: test + format: {postgresql_dump_format} ''' with open(config_path, 'w') as config_file: diff --git a/tests/end-to-end/test_validate_config.py b/tests/end-to-end/test_validate_config.py index 4b86da4a..85a2006c 100644 --- a/tests/end-to-end/test_validate_config.py +++ b/tests/end-to-end/test_validate_config.py @@ -38,5 +38,4 @@ def test_validate_config_command_with_show_flag_displays_configuration(): f'validate-borgmatic-config --config {config_path} --show'.split(' ') ).decode(sys.stdout.encoding) - assert 'location:' in output assert 'repositories:' in output diff --git a/tests/integration/hooks/test_healthchecks.py b/tests/integration/hooks/test_healthchecks.py index 0c37013b..687b4873 100644 --- a/tests/integration/hooks/test_healthchecks.py +++ b/tests/integration/hooks/test_healthchecks.py @@ -10,7 +10,7 @@ def test_destroy_monitor_removes_healthchecks_handler(): original_handlers = list(logger.handlers) logger.addHandler(module.Forgetful_buffering_handler(byte_capacity=100, log_level=1)) - module.destroy_monitor(flexmock(), flexmock(), flexmock(), flexmock()) + module.destroy_monitor(flexmock(), flexmock(), flexmock(), flexmock(), flexmock()) assert logger.handlers == original_handlers @@ -19,6 +19,6 @@ def test_destroy_monitor_without_healthchecks_handler_does_not_raise(): logger = logging.getLogger() original_handlers = list(logger.handlers) - module.destroy_monitor(flexmock(), flexmock(), flexmock(), flexmock()) + module.destroy_monitor(flexmock(), flexmock(), flexmock(), flexmock(), flexmock()) assert logger.handlers == original_handlers diff --git a/tests/unit/hooks/test_healthchecks.py b/tests/unit/hooks/test_healthchecks.py index fd43507b..e3ab4386 100644 --- a/tests/unit/hooks/test_healthchecks.py +++ b/tests/unit/hooks/test_healthchecks.py @@ -86,7 +86,7 @@ def test_initialize_monitor_creates_log_handler_with_ping_body_limit(): ).once() module.initialize_monitor( - {'ping_body_limit': ping_body_limit}, 'test.yaml', monitoring_log_level, dry_run=False + {'ping_body_limit': ping_body_limit}, {}, 'test.yaml', monitoring_log_level, dry_run=False ) @@ -99,7 +99,7 @@ def test_initialize_monitor_creates_log_handler_with_default_ping_body_limit(): monitoring_log_level, ).once() - module.initialize_monitor({}, 'test.yaml', monitoring_log_level, dry_run=False) + module.initialize_monitor({}, {}, 'test.yaml', monitoring_log_level, dry_run=False) def test_initialize_monitor_creates_log_handler_with_zero_ping_body_limit(): @@ -112,7 +112,7 @@ def test_initialize_monitor_creates_log_handler_with_zero_ping_body_limit(): ).once() module.initialize_monitor( - {'ping_body_limit': ping_body_limit}, 'test.yaml', monitoring_log_level, dry_run=False + {'ping_body_limit': ping_body_limit}, {}, 'test.yaml', monitoring_log_level, dry_run=False ) @@ -121,7 +121,7 @@ def test_initialize_monitor_creates_log_handler_when_send_logs_true(): flexmock(module).should_receive('Forgetful_buffering_handler').once() module.initialize_monitor( - {'send_logs': True}, 'test.yaml', monitoring_log_level=1, dry_run=False + {'send_logs': True}, {}, 'test.yaml', monitoring_log_level=1, dry_run=False ) @@ -130,7 +130,7 @@ def test_initialize_monitor_bails_when_send_logs_false(): flexmock(module).should_receive('Forgetful_buffering_handler').never() module.initialize_monitor( - {'send_logs': False}, 'test.yaml', monitoring_log_level=1, dry_run=False + {'send_logs': False}, {}, 'test.yaml', monitoring_log_level=1, dry_run=False ) From a3f47a6418665a363d3fd765992c872b496b25c1 Mon Sep 17 00:00:00 2001 From: Dan Helfman Date: Mon, 10 Jul 2023 09:38:28 -0700 Subject: [PATCH 03/12] Remove some sections from tests (#721). --- tests/end-to-end/test_database.py | 6 +++--- tests/integration/config/test_generate.py | 24 +++++++++++------------ 2 files changed, 14 insertions(+), 16 deletions(-) diff --git a/tests/end-to-end/test_database.py b/tests/end-to-end/test_database.py index 9209ec39..f9c36214 100644 --- a/tests/end-to-end/test_database.py +++ b/tests/end-to-end/test_database.py @@ -25,7 +25,7 @@ def write_configuration( source_directories: - {source_directory} repositories: - - {repository_path} + - path: {repository_path} borgmatic_source_directory: {borgmatic_source_directory} encryption_passphrase: "test" @@ -96,7 +96,7 @@ def write_custom_restore_configuration( source_directories: - {source_directory} repositories: - - {repository_path} + - path: {repository_path} borgmatic_source_directory: {borgmatic_source_directory} encryption_passphrase: "test" @@ -158,7 +158,7 @@ def write_simple_custom_restore_configuration( source_directories: - {source_directory} repositories: - - {repository_path} + - path: {repository_path} borgmatic_source_directory: {borgmatic_source_directory} encryption_passphrase: "test" diff --git a/tests/integration/config/test_generate.py b/tests/integration/config/test_generate.py index 5df1825a..e332b899 100644 --- a/tests/integration/config/test_generate.py +++ b/tests/integration/config/test_generate.py @@ -58,14 +58,13 @@ foo: - baz - quux -location: - repositories: - - one - - two +repositories: + - one + - two - # This comment should be kept. - # COMMENT_OUT - other: thing +# This comment should be kept. +# COMMENT_OUT +other: thing ''' # flake8: noqa @@ -75,13 +74,12 @@ location: # - baz # - quux -location: - repositories: - - one - - two +repositories: + - one + - two - # This comment should be kept. -# other: thing +# This comment should be kept. +# other: thing ''' assert module.comment_out_optional_configuration(config.strip()) == expected_config.strip() From bd9d592560213f0c4d6ab8cb30ffb7fc81660988 Mon Sep 17 00:00:00 2001 From: Dan Helfman Date: Mon, 10 Jul 2023 10:20:51 -0700 Subject: [PATCH 04/12] Truncate long command error output. --- NEWS | 2 ++ borgmatic/commands/borgmatic.py | 27 ++++++++++++--------------- tests/unit/commands/test_borgmatic.py | 16 +--------------- 3 files changed, 15 insertions(+), 30 deletions(-) diff --git a/NEWS b/NEWS index 2831f29a..1638e083 100644 --- a/NEWS +++ b/NEWS @@ -20,6 +20,8 @@ * All deprecated configuration option values now generate warning logs. * Remove the deprecated (and non-functional) "--excludes" flag in favor of excludes within configuration. + * Fix an error when logging too-long command output during error handling. Now, long command output + is truncated before logging. 1.7.15 * #326: Add configuration options and command-line flags for backing up a database from one diff --git a/borgmatic/commands/borgmatic.py b/borgmatic/commands/borgmatic.py index a869594e..af87ad67 100644 --- a/borgmatic/commands/borgmatic.py +++ b/borgmatic/commands/borgmatic.py @@ -141,7 +141,6 @@ def run_configuration(config_filename, config, arguments): f'{repository.get("label", repository["path"])}: Error running actions for repository', error, levelno=logging.WARNING, - log_command_error_output=True, ) ) logger.warning( @@ -531,26 +530,24 @@ def load_configurations(config_filenames, overrides=None, resolve_env=True): return (configs, logs) -def log_record(suppress_log=False, **kwargs): +def log_record(**kwargs): ''' Create a log record based on the given makeLogRecord() arguments, one of which must be - named "levelno". Log the record (unless suppress log is set) and return it. + named "levelno". Log the record and return it. ''' record = logging.makeLogRecord(kwargs) - if suppress_log: - return record - logger.handle(record) + return record -def log_error_records( - message, error=None, levelno=logging.CRITICAL, log_command_error_output=False -): +MAX_CAPTURED_OUTPUT_LENGTH = 1000 + + +def log_error_records(message, error=None, levelno=logging.CRITICAL): ''' - Given error message text, an optional exception object, an optional log level, and whether to - log the error output of a CalledProcessError (if any), log error summary information and also - yield it as a series of logging.LogRecord instances. + Given error message text, an optional exception object, and an optional log level, log error + summary information and also yield it as a series of logging.LogRecord instances. Note that because the logs are yielded as a generator, logs won't get logged unless you consume the generator output. @@ -566,12 +563,12 @@ def log_error_records( except CalledProcessError as error: yield log_record(levelno=levelno, levelname=level_name, msg=message) if error.output: - # Suppress these logs for now and save full error output for the log summary at the end. + output = error.output.decode('utf-8') yield log_record( levelno=levelno, levelname=level_name, - msg=error.output, - suppress_log=not log_command_error_output, + msg=output[:MAX_CAPTURED_OUTPUT_LENGTH] + + ' ...' * (len(output) > MAX_CAPTURED_OUTPUT_LENGTH), ) yield log_record(levelno=levelno, levelname=level_name, msg=error) except (ValueError, OSError) as error: diff --git a/tests/unit/commands/test_borgmatic.py b/tests/unit/commands/test_borgmatic.py index e94bdd8c..2867a408 100644 --- a/tests/unit/commands/test_borgmatic.py +++ b/tests/unit/commands/test_borgmatic.py @@ -238,7 +238,6 @@ def test_run_configuration_retries_hard_error(): 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, - log_command_error_output=True, ).and_return([flexmock()]) error_logs = [flexmock()] flexmock(module).should_receive('log_error_records').with_args( @@ -278,13 +277,11 @@ def test_run_configuration_retries_round_robin(): 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, - log_command_error_output=True, ).and_return([flexmock()]).ordered() flexmock(module).should_receive('log_error_records').with_args( 'bar: Error running actions for repository', OSError, levelno=logging.WARNING, - log_command_error_output=True, ).and_return([flexmock()]).ordered() foo_error_logs = [flexmock()] flexmock(module).should_receive('log_error_records').with_args( @@ -314,13 +311,11 @@ def test_run_configuration_retries_one_passes(): 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, - log_command_error_output=True, ).and_return([flexmock()]).ordered() flexmock(module).should_receive('log_error_records').with_args( 'bar: Error running actions for repository', OSError, levelno=logging.WARNING, - log_command_error_output=True, ).and_return(flexmock()).ordered() error_logs = [flexmock()] flexmock(module).should_receive('log_error_records').with_args( @@ -344,7 +339,6 @@ def test_run_configuration_retry_wait(): 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, - log_command_error_output=True, ).and_return([flexmock()]).ordered() flexmock(time).should_receive('sleep').with_args(10).and_return().ordered() @@ -352,7 +346,6 @@ def test_run_configuration_retry_wait(): 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, - log_command_error_output=True, ).and_return([flexmock()]).ordered() flexmock(time).should_receive('sleep').with_args(20).and_return().ordered() @@ -360,7 +353,6 @@ def test_run_configuration_retry_wait(): 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, - log_command_error_output=True, ).and_return([flexmock()]).ordered() flexmock(time).should_receive('sleep').with_args(30).and_return().ordered() @@ -389,13 +381,11 @@ def test_run_configuration_retries_timeout_multiple_repos(): 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, - log_command_error_output=True, ).and_return([flexmock()]).ordered() flexmock(module).should_receive('log_error_records').with_args( 'bar: Error running actions for repository', OSError, levelno=logging.WARNING, - log_command_error_output=True, ).and_return([flexmock()]).ordered() # Sleep before retrying foo (and passing) @@ -826,10 +816,6 @@ def test_log_record_does_not_raise(): module.log_record(levelno=1, foo='bar', baz='quux') -def test_log_record_with_suppress_does_not_raise(): - module.log_record(levelno=1, foo='bar', baz='quux', suppress_log=True) - - def test_log_error_records_generates_output_logs_for_message_only(): flexmock(module).should_receive('log_record').replace_with(dict) @@ -843,7 +829,7 @@ def test_log_error_records_generates_output_logs_for_called_process_error(): flexmock(module.logger).should_receive('getEffectiveLevel').and_return(logging.WARNING) logs = tuple( - module.log_error_records('Error', subprocess.CalledProcessError(1, 'ls', 'error output')) + module.log_error_records('Error', subprocess.CalledProcessError(1, 'ls', b'error output')) ) assert {log['levelno'] for log in logs} == {logging.CRITICAL} From e8dbca9d6809f6f60730e38ee5608472124e223f Mon Sep 17 00:00:00 2001 From: Dan Helfman Date: Mon, 10 Jul 2023 11:16:18 -0700 Subject: [PATCH 05/12] Truncate long command output without assuming an encoded byte string. --- borgmatic/borg/create.py | 2 ++ borgmatic/borg/info.py | 1 + borgmatic/borg/list.py | 2 ++ borgmatic/borg/rinfo.py | 1 + borgmatic/borg/rlist.py | 5 ++++- borgmatic/borg/version.py | 1 + borgmatic/commands/borgmatic.py | 26 ++++++++++++++++++------- borgmatic/execute.py | 7 +++++-- tests/unit/borg/test_create.py | 7 ++++++- tests/unit/borg/test_info.py | 3 +++ tests/unit/borg/test_list.py | 2 ++ tests/unit/borg/test_rinfo.py | 3 +++ tests/unit/borg/test_rlist.py | 8 ++++++++ tests/unit/borg/test_version.py | 1 + tests/unit/commands/test_borgmatic.py | 28 ++++++++++++++++++++++++++- 15 files changed, 85 insertions(+), 12 deletions(-) diff --git a/borgmatic/borg/create.py b/borgmatic/borg/create.py index 018f447d..d778e565 100644 --- a/borgmatic/borg/create.py +++ b/borgmatic/borg/create.py @@ -292,6 +292,7 @@ def collect_special_file_paths( capture_stderr=True, working_directory=working_directory, extra_environment=borg_environment, + borg_local_path=local_path, ) paths = tuple( @@ -510,6 +511,7 @@ def create_archive( create_command, working_directory=working_directory, extra_environment=borg_environment, + borg_local_path=local_path, ) else: execute_command( diff --git a/borgmatic/borg/info.py b/borgmatic/borg/info.py index 9a8bdda2..3e596ca4 100644 --- a/borgmatic/borg/info.py +++ b/borgmatic/borg/info.py @@ -66,6 +66,7 @@ def display_archives_info( return execute_command_and_capture_output( full_command, extra_environment=environment.make_environment(config), + borg_local_path=local_path, ) else: execute_command( diff --git a/borgmatic/borg/list.py b/borgmatic/borg/list.py index b3db8e90..5a245d97 100644 --- a/borgmatic/borg/list.py +++ b/borgmatic/borg/list.py @@ -123,6 +123,7 @@ def capture_archive_listing( remote_path, ), extra_environment=borg_environment, + borg_local_path=local_path, ) .strip('\n') .split('\n') @@ -217,6 +218,7 @@ def list_archive( remote_path, ), extra_environment=borg_environment, + borg_local_path=local_path, ) .strip('\n') .split('\n') diff --git a/borgmatic/borg/rinfo.py b/borgmatic/borg/rinfo.py index a7ae8229..ab4197e6 100644 --- a/borgmatic/borg/rinfo.py +++ b/borgmatic/borg/rinfo.py @@ -54,6 +54,7 @@ def display_repository_info( return execute_command_and_capture_output( full_command, extra_environment=extra_environment, + borg_local_path=local_path, ) else: execute_command( diff --git a/borgmatic/borg/rlist.py b/borgmatic/borg/rlist.py index b532a6aa..b6ceca31 100644 --- a/borgmatic/borg/rlist.py +++ b/borgmatic/borg/rlist.py @@ -43,6 +43,7 @@ def resolve_archive_name( output = execute_command_and_capture_output( full_command, extra_environment=environment.make_environment(config), + borg_local_path=local_path, ) try: latest_archive = output.strip().splitlines()[-1] @@ -138,7 +139,9 @@ def list_repository( ) if rlist_arguments.json: - return execute_command_and_capture_output(main_command, extra_environment=borg_environment) + return execute_command_and_capture_output( + main_command, extra_environment=borg_environment, borg_local_path=local_path + ) else: execute_command( main_command, diff --git a/borgmatic/borg/version.py b/borgmatic/borg/version.py index feb677ad..9ded62a7 100644 --- a/borgmatic/borg/version.py +++ b/borgmatic/borg/version.py @@ -21,6 +21,7 @@ def local_borg_version(config, local_path='borg'): output = execute_command_and_capture_output( full_command, extra_environment=environment.make_environment(config), + borg_local_path=local_path, ) try: diff --git a/borgmatic/commands/borgmatic.py b/borgmatic/commands/borgmatic.py index af87ad67..8f61a0d3 100644 --- a/borgmatic/commands/borgmatic.py +++ b/borgmatic/commands/borgmatic.py @@ -141,6 +141,7 @@ def run_configuration(config_filename, config, arguments): f'{repository.get("label", repository["path"])}: Error running actions for repository', error, levelno=logging.WARNING, + log_command_error_output=True, ) ) logger.warning( @@ -530,24 +531,29 @@ def load_configurations(config_filenames, overrides=None, resolve_env=True): return (configs, logs) -def log_record(**kwargs): +def log_record(suppress_log=False, **kwargs): ''' Create a log record based on the given makeLogRecord() arguments, one of which must be - named "levelno". Log the record and return it. + named "levelno". Log the record (unless suppress log is set) and return it. ''' record = logging.makeLogRecord(kwargs) - logger.handle(record) + if suppress_log: + return record + logger.handle(record) return record MAX_CAPTURED_OUTPUT_LENGTH = 1000 -def log_error_records(message, error=None, levelno=logging.CRITICAL): +def log_error_records( + message, error=None, levelno=logging.CRITICAL, log_command_error_output=False +): ''' - Given error message text, an optional exception object, and an optional log level, log error - summary information and also yield it as a series of logging.LogRecord instances. + Given error message text, an optional exception object, an optional log level, and whether to + log the error output of a CalledProcessError (if any), log error summary information and also + yield it as a series of logging.LogRecord instances. Note that because the logs are yielded as a generator, logs won't get logged unless you consume the generator output. @@ -563,12 +569,18 @@ def log_error_records(message, error=None, levelno=logging.CRITICAL): except CalledProcessError as error: yield log_record(levelno=levelno, levelname=level_name, msg=message) if error.output: - output = error.output.decode('utf-8') + try: + output = error.output.decode('utf-8') + except (UnicodeDecodeError, AttributeError): + output = error.output + + # Suppress these logs for now and save full error output for the log summary at the end. yield log_record( levelno=levelno, levelname=level_name, msg=output[:MAX_CAPTURED_OUTPUT_LENGTH] + ' ...' * (len(output) > MAX_CAPTURED_OUTPUT_LENGTH), + suppress_log=True, ) yield log_record(levelno=levelno, levelname=level_name, msg=error) except (ValueError, OSError) as error: diff --git a/borgmatic/execute.py b/borgmatic/execute.py index 39691dac..9238a0c3 100644 --- a/borgmatic/execute.py +++ b/borgmatic/execute.py @@ -241,13 +241,16 @@ def execute_command_and_capture_output( shell=False, extra_environment=None, working_directory=None, + borg_local_path=None, ): ''' Execute the given command (a sequence of command/argument strings), capturing and returning its output (stdout). If capture stderr is True, then capture and return stderr in addition to stdout. If shell is True, execute the command within a shell. If an extra environment dict is given, then use it to augment the current environment, and pass the result into the command. If - a working directory is given, use that as the present working directory when running the command. + a working directory is given, use that as the present working directory when running the + command. If a Borg local path is given, and the command matches it (regardless of arguments), + treat exit code 1 as a warning instead of an error. Raise subprocesses.CalledProcessError if an error occurs while running the command. ''' @@ -264,7 +267,7 @@ def execute_command_and_capture_output( cwd=working_directory, ) except subprocess.CalledProcessError as error: - if exit_code_indicates_error(command, error.returncode): + if exit_code_indicates_error(command, error.returncode, borg_local_path): raise output = error.output diff --git a/tests/unit/borg/test_create.py b/tests/unit/borg/test_create.py index 5b196d9b..dfea86e2 100644 --- a/tests/unit/borg/test_create.py +++ b/tests/unit/borg/test_create.py @@ -451,13 +451,14 @@ def test_collect_special_file_paths_omits_exclude_no_dump_flag_from_command(): capture_stderr=True, working_directory=None, extra_environment=None, + borg_local_path='borg', ).and_return('Processing files ...\n- /foo\n+ /bar\n- /baz').once() flexmock(module).should_receive('special_file').and_return(True) flexmock(module).should_receive('any_parent_directories').and_return(False) module.collect_special_file_paths( ('borg', 'create', '--exclude-nodump'), - local_path=None, + local_path='borg', working_directory=None, borg_environment=None, skip_directories=flexmock(), @@ -758,6 +759,7 @@ def test_create_archive_with_log_info_and_json_suppresses_most_borg_output(): ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS + ('--json',), working_directory=None, extra_environment=None, + borg_local_path='borg', ) insert_logging_mock(logging.INFO) @@ -842,6 +844,7 @@ def test_create_archive_with_log_debug_and_json_suppresses_most_borg_output(): ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS + ('--json',), working_directory=None, extra_environment=None, + borg_local_path='borg', ) insert_logging_mock(logging.DEBUG) @@ -2235,6 +2238,7 @@ def test_create_archive_with_json_calls_borg_with_json_parameter(): ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS + ('--json',), working_directory=None, extra_environment=None, + borg_local_path='borg', ).and_return('[]') json_output = module.create_archive( @@ -2277,6 +2281,7 @@ def test_create_archive_with_stats_and_json_calls_borg_without_stats_parameter() ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS + ('--json',), working_directory=None, extra_environment=None, + borg_local_path='borg', ).and_return('[]') json_output = module.create_archive( diff --git a/tests/unit/borg/test_info.py b/tests/unit/borg/test_info.py index 3e93bb73..107c4863 100644 --- a/tests/unit/borg/test_info.py +++ b/tests/unit/borg/test_info.py @@ -73,6 +73,7 @@ def test_display_archives_info_with_log_info_and_json_suppresses_most_borg_outpu flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'info', '--json', '--repo', 'repo'), extra_environment=None, + borg_local_path='borg', ).and_return('[]') insert_logging_mock(logging.INFO) @@ -127,6 +128,7 @@ def test_display_archives_info_with_log_debug_and_json_suppresses_most_borg_outp flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'info', '--json', '--repo', 'repo'), extra_environment=None, + borg_local_path='borg', ).and_return('[]') insert_logging_mock(logging.DEBUG) @@ -154,6 +156,7 @@ def test_display_archives_info_with_json_calls_borg_with_json_parameter(): flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'info', '--json', '--repo', 'repo'), extra_environment=None, + borg_local_path='borg', ).and_return('[]') json_output = module.display_archives_info( diff --git a/tests/unit/borg/test_list.py b/tests/unit/borg/test_list.py index 2f82b802..4d1d5346 100644 --- a/tests/unit/borg/test_list.py +++ b/tests/unit/borg/test_list.py @@ -429,6 +429,7 @@ def test_list_archive_calls_borg_multiple_times_with_find_paths(): flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'list', 'repo'), extra_environment=None, + borg_local_path='borg', ).and_return('archive1\narchive2').once() flexmock(module).should_receive('make_list_command').and_return( ('borg', 'list', 'repo::archive1') @@ -667,6 +668,7 @@ def test_list_archive_with_find_paths_allows_archive_filter_flag_but_only_passes flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'rlist', '--repo', 'repo'), extra_environment=None, + borg_local_path='borg', ).and_return('archive1\narchive2').once() flexmock(module).should_receive('make_list_command').with_args( diff --git a/tests/unit/borg/test_rinfo.py b/tests/unit/borg/test_rinfo.py index 8628b9aa..ee5e5c24 100644 --- a/tests/unit/borg/test_rinfo.py +++ b/tests/unit/borg/test_rinfo.py @@ -97,6 +97,7 @@ def test_display_repository_info_with_log_info_and_json_suppresses_most_borg_out flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'rinfo', '--json', '--repo', 'repo'), extra_environment=None, + borg_local_path='borg', ).and_return('[]') insert_logging_mock(logging.INFO) @@ -153,6 +154,7 @@ def test_display_repository_info_with_log_debug_and_json_suppresses_most_borg_ou flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'rinfo', '--json', '--repo', 'repo'), extra_environment=None, + borg_local_path='borg', ).and_return('[]') insert_logging_mock(logging.DEBUG) @@ -181,6 +183,7 @@ def test_display_repository_info_with_json_calls_borg_with_json_flag(): flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'rinfo', '--json', '--repo', 'repo'), extra_environment=None, + borg_local_path='borg', ).and_return('[]') json_output = module.display_repository_info( diff --git a/tests/unit/borg/test_rlist.py b/tests/unit/borg/test_rlist.py index 65fcef1d..a61a837e 100644 --- a/tests/unit/borg/test_rlist.py +++ b/tests/unit/borg/test_rlist.py @@ -37,6 +37,7 @@ def test_resolve_archive_name_calls_borg_with_flags(): flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'list') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, + borg_local_path='borg', ).and_return(expected_archive + '\n') assert ( @@ -57,6 +58,7 @@ def test_resolve_archive_name_with_log_info_calls_borg_without_info_flag(): flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'list') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, + borg_local_path='borg', ).and_return(expected_archive + '\n') insert_logging_mock(logging.INFO) @@ -78,6 +80,7 @@ def test_resolve_archive_name_with_log_debug_calls_borg_without_debug_flag(): flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'list') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, + borg_local_path='borg', ).and_return(expected_archive + '\n') insert_logging_mock(logging.DEBUG) @@ -99,6 +102,7 @@ def test_resolve_archive_name_with_local_path_calls_borg_via_local_path(): flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg1', 'list') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, + borg_local_path='borg1', ).and_return(expected_archive + '\n') assert ( @@ -120,6 +124,7 @@ def test_resolve_archive_name_with_remote_path_calls_borg_with_remote_path_flags flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'list', '--remote-path', 'borg1') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, + borg_local_path='borg', ).and_return(expected_archive + '\n') assert ( @@ -140,6 +145,7 @@ def test_resolve_archive_name_without_archives_raises(): flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'list') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, + borg_local_path='borg', ).and_return('') with pytest.raises(ValueError): @@ -159,6 +165,7 @@ def test_resolve_archive_name_with_log_json_calls_borg_with_log_json_flags(): flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'list', '--log-json') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, + borg_local_path='borg', ).and_return(expected_archive + '\n') assert ( @@ -180,6 +187,7 @@ def test_resolve_archive_name_with_lock_wait_calls_borg_with_lock_wait_flags(): flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'list', '--lock-wait', 'okay') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, + borg_local_path='borg', ).and_return(expected_archive + '\n') assert ( diff --git a/tests/unit/borg/test_version.py b/tests/unit/borg/test_version.py index a051f693..a00235a5 100644 --- a/tests/unit/borg/test_version.py +++ b/tests/unit/borg/test_version.py @@ -17,6 +17,7 @@ def insert_execute_command_and_capture_output_mock( flexmock(module).should_receive('execute_command_and_capture_output').with_args( command, extra_environment=None, + borg_local_path=borg_local_path, ).once().and_return(version_output) diff --git a/tests/unit/commands/test_borgmatic.py b/tests/unit/commands/test_borgmatic.py index 2867a408..1c44e281 100644 --- a/tests/unit/commands/test_borgmatic.py +++ b/tests/unit/commands/test_borgmatic.py @@ -238,6 +238,7 @@ def test_run_configuration_retries_hard_error(): 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, + log_command_error_output=True, ).and_return([flexmock()]) error_logs = [flexmock()] flexmock(module).should_receive('log_error_records').with_args( @@ -277,11 +278,13 @@ def test_run_configuration_retries_round_robin(): 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, + log_command_error_output=True, ).and_return([flexmock()]).ordered() flexmock(module).should_receive('log_error_records').with_args( 'bar: Error running actions for repository', OSError, levelno=logging.WARNING, + log_command_error_output=True, ).and_return([flexmock()]).ordered() foo_error_logs = [flexmock()] flexmock(module).should_receive('log_error_records').with_args( @@ -311,11 +314,13 @@ def test_run_configuration_retries_one_passes(): 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, + log_command_error_output=True, ).and_return([flexmock()]).ordered() flexmock(module).should_receive('log_error_records').with_args( 'bar: Error running actions for repository', OSError, levelno=logging.WARNING, + log_command_error_output=True, ).and_return(flexmock()).ordered() error_logs = [flexmock()] flexmock(module).should_receive('log_error_records').with_args( @@ -339,6 +344,7 @@ def test_run_configuration_retry_wait(): 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, + log_command_error_output=True, ).and_return([flexmock()]).ordered() flexmock(time).should_receive('sleep').with_args(10).and_return().ordered() @@ -346,6 +352,7 @@ def test_run_configuration_retry_wait(): 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, + log_command_error_output=True, ).and_return([flexmock()]).ordered() flexmock(time).should_receive('sleep').with_args(20).and_return().ordered() @@ -353,6 +360,7 @@ def test_run_configuration_retry_wait(): 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, + log_command_error_output=True, ).and_return([flexmock()]).ordered() flexmock(time).should_receive('sleep').with_args(30).and_return().ordered() @@ -381,11 +389,13 @@ def test_run_configuration_retries_timeout_multiple_repos(): 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, + log_command_error_output=True, ).and_return([flexmock()]).ordered() flexmock(module).should_receive('log_error_records').with_args( 'bar: Error running actions for repository', OSError, levelno=logging.WARNING, + log_command_error_output=True, ).and_return([flexmock()]).ordered() # Sleep before retrying foo (and passing) @@ -816,6 +826,10 @@ def test_log_record_does_not_raise(): module.log_record(levelno=1, foo='bar', baz='quux') +def test_log_record_with_suppress_does_not_raise(): + module.log_record(levelno=1, foo='bar', baz='quux', suppress_log=True) + + def test_log_error_records_generates_output_logs_for_message_only(): flexmock(module).should_receive('log_record').replace_with(dict) @@ -824,7 +838,7 @@ def test_log_error_records_generates_output_logs_for_message_only(): assert {log['levelno'] for log in logs} == {logging.CRITICAL} -def test_log_error_records_generates_output_logs_for_called_process_error(): +def test_log_error_records_generates_output_logs_for_called_process_error_with_bytes_ouput(): flexmock(module).should_receive('log_record').replace_with(dict) flexmock(module.logger).should_receive('getEffectiveLevel').and_return(logging.WARNING) @@ -836,6 +850,18 @@ def test_log_error_records_generates_output_logs_for_called_process_error(): assert any(log for log in logs if 'error output' in str(log)) +def test_log_error_records_generates_output_logs_for_called_process_error_with_string_ouput(): + flexmock(module).should_receive('log_record').replace_with(dict) + flexmock(module.logger).should_receive('getEffectiveLevel').and_return(logging.WARNING) + + logs = tuple( + module.log_error_records('Error', subprocess.CalledProcessError(1, 'ls', 'error output')) + ) + + assert {log['levelno'] for log in logs} == {logging.CRITICAL} + assert any(log for log in logs if 'error output' in str(log)) + + def test_log_error_records_generates_logs_for_value_error(): flexmock(module).should_receive('log_record').replace_with(dict) From 003d4eac9358998cfea165b2fa3f389fc5d1cee4 Mon Sep 17 00:00:00 2001 From: Dan Helfman Date: Mon, 10 Jul 2023 11:41:43 -0700 Subject: [PATCH 06/12] Remove extra argument (#721). --- borgmatic/actions/restore.py | 1 - 1 file changed, 1 deletion(-) diff --git a/borgmatic/actions/restore.py b/borgmatic/actions/restore.py index 8112207a..a59554a2 100644 --- a/borgmatic/actions/restore.py +++ b/borgmatic/actions/restore.py @@ -112,7 +112,6 @@ def restore_single_database( {hook_name: [database]}, repository['path'], borgmatic.hooks.dump.DATABASE_HOOK_NAMES, - config, global_arguments.dry_run, extract_process, connection_params, From ecd9e62147db3bcfb266847d82b5194cbf0e5bd0 Mon Sep 17 00:00:00 2001 From: Dan Helfman Date: Tue, 11 Jul 2023 00:37:36 -0700 Subject: [PATCH 07/12] Fix last end-to-end database test (#721). --- borgmatic/actions/restore.py | 3 +- borgmatic/hooks/mongodb.py | 26 ++++++--- borgmatic/hooks/mysql.py | 26 +++++---- borgmatic/hooks/postgresql.py | 29 ++++++---- borgmatic/hooks/sqlite.py | 28 ++++++---- tests/unit/hooks/test_mongodb.py | 57 +++++++++++-------- tests/unit/hooks/test_mysql.py | 57 +++++++------------ tests/unit/hooks/test_postgresql.py | 85 ++++++++++++----------------- tests/unit/hooks/test_sqlite.py | 27 +++++---- 9 files changed, 178 insertions(+), 160 deletions(-) diff --git a/borgmatic/actions/restore.py b/borgmatic/actions/restore.py index a59554a2..06fd1b87 100644 --- a/borgmatic/actions/restore.py +++ b/borgmatic/actions/restore.py @@ -109,8 +109,9 @@ def restore_single_database( # Run a single database restore, consuming the extract stdout (if any). borgmatic.hooks.dispatch.call_hooks( 'restore_database_dump', - {hook_name: [database]}, + config, repository['path'], + database['name'], borgmatic.hooks.dump.DATABASE_HOOK_NAMES, global_arguments.dry_run, extract_process, diff --git a/borgmatic/hooks/mongodb.py b/borgmatic/hooks/mongodb.py index 3c91a183..c94a084d 100644 --- a/borgmatic/hooks/mongodb.py +++ b/borgmatic/hooks/mongodb.py @@ -100,24 +100,32 @@ def make_database_dump_pattern(databases, config, log_prefix, name=None): # pra def restore_database_dump( - database_config, config, log_prefix, dry_run, extract_process, connection_params + databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params ): ''' - Restore the given MongoDB database from an extract stream. The database is supplied as a - one-element sequence containing a dict describing the database, as per the configuration schema. - Use the configuration dict to construct the destination path and the given log prefix in any log - entries. If this is a dry run, then don't actually restore anything. Trigger the given active - extract process (an instance of subprocess.Popen) to produce output to consume. + Restore the given MongoDB database from an extract stream. The databases are supplied as a + sequence containing one dict describing each database (as per the configuration schema), but + only the database corresponding to the given database name is restored. Use the configuration + dict to construct the destination path and the given log prefix in any log entries. If this is a + dry run, then don't actually restore anything. Trigger the given active extract process (an + instance of subprocess.Popen) to produce output to consume. If the extract process is None, then restore the dump from the filesystem rather than from an extract stream. ''' dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else '' - if len(database_config) != 1: - raise ValueError('The database configuration value is invalid') + try: + database = next( + database_config + for database_config in databases_config + if database_config.get('name') == database_name + ) + except StopIteration: + raise ValueError( + f'A database named "{database_name}" could not be found in the configuration' + ) - database = database_config[0] dump_filename = dump.make_database_dump_filename( make_dump_path(config), database['name'], database.get('hostname') ) diff --git a/borgmatic/hooks/mysql.py b/borgmatic/hooks/mysql.py index 8ca2f6ba..a3b34f15 100644 --- a/borgmatic/hooks/mysql.py +++ b/borgmatic/hooks/mysql.py @@ -181,21 +181,27 @@ def make_database_dump_pattern(databases, config, log_prefix, name=None): # pra def restore_database_dump( - database_config, config, log_prefix, dry_run, extract_process, connection_params + databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params ): ''' - Restore the given MySQL/MariaDB database from an extract stream. The database is supplied as a - one-element sequence containing a dict describing the database, as per the configuration schema. - Use the given log prefix in any log entries. If this is a dry run, then don't actually restore - anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce - output to consume. + Restore the given MySQL/MariaDB database from an extract stream. The databases are supplied as a + sequence containing one dict describing each database (as per the configuration schema), but + only the database corresponding to the given database name is restored. Use the given log + prefix in any log entries. If this is a dry run, then don't actually restore anything. Trigger + the given active extract process (an instance of subprocess.Popen) to produce output to consume. ''' dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else '' - if len(database_config) != 1: - raise ValueError('The database configuration value is invalid') - - database = database_config[0] + try: + database = next( + database_config + for database_config in databases_config + if database_config.get('name') == database_name + ) + except StopIteration: + raise ValueError( + f'A database named "{database_name}" could not be found in the configuration' + ) hostname = connection_params['hostname'] or database.get( 'restore_hostname', database.get('hostname') diff --git a/borgmatic/hooks/postgresql.py b/borgmatic/hooks/postgresql.py index 7bef5a70..598b878c 100644 --- a/borgmatic/hooks/postgresql.py +++ b/borgmatic/hooks/postgresql.py @@ -202,14 +202,15 @@ def make_database_dump_pattern(databases, config, log_prefix, name=None): # pra def restore_database_dump( - database_config, config, log_prefix, dry_run, extract_process, connection_params + databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params ): ''' - Restore the given PostgreSQL database from an extract stream. The database is supplied as a - one-element sequence containing a dict describing the database, as per the configuration schema. - Use the given configuration dict to construct the destination path and the given log prefix in - any log entries. If this is a dry run, then don't actually restore anything. Trigger the given - active extract process (an instance of subprocess.Popen) to produce output to consume. + Restore the given PostgreSQL database from an extract stream. The databases are supplied as a + sequence containing one dict describing each database (as per the configuration schema), but + only the database corresponding to the given database name is restored. Use the given + configuration dict to construct the destination path and the given log prefix in any log + entries. If this is a dry run, then don't actually restore anything. Trigger the given active + extract process (an instance of subprocess.Popen) to produce output to consume. If the extract process is None, then restore the dump from the filesystem rather than from an extract stream. @@ -219,10 +220,16 @@ def restore_database_dump( ''' dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else '' - if len(database_config) != 1: - raise ValueError('The database configuration value is invalid') - - database = database_config[0] + try: + database = next( + database_config + for database_config in databases_config + if database_config.get('name') == database_name + ) + except StopIteration: + raise ValueError( + f'A database named "{database_name}" could not be found in the configuration' + ) hostname = connection_params['hostname'] or database.get( 'restore_hostname', database.get('hostname') @@ -262,7 +269,7 @@ def restore_database_dump( + (() if extract_process else (dump_filename,)) + tuple( itertools.chain.from_iterable(('--schema', schema) for schema in database['schemas']) - if database['schemas'] + if database.get('schemas') else () ) ) diff --git a/borgmatic/hooks/sqlite.py b/borgmatic/hooks/sqlite.py index 109f253a..524318bc 100644 --- a/borgmatic/hooks/sqlite.py +++ b/borgmatic/hooks/sqlite.py @@ -84,22 +84,30 @@ def make_database_dump_pattern(databases, config, log_prefix, name=None): # pra def restore_database_dump( - database_config, config, log_prefix, dry_run, extract_process, connection_params + databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params ): ''' - Restore the given SQLite3 database from an extract stream. The database is supplied as a - one-element sequence containing a dict describing the database, as per the configuration schema. - Use the given log prefix in any log entries. If this is a dry run, then don't actually restore - anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce - output to consume. + Restore the given SQLite3 database from an extract stream. The databases are supplied as a + sequence containing one dict describing each database (as per the configuration schema), but + only the database corresponding to the given database name is restored. Use the given log prefix + in any log entries. If this is a dry run, then don't actually restore anything. Trigger the + given active extract process (an instance of subprocess.Popen) to produce output to consume. ''' dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else '' - if len(database_config) != 1: - raise ValueError('The database configuration value is invalid') + try: + database = next( + database_config + for database_config in databases_config + if database_config.get('name') == database_name + ) + except StopIteration: + raise ValueError( + f'A database named "{database_name}" could not be found in the configuration' + ) - database_path = connection_params['restore_path'] or database_config[0].get( - 'restore_path', database_config[0].get('path') + database_path = connection_params['restore_path'] or database.get( + 'restore_path', database.get('path') ) logger.debug(f'{log_prefix}: Restoring SQLite database at {database_path}{dry_run_label}') diff --git a/tests/unit/hooks/test_mongodb.py b/tests/unit/hooks/test_mongodb.py index 234201f4..a676b583 100644 --- a/tests/unit/hooks/test_mongodb.py +++ b/tests/unit/hooks/test_mongodb.py @@ -158,7 +158,7 @@ def test_dump_databases_runs_mongodumpall_for_all_databases(): def test_restore_database_dump_runs_mongorestore(): - database_config = [{'name': 'foo', 'schemas': None}] + databases_config = [{'name': 'foo', 'schemas': None}, {'name': 'bar'}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('make_dump_path') @@ -171,9 +171,10 @@ def test_restore_database_dump_runs_mongorestore(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -185,8 +186,8 @@ def test_restore_database_dump_runs_mongorestore(): ) -def test_restore_database_dump_errors_on_multiple_database_config(): - database_config = [{'name': 'foo'}, {'name': 'bar'}] +def test_restore_database_dump_errors_on_empty_databases_config(): + databases_config = [] flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename') @@ -195,9 +196,10 @@ def test_restore_database_dump_errors_on_multiple_database_config(): with pytest.raises(ValueError): module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=flexmock(), connection_params={ @@ -210,7 +212,7 @@ def test_restore_database_dump_errors_on_multiple_database_config(): def test_restore_database_dump_runs_mongorestore_with_hostname_and_port(): - database_config = [ + databases_config = [ {'name': 'foo', 'hostname': 'database.example.org', 'port': 5433, 'schemas': None} ] extract_process = flexmock(stdout=flexmock()) @@ -235,9 +237,10 @@ def test_restore_database_dump_runs_mongorestore_with_hostname_and_port(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -250,7 +253,7 @@ def test_restore_database_dump_runs_mongorestore_with_hostname_and_port(): def test_restore_database_dump_runs_mongorestore_with_username_and_password(): - database_config = [ + databases_config = [ { 'name': 'foo', 'username': 'mongo', @@ -283,9 +286,10 @@ def test_restore_database_dump_runs_mongorestore_with_username_and_password(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -298,7 +302,7 @@ def test_restore_database_dump_runs_mongorestore_with_username_and_password(): def test_restore_database_dump_with_connection_params_uses_connection_params_for_restore(): - database_config = [ + databases_config = [ { 'name': 'foo', 'username': 'mongo', @@ -339,9 +343,10 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -354,7 +359,7 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for def test_restore_database_dump_without_connection_params_uses_restore_params_in_config_for_restore(): - database_config = [ + databases_config = [ { 'name': 'foo', 'username': 'mongo', @@ -395,9 +400,10 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_ ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -410,7 +416,7 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_ def test_restore_database_dump_runs_mongorestore_with_options(): - database_config = [{'name': 'foo', 'restore_options': '--harder', 'schemas': None}] + databases_config = [{'name': 'foo', 'restore_options': '--harder', 'schemas': None}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('make_dump_path') @@ -423,9 +429,10 @@ def test_restore_database_dump_runs_mongorestore_with_options(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -438,7 +445,7 @@ def test_restore_database_dump_runs_mongorestore_with_options(): def test_restore_databases_dump_runs_mongorestore_with_schemas(): - database_config = [{'name': 'foo', 'schemas': ['bar', 'baz']}] + databases_config = [{'name': 'foo', 'schemas': ['bar', 'baz']}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('make_dump_path') @@ -461,9 +468,10 @@ def test_restore_databases_dump_runs_mongorestore_with_schemas(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -476,7 +484,7 @@ def test_restore_databases_dump_runs_mongorestore_with_schemas(): def test_restore_database_dump_runs_psql_for_all_database_dump(): - database_config = [{'name': 'all', 'schemas': None}] + databases_config = [{'name': 'all', 'schemas': None}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('make_dump_path') @@ -489,9 +497,10 @@ def test_restore_database_dump_runs_psql_for_all_database_dump(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='all', dry_run=False, extract_process=extract_process, connection_params={ @@ -504,16 +513,17 @@ def test_restore_database_dump_runs_psql_for_all_database_dump(): def test_restore_database_dump_with_dry_run_skips_restore(): - database_config = [{'name': 'foo', 'schemas': None}] + databases_config = [{'name': 'foo', 'schemas': None}] flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module).should_receive('execute_command_with_processes').never() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=True, extract_process=flexmock(), connection_params={ @@ -526,7 +536,7 @@ def test_restore_database_dump_with_dry_run_skips_restore(): def test_restore_database_dump_without_extract_process_restores_from_disk(): - database_config = [{'name': 'foo', 'format': 'directory', 'schemas': None}] + databases_config = [{'name': 'foo', 'format': 'directory', 'schemas': None}] flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename').and_return('/dump/path') @@ -538,9 +548,10 @@ def test_restore_database_dump_without_extract_process_restores_from_disk(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=None, connection_params={ diff --git a/tests/unit/hooks/test_mysql.py b/tests/unit/hooks/test_mysql.py index 4b55be47..19ab3cc7 100644 --- a/tests/unit/hooks/test_mysql.py +++ b/tests/unit/hooks/test_mysql.py @@ -380,7 +380,7 @@ def test_dump_databases_does_not_error_for_missing_all_databases_with_dry_run(): def test_restore_database_dump_runs_mysql_to_restore(): - database_config = [{'name': 'foo'}] + databases_config = [{'name': 'foo'}, {'name': 'bar'}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('execute_command_with_processes').with_args( @@ -392,9 +392,10 @@ def test_restore_database_dump_runs_mysql_to_restore(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -406,30 +407,8 @@ def test_restore_database_dump_runs_mysql_to_restore(): ) -def test_restore_database_dump_errors_on_multiple_database_config(): - database_config = [{'name': 'foo'}, {'name': 'bar'}] - - flexmock(module).should_receive('execute_command_with_processes').never() - flexmock(module).should_receive('execute_command').never() - - with pytest.raises(ValueError): - module.restore_database_dump( - database_config, - {}, - 'test.yaml', - dry_run=False, - extract_process=flexmock(), - connection_params={ - 'hostname': None, - 'port': None, - 'username': None, - 'password': None, - }, - ) - - def test_restore_database_dump_runs_mysql_with_options(): - database_config = [{'name': 'foo', 'restore_options': '--harder'}] + databases_config = [{'name': 'foo', 'restore_options': '--harder'}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('execute_command_with_processes').with_args( @@ -441,9 +420,10 @@ def test_restore_database_dump_runs_mysql_with_options(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -456,7 +436,7 @@ def test_restore_database_dump_runs_mysql_with_options(): def test_restore_database_dump_runs_mysql_with_hostname_and_port(): - database_config = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}] + databases_config = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('execute_command_with_processes').with_args( @@ -477,9 +457,10 @@ def test_restore_database_dump_runs_mysql_with_hostname_and_port(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -492,7 +473,7 @@ def test_restore_database_dump_runs_mysql_with_hostname_and_port(): def test_restore_database_dump_runs_mysql_with_username_and_password(): - database_config = [{'name': 'foo', 'username': 'root', 'password': 'trustsome1'}] + databases_config = [{'name': 'foo', 'username': 'root', 'password': 'trustsome1'}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('execute_command_with_processes').with_args( @@ -504,9 +485,10 @@ def test_restore_database_dump_runs_mysql_with_username_and_password(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -519,7 +501,7 @@ def test_restore_database_dump_runs_mysql_with_username_and_password(): def test_restore_database_dump_with_connection_params_uses_connection_params_for_restore(): - database_config = [ + databases_config = [ { 'name': 'foo', 'username': 'root', @@ -552,9 +534,10 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -567,7 +550,7 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for def test_restore_database_dump_without_connection_params_uses_restore_params_in_config_for_restore(): - database_config = [ + databases_config = [ { 'name': 'foo', 'username': 'root', @@ -602,9 +585,10 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_ ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -617,14 +601,15 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_ def test_restore_database_dump_with_dry_run_skips_restore(): - database_config = [{'name': 'foo'}] + databases_config = [{'name': 'foo'}] flexmock(module).should_receive('execute_command_with_processes').never() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=True, extract_process=flexmock(), connection_params={ diff --git a/tests/unit/hooks/test_postgresql.py b/tests/unit/hooks/test_postgresql.py index d7416100..51235ffc 100644 --- a/tests/unit/hooks/test_postgresql.py +++ b/tests/unit/hooks/test_postgresql.py @@ -464,7 +464,7 @@ def test_dump_databases_runs_non_default_pg_dump(): def test_restore_database_dump_runs_pg_restore(): - database_config = [{'name': 'foo', 'schemas': None}] + databases_config = [{'name': 'foo', 'schemas': None}, {'name': 'bar'}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) @@ -500,9 +500,10 @@ def test_restore_database_dump_runs_pg_restore(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -514,33 +515,8 @@ def test_restore_database_dump_runs_pg_restore(): ) -def test_restore_database_dump_errors_on_multiple_database_config(): - database_config = [{'name': 'foo'}, {'name': 'bar'}] - - flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) - flexmock(module).should_receive('make_dump_path') - flexmock(module.dump).should_receive('make_database_dump_filename') - flexmock(module).should_receive('execute_command_with_processes').never() - flexmock(module).should_receive('execute_command').never() - - with pytest.raises(ValueError): - module.restore_database_dump( - database_config, - {}, - 'test.yaml', - dry_run=False, - extract_process=flexmock(), - connection_params={ - 'restore_hostname': None, - 'restore_port': None, - 'restore_username': None, - 'restore_password': None, - }, - ) - - def test_restore_database_dump_runs_pg_restore_with_hostname_and_port(): - database_config = [ + databases_config = [ {'name': 'foo', 'hostname': 'database.example.org', 'port': 5433, 'schemas': None} ] extract_process = flexmock(stdout=flexmock()) @@ -586,9 +562,10 @@ def test_restore_database_dump_runs_pg_restore_with_hostname_and_port(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -601,7 +578,7 @@ def test_restore_database_dump_runs_pg_restore_with_hostname_and_port(): def test_restore_database_dump_runs_pg_restore_with_username_and_password(): - database_config = [ + databases_config = [ {'name': 'foo', 'username': 'postgres', 'password': 'trustsome1', 'schemas': None} ] extract_process = flexmock(stdout=flexmock()) @@ -645,9 +622,10 @@ def test_restore_database_dump_runs_pg_restore_with_username_and_password(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -660,7 +638,7 @@ def test_restore_database_dump_runs_pg_restore_with_username_and_password(): def test_restore_database_dump_with_connection_params_uses_connection_params_for_restore(): - database_config = [ + databases_config = [ { 'name': 'foo', 'hostname': 'database.example.org', @@ -723,9 +701,10 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -738,7 +717,7 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for def test_restore_database_dump_without_connection_params_uses_restore_params_in_config_for_restore(): - database_config = [ + databases_config = [ { 'name': 'foo', 'hostname': 'database.example.org', @@ -801,9 +780,10 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_ ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -816,7 +796,7 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_ def test_restore_database_dump_runs_pg_restore_with_options(): - database_config = [ + databases_config = [ { 'name': 'foo', 'restore_options': '--harder', @@ -861,9 +841,10 @@ def test_restore_database_dump_runs_pg_restore_with_options(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -876,7 +857,7 @@ def test_restore_database_dump_runs_pg_restore_with_options(): def test_restore_database_dump_runs_psql_for_all_database_dump(): - database_config = [{'name': 'all', 'schemas': None}] + databases_config = [{'name': 'all', 'schemas': None}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) @@ -899,9 +880,10 @@ def test_restore_database_dump_runs_psql_for_all_database_dump(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='all', dry_run=False, extract_process=extract_process, connection_params={ @@ -914,7 +896,7 @@ def test_restore_database_dump_runs_psql_for_all_database_dump(): def test_restore_database_dump_runs_psql_for_plain_database_dump(): - database_config = [{'name': 'foo', 'format': 'plain', 'schemas': None}] + databases_config = [{'name': 'foo', 'format': 'plain', 'schemas': None}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) @@ -942,9 +924,10 @@ def test_restore_database_dump_runs_psql_for_plain_database_dump(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -957,7 +940,7 @@ def test_restore_database_dump_runs_psql_for_plain_database_dump(): def test_restore_database_dump_runs_non_default_pg_restore_and_psql(): - database_config = [ + databases_config = [ { 'name': 'foo', 'pg_restore_command': 'docker exec mycontainer pg_restore', @@ -1006,9 +989,10 @@ def test_restore_database_dump_runs_non_default_pg_restore_and_psql(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=extract_process, connection_params={ @@ -1021,7 +1005,7 @@ def test_restore_database_dump_runs_non_default_pg_restore_and_psql(): def test_restore_database_dump_with_dry_run_skips_restore(): - database_config = [{'name': 'foo', 'schemas': None}] + databases_config = [{'name': 'foo', 'schemas': None}] flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path') @@ -1029,9 +1013,10 @@ def test_restore_database_dump_with_dry_run_skips_restore(): flexmock(module).should_receive('execute_command_with_processes').never() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=True, extract_process=flexmock(), connection_params={ @@ -1044,7 +1029,7 @@ def test_restore_database_dump_with_dry_run_skips_restore(): def test_restore_database_dump_without_extract_process_restores_from_disk(): - database_config = [{'name': 'foo', 'schemas': None}] + databases_config = [{'name': 'foo', 'schemas': None}] flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path') @@ -1080,9 +1065,10 @@ def test_restore_database_dump_without_extract_process_restores_from_disk(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=None, connection_params={ @@ -1095,7 +1081,7 @@ def test_restore_database_dump_without_extract_process_restores_from_disk(): def test_restore_database_dump_with_schemas_restores_schemas(): - database_config = [{'name': 'foo', 'schemas': ['bar', 'baz']}] + databases_config = [{'name': 'foo', 'schemas': ['bar', 'baz']}] flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path') @@ -1135,9 +1121,10 @@ def test_restore_database_dump_with_schemas_restores_schemas(): ).once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='foo', dry_run=False, extract_process=None, connection_params={ diff --git a/tests/unit/hooks/test_sqlite.py b/tests/unit/hooks/test_sqlite.py index f61ffc75..761a0557 100644 --- a/tests/unit/hooks/test_sqlite.py +++ b/tests/unit/hooks/test_sqlite.py @@ -93,7 +93,7 @@ def test_dump_databases_does_not_dump_if_dry_run(): def test_restore_database_dump_restores_database(): - database_config = [{'path': '/path/to/database', 'name': 'database'}] + databases_config = [{'path': '/path/to/database', 'name': 'database'}, {'name': 'other'}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('execute_command_with_processes').with_args( @@ -109,9 +109,10 @@ def test_restore_database_dump_restores_database(): flexmock(module.os).should_receive('remove').once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='database', dry_run=False, extract_process=extract_process, connection_params={'restore_path': None}, @@ -119,7 +120,7 @@ def test_restore_database_dump_restores_database(): def test_restore_database_dump_with_connection_params_uses_connection_params_for_restore(): - database_config = [ + databases_config = [ {'path': '/path/to/database', 'name': 'database', 'restore_path': 'config/path/to/database'} ] extract_process = flexmock(stdout=flexmock()) @@ -137,9 +138,10 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for flexmock(module.os).should_receive('remove').once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='database', dry_run=False, extract_process=extract_process, connection_params={'restore_path': 'cli/path/to/database'}, @@ -147,7 +149,7 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for def test_restore_database_dump_without_connection_params_uses_restore_params_in_config_for_restore(): - database_config = [ + databases_config = [ {'path': '/path/to/database', 'name': 'database', 'restore_path': 'config/path/to/database'} ] extract_process = flexmock(stdout=flexmock()) @@ -165,9 +167,10 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_ flexmock(module.os).should_receive('remove').once() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='database', dry_run=False, extract_process=extract_process, connection_params={'restore_path': None}, @@ -175,31 +178,33 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_ def test_restore_database_dump_does_not_restore_database_if_dry_run(): - database_config = [{'path': '/path/to/database', 'name': 'database'}] + databases_config = [{'path': '/path/to/database', 'name': 'database'}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('execute_command_with_processes').never() flexmock(module.os).should_receive('remove').never() module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='database', dry_run=True, extract_process=extract_process, connection_params={'restore_path': None}, ) -def test_restore_database_dump_raises_error_if_database_config_is_invalid(): - database_config = [] +def test_restore_database_dump_raises_error_if_database_config_is_empty(): + databases_config = [] extract_process = flexmock(stdout=flexmock()) with pytest.raises(ValueError): module.restore_database_dump( - database_config, + databases_config, {}, 'test.yaml', + database_name='database', dry_run=False, extract_process=extract_process, connection_params={'restore_path': None}, From d2fa205476bd2b82d05423742484c9d71c8e9e44 Mon Sep 17 00:00:00 2001 From: Dan Helfman Date: Tue, 11 Jul 2023 19:42:14 -0700 Subject: [PATCH 08/12] Update documentation for section removal (#721). --- README.md | 63 +++--- ...movable-drive-or-an-intermittent-server.md | 30 +-- docs/how-to/backup-your-databases.md | 1 + docs/how-to/deal-with-very-large-backups.md | 13 +- docs/how-to/make-backups-redundant.md | 20 +- docs/how-to/make-per-application-backups.md | 193 ++++++++++-------- docs/how-to/set-up-backups.md | 13 +- docs/how-to/upgrade.md | 13 +- 8 files changed, 184 insertions(+), 162 deletions(-) diff --git a/README.md b/README.md index 78691195..9cd52108 100644 --- a/README.md +++ b/README.md @@ -16,50 +16,41 @@ The canonical home of borgmatic is at ht Here's an example configuration file: ```yaml -location: - # List of source directories to backup. - source_directories: - - /home - - /etc +# List of source directories to backup. +source_directories: + - /home + - /etc - # Paths of local or remote repositories to backup to. - repositories: - - path: ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo - label: borgbase - - path: /var/lib/backups/local.borg - label: local +# Paths of local or remote repositories to backup to. +repositories: + - path: ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo + label: borgbase + - path: /var/lib/backups/local.borg + label: local -retention: - # Retention policy for how many backups to keep. - keep_daily: 7 - keep_weekly: 4 - keep_monthly: 6 +# Retention policy for how many backups to keep. +keep_daily: 7 +keep_weekly: 4 +keep_monthly: 6 -consistency: - # List of checks to run to validate your backups. - checks: - - name: repository - - name: archives - frequency: 2 weeks +# List of checks to run to validate your backups. +checks: + - name: repository + - name: archives + frequency: 2 weeks -hooks: - # Custom preparation scripts to run. - before_backup: - - prepare-for-backup.sh +# Custom preparation scripts to run. +before_backup: + - prepare-for-backup.sh - # Databases to dump and include in backups. - postgresql_databases: - - name: users +# Databases to dump and include in backups. +postgresql_databases: + - name: users - # Third-party services to notify you if backups aren't happening. - healthchecks: https://hc-ping.com/be067061-cf96-4412-8eae-62b0c50d6a8c +# Third-party services to notify you if backups aren't happening. +healthchecks: https://hc-ping.com/be067061-cf96-4412-8eae-62b0c50d6a8c ``` -Want to see borgmatic in action? Check out the screencast. - - - borgmatic is powered by [Borg Backup](https://www.borgbackup.org/). ## Integrations diff --git a/docs/how-to/backup-to-a-removable-drive-or-an-intermittent-server.md b/docs/how-to/backup-to-a-removable-drive-or-an-intermittent-server.md index 04ccbf79..6bcc8950 100644 --- a/docs/how-to/backup-to-a-removable-drive-or-an-intermittent-server.md +++ b/docs/how-to/backup-to-a-removable-drive-or-an-intermittent-server.md @@ -44,14 +44,16 @@ file](https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/), say at `/etc/borgmatic.d/removable.yaml`: ```yaml -location: - source_directories: - - /home +source_directories: + - /home - repositories: - - path: /mnt/removable/backup.borg +repositories: + - path: /mnt/removable/backup.borg ``` +Prior to version 1.8.0 Put +these options in the `location:` section of your configuration. + Prior to version 1.7.10 Omit the `path:` portion of the `repositories` list. @@ -77,18 +79,20 @@ optionally using `before_actions` instead. You can imagine a similar check for the sometimes-online server case: ```yaml -location: - source_directories: - - /home +source_directories: + - /home - repositories: - - path: ssh://me@buddys-server.org/./backup.borg +repositories: + - path: ssh://me@buddys-server.org/./backup.borg -hooks: - before_backup: - - ping -q -c 1 buddys-server.org > /dev/null || exit 75 +before_backup: + - ping -q -c 1 buddys-server.org > /dev/null || exit 75 ``` +Prior to version 1.8.0 Put the +first two options in the `location:` section of your configuration and the +`before_backup` option within the `hooks:` section. + Prior to version 1.7.10 Omit the `path:` portion of the `repositories` list. diff --git a/docs/how-to/backup-your-databases.md b/docs/how-to/backup-your-databases.md index bf2c7b68..b8aeca98 100644 --- a/docs/how-to/backup-your-databases.md +++ b/docs/how-to/backup-your-databases.md @@ -196,6 +196,7 @@ it is a mandatory option there: ```yaml location: source_directories: [] + hooks: mysql_databases: - name: all diff --git a/docs/how-to/deal-with-very-large-backups.md b/docs/how-to/deal-with-very-large-backups.md index d6142619..48199b97 100644 --- a/docs/how-to/deal-with-very-large-backups.md +++ b/docs/how-to/deal-with-very-large-backups.md @@ -162,11 +162,13 @@ either for a single repository or for all repositories. Disabling all consistency checks looks like this: ```yaml -consistency: - checks: - - name: disabled +checks: + - name: disabled ``` +Prior to version 1.8.0 Put +this option in the `consistency:` section of your configuration. + Prior to version 1.6.2 `checks` was a plain list of strings without the `name:` part. For instance: @@ -181,9 +183,8 @@ you can keep running consistency checks, but only against a subset of the repositories: ```yaml -consistency: - check_repositories: - - path/of/repository_to_check.borg +check_repositories: + - path/of/repository_to_check.borg ``` Finally, you can override your configuration file's consistency checks, and diff --git a/docs/how-to/make-backups-redundant.md b/docs/how-to/make-backups-redundant.md index 2a4b8121..6f4d868b 100644 --- a/docs/how-to/make-backups-redundant.md +++ b/docs/how-to/make-backups-redundant.md @@ -12,18 +12,20 @@ it. borgmatic supports this in its configuration by specifying multiple backup repositories. Here's an example: ```yaml -location: - # List of source directories to backup. - source_directories: - - /home - - /etc +# List of source directories to backup. +source_directories: + - /home + - /etc - # Paths of local or remote repositories to backup to. - repositories: - - path: ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo - - path: /var/lib/backups/local.borg +# Paths of local or remote repositories to backup to. +repositories: + - path: ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo + - path: /var/lib/backups/local.borg ``` +Prior to version 1.8.0 Put +these options in the `location:` section of your configuration. + Prior to version 1.7.10 Omit the `path:` portion of the `repositories` list. diff --git a/docs/how-to/make-per-application-backups.md b/docs/how-to/make-per-application-backups.md index fb815d87..cae2fa2e 100644 --- a/docs/how-to/make-per-application-backups.md +++ b/docs/how-to/make-per-application-backups.md @@ -74,14 +74,15 @@ and borgmatic uses that format to name any new archive it creates. For instance: ```yaml -storage: - ... - archive_name_format: home-directories-{now} +archive_name_format: home-directories-{now} ``` -This means that when borgmatic creates an archive, its name will start with -the string `home-directories-` and end with a timestamp for its creation time. -If `archive_name_format` is unspecified, the default is +Prior to version 1.8.0 Put +this option in the `storage:` section of your configuration. + +This example means that when borgmatic creates an archive, its name will start +with the string `home-directories-` and end with a timestamp for its creation +time. If `archive_name_format` is unspecified, the default is `{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}`, meaning your system hostname plus a timestamp in a particular format. @@ -156,23 +157,28 @@ them. To achieve this, you can put fragments of common configuration options into a file, and then include or inline that file into one or more borgmatic configuration files. -Let's say that you want to include common retention configuration across all +Let's say that you want to include common consistency check configuration across all of your configuration files. You could do that in each configuration file with the following: ```yaml -location: - ... +repositories: + - path: repo.borg -retention: - !include /etc/borgmatic/common_retention.yaml +checks: + !include /etc/borgmatic/common_checks.yaml ``` -And then the contents of `common_retention.yaml` could be: +Prior to version 1.8.0 These +options were organized into sections like `location:` and `consistency:`. + +The contents of `common_checks.yaml` could be: ```yaml -keep_hourly: 24 -keep_daily: 7 +- name: repository + frequency: 3 weeks +- name: archives + frequency: 2 weeks ``` To prevent borgmatic from trying to load these configuration fragments by @@ -188,11 +194,11 @@ Note that this form of include must be a YAML value rather than a key. For example, this will not work: ```yaml -location: - ... +repositories: + - path: repo.borg # Don't do this. It won't work! -!include /etc/borgmatic/common_retention.yaml +!include /etc/borgmatic/common_checks.yaml ``` But if you do want to merge in a YAML key *and* its values, keep reading! @@ -203,45 +209,48 @@ But if you do want to merge in a YAML key *and* its values, keep reading! If you need to get even fancier and merge in common configuration options, you can perform a YAML merge of included configuration using the YAML `<<` key. For instance, here's an example of a main configuration file that pulls in -retention and consistency options via a single include: +retention and consistency checks options via a single include: ```yaml -<<: !include /etc/borgmatic/common.yaml +repositories: + - path: repo.borg -location: - ... +<<: !include /etc/borgmatic/common.yaml ``` This is what `common.yaml` might look like: ```yaml -retention: - keep_hourly: 24 - keep_daily: 7 +keep_hourly: 24 +keep_daily: 7 -consistency: - checks: - - name: repository +checks: + - name: repository + frequency: 3 weeks + - name: archives + frequency: 2 weeks ``` -Once this include gets merged in, the resulting configuration would have all -of the `location` options from the original configuration file *and* the -`retention` and `consistency` options from the include. +Prior to version 1.8.0 These +options were organized into sections like `retention:` and `consistency:`. -Prior to borgmatic version 1.6.0, when there's a section collision between the -local file and the merged include, the local file's section takes precedence. -So if the `retention` section appears in both the local file and the include -file, the included `retention` is ignored in favor of the local `retention`. -But see below about deep merge in version 1.6.0+. +Once this include gets merged in, the resulting configuration would have all +of the options from the original configuration file *and* the options from the +include. + +Prior to version 1.6.0 When the +same option appeared in both the local file and the merged include, the local +file's value took precedence—meaning the included value was ignored in favor +of the local one. But see below about deep merge in version 1.6.0+. Note that this `<<` include merging syntax is only for merging in mappings (configuration options and their values). But if you'd like to include a -single value directly, please see the section above about standard includes. +single value directly, please see the above about standard includes. Additionally, there is a limitation preventing multiple `<<` include merges -per section. So for instance, that means you can do one `<<` merge at the -global level, another `<<` within each configuration section, etc. (This is a -YAML limitation.) +per file or option value. So for instance, that means you can do one `<<` +merge at the global level, another `<<` within each nested option value, etc. +(This is a YAML limitation.) ### Deep merge @@ -342,8 +351,8 @@ includes. ### Shallow merge Even though deep merging is generally pretty handy for included files, -sometimes you want specific sections in the local file to take precedence over -included sections—without any merging occurring for them. +sometimes you want specific options in the local file to take precedence over +included options—without any merging occurring for them. New in version 1.7.12 That's where the `!retain` tag comes in. Whenever you're merging an included file @@ -357,37 +366,38 @@ on the `retention` mapping: ```yaml <<: !include /etc/borgmatic/common.yaml -location: - repositories: - - path: repo.borg +repositories: + - path: repo.borg -retention: !retain - keep_daily: 5 +checks: !retain + - name: repository ``` And `common.yaml` like this: ```yaml -location: - repositories: - - path: common.borg +repositories: + - path: common.borg -retention: - keep_hourly: 24 - keep_daily: 7 +checks: + - name: archives ``` -Once this include gets merged in, the resulting configuration will have a -`keep_daily` value of `5` and nothing else in the `retention` section. That's -because the `!retain` tag says to retain the local version of `retention` and -ignore any values coming in from the include. But because the `repositories` -list doesn't have a `!retain` tag, it still gets merged together to contain -both `common.borg` and `repo.borg`. +Prior to version 1.8.0 These +options were organized into sections like `location:` and `consistency:`. -The `!retain` tag can only be placed on mappings and lists, and it goes right -after the name of the option (and its colon) on the same line. The effects of -`!retain` are recursive, meaning that if you place a `!retain` tag on a -top-level mapping, even deeply nested values within it will not be merged. +Once this include gets merged in, the resulting configuration will have a +`checks` value with a name of `repository` and no other values. That's because +the `!retain` tag says to retain the local version of `checks` and ignore any +values coming in from the include. But because the `repositories` list doesn't +have a `!retain` tag, it still gets merged together to contain both +`common.borg` and `repo.borg`. + +The `!retain` tag can only be placed on mappings (keys/values) and lists, and +it goes right after the name of the option (and its colon) on the same line. +The effects of `!retain` are recursive, meaning that if you place a `!retain` +tag on a top-level mapping, even deeply nested values within it will not be +merged. Additionally, the `!retain` tag only works in a configuration file that also performs a merge include with `<<: !include`. It doesn't make sense within, @@ -434,43 +444,50 @@ Whatever the reason, you can override borgmatic configuration options at the command-line via the `--override` flag. Here's an example: ```bash -borgmatic create --override location.remote_path=/usr/local/bin/borg1 +borgmatic create --override remote_path=/usr/local/bin/borg1 ``` What this does is load your configuration files, and for each one, disregard -the configured value for the `remote_path` option in the `location` section, -and use the value of `/usr/local/bin/borg1` instead. +the configured value for the `remote_path` option, and use the value of +`/usr/local/bin/borg1` instead. -You can even override multiple values at once. For instance: +Prior to version 1.8.0 Don't +forget to specify the section (like `location:`) that any option is in. + +You can even override nested values or multiple values at once. For instance: ```bash -borgmatic create --override section.option1=value1 section.option2=value2 +borgmatic create --override parent_option.option1=value1 parent_option.option2=value2 ``` This will accomplish the same thing: ```bash -borgmatic create --override section.option1=value1 --override section.option2=value2 +borgmatic create --override parent_option.option1=value1 --override parent_option.option2=value2 ``` +Prior to version 1.8.0 Don't +forget to specify the section that an option is in. That looks like a prefix +on the option name, e.g. `location.repositories`. + Note that each value is parsed as an actual YAML string, so you can even set list values by using brackets. For instance: ```bash -borgmatic create --override location.repositories=[test1.borg,test2.borg] +borgmatic create --override repositories=[test1.borg,test2.borg] ``` Or even a single list element: ```bash -borgmatic create --override location.repositories=[/root/test.borg] +borgmatic create --override repositories=[/root/test.borg] ``` If your override value contains special YAML characters like colons, then you'll need quotes for it to parse correctly: ```bash -borgmatic create --override location.repositories="['user@server:test.borg']" +borgmatic create --override repositories="['user@server:test.borg']" ``` There is not currently a way to override a single element of a list without @@ -486,7 +503,9 @@ indentation and a leading dash.) Be sure to quote your overrides if they contain spaces or other characters that your shell may interpret. -An alternate to command-line overrides is passing in your values via [environment variables](https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/). +An alternate to command-line overrides is passing in your values via +[environment +variables](https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/). ## Constant interpolation @@ -506,16 +525,19 @@ constants: user: foo archive_prefix: bar -location: - source_directories: - - /home/{user}/.config - - /home/{user}/.ssh - ... +source_directories: + - /home/{user}/.config + - /home/{user}/.ssh -storage: - archive_name_format: '{archive_prefix}-{now}' +... + +archive_name_format: '{archive_prefix}-{now}' ``` +Prior to version 1.8.0 Don't +forget to specify the section (like `location:` or `storage:`) that any option +is in. + In this example, when borgmatic runs, all instances of `{user}` get replaced with `foo` and all instances of `{archive-prefix}` get replaced with `bar-`. (And in this particular example, `{now}` doesn't get replaced with anything, @@ -523,14 +545,13 @@ but gets passed directly to Borg.) After substitution, the logical result looks something like this: ```yaml -location: - source_directories: - - /home/foo/.config - - /home/foo/.ssh - ... +source_directories: + - /home/foo/.config + - /home/foo/.ssh -storage: - archive_name_format: 'bar-{now}' +... + +archive_name_format: 'bar-{now}' ``` An alternate to constants is passing in your values via [environment diff --git a/docs/how-to/set-up-backups.md b/docs/how-to/set-up-backups.md index 043817ad..0797153f 100644 --- a/docs/how-to/set-up-backups.md +++ b/docs/how-to/set-up-backups.md @@ -140,13 +140,14 @@ use the `--destination` flag, for instance: `--destination You should edit the configuration file to suit your needs, as the generated values are only representative. All options are optional except where -indicated, so feel free to ignore anything you don't need. +indicated, so feel free to ignore anything you don't need. Be sure to use +spaces rather than tabs for indentation; YAML does not allow tabs. -Note that the configuration file is organized into distinct sections, each -with a section name like `location:` or `storage:`. So take care that if you -uncomment a particular option, also uncomment its containing section name, or -else borgmatic won't recognize the option. Also be sure to use spaces rather -than tabs for indentation; YAML does not allow tabs. +Prior to version 1.8.0 The +configuration file was organized into distinct sections, each with a section +name like `location:` or `storage:`. So in older versions of borgmatic, take +care that if you uncomment a particular option, also uncomment its containing +section name—or else borgmatic won't recognize the option. You can get the same sample configuration file from the [configuration reference](https://torsion.org/borgmatic/docs/reference/configuration/), the diff --git a/docs/how-to/upgrade.md b/docs/how-to/upgrade.md index be85880f..43a28cc0 100644 --- a/docs/how-to/upgrade.md +++ b/docs/how-to/upgrade.md @@ -131,20 +131,21 @@ Let's say your original borgmatic repository configuration file looks something like this: ```yaml -location: - repositories: - - path: original.borg +repositories: + - path: original.borg ``` +Prior to version 1.8.0 This +option was found in the `location:` section of your configuration. + Prior to version 1.7.10 Omit the `path:` portion of the `repositories` list. Change it to a new (not yet created) repository path: ```yaml -location: - repositories: - - path: upgraded.borg +repositories: + - path: upgraded.borg ``` Then, run the `rcreate` action (formerly `init`) to create that new Borg 2 From 2453ecad424c713403dabf4c276333ff4a65d4cd Mon Sep 17 00:00:00 2001 From: Dan Helfman Date: Tue, 11 Jul 2023 22:10:57 -0700 Subject: [PATCH 09/12] More documentation updates based on section removal (#721). --- .../_includes/components/suggestion-link.html | 4 +- ...reparation-and-cleanup-steps-to-backups.md | 60 ++++---- ...movable-drive-or-an-intermittent-server.md | 20 +-- docs/how-to/backup-your-databases.md | 141 +++++++++--------- docs/how-to/deal-with-very-large-backups.md | 32 ++-- docs/how-to/develop-on-borgmatic.md | 6 +- docs/how-to/inspect-your-backups.md | 2 +- docs/how-to/make-per-application-backups.md | 80 +++++----- docs/how-to/monitor-your-backups.md | 94 +++++++----- docs/how-to/provide-your-passwords.md | 22 +-- docs/how-to/upgrade.md | 12 +- docs/reference/command-line.md | 2 +- 12 files changed, 243 insertions(+), 232 deletions(-) diff --git a/docs/_includes/components/suggestion-link.html b/docs/_includes/components/suggestion-link.html index 2c2d1424..073b8887 100644 --- a/docs/_includes/components/suggestion-link.html +++ b/docs/_includes/components/suggestion-link.html @@ -1,5 +1,5 @@

Improve this documentation

Have an idea on how to make this documentation even better? Use our issue tracker to send your -feedback!

+href="https://torsion.org/borgmatic/#support-and-contributing">issue +tracker to send your feedback!

diff --git a/docs/how-to/add-preparation-and-cleanup-steps-to-backups.md b/docs/how-to/add-preparation-and-cleanup-steps-to-backups.md index eeba2e04..37806f20 100644 --- a/docs/how-to/add-preparation-and-cleanup-steps-to-backups.md +++ b/docs/how-to/add-preparation-and-cleanup-steps-to-backups.md @@ -21,11 +21,10 @@ running backups, and specify `after_backup` hooks to perform cleanup steps afterwards. Here's an example: ```yaml -hooks: - before_backup: - - mount /some/filesystem - after_backup: - - umount /some/filesystem +before_backup: + - mount /some/filesystem +after_backup: + - umount /some/filesystem ``` If your command contains a special YAML character such as a colon, you may @@ -33,11 +32,23 @@ need to quote the entire string (or use a [multiline string](https://yaml-multiline.info/)) to avoid an error: ```yaml -hooks: - before_backup: - - "echo Backup: start" +before_backup: + - "echo Backup: start" ``` +There are additional hooks that run before/after other actions as well. For +instance, `before_prune` runs before a `prune` action for a repository, while +`after_prune` runs after it. + +Prior to version 1.8.0 Put +these options in the `hooks:` section of your configuration. + +New in version 1.7.0 The +`before_actions` and `after_actions` hooks run before/after all the actions +(like `create`, `prune`, etc.) for each repository. These hooks are a good +place to run per-repository steps like mounting/unmounting a remote +filesystem. + New in version 1.6.0 The `before_backup` and `after_backup` hooks each run once per repository in a configuration file. `before_backup` hooks runs right before the `create` @@ -46,16 +57,6 @@ but not if an error occurs in a previous hook or in the backups themselves. (Prior to borgmatic 1.6.0, these hooks instead ran once per configuration file rather than once per repository.) -There are additional hooks that run before/after other actions as well. For -instance, `before_prune` runs before a `prune` action for a repository, while -`after_prune` runs after it. - -New in version 1.7.0 The -`before_actions` and `after_actions` hooks run before/after all the actions -(like `create`, `prune`, etc.) for each repository. These hooks are a good -place to run per-repository steps like mounting/unmounting a remote -filesystem. - ## Variable interpolation @@ -64,11 +65,13 @@ variables into the hook command. Here's an example that assumes you provide a separate shell script: ```yaml -hooks: - after_prune: - - record-prune.sh "{configuration_filename}" "{repository}" +after_prune: + - record-prune.sh "{configuration_filename}" "{repository}" ``` +Prior to version 1.8.0 Put +this option in the `hooks:` section of your configuration. + In this example, when the hook is triggered, borgmatic interpolates runtime values into the hook command: the borgmatic configuration filename and the paths of the current Borg repository. Here's the full set of supported @@ -92,13 +95,15 @@ You can also use `before_everything` and `after_everything` hooks to perform global setup or cleanup: ```yaml -hooks: - before_everything: - - set-up-stuff-globally - after_everything: - - clean-up-stuff-globally +before_everything: + - set-up-stuff-globally +after_everything: + - clean-up-stuff-globally ``` +Prior to version 1.8.0 Put +these options in the `hooks:` section of your configuration. + `before_everything` hooks collected from all borgmatic configuration files run once before all configuration files (prior to all actions), but only if there is a `create` action. An error encountered during a `before_everything` hook @@ -109,6 +114,7 @@ but only if there is a `create` action. It runs even if an error occurs during a backup or a backup hook, but not if an error occurs during a `before_everything` hook. + ## Error hooks borgmatic also runs `on_error` hooks if an error occurs, either when creating @@ -116,6 +122,7 @@ a backup or running a backup hook. See the [monitoring and alerting documentation](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/) for more information. + ## Hook output Any output produced by your hooks shows up both at the console and in syslog @@ -123,6 +130,7 @@ Any output produced by your hooks shows up both at the console and in syslog href="https://torsion.org/borgmatic/docs/how-to/inspect-your-backups/">inspecting your backups. + ## Security An important security note about hooks: borgmatic executes all hook commands diff --git a/docs/how-to/backup-to-a-removable-drive-or-an-intermittent-server.md b/docs/how-to/backup-to-a-removable-drive-or-an-intermittent-server.md index 6bcc8950..a7c9b20a 100644 --- a/docs/how-to/backup-to-a-removable-drive-or-an-intermittent-server.md +++ b/docs/how-to/backup-to-a-removable-drive-or-an-intermittent-server.md @@ -62,11 +62,13 @@ the external `findmnt` utility to see whether the drive is mounted before proceeding. ```yaml -hooks: - before_backup: - - findmnt /mnt/removable > /dev/null || exit 75 +before_backup: + - findmnt /mnt/removable > /dev/null || exit 75 ``` +Prior to version 1.8.0 Put this +option in the `hooks:` section of your configuration. + What this does is check if the `findmnt` command errors when probing for a particular mount point. If it does error, then it returns exit code 75 to borgmatic. borgmatic logs the soft failure, skips all further actions in that @@ -89,19 +91,11 @@ before_backup: - ping -q -c 1 buddys-server.org > /dev/null || exit 75 ``` -Prior to version 1.8.0 Put the -first two options in the `location:` section of your configuration and the -`before_backup` option within the `hooks:` section. - -Prior to version 1.7.10 Omit -the `path:` portion of the `repositories` list. - Or to only run backups if the battery level is high enough: ```yaml -hooks: - before_backup: - - is_battery_percent_at_least.sh 25 +before_backup: + - is_battery_percent_at_least.sh 25 ``` (Writing the battery script is left as an exercise to the reader.) diff --git a/docs/how-to/backup-your-databases.md b/docs/how-to/backup-your-databases.md index b8aeca98..4708e0ad 100644 --- a/docs/how-to/backup-your-databases.md +++ b/docs/how-to/backup-your-databases.md @@ -18,31 +18,32 @@ prior to running backups. For example, here is everything you need to dump and backup a couple of local PostgreSQL databases and a MySQL/MariaDB database. ```yaml -hooks: - postgresql_databases: - - name: users - - name: orders - mysql_databases: - - name: posts +postgresql_databases: + - name: users + - name: orders +mysql_databases: + - name: posts ``` +Prior to version 1.8.0 Put +these and other database options in the `hooks:` section of your +configuration. + New in version 1.5.22 You can also dump MongoDB databases. For example: ```yaml -hooks: - mongodb_databases: - - name: messages +mongodb_databases: + - name: messages ``` New in version 1.7.9 Additionally, you can dump SQLite databases. For example: ```yaml -hooks: - sqlite_databases: - - name: mydb - path: /var/lib/sqlite3/mydb.sqlite +sqlite_databases: + - name: mydb + path: /var/lib/sqlite3/mydb.sqlite ``` As part of each backup, borgmatic streams a database dump for each configured @@ -54,7 +55,7 @@ temporary disk space.) To support this, borgmatic creates temporary named pipes in `~/.borgmatic` by default. To customize this path, set the `borgmatic_source_directory` option -in the `location` section of borgmatic's configuration. +in borgmatic's configuration. Also note that using a database hook implicitly enables both the `read_special` and `one_file_system` configuration settings (even if they're @@ -64,35 +65,34 @@ See Limitations below for more on this. Here's a more involved example that connects to remote databases: ```yaml -hooks: - postgresql_databases: - - name: users - hostname: database1.example.org - - name: orders - hostname: database2.example.org - port: 5433 - username: postgres - password: trustsome1 - format: tar - options: "--role=someone" - mysql_databases: - - name: posts - hostname: database3.example.org - port: 3307 - username: root - password: trustsome1 - options: "--skip-comments" - mongodb_databases: - - name: messages - hostname: database4.example.org - port: 27018 - username: dbuser - password: trustsome1 - authentication_database: mongousers - options: "--ssl" - sqlite_databases: - - name: mydb - path: /var/lib/sqlite3/mydb.sqlite +postgresql_databases: + - name: users + hostname: database1.example.org + - name: orders + hostname: database2.example.org + port: 5433 + username: postgres + password: trustsome1 + format: tar + options: "--role=someone" +mysql_databases: + - name: posts + hostname: database3.example.org + port: 3307 + username: root + password: trustsome1 + options: "--skip-comments" +mongodb_databases: + - name: messages + hostname: database4.example.org + port: 27018 + username: dbuser + password: trustsome1 + authentication_database: mongousers + options: "--ssl" +sqlite_databases: + - name: mydb + path: /var/lib/sqlite3/mydb.sqlite ``` See your [borgmatic configuration @@ -106,13 +106,12 @@ listing databases, restoring databases, etc.). If you want to dump all databases on a host, use `all` for the database name: ```yaml -hooks: - postgresql_databases: - - name: all - mysql_databases: - - name: all - mongodb_databases: - - name: all +postgresql_databases: + - name: all +mysql_databases: + - name: all +mongodb_databases: + - name: all ``` Note that you may need to use a `username` of the `postgres` superuser for @@ -120,6 +119,9 @@ this to work with PostgreSQL. The SQLite hook in particular does not consider "all" a special database name. +Prior to version 1.8.0 Put +these options in the `hooks:` section of your configuration. + New in version 1.7.6 With PostgreSQL and MySQL, you can optionally dump "all" databases to separate files instead of one combined dump file, allowing more convenient restores of @@ -127,13 +129,12 @@ individual databases. Enable this by specifying your desired database dump `format`: ```yaml -hooks: - postgresql_databases: - - name: all - format: custom - mysql_databases: - - name: all - format: sql +postgresql_databases: + - name: all + format: custom +mysql_databases: + - name: all + format: sql ``` ### Containers @@ -143,15 +144,17 @@ problem—configure borgmatic to connect to the container's name on its exposed port. For instance: ```yaml -hooks: - postgresql_databases: - - name: users - hostname: your-database-container-name - port: 5433 - username: postgres - password: trustsome1 +postgresql_databases: + - name: users + hostname: your-database-container-name + port: 5433 + username: postgres + password: trustsome1 ``` +Prior to version 1.8.0 Put +these options in the `hooks:` section of your configuration. + But what if borgmatic is running on the host? You can still connect to a database container if its ports are properly exposed to the host. For instance, when running the database container, you can specify `--publish @@ -179,8 +182,7 @@ hooks: password: trustsome1 ``` -You can alter the ports in these examples to suit your particular database -system. +Alter the ports in these examples to suit your particular database system. ### No source directories @@ -428,10 +430,9 @@ You can add any additional flags to the `options:` in your database configuration. Here's an example: ```yaml -hooks: - mysql_databases: - - name: posts - options: "--single-transaction --quick" +mysql_databases: + - name: posts + options: "--single-transaction --quick" ``` ### borgmatic hangs during backup diff --git a/docs/how-to/deal-with-very-large-backups.md b/docs/how-to/deal-with-very-large-backups.md index 48199b97..96064c12 100644 --- a/docs/how-to/deal-with-very-large-backups.md +++ b/docs/how-to/deal-with-very-large-backups.md @@ -65,19 +65,20 @@ configure borgmatic to run repository checks only. Configure this in the `consistency` section of borgmatic configuration: ```yaml -consistency: - checks: - - name: repository +checks: + - name: repository ``` +Prior to version 1.8.0 Put +this option in the `consistency:` section of your configuration. + Prior to version 1.6.2 The `checks` option was a plain list of strings without the `name:` part, and borgmatic ran each configured check every time checks were run. For example: ```yaml -consistency: - checks: - - repository +checks: + - repository ``` @@ -103,14 +104,16 @@ optionally configure checks to run on a periodic basis rather than every time borgmatic runs checks. For instance: ```yaml -consistency: - checks: - - name: repository - frequency: 2 weeks - - name: archives - frequency: 1 month +checks: + - name: repository + frequency: 2 weeks + - name: archives + frequency: 1 month ``` +Prior to version 1.8.0 Put +this option in the `consistency:` section of your configuration. + This tells borgmatic to run the `repository` consistency check at most once every two weeks for a given repository and the `archives` check at most once a month. The `frequency` value is a number followed by a unit of time, e.g. "3 @@ -173,9 +176,8 @@ this option in the `consistency:` section of your configuration. was a plain list of strings without the `name:` part. For instance: ```yaml -consistency: - checks: - - disabled +checks: + - disabled ``` If you have multiple repositories in your borgmatic configuration file, diff --git a/docs/how-to/develop-on-borgmatic.md b/docs/how-to/develop-on-borgmatic.md index 391d7950..bd219bbc 100644 --- a/docs/how-to/develop-on-borgmatic.md +++ b/docs/how-to/develop-on-borgmatic.md @@ -7,7 +7,7 @@ eleventyNavigation: --- ## Source code -To get set up to hack on borgmatic, first clone it via HTTPS or SSH: +To get set up to develop on borgmatic, first clone it via HTTPS or SSH: ```bash git clone https://projects.torsion.org/borgmatic-collective/borgmatic.git @@ -21,8 +21,8 @@ git clone ssh://git@projects.torsion.org:3022/borgmatic-collective/borgmatic.git Then, install borgmatic "[editable](https://pip.pypa.io/en/stable/cli/pip_install/#editable-installs)" -so that you can run borgmatic commands while you're hacking on them to -make sure your changes work. +so that you can run borgmatic actions during development to make sure your +changes work. ```bash cd borgmatic diff --git a/docs/how-to/inspect-your-backups.md b/docs/how-to/inspect-your-backups.md index c1e509b8..314e0c74 100644 --- a/docs/how-to/inspect-your-backups.md +++ b/docs/how-to/inspect-your-backups.md @@ -60,7 +60,7 @@ with `--format`. Refer to the [borg list --format documentation](https://borgbackup.readthedocs.io/en/stable/usage/list.html#the-format-specifier-syntax) for available values. -*(No borgmatic `list` or `info` actions? Upgrade borgmatic!)* +(No borgmatic `list` or `info` actions? Upgrade borgmatic!) New in borgmatic version 1.7.0 There are also `rlist` and `rinfo` actions for displaying repository diff --git a/docs/how-to/make-per-application-backups.md b/docs/how-to/make-per-application-backups.md index cae2fa2e..62a1c3e5 100644 --- a/docs/how-to/make-per-application-backups.md +++ b/docs/how-to/make-per-application-backups.md @@ -104,11 +104,12 @@ to filter archives when running supported actions. For instance, let's say that you have this in your configuration: ```yaml -storage: - ... - archive_name_format: {hostname}-user-data-{now} +archive_name_format: {hostname}-user-data-{now} ``` +Prior to version 1.8.0 Put +this option in the `storage:` section of your configuration. + borgmatic considers `{now}` an emphemeral data placeholder that will probably change per archive, while `{hostname}` won't. So it turns the example value into `{hostname}-user-data-*` and applies it to filter down the set of @@ -124,10 +125,8 @@ If this behavior isn't quite smart enough for your needs, you can use the filtering archives. For example: ```yaml -storage: - ... - archive_name_format: {hostname}-user-data-{now} - match_archives: sh:myhost-user-data-* +archive_name_format: {hostname}-user-data-{now} +match_archives: sh:myhost-user-data-* ``` For Borg 1.x, use a shell pattern for the `match_archives` value and see the @@ -190,7 +189,7 @@ When a configuration include is a relative path, borgmatic loads it from either the current working directory or from the directory containing the file doing the including. -Note that this form of include must be a YAML value rather than a key. For +Note that this form of include must be a value rather than an option name. For example, this will not work: ```yaml @@ -201,7 +200,7 @@ repositories: !include /etc/borgmatic/common_checks.yaml ``` -But if you do want to merge in a YAML key *and* its values, keep reading! +But if you do want to merge in a option name *and* its values, keep reading! ## Include merging @@ -238,11 +237,6 @@ Once this include gets merged in, the resulting configuration would have all of the options from the original configuration file *and* the options from the include. -Prior to version 1.6.0 When the -same option appeared in both the local file and the merged include, the local -file's value took precedence—meaning the included value was ignored in favor -of the local one. But see below about deep merge in version 1.6.0+. - Note that this `<<` include merging syntax is only for merging in mappings (configuration options and their values). But if you'd like to include a single value directly, please see the above about standard includes. @@ -261,29 +255,30 @@ at all levels in the two configuration files. This allows you to include common configuration—up to full borgmatic configuration files—while overriding only the parts you want to customize. -For instance, here's an example of a main configuration file that pulls in two -retention options via an include and then overrides one of them locally: +For instance, here's an example of a main configuration file that pulls in +options via an include and then overrides one of them locally: ```yaml <<: !include /etc/borgmatic/common.yaml -location: - ... +constants: + hostname: myhostname -retention: - keep_daily: 5 +repositories: + - path: repo.borg ``` This is what `common.yaml` might look like: ```yaml -retention: - keep_hourly: 24 - keep_daily: 7 +constants: + prefix: myprefix + hostname: otherhost ``` Once this include gets merged in, the resulting configuration would have a -`keep_hourly` value of `24` and an overridden `keep_daily` value of `5`. +`prefix` value of `myprefix` and an overridden `hostname` value of +`myhostname`. When there's an option collision between the local file and the merged include, the local file's option takes precedence. @@ -301,21 +296,22 @@ configuration file, you can omit it with an `!omit` tag. For instance: ```yaml <<: !include /etc/borgmatic/common.yaml -location: - source_directories: - - !omit /home - - /var +source_directories: + - !omit /home + - /var ``` And `common.yaml` like this: ```yaml -location: - source_directories: - - /home - - /etc +source_directories: + - /home + - /etc ``` +Prior to version 1.8.0 Put +this option in the `location:` section of your configuration. + Once this include gets merged in, the resulting configuration will have a `source_directories` value of `/etc` and `/var`—with `/home` omitted. @@ -328,16 +324,15 @@ an example of some things not to do: ```yaml <<: !include /etc/borgmatic/common.yaml -location: - source_directories: - # Do not do this! It will not work. "!omit" belongs before "/home". - - /home !omit +source_directories: + # Do not do this! It will not work. "!omit" belongs before "/home". + - /home !omit - # Do not do this either! "!omit" only works on scalar list items. - repositories: !omit - # Also do not do this for the same reason! This is a list item, but it's - # not a scalar. - - !omit path: repo.borg +# Do not do this either! "!omit" only works on scalar list items. +repositories: !omit + # Also do not do this for the same reason! This is a list item, but it's + # not a scalar. + - !omit path: repo.borg ``` Additionally, the `!omit` tag only works in a configuration file that also @@ -451,9 +446,6 @@ What this does is load your configuration files, and for each one, disregard the configured value for the `remote_path` option, and use the value of `/usr/local/bin/borg1` instead. -Prior to version 1.8.0 Don't -forget to specify the section (like `location:`) that any option is in. - You can even override nested values or multiple values at once. For instance: ```bash diff --git a/docs/how-to/monitor-your-backups.md b/docs/how-to/monitor-your-backups.md index 517f9c79..2be61c61 100644 --- a/docs/how-to/monitor-your-backups.md +++ b/docs/how-to/monitor-your-backups.md @@ -89,19 +89,20 @@ notifications or take other actions, so you can get alerted as soon as something goes wrong. Here's a not-so-useful example: ```yaml -hooks: - on_error: - - echo "Error while creating a backup or running a backup hook." +on_error: + - echo "Error while creating a backup or running a backup hook." ``` +Prior to version 1.8.0 Put +this option in the `hooks:` section of your configuration. + The `on_error` hook supports interpolating particular runtime variables into the hook command. Here's an example that assumes you provide a separate shell script to handle the alerting: ```yaml -hooks: - on_error: - - send-text-message.sh "{configuration_filename}" "{repository}" +on_error: + - send-text-message.sh "{configuration_filename}" "{repository}" ``` In this example, when the error occurs, borgmatic interpolates runtime values @@ -135,11 +136,13 @@ URL" for your project. Here's an example: ```yaml -hooks: - healthchecks: - ping_url: https://hc-ping.com/addffa72-da17-40ae-be9c-ff591afb942a +healthchecks: + ping_url: https://hc-ping.com/addffa72-da17-40ae-be9c-ff591afb942a ``` +Prior to version 1.8.0 Put +this option in the `hooks:` section of your configuration. + With this hook in place, borgmatic pings your Healthchecks project when a backup begins, ends, or errors. Specifically, after the `before_backup` @@ -179,11 +182,13 @@ API URL" for your monitor. Here's an example: ```yaml -hooks: - cronitor: - ping_url: https://cronitor.link/d3x0c1 +cronitor: + ping_url: https://cronitor.link/d3x0c1 ``` +Prior to version 1.8.0 Put +this option in the `hooks:` section of your configuration. + With this hook in place, borgmatic pings your Cronitor monitor when a backup begins, ends, or errors. Specifically, after the `before_backup` @@ -208,11 +213,13 @@ URL" for your monitor. Here's an example: ```yaml -hooks: - cronhub: - ping_url: https://cronhub.io/start/1f5e3410-254c-11e8-b61d-55875966d031 +cronhub: + ping_url: https://cronhub.io/start/1f5e3410-254c-11e8-b61d-55875966d031 ``` +Prior to version 1.8.0 Put +this option in the `hooks:` section of your configuration. + With this hook in place, borgmatic pings your Cronhub monitor when a backup begins, ends, or errors. Specifically, after the `before_backup` @@ -251,11 +258,13 @@ Here's an example: ```yaml -hooks: - pagerduty: - integration_key: a177cad45bd374409f78906a810a3074 +pagerduty: + integration_key: a177cad45bd374409f78906a810a3074 ``` +Prior to version 1.8.0 Put +this option in the `hooks:` section of your configuration. + With this hook in place, borgmatic creates a PagerDuty event for your service whenever backups fail. Specifically, if an error occurs during a `create`, `prune`, `compact`, or `check` action, borgmatic sends an event to PagerDuty @@ -291,31 +300,34 @@ An example configuration is shown here, with all the available options, includin [tags](https://ntfy.sh/docs/publish/#tags-emojis): ```yaml -hooks: - ntfy: - topic: my-unique-topic - server: https://ntfy.my-domain.com - start: - title: A Borgmatic backup started - message: Watch this space... - tags: borgmatic - priority: min - finish: - title: A Borgmatic backup completed successfully - message: Nice! - tags: borgmatic,+1 - priority: min - fail: - title: A Borgmatic backup failed - message: You should probably fix it - tags: borgmatic,-1,skull - priority: max - states: - - start - - finish - - fail +ntfy: + topic: my-unique-topic + server: https://ntfy.my-domain.com + start: + title: A Borgmatic backup started + message: Watch this space... + tags: borgmatic + priority: min + finish: + title: A Borgmatic backup completed successfully + message: Nice! + tags: borgmatic,+1 + priority: min + fail: + title: A Borgmatic backup failed + message: You should probably fix it + tags: borgmatic,-1,skull + priority: max + states: + - start + - finish + - fail ``` +Prior to version 1.8.0 Put +the `ntfy:` option in the `hooks:` section of your configuration. + + ## Scripting borgmatic To consume the output of borgmatic in other software, you can include an diff --git a/docs/how-to/provide-your-passwords.md b/docs/how-to/provide-your-passwords.md index 76f89d8d..99b56e49 100644 --- a/docs/how-to/provide-your-passwords.md +++ b/docs/how-to/provide-your-passwords.md @@ -20,10 +20,12 @@ pull your repository passphrase, your database passwords, or any other option values from environment variables. For instance: ```yaml -storage: - encryption_passphrase: ${MY_PASSPHRASE} +encryption_passphrase: ${MY_PASSPHRASE} ``` +Prior to version 1.8.0 Put +this option in the `storage:` section of your configuration. + This uses the `MY_PASSPHRASE` environment variable as your encryption passphrase. Note that the `{` `}` brackets are required. `$MY_PASSPHRASE` by itself will not work. @@ -38,12 +40,14 @@ configuration](https://torsion.org/borgmatic/docs/how-to/backup-your-databases/) the same approach applies. For example: ```yaml -hooks: - postgresql_databases: - - name: users - password: ${MY_DATABASE_PASSWORD} +postgresql_databases: + - name: users + password: ${MY_DATABASE_PASSWORD} ``` +Prior to version 1.8.0 Put +this option in the `hooks:` section of your configuration. + This uses the `MY_DATABASE_PASSWORD` environment variable as your database password. @@ -53,8 +57,7 @@ password. If you'd like to set a default for your environment variables, you can do so with the following syntax: ```yaml -storage: - encryption_passphrase: ${MY_PASSPHRASE:-defaultpass} +encryption_passphrase: ${MY_PASSPHRASE:-defaultpass} ``` Here, "`defaultpass`" is the default passphrase if the `MY_PASSPHRASE` @@ -72,8 +75,7 @@ can escape it with a backslash. For instance, if your password is literally `${A}@!`: ```yaml -storage: - encryption_passphrase: \${A}@! +encryption_passphrase: \${A}@! ``` ### Related features diff --git a/docs/how-to/upgrade.md b/docs/how-to/upgrade.md index 43a28cc0..87e92ac3 100644 --- a/docs/how-to/upgrade.md +++ b/docs/how-to/upgrade.md @@ -135,12 +135,6 @@ repositories: - path: original.borg ``` -Prior to version 1.8.0 This -option was found in the `location:` section of your configuration. - -Prior to version 1.7.10 Omit -the `path:` portion of the `repositories` list. - Change it to a new (not yet created) repository path: ```yaml @@ -148,6 +142,12 @@ repositories: - path: upgraded.borg ``` +Prior to version 1.8.0 This +option was found in the `location:` section of your configuration. + +Prior to version 1.7.10 Omit +the `path:` portion of the `repositories` list. + Then, run the `rcreate` action (formerly `init`) to create that new Borg 2 repository: diff --git a/docs/reference/command-line.md b/docs/reference/command-line.md index 5ef3836c..78f8e7f3 100644 --- a/docs/reference/command-line.md +++ b/docs/reference/command-line.md @@ -15,7 +15,7 @@ listed here do not have equivalents in borgmatic's [configuration file](https://torsion.org/borgmatic/docs/reference/configuration/). If you're using an older version of borgmatic, some of these flags may not be -present in that version, and you should instead use `borgmatic --help` or +present in that version and you should instead use `borgmatic --help` or `borgmatic [action name] --help` (where `[action name]` is the name of an action like `list`, `create`, etc.). From 054bd524825f8434a45b7ce13cf690c2ba12ed13 Mon Sep 17 00:00:00 2001 From: Dan Helfman Date: Tue, 11 Jul 2023 22:16:42 -0700 Subject: [PATCH 10/12] Fix incorrect merge from main that broke tests (#721). --- borgmatic/borg/environment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/borgmatic/borg/environment.py b/borgmatic/borg/environment.py index 136cf95e..fbe99b63 100644 --- a/borgmatic/borg/environment.py +++ b/borgmatic/borg/environment.py @@ -45,7 +45,7 @@ def make_environment(config): option_name, environment_variable_name, ) in DEFAULT_BOOL_OPTION_TO_UPPERCASE_ENVIRONMENT_VARIABLE.items(): - value = storage_config.get(option_name, False) + value = config.get(option_name, False) environment[environment_variable_name] = 'YES' if value else 'NO' return environment From 18b3b569d03dc6c50d848208be1bee71dbd786a9 Mon Sep 17 00:00:00 2001 From: Dan Helfman Date: Wed, 12 Jul 2023 09:39:45 -0700 Subject: [PATCH 11/12] Fix broken bootstrap action (#721). --- borgmatic/actions/config/bootstrap.py | 2 -- tests/end-to-end/test_borgmatic.py | 9 ++++++++- tests/end-to-end/test_override.py | 2 +- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/borgmatic/actions/config/bootstrap.py b/borgmatic/actions/config/bootstrap.py index 2f5acbd4..ee167478 100644 --- a/borgmatic/actions/config/bootstrap.py +++ b/borgmatic/actions/config/bootstrap.py @@ -43,7 +43,6 @@ def get_config_paths(bootstrap_arguments, global_arguments, local_borg_version): ), [borgmatic_manifest_path], {}, - {}, local_borg_version, global_arguments, extract_to_stdout=True, @@ -95,7 +94,6 @@ def run_bootstrap(bootstrap_arguments, global_arguments, local_borg_version): ), [config_path.lstrip(os.path.sep) for config_path in manifest_config_paths], {}, - {}, local_borg_version, global_arguments, extract_to_stdout=False, diff --git a/tests/end-to-end/test_borgmatic.py b/tests/end-to-end/test_borgmatic.py index 93fd27a2..d4d8629b 100644 --- a/tests/end-to-end/test_borgmatic.py +++ b/tests/end-to-end/test_borgmatic.py @@ -22,7 +22,7 @@ def generate_configuration(config_path, repository_path): .replace('- /home', f'- {config_path}') .replace('- /etc', '') .replace('- /var/log/syslog*', '') - + 'storage:\n encryption_passphrase: "test"' + + 'encryption_passphrase: "test"' ) config_file = open(config_path, 'w') config_file.write(config) @@ -74,6 +74,13 @@ def test_borgmatic_command(): assert len(parsed_output) == 1 assert 'repository' in parsed_output[0] + + # Exercise the bootstrap action. + output = subprocess.check_output( + f'borgmatic --config {config_path} bootstrap --repository {repository_path}'.split(' '), + ).decode(sys.stdout.encoding) + + assert 'successful' in output finally: os.chdir(original_working_directory) shutil.rmtree(temporary_directory) diff --git a/tests/end-to-end/test_override.py b/tests/end-to-end/test_override.py index 7debc6e2..f0aa8a88 100644 --- a/tests/end-to-end/test_override.py +++ b/tests/end-to-end/test_override.py @@ -21,7 +21,7 @@ def generate_configuration(config_path, repository_path): .replace('- /home', f'- {config_path}') .replace('- /etc', '') .replace('- /var/log/syslog*', '') - + 'storage:\n encryption_passphrase: "test"' + + 'encryption_passphrase: "test"' ) config_file = open(config_path, 'w') config_file.write(config) From da789294157d77ca0e7f1961bc01e76dd9bf43a9 Mon Sep 17 00:00:00 2001 From: Dan Helfman Date: Thu, 13 Jul 2023 19:25:14 -0700 Subject: [PATCH 12/12] To prevent argument parsing errors on ambiguous commands, drop support for multiple consecutive flag values. --- NEWS | 3 + borgmatic/commands/arguments.py | 74 ++++++-------------- borgmatic/config/normalize.py | 2 +- docs/how-to/backup-your-databases.md | 2 +- docs/how-to/extract-a-backup.md | 2 +- docs/how-to/make-per-application-backups.md | 6 -- tests/end-to-end/test_borgmatic.py | 7 -- tests/integration/commands/test_arguments.py | 17 ++--- tests/unit/hooks/test_mysql.py | 23 ++++++ tests/unit/hooks/test_postgresql.py | 24 +++++++ 10 files changed, 79 insertions(+), 81 deletions(-) diff --git a/NEWS b/NEWS index 23f47371..dd882d97 100644 --- a/NEWS +++ b/NEWS @@ -17,6 +17,9 @@ values (unless one is not set). * #721: BREAKING: The storage umask and the hooks umask can no longer have different values (unless one is not set). + * BREAKING: Flags like "--config" that previously took multiple values now need to be given once + per value, e.g. "--config first.yaml --config second.yaml" instead of "--config first.yaml + second.yaml". This prevents argument parsing errors on ambiguous commands. * BREAKING: Remove the deprecated (and silently ignored) "--successful" flag on the "list" action, as newer versions of Borg list successful (non-checkpoint) archives by default. * All deprecated configuration option values now generate warning logs. diff --git a/borgmatic/commands/arguments.py b/borgmatic/commands/arguments.py index ce246e14..0b4b259c 100644 --- a/borgmatic/commands/arguments.py +++ b/borgmatic/commands/arguments.py @@ -1,7 +1,7 @@ import collections import itertools import sys -from argparse import Action, ArgumentParser +from argparse import ArgumentParser from borgmatic.config import collect @@ -216,42 +216,12 @@ def parse_arguments_for_actions(unparsed_arguments, action_parsers, global_parse arguments['global'], remaining = global_parser.parse_known_args(unparsed_arguments) remaining_action_arguments.append(remaining) - # Prevent action names and arguments that follow "--config" paths from being considered as - # additional paths. - for argument_name in arguments.keys(): - if argument_name == 'global': - continue - - for action_name in [argument_name] + ACTION_ALIASES.get(argument_name, []): - try: - action_name_index = arguments['global'].config_paths.index(action_name) - arguments['global'].config_paths = arguments['global'].config_paths[ - :action_name_index - ] - break - except ValueError: - pass - return ( arguments, tuple(remaining_action_arguments) if arguments else unparsed_arguments, ) -class Extend_action(Action): - ''' - An argparse action to support Python 3.8's "extend" action in older versions of Python. - ''' - - def __call__(self, parser, namespace, values, option_string=None): - items = getattr(namespace, self.dest, None) - - if items: - items.extend(values) # pragma: no cover - else: - setattr(namespace, self.dest, list(values)) - - def make_parsers(): ''' Build a global arguments parser, individual action parsers, and a combined parser containing @@ -263,16 +233,14 @@ def make_parsers(): unexpanded_config_paths = collect.get_default_config_paths(expand_home=False) global_parser = ArgumentParser(add_help=False) - global_parser.register('action', 'extend', Extend_action) global_group = global_parser.add_argument_group('global arguments') global_group.add_argument( '-c', '--config', - nargs='*', dest='config_paths', - default=config_paths, - help=f"Configuration filenames or directories, defaults to: {' '.join(unexpanded_config_paths)}", + action='append', + help=f"Configuration filename or directory, can specify flag multiple times, defaults to: {' '.join(unexpanded_config_paths)}", ) global_group.add_argument( '-n', @@ -331,10 +299,9 @@ def make_parsers(): global_group.add_argument( '--override', metavar='OPTION.SUBOPTION=VALUE', - nargs='+', dest='overrides', - action='extend', - help='One or more configuration file options to override with specified values', + action='append', + help='Configuration file option to override with specified value, can specify flag multiple times', ) global_group.add_argument( '--no-environment-interpolation', @@ -672,9 +639,9 @@ def make_parsers(): '--path', '--restore-path', metavar='PATH', - nargs='+', dest='paths', - help='Paths to extract from archive, defaults to the entire archive', + action='append', + help='Path to extract from archive, can specify flag multiple times, defaults to the entire archive', ) extract_group.add_argument( '--destination', @@ -826,9 +793,9 @@ def make_parsers(): export_tar_group.add_argument( '--path', metavar='PATH', - nargs='+', dest='paths', - help='Paths to export from archive, defaults to the entire archive', + action='append', + help='Path to export from archive, can specify flag multiple times, defaults to the entire archive', ) export_tar_group.add_argument( '--destination', @@ -877,9 +844,9 @@ def make_parsers(): mount_group.add_argument( '--path', metavar='PATH', - nargs='+', dest='paths', - help='Paths to mount from archive, defaults to the entire archive', + action='append', + help='Path to mount from archive, can specify multiple times, defaults to the entire archive', ) mount_group.add_argument( '--foreground', @@ -954,16 +921,16 @@ def make_parsers(): restore_group.add_argument( '--database', metavar='NAME', - nargs='+', dest='databases', - help="Names of databases to restore from archive, defaults to all databases. Note that any databases to restore must be defined in borgmatic's configuration", + action='append', + help="Name of database to restore from archive, must be defined in borgmatic's configuration, can specify flag multiple times, defaults to all databases", ) restore_group.add_argument( '--schema', metavar='NAME', - nargs='+', dest='schemas', - help='Names of schemas to restore from the database, defaults to all schemas. Schemas are only supported for PostgreSQL and MongoDB databases', + action='append', + help='Name of schema to restore from the database, can specify flag multiple times, defaults to all schemas. Schemas are only supported for PostgreSQL and MongoDB databases', ) restore_group.add_argument( '--hostname', @@ -1065,16 +1032,16 @@ def make_parsers(): list_group.add_argument( '--path', metavar='PATH', - nargs='+', dest='paths', - help='Paths or patterns to list from a single selected archive (via "--archive"), defaults to listing the entire archive', + action='append', + help='Path or pattern to list from a single selected archive (via "--archive"), can specify flag multiple times, defaults to listing the entire archive', ) list_group.add_argument( '--find', metavar='PATH', - nargs='+', dest='find_paths', - help='Partial paths or patterns to search for and list across multiple archives', + action='append', + help='Partial path or pattern to search for and list across multiple archives, can specify flag multiple times', ) list_group.add_argument( '--short', default=False, action='store_true', help='Output only path names' @@ -1248,6 +1215,9 @@ def parse_arguments(*unparsed_arguments): unparsed_arguments, action_parsers.choices, global_parser ) + if not arguments['global'].config_paths: + arguments['global'].config_paths = collect.get_default_config_paths(expand_home=True) + for action_name in ('bootstrap', 'generate', 'validate'): if ( action_name in arguments.keys() and len(arguments.keys()) > 2 diff --git a/borgmatic/config/normalize.py b/borgmatic/config/normalize.py index 83f2a20b..7ca79130 100644 --- a/borgmatic/config/normalize.py +++ b/borgmatic/config/normalize.py @@ -46,7 +46,7 @@ def normalize_sections(config_filename, config): dict( levelno=logging.WARNING, levelname='WARNING', - msg=f'{config_filename}: Configuration sections like location: and storage: are deprecated and support will be removed from a future release. Move all of your options out of sections to the global scope.', + msg=f'{config_filename}: Configuration sections like location: and storage: are deprecated and support will be removed from a future release. To prepare for this, move your options out of sections to the global scope.', ) ) ] diff --git a/docs/how-to/backup-your-databases.md b/docs/how-to/backup-your-databases.md index 4708e0ad..f1f00878 100644 --- a/docs/how-to/backup-your-databases.md +++ b/docs/how-to/backup-your-databases.md @@ -295,7 +295,7 @@ restore one of them, use the `--database` flag to select one or more databases. For instance: ```bash -borgmatic restore --archive host-2023-... --database users +borgmatic restore --archive host-2023-... --database users --database orders ``` New in version 1.7.6 You can diff --git a/docs/how-to/extract-a-backup.md b/docs/how-to/extract-a-backup.md index 164fc135..5a4aa6fa 100644 --- a/docs/how-to/extract-a-backup.md +++ b/docs/how-to/extract-a-backup.md @@ -65,7 +65,7 @@ everything from an archive. To do that, tack on one or more `--path` values. For instance: ```bash -borgmatic extract --archive latest --path path/1 path/2 +borgmatic extract --archive latest --path path/1 --path path/2 ``` Note that the specified restore paths should not have a leading slash. Like a diff --git a/docs/how-to/make-per-application-backups.md b/docs/how-to/make-per-application-backups.md index 62a1c3e5..60e0b221 100644 --- a/docs/how-to/make-per-application-backups.md +++ b/docs/how-to/make-per-application-backups.md @@ -448,12 +448,6 @@ the configured value for the `remote_path` option, and use the value of You can even override nested values or multiple values at once. For instance: -```bash -borgmatic create --override parent_option.option1=value1 parent_option.option2=value2 -``` - -This will accomplish the same thing: - ```bash borgmatic create --override parent_option.option1=value1 --override parent_option.option2=value2 ``` diff --git a/tests/end-to-end/test_borgmatic.py b/tests/end-to-end/test_borgmatic.py index d4d8629b..7eac73a6 100644 --- a/tests/end-to-end/test_borgmatic.py +++ b/tests/end-to-end/test_borgmatic.py @@ -74,13 +74,6 @@ def test_borgmatic_command(): assert len(parsed_output) == 1 assert 'repository' in parsed_output[0] - - # Exercise the bootstrap action. - output = subprocess.check_output( - f'borgmatic --config {config_path} bootstrap --repository {repository_path}'.split(' '), - ).decode(sys.stdout.encoding) - - assert 'successful' in output finally: os.chdir(original_working_directory) shutil.rmtree(temporary_directory) diff --git a/tests/integration/commands/test_arguments.py b/tests/integration/commands/test_arguments.py index db8db636..74d056aa 100644 --- a/tests/integration/commands/test_arguments.py +++ b/tests/integration/commands/test_arguments.py @@ -17,10 +17,10 @@ def test_parse_arguments_with_no_arguments_uses_defaults(): assert global_arguments.log_file_verbosity == 0 -def test_parse_arguments_with_multiple_config_paths_parses_as_list(): +def test_parse_arguments_with_multiple_config_flags_parses_as_list(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) - arguments = module.parse_arguments('--config', 'myconfig', 'otherconfig') + arguments = module.parse_arguments('--config', 'myconfig', '--config', 'otherconfig') global_arguments = arguments['global'] assert global_arguments.config_paths == ['myconfig', 'otherconfig'] @@ -109,20 +109,11 @@ def test_parse_arguments_with_single_override_parses(): assert global_arguments.overrides == ['foo.bar=baz'] -def test_parse_arguments_with_multiple_overrides_parses(): - flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) - - arguments = module.parse_arguments('--override', 'foo.bar=baz', 'foo.quux=7') - - global_arguments = arguments['global'] - assert global_arguments.overrides == ['foo.bar=baz', 'foo.quux=7'] - - -def test_parse_arguments_with_multiple_overrides_and_flags_parses(): +def test_parse_arguments_with_multiple_overrides_flags_parses(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) arguments = module.parse_arguments( - '--override', 'foo.bar=baz', '--override', 'foo.quux=7', 'this.that=8' + '--override', 'foo.bar=baz', '--override', 'foo.quux=7', '--override', 'this.that=8' ) global_arguments = arguments['global'] diff --git a/tests/unit/hooks/test_mysql.py b/tests/unit/hooks/test_mysql.py index 19ab3cc7..55abebb1 100644 --- a/tests/unit/hooks/test_mysql.py +++ b/tests/unit/hooks/test_mysql.py @@ -407,6 +407,29 @@ def test_restore_database_dump_runs_mysql_to_restore(): ) +def test_restore_database_dump_errors_when_database_missing_from_configuration(): + databases_config = [{'name': 'foo'}, {'name': 'bar'}] + extract_process = flexmock(stdout=flexmock()) + + flexmock(module).should_receive('execute_command_with_processes').never() + + with pytest.raises(ValueError): + module.restore_database_dump( + databases_config, + {}, + 'test.yaml', + database_name='other', + dry_run=False, + extract_process=extract_process, + connection_params={ + 'hostname': None, + 'port': None, + 'username': None, + 'password': None, + }, + ) + + def test_restore_database_dump_runs_mysql_with_options(): databases_config = [{'name': 'foo', 'restore_options': '--harder'}] extract_process = flexmock(stdout=flexmock()) diff --git a/tests/unit/hooks/test_postgresql.py b/tests/unit/hooks/test_postgresql.py index 51235ffc..73d67dad 100644 --- a/tests/unit/hooks/test_postgresql.py +++ b/tests/unit/hooks/test_postgresql.py @@ -515,6 +515,30 @@ def test_restore_database_dump_runs_pg_restore(): ) +def test_restore_database_dump_errors_when_database_missing_from_configuration(): + databases_config = [{'name': 'foo', 'schemas': None}, {'name': 'bar'}] + extract_process = flexmock(stdout=flexmock()) + + flexmock(module).should_receive('execute_command_with_processes').never() + flexmock(module).should_receive('execute_command').never() + + with pytest.raises(ValueError): + module.restore_database_dump( + databases_config, + {}, + 'test.yaml', + database_name='other', + dry_run=False, + extract_process=extract_process, + connection_params={ + 'hostname': None, + 'port': None, + 'username': None, + 'password': None, + }, + ) + + def test_restore_database_dump_runs_pg_restore_with_hostname_and_port(): databases_config = [ {'name': 'foo', 'hostname': 'database.example.org', 'port': 5433, 'schemas': None}