From f42890430c59a40a17d9a68a193d6a09674770cb Mon Sep 17 00:00:00 2001 From: Dan Helfman Date: Thu, 23 Mar 2023 23:11:14 -0700 Subject: [PATCH] Add code style plugins to enforce use of Python f-strings and prevent single-letter variables. --- NEWS | 2 ++ borgmatic/actions/borg.py | 2 +- borgmatic/actions/check.py | 2 +- borgmatic/actions/compact.py | 4 +-- borgmatic/actions/create.py | 2 +- borgmatic/actions/export_tar.py | 4 +-- borgmatic/actions/extract.py | 2 +- borgmatic/actions/mount.py | 4 +-- borgmatic/actions/prune.py | 2 +- borgmatic/actions/rcreate.py | 2 +- borgmatic/actions/restore.py | 5 ++-- borgmatic/actions/rinfo.py | 3 +- borgmatic/actions/rlist.py | 3 +- borgmatic/borg/check.py | 4 +-- borgmatic/borg/create.py | 2 +- borgmatic/borg/export_tar.py | 2 +- borgmatic/borg/flags.py | 2 +- borgmatic/borg/list.py | 2 +- borgmatic/borg/prune.py | 2 +- borgmatic/borg/rlist.py | 2 +- borgmatic/commands/arguments.py | 4 +-- borgmatic/commands/borgmatic.py | 38 ++++++++++-------------- borgmatic/commands/completion.py | 4 +-- borgmatic/commands/convert_config.py | 22 +++++--------- borgmatic/commands/generate_config.py | 14 +++------ borgmatic/commands/validate_config.py | 10 ++----- borgmatic/config/collect.py | 4 +-- borgmatic/config/environment.py | 5 +++- borgmatic/config/generate.py | 10 +++---- borgmatic/config/legacy.py | 16 ++++------ borgmatic/config/validate.py | 23 +++++++------- borgmatic/execute.py | 8 ++--- borgmatic/hooks/command.py | 18 ++++------- borgmatic/hooks/cronhub.py | 8 ++--- borgmatic/hooks/cronitor.py | 8 ++--- borgmatic/hooks/dispatch.py | 4 +-- borgmatic/hooks/dump.py | 8 ++--- borgmatic/hooks/healthchecks.py | 10 +++---- borgmatic/hooks/mongodb.py | 10 ++----- borgmatic/hooks/mysql.py | 6 ++-- borgmatic/hooks/pagerduty.py | 10 +++---- borgmatic/hooks/postgresql.py | 6 ++-- borgmatic/hooks/sqlite.py | 2 +- borgmatic/logger.py | 2 +- test_requirements.txt | 2 ++ tests/end-to-end/test_borgmatic.py | 20 +++++-------- tests/end-to-end/test_database.py | 2 +- tests/end-to-end/test_override.py | 8 ++--- tests/end-to-end/test_validate_config.py | 16 +++------- tests/integration/config/test_legacy.py | 2 +- tests/unit/borg/test_create.py | 20 ++++++------- tests/unit/borg/test_prune.py | 22 ++++++++++---- tests/unit/config/test_environment.py | 28 ++++++++--------- tests/unit/config/test_validate.py | 8 ++--- tests/unit/hooks/test_command.py | 17 ++++------- tests/unit/hooks/test_healthchecks.py | 4 +-- tests/unit/hooks/test_mongodb.py | 2 +- tests/unit/hooks/test_postgresql.py | 2 +- 58 files changed, 195 insertions(+), 261 deletions(-) diff --git a/NEWS b/NEWS index bd5687cd..bc5f431e 100644 --- a/NEWS +++ b/NEWS @@ -6,6 +6,8 @@ in borgmatic's storage configuration. * #623: Fix confusing message when an error occurs running actions for a configuration file. * #655: Fix error when databases are configured and a source directory doesn't exist. + * Add code style plugins to enforce use of Python f-strings and prevent single-letter variables. + To join in the pedantry, refresh your test environment with "tox --recreate". 1.7.9 * #295: Add a SQLite database dump/restore hook. diff --git a/borgmatic/actions/borg.py b/borgmatic/actions/borg.py index a50dd286..ee94f1dc 100644 --- a/borgmatic/actions/borg.py +++ b/borgmatic/actions/borg.py @@ -16,7 +16,7 @@ def run_borg( if borg_arguments.repository is None or borgmatic.config.validate.repositories_match( repository, borg_arguments.repository ): - logger.info('{}: Running arbitrary Borg command'.format(repository)) + logger.info(f'{repository}: Running arbitrary Borg command') archive_name = borgmatic.borg.rlist.resolve_archive_name( repository, borg_arguments.archive, diff --git a/borgmatic/actions/check.py b/borgmatic/actions/check.py index f3572395..cdee9edb 100644 --- a/borgmatic/actions/check.py +++ b/borgmatic/actions/check.py @@ -37,7 +37,7 @@ def run_check( global_arguments.dry_run, **hook_context, ) - logger.info('{}: Running consistency checks'.format(repository)) + logger.info(f'{repository}: Running consistency checks') borgmatic.borg.check.check_archives( repository, location, diff --git a/borgmatic/actions/compact.py b/borgmatic/actions/compact.py index 7a25b829..a0efa3a2 100644 --- a/borgmatic/actions/compact.py +++ b/borgmatic/actions/compact.py @@ -39,7 +39,7 @@ def run_compact( **hook_context, ) if borgmatic.borg.feature.available(borgmatic.borg.feature.Feature.COMPACT, local_borg_version): - logger.info('{}: Compacting segments{}'.format(repository, dry_run_label)) + logger.info(f'{repository}: Compacting segments{dry_run_label}') borgmatic.borg.compact.compact_segments( global_arguments.dry_run, repository, @@ -52,7 +52,7 @@ def run_compact( threshold=compact_arguments.threshold, ) else: # pragma: nocover - logger.info('{}: Skipping compact (only available/needed in Borg 1.2+)'.format(repository)) + logger.info(f'{repository}: Skipping compact (only available/needed in Borg 1.2+)') borgmatic.hooks.command.execute_hook( hooks.get('after_compact'), hooks.get('umask'), diff --git a/borgmatic/actions/create.py b/borgmatic/actions/create.py index 96a48521..ac2617d2 100644 --- a/borgmatic/actions/create.py +++ b/borgmatic/actions/create.py @@ -42,7 +42,7 @@ def run_create( global_arguments.dry_run, **hook_context, ) - logger.info('{}: Creating archive{}'.format(repository, dry_run_label)) + logger.info(f'{repository}: Creating archive{dry_run_label}') borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( 'remove_database_dumps', hooks, diff --git a/borgmatic/actions/export_tar.py b/borgmatic/actions/export_tar.py index ae349208..b5b6089d 100644 --- a/borgmatic/actions/export_tar.py +++ b/borgmatic/actions/export_tar.py @@ -22,9 +22,7 @@ def run_export_tar( if export_tar_arguments.repository is None or borgmatic.config.validate.repositories_match( repository, export_tar_arguments.repository ): - logger.info( - '{}: Exporting archive {} as tar file'.format(repository, export_tar_arguments.archive) - ) + logger.info(f'{repository}: Exporting archive {export_tar_arguments.archive} as tar file') borgmatic.borg.export_tar.export_tar_archive( global_arguments.dry_run, repository, diff --git a/borgmatic/actions/extract.py b/borgmatic/actions/extract.py index a3d89a55..6af9caa1 100644 --- a/borgmatic/actions/extract.py +++ b/borgmatic/actions/extract.py @@ -35,7 +35,7 @@ def run_extract( if extract_arguments.repository is None or borgmatic.config.validate.repositories_match( repository, extract_arguments.repository ): - logger.info('{}: Extracting archive {}'.format(repository, extract_arguments.archive)) + logger.info(f'{repository}: Extracting archive {extract_arguments.archive}') borgmatic.borg.extract.extract_archive( global_arguments.dry_run, repository, diff --git a/borgmatic/actions/mount.py b/borgmatic/actions/mount.py index e2703a57..262e7d9e 100644 --- a/borgmatic/actions/mount.py +++ b/borgmatic/actions/mount.py @@ -17,9 +17,9 @@ def run_mount( repository, mount_arguments.repository ): if mount_arguments.archive: - logger.info('{}: Mounting archive {}'.format(repository, mount_arguments.archive)) + logger.info(f'{repository}: Mounting archive {mount_arguments.archive}') else: # pragma: nocover - logger.info('{}: Mounting repository'.format(repository)) + logger.info(f'{repository}: Mounting repository') borgmatic.borg.mount.mount_archive( repository, diff --git a/borgmatic/actions/prune.py b/borgmatic/actions/prune.py index ca098ce4..76a42a9a 100644 --- a/borgmatic/actions/prune.py +++ b/borgmatic/actions/prune.py @@ -37,7 +37,7 @@ def run_prune( global_arguments.dry_run, **hook_context, ) - logger.info('{}: Pruning archives{}'.format(repository, dry_run_label)) + logger.info(f'{repository}: Pruning archives{dry_run_label}') borgmatic.borg.prune.prune_archives( global_arguments.dry_run, repository, diff --git a/borgmatic/actions/rcreate.py b/borgmatic/actions/rcreate.py index 0052b4b6..59b147d7 100644 --- a/borgmatic/actions/rcreate.py +++ b/borgmatic/actions/rcreate.py @@ -23,7 +23,7 @@ def run_rcreate( ): return - logger.info('{}: Creating repository'.format(repository)) + logger.info(f'{repository}: Creating repository') borgmatic.borg.rcreate.create_repository( global_arguments.dry_run, repository, diff --git a/borgmatic/actions/restore.py b/borgmatic/actions/restore.py index 7a058092..50c39737 100644 --- a/borgmatic/actions/restore.py +++ b/borgmatic/actions/restore.py @@ -255,9 +255,8 @@ def run_restore( ): return - logger.info( - '{}: Restoring databases from archive {}'.format(repository, restore_arguments.archive) - ) + logger.info(f'{repository}: Restoring databases from archive {restore_arguments.archive}') + borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( 'remove_database_dumps', hooks, diff --git a/borgmatic/actions/rinfo.py b/borgmatic/actions/rinfo.py index 611d1bc2..e7132c04 100644 --- a/borgmatic/actions/rinfo.py +++ b/borgmatic/actions/rinfo.py @@ -19,7 +19,8 @@ def run_rinfo( repository, rinfo_arguments.repository ): if not rinfo_arguments.json: # pragma: nocover - logger.answer('{}: Displaying repository summary information'.format(repository)) + logger.answer(f'{repository}: Displaying repository summary information') + json_output = borgmatic.borg.rinfo.display_repository_info( repository, storage, diff --git a/borgmatic/actions/rlist.py b/borgmatic/actions/rlist.py index 72d52068..aa2032b1 100644 --- a/borgmatic/actions/rlist.py +++ b/borgmatic/actions/rlist.py @@ -19,7 +19,8 @@ def run_rlist( repository, rlist_arguments.repository ): if not rlist_arguments.json: # pragma: nocover - logger.answer('{}: Listing repository'.format(repository)) + logger.answer(f'{repository}: Listing repository') + json_output = borgmatic.borg.rlist.list_repository( repository, storage, diff --git a/borgmatic/borg/check.py b/borgmatic/borg/check.py index d9beaa60..2914d83b 100644 --- a/borgmatic/borg/check.py +++ b/borgmatic/borg/check.py @@ -12,7 +12,7 @@ DEFAULT_CHECKS = ( {'name': 'repository', 'frequency': '1 month'}, {'name': 'archives', 'frequency': '1 month'}, ) -DEFAULT_PREFIX = '{hostname}-' +DEFAULT_PREFIX = '{hostname}-' # noqa: FS003 logger = logging.getLogger(__name__) @@ -196,7 +196,7 @@ def make_check_flags(local_borg_version, checks, check_last=None, prefix=None): return common_flags return ( - tuple('--{}-only'.format(check) for check in checks if check in ('repository', 'archives')) + tuple(f'--{check}-only' for check in checks if check in ('repository', 'archives')) + common_flags ) diff --git a/borgmatic/borg/create.py b/borgmatic/borg/create.py index 87a0fdd7..d557a6ab 100644 --- a/borgmatic/borg/create.py +++ b/borgmatic/borg/create.py @@ -217,7 +217,7 @@ def make_list_filter_flags(local_borg_version, dry_run): return f'{base_flags}-' -DEFAULT_ARCHIVE_NAME_FORMAT = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}' +DEFAULT_ARCHIVE_NAME_FORMAT = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}' # noqa: FS003 def collect_borgmatic_source_directories(borgmatic_source_directory): diff --git a/borgmatic/borg/export_tar.py b/borgmatic/borg/export_tar.py index 43ea4ac0..01d1b7ed 100644 --- a/borgmatic/borg/export_tar.py +++ b/borgmatic/borg/export_tar.py @@ -56,7 +56,7 @@ def export_tar_archive( output_log_level = logging.INFO if dry_run: - logging.info('{}: Skipping export to tar file (dry run)'.format(repository)) + logging.info(f'{repository}: Skipping export to tar file (dry run)') return execute_command( diff --git a/borgmatic/borg/flags.py b/borgmatic/borg/flags.py index 81b6a6b1..845e0ff3 100644 --- a/borgmatic/borg/flags.py +++ b/borgmatic/borg/flags.py @@ -10,7 +10,7 @@ def make_flags(name, value): if not value: return () - flag = '--{}'.format(name.replace('_', '-')) + flag = f"--{name.replace('_', '-')}" if value is True: return (flag,) diff --git a/borgmatic/borg/list.py b/borgmatic/borg/list.py index fedd3650..916d17b0 100644 --- a/borgmatic/borg/list.py +++ b/borgmatic/borg/list.py @@ -113,7 +113,7 @@ def capture_archive_listing( paths=[f'sh:{list_path}'], find_paths=None, json=None, - format='{path}{NL}', + format='{path}{NL}', # noqa: FS003 ), local_path, remote_path, diff --git a/borgmatic/borg/prune.py b/borgmatic/borg/prune.py index e53197f1..5be85de2 100644 --- a/borgmatic/borg/prune.py +++ b/borgmatic/borg/prune.py @@ -24,7 +24,7 @@ def make_prune_flags(retention_config, local_borg_version): ) ''' config = retention_config.copy() - prefix = config.pop('prefix', '{hostname}-') + prefix = config.pop('prefix', '{hostname}-') # noqa: FS003 if prefix: if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version): diff --git a/borgmatic/borg/rlist.py b/borgmatic/borg/rlist.py index 33404f9e..43bc28d6 100644 --- a/borgmatic/borg/rlist.py +++ b/borgmatic/borg/rlist.py @@ -42,7 +42,7 @@ def resolve_archive_name( except IndexError: raise ValueError('No archives found in the repository') - logger.debug('{}: Latest archive is {}'.format(repository, latest_archive)) + logger.debug(f'{repository}: Latest archive is {latest_archive}') return latest_archive diff --git a/borgmatic/commands/arguments.py b/borgmatic/commands/arguments.py index d5dc6af4..773a0bae 100644 --- a/borgmatic/commands/arguments.py +++ b/borgmatic/commands/arguments.py @@ -131,9 +131,7 @@ def make_parsers(): nargs='*', dest='config_paths', default=config_paths, - help='Configuration filenames or directories, defaults to: {}'.format( - ' '.join(unexpanded_config_paths) - ), + help=f"Configuration filenames or directories, defaults to: {' '.join(unexpanded_config_paths)}", ) global_group.add_argument( '--excludes', diff --git a/borgmatic/commands/borgmatic.py b/borgmatic/commands/borgmatic.py index fbea260d..73bde94b 100644 --- a/borgmatic/commands/borgmatic.py +++ b/borgmatic/commands/borgmatic.py @@ -70,9 +70,7 @@ def run_configuration(config_filename, config, arguments): try: local_borg_version = borg_version.local_borg_version(storage, local_path) except (OSError, CalledProcessError, ValueError) as error: - yield from log_error_records( - '{}: Error getting local Borg version'.format(config_filename), error - ) + yield from log_error_records(f'{config_filename}: Error getting local Borg version', error) return try: @@ -100,7 +98,7 @@ def run_configuration(config_filename, config, arguments): return encountered_error = error - yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error) + yield from log_error_records(f'{config_filename}: Error pinging monitor', error) if not encountered_error: repo_queue = Queue() @@ -132,7 +130,7 @@ def run_configuration(config_filename, config, arguments): repo_queue.put((repository_path, retry_num + 1),) tuple( # Consume the generator so as to trigger logging. log_error_records( - '{}: Error running actions for repository'.format(repository_path), + f'{repository_path}: Error running actions for repository', error, levelno=logging.WARNING, log_command_error_output=True, @@ -147,7 +145,7 @@ def run_configuration(config_filename, config, arguments): return yield from log_error_records( - '{}: Error running actions for repository'.format(repository_path), error + f'{repository_path}: Error running actions for repository', error ) encountered_error = error error_repository = repository_path @@ -169,7 +167,7 @@ def run_configuration(config_filename, config, arguments): return encountered_error = error - yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error) + yield from log_error_records(f'{repository_path}: Error pinging monitor', error) if not encountered_error: try: @@ -196,7 +194,7 @@ def run_configuration(config_filename, config, arguments): return encountered_error = error - yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error) + yield from log_error_records(f'{config_filename}: Error pinging monitor', error) if encountered_error and using_primary_action: try: @@ -231,9 +229,7 @@ def run_configuration(config_filename, config, arguments): if command.considered_soft_failure(config_filename, error): return - yield from log_error_records( - '{}: Error running on-error hook'.format(config_filename), error - ) + yield from log_error_records(f'{config_filename}: Error running on-error hook', error) def run_actions( @@ -472,9 +468,7 @@ def load_configurations(config_filenames, overrides=None, resolve_env=True): dict( levelno=logging.WARNING, levelname='WARNING', - msg='{}: Insufficient permissions to read configuration file'.format( - config_filename - ), + msg=f'{config_filename}: Insufficient permissions to read configuration file', ) ), ] @@ -486,7 +480,7 @@ def load_configurations(config_filenames, overrides=None, resolve_env=True): dict( levelno=logging.CRITICAL, levelname='CRITICAL', - msg='{}: Error parsing configuration file'.format(config_filename), + msg=f'{config_filename}: Error parsing configuration file', ) ), logging.makeLogRecord( @@ -587,9 +581,7 @@ def collect_configuration_run_summary_logs(configs, arguments): if not configs: yield from log_error_records( - '{}: No valid configuration files found'.format( - ' '.join(arguments['global'].config_paths) - ) + r"{' '.join(arguments['global'].config_paths)}: No valid configuration files found", ) return @@ -615,21 +607,21 @@ def collect_configuration_run_summary_logs(configs, arguments): error_logs = tuple(result for result in results if isinstance(result, logging.LogRecord)) if error_logs: - yield from log_error_records('{}: An error occurred'.format(config_filename)) + yield from log_error_records(f'{config_filename}: An error occurred') yield from error_logs else: yield logging.makeLogRecord( dict( levelno=logging.INFO, levelname='INFO', - msg='{}: Successfully ran configuration file'.format(config_filename), + msg=f'{config_filename}: Successfully ran configuration file', ) ) if results: json_results.extend(results) if 'umount' in arguments: - logger.info('Unmounting mount point {}'.format(arguments['umount'].mount_point)) + logger.info(f"Unmounting mount point {arguments['umount'].mount_point}") try: borg_umount.unmount_archive( mount_point=arguments['umount'].mount_point, local_path=get_local_path(configs), @@ -677,7 +669,7 @@ def main(): # pragma: no cover if error.code == 0: raise error configure_logging(logging.CRITICAL) - logger.critical('Error parsing arguments: {}'.format(' '.join(sys.argv))) + logger.critical(f"Error parsing arguments: {' '.join(sys.argv)}") exit_with_help_link() global_arguments = arguments['global'] @@ -710,7 +702,7 @@ def main(): # pragma: no cover ) except (FileNotFoundError, PermissionError) as error: configure_logging(logging.CRITICAL) - logger.critical('Error configuring logging: {}'.format(error)) + logger.critical(f'Error configuring logging: {error}') exit_with_help_link() logger.debug('Ensuring legacy configuration is upgraded') diff --git a/borgmatic/commands/completion.py b/borgmatic/commands/completion.py index 0ff1f3e5..1fc976bc 100644 --- a/borgmatic/commands/completion.py +++ b/borgmatic/commands/completion.py @@ -34,7 +34,7 @@ def bash_completion(): ' local this_script="$(cat "$BASH_SOURCE" 2> /dev/null)"', ' local installed_script="$(borgmatic --bash-completion 2> /dev/null)"', ' if [ "$this_script" != "$installed_script" ] && [ "$installed_script" != "" ];' - ' then cat << EOF\n%s\nEOF' % UPGRADE_MESSAGE, + f' then cat << EOF\n{UPGRADE_MESSAGE}\nEOF', ' fi', '}', 'complete_borgmatic() {', @@ -48,7 +48,7 @@ def bash_completion(): for action, subparser in subparsers.choices.items() ) + ( - ' COMPREPLY=($(compgen -W "%s %s" -- "${COMP_WORDS[COMP_CWORD]}"))' + ' COMPREPLY=($(compgen -W "%s %s" -- "${COMP_WORDS[COMP_CWORD]}"))' # noqa: FS003 % (actions, global_flags), ' (check_version &)', '}', diff --git a/borgmatic/commands/convert_config.py b/borgmatic/commands/convert_config.py index 093d4e37..64a89486 100644 --- a/borgmatic/commands/convert_config.py +++ b/borgmatic/commands/convert_config.py @@ -28,9 +28,7 @@ def parse_arguments(*arguments): '--source-config', dest='source_config_filename', default=DEFAULT_SOURCE_CONFIG_FILENAME, - help='Source INI-style configuration filename. Default: {}'.format( - DEFAULT_SOURCE_CONFIG_FILENAME - ), + help=f'Source INI-style configuration filename. Default: {DEFAULT_SOURCE_CONFIG_FILENAME}', ) parser.add_argument( '-e', @@ -46,9 +44,7 @@ def parse_arguments(*arguments): '--destination-config', dest='destination_config_filename', default=DEFAULT_DESTINATION_CONFIG_FILENAME, - help='Destination YAML configuration filename. Default: {}'.format( - DEFAULT_DESTINATION_CONFIG_FILENAME - ), + help=f'Destination YAML configuration filename. Default: {DEFAULT_DESTINATION_CONFIG_FILENAME}', ) return parser.parse_args(arguments) @@ -59,19 +55,15 @@ TEXT_WRAP_CHARACTERS = 80 def display_result(args): # pragma: no cover result_lines = textwrap.wrap( - 'Your borgmatic configuration has been upgraded. Please review the result in {}.'.format( - args.destination_config_filename - ), + f'Your borgmatic configuration has been upgraded. Please review the result in {args.destination_config_filename}.', TEXT_WRAP_CHARACTERS, ) + excludes_phrase = ( + f' and {args.source_excludes_filename}' if args.source_excludes_filename else '' + ) delete_lines = textwrap.wrap( - 'Once you are satisfied, you can safely delete {}{}.'.format( - args.source_config_filename, - ' and {}'.format(args.source_excludes_filename) - if args.source_excludes_filename - else '', - ), + f'Once you are satisfied, you can safely delete {args.source_config_filename}{excludes_phrase}.', TEXT_WRAP_CHARACTERS, ) diff --git a/borgmatic/commands/generate_config.py b/borgmatic/commands/generate_config.py index 13a5cbaa..78c32f04 100644 --- a/borgmatic/commands/generate_config.py +++ b/borgmatic/commands/generate_config.py @@ -23,9 +23,7 @@ def parse_arguments(*arguments): '--destination', dest='destination_filename', default=DEFAULT_DESTINATION_CONFIG_FILENAME, - help='Destination YAML configuration file, default: {}'.format( - DEFAULT_DESTINATION_CONFIG_FILENAME - ), + help=f'Destination YAML configuration file, default: {DEFAULT_DESTINATION_CONFIG_FILENAME}', ) parser.add_argument( '--overwrite', @@ -48,17 +46,13 @@ def main(): # pragma: no cover overwrite=args.overwrite, ) - print('Generated a sample configuration file at {}.'.format(args.destination_filename)) + print(f'Generated a sample configuration file at {args.destination_filename}.') print() if args.source_filename: - print( - 'Merged in the contents of configuration file at {}.'.format(args.source_filename) - ) + print(f'Merged in the contents of configuration file at {args.source_filename}.') print('To review the changes made, run:') print() - print( - ' diff --unified {} {}'.format(args.source_filename, args.destination_filename) - ) + print(f' diff --unified {args.source_filename} {args.destination_filename}') print() print('This includes all available configuration options with example values. The few') print('required options are indicated. Please edit the file to suit your needs.') diff --git a/borgmatic/commands/validate_config.py b/borgmatic/commands/validate_config.py index 00ea9f45..44c0082a 100644 --- a/borgmatic/commands/validate_config.py +++ b/borgmatic/commands/validate_config.py @@ -21,9 +21,7 @@ def parse_arguments(*arguments): nargs='+', dest='config_paths', default=config_paths, - help='Configuration filenames or directories, defaults to: {}'.format( - ' '.join(config_paths) - ), + help=f'Configuration filenames or directories, defaults to: {config_paths}', ) return parser.parse_args(arguments) @@ -44,13 +42,11 @@ def main(): # pragma: no cover try: validate.parse_configuration(config_filename, validate.schema_filename()) except (ValueError, OSError, validate.Validation_error) as error: - logging.critical('{}: Error parsing configuration file'.format(config_filename)) + logging.critical(f'{config_filename}: Error parsing configuration file') logging.critical(error) found_issues = True if found_issues: sys.exit(1) else: - logger.info( - 'All given configuration files are valid: {}'.format(', '.join(config_filenames)) - ) + logger.info(f"All given configuration files are valid: {', '.join(config_filenames)}") diff --git a/borgmatic/config/collect.py b/borgmatic/config/collect.py index a13472ee..bd38fee2 100644 --- a/borgmatic/config/collect.py +++ b/borgmatic/config/collect.py @@ -16,8 +16,8 @@ def get_default_config_paths(expand_home=True): return [ '/etc/borgmatic/config.yaml', '/etc/borgmatic.d', - '%s/borgmatic/config.yaml' % user_config_directory, - '%s/borgmatic.d' % user_config_directory, + os.path.join(user_config_directory, 'borgmatic/config.yaml'), + os.path.join(user_config_directory, 'borgmatic.d'), ] diff --git a/borgmatic/config/environment.py b/borgmatic/config/environment.py index 3a58566f..a2857bbf 100644 --- a/borgmatic/config/environment.py +++ b/borgmatic/config/environment.py @@ -14,11 +14,14 @@ def _resolve_string(matcher): if matcher.group('escape') is not None: # in case of escaped envvar, unescape it return matcher.group('variable') + # resolve the env var name, default = matcher.group('name'), matcher.group('default') out = os.getenv(name, default=default) + if out is None: - raise ValueError('Cannot find variable ${name} in environment'.format(name=name)) + raise ValueError(f'Cannot find variable {name} in environment') + return out diff --git a/borgmatic/config/generate.py b/borgmatic/config/generate.py index e864a3c0..d486f23c 100644 --- a/borgmatic/config/generate.py +++ b/borgmatic/config/generate.py @@ -48,7 +48,7 @@ def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False): config, schema, indent=indent, skip_first=parent_is_sequence ) else: - raise ValueError('Schema at level {} is unsupported: {}'.format(level, schema)) + raise ValueError(f'Schema at level {level} is unsupported: {schema}') return config @@ -84,7 +84,7 @@ def _comment_out_optional_configuration(rendered_config): for line in rendered_config.split('\n'): # Upon encountering an optional configuration option, comment out lines until the next blank # line. - if line.strip().startswith('# {}'.format(COMMENTED_OUT_SENTINEL)): + if line.strip().startswith(f'# {COMMENTED_OUT_SENTINEL}'): optional = True continue @@ -117,9 +117,7 @@ def write_configuration(config_filename, rendered_config, mode=0o600, overwrite= ''' if not overwrite and os.path.exists(config_filename): raise FileExistsError( - '{} already exists. Aborting. Use --overwrite to replace the file.'.format( - config_filename - ) + f'{config_filename} already exists. Aborting. Use --overwrite to replace the file.' ) try: @@ -218,7 +216,7 @@ def remove_commented_out_sentinel(config, field_name): except KeyError: return - if last_comment_value == '# {}\n'.format(COMMENTED_OUT_SENTINEL): + if last_comment_value == f'# {COMMENTED_OUT_SENTINEL}\n': config.ca.items[field_name][RUAMEL_YAML_COMMENTS_INDEX].pop() diff --git a/borgmatic/config/legacy.py b/borgmatic/config/legacy.py index 91352784..ec1e50a1 100644 --- a/borgmatic/config/legacy.py +++ b/borgmatic/config/legacy.py @@ -70,13 +70,11 @@ def validate_configuration_format(parser, config_format): section_format.name for section_format in config_format ) if unknown_section_names: - raise ValueError( - 'Unknown config sections found: {}'.format(', '.join(unknown_section_names)) - ) + raise ValueError(f"Unknown config sections found: {', '.join(unknown_section_names)}") missing_section_names = set(required_section_names) - section_names if missing_section_names: - raise ValueError('Missing config sections: {}'.format(', '.join(missing_section_names))) + raise ValueError(f"Missing config sections: {', '.join(missing_section_names)}") for section_format in config_format: if section_format.name not in section_names: @@ -91,9 +89,7 @@ def validate_configuration_format(parser, config_format): if unexpected_option_names: raise ValueError( - 'Unexpected options found in config section {}: {}'.format( - section_format.name, ', '.join(sorted(unexpected_option_names)) - ) + f"Unexpected options found in config section {section_format.name}: {', '.join(sorted(unexpected_option_names))}", ) missing_option_names = tuple( @@ -105,9 +101,7 @@ def validate_configuration_format(parser, config_format): if missing_option_names: raise ValueError( - 'Required options missing from config section {}: {}'.format( - section_format.name, ', '.join(missing_option_names) - ) + f"Required options missing from config section {section_format.name}: {', '.join(missing_option_names)}", ) @@ -137,7 +131,7 @@ def parse_configuration(config_filename, config_format): ''' parser = RawConfigParser() if not parser.read(config_filename): - raise ValueError('Configuration file cannot be opened: {}'.format(config_filename)) + raise ValueError(f'Configuration file cannot be opened: {config_filename}') validate_configuration_format(parser, config_format) diff --git a/borgmatic/config/validate.py b/borgmatic/config/validate.py index 5828380e..10da19ce 100644 --- a/borgmatic/config/validate.py +++ b/borgmatic/config/validate.py @@ -20,9 +20,9 @@ def format_json_error_path_element(path_element): Given a path element into a JSON data structure, format it for display as a string. ''' if isinstance(path_element, int): - return str('[{}]'.format(path_element)) + return str(f'[{path_element}]') - return str('.{}'.format(path_element)) + return str(f'.{path_element}') def format_json_error(error): @@ -30,10 +30,10 @@ def format_json_error(error): Given an instance of jsonschema.exceptions.ValidationError, format it for display as a string. ''' if not error.path: - return 'At the top level: {}'.format(error.message) + return f'At the top level: {error.message}' formatted_path = ''.join(format_json_error_path_element(element) for element in error.path) - return "At '{}': {}".format(formatted_path.lstrip('.'), error.message) + return f"At '{formatted_path.lstrip('.')}': {error.message}" class Validation_error(ValueError): @@ -54,9 +54,10 @@ class Validation_error(ValueError): ''' Render a validation error as a user-facing string. ''' - return 'An error occurred while parsing a configuration file at {}:\n'.format( - self.config_filename - ) + '\n'.join(error for error in self.errors) + return ( + f'An error occurred while parsing a configuration file at {self.config_filename}:\n' + + '\n'.join(error for error in self.errors) + ) def apply_logical_validation(config_filename, parsed_configuration): @@ -72,9 +73,7 @@ def apply_logical_validation(config_filename, parsed_configuration): raise Validation_error( config_filename, ( - 'Unknown repository in the "consistency" section\'s "check_repositories": {}'.format( - repository - ), + f'Unknown repository in the "consistency" section\'s "check_repositories": {repository}', ), ) @@ -165,9 +164,9 @@ def guard_configuration_contains_repository(repository, configurations): ) if count == 0: - raise ValueError('Repository {} not found in configuration files'.format(repository)) + raise ValueError(f'Repository {repository} not found in configuration files') if count > 1: - raise ValueError('Repository {} found in multiple configuration files'.format(repository)) + raise ValueError(f'Repository {repository} found in multiple configuration files') def guard_single_repository_selected(repository, configurations): diff --git a/borgmatic/execute.py b/borgmatic/execute.py index a01e1a13..53d1a098 100644 --- a/borgmatic/execute.py +++ b/borgmatic/execute.py @@ -154,8 +154,8 @@ def log_command(full_command, input_file=None, output_file=None): ''' logger.debug( ' '.join(full_command) - + (' < {}'.format(getattr(input_file, 'name', '')) if input_file else '') - + (' > {}'.format(getattr(output_file, 'name', '')) if output_file else '') + + (f" < {getattr(input_file, 'name', '')}" if input_file else '') + + (f" > {getattr(output_file, 'name', '')}" if output_file else '') ) @@ -235,12 +235,12 @@ def execute_command_and_capture_output( env=environment, cwd=working_directory, ) - logger.warning('Command output: {}'.format(output)) + logger.warning(f'Command output: {output}') except subprocess.CalledProcessError as error: if exit_code_indicates_error(command, error.returncode): raise output = error.output - logger.warning('Command output: {}'.format(output)) + logger.warning(f'Command output: {output}') return output.decode() if output is not None else None diff --git a/borgmatic/hooks/command.py b/borgmatic/hooks/command.py index 756f8779..05f7d2f7 100644 --- a/borgmatic/hooks/command.py +++ b/borgmatic/hooks/command.py @@ -16,7 +16,7 @@ def interpolate_context(config_filename, hook_description, command, context): names/values, interpolate the values by "{name}" into the command and return the result. ''' for name, value in context.items(): - command = command.replace('{%s}' % name, str(value)) + command = command.replace(f'{{{name}}}', str(value)) for unsupported_variable in re.findall(r'{\w+}', command): logger.warning( @@ -38,7 +38,7 @@ def execute_hook(commands, umask, config_filename, description, dry_run, **conte Raise subprocesses.CalledProcessError if an error occurs in a hook. ''' if not commands: - logger.debug('{}: No commands to run for {} hook'.format(config_filename, description)) + logger.debug(f'{config_filename}: No commands to run for {description} hook') return dry_run_label = ' (dry run; not actually running hooks)' if dry_run else '' @@ -49,19 +49,15 @@ def execute_hook(commands, umask, config_filename, description, dry_run, **conte ] if len(commands) == 1: - logger.info( - '{}: Running command for {} hook{}'.format(config_filename, description, dry_run_label) - ) + logger.info(f'{config_filename}: Running command for {description} hook{dry_run_label}') else: logger.info( - '{}: Running {} commands for {} hook{}'.format( - config_filename, len(commands), description, dry_run_label - ) + f'{config_filename}: Running {len(commands)} commands for {description} hook{dry_run_label}', ) if umask: parsed_umask = int(str(umask), 8) - logger.debug('{}: Set hook umask to {}'.format(config_filename, oct(parsed_umask))) + logger.debug(f'{config_filename}: Set hook umask to {oct(parsed_umask)}') original_umask = os.umask(parsed_umask) else: original_umask = None @@ -93,9 +89,7 @@ def considered_soft_failure(config_filename, error): if exit_code == SOFT_FAIL_EXIT_CODE: logger.info( - '{}: Command hook exited with soft failure exit code ({}); skipping remaining actions'.format( - config_filename, SOFT_FAIL_EXIT_CODE - ) + f'{config_filename}: Command hook exited with soft failure exit code ({SOFT_FAIL_EXIT_CODE}); skipping remaining actions', ) return True diff --git a/borgmatic/hooks/cronhub.py b/borgmatic/hooks/cronhub.py index cd0ffa5c..05ada575 100644 --- a/borgmatic/hooks/cronhub.py +++ b/borgmatic/hooks/cronhub.py @@ -34,17 +34,15 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_ return dry_run_label = ' (dry run; not actually pinging)' if dry_run else '' - formatted_state = '/{}/'.format(MONITOR_STATE_TO_CRONHUB[state]) + formatted_state = f'/{MONITOR_STATE_TO_CRONHUB[state]}/' ping_url = ( hook_config['ping_url'] .replace('/start/', formatted_state) .replace('/ping/', formatted_state) ) - logger.info( - '{}: Pinging Cronhub {}{}'.format(config_filename, state.name.lower(), dry_run_label) - ) - logger.debug('{}: Using Cronhub ping URL {}'.format(config_filename, ping_url)) + logger.info(f'{config_filename}: Pinging Cronhub {state.name.lower()}{dry_run_label}') + logger.debug(f'{config_filename}: Using Cronhub ping URL {ping_url}') if not dry_run: logging.getLogger('urllib3').setLevel(logging.ERROR) diff --git a/borgmatic/hooks/cronitor.py b/borgmatic/hooks/cronitor.py index 633b4c3c..d669c09d 100644 --- a/borgmatic/hooks/cronitor.py +++ b/borgmatic/hooks/cronitor.py @@ -34,12 +34,10 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_ return dry_run_label = ' (dry run; not actually pinging)' if dry_run else '' - ping_url = '{}/{}'.format(hook_config['ping_url'], MONITOR_STATE_TO_CRONITOR[state]) + ping_url = f"{hook_config['ping_url']}/{MONITOR_STATE_TO_CRONITOR[state]}" - logger.info( - '{}: Pinging Cronitor {}{}'.format(config_filename, state.name.lower(), dry_run_label) - ) - logger.debug('{}: Using Cronitor ping URL {}'.format(config_filename, ping_url)) + logger.info(f'{config_filename}: Pinging Cronitor {state.name.lower()}{dry_run_label}') + logger.debug(f'{config_filename}: Using Cronitor ping URL {ping_url}') if not dry_run: logging.getLogger('urllib3').setLevel(logging.ERROR) diff --git a/borgmatic/hooks/dispatch.py b/borgmatic/hooks/dispatch.py index 88a99eb1..fa7bd9b3 100644 --- a/borgmatic/hooks/dispatch.py +++ b/borgmatic/hooks/dispatch.py @@ -43,9 +43,9 @@ def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs): try: module = HOOK_NAME_TO_MODULE[hook_name] except KeyError: - raise ValueError('Unknown hook name: {}'.format(hook_name)) + raise ValueError(f'Unknown hook name: {hook_name}') - logger.debug('{}: Calling {} hook function {}'.format(log_prefix, hook_name, function_name)) + logger.debug(f'{log_prefix}: Calling {hook_name} hook function {function_name}') return getattr(module, function_name)(config, log_prefix, *args, **kwargs) diff --git a/borgmatic/hooks/dump.py b/borgmatic/hooks/dump.py index 43686d36..015ed696 100644 --- a/borgmatic/hooks/dump.py +++ b/borgmatic/hooks/dump.py @@ -33,7 +33,7 @@ def make_database_dump_filename(dump_path, name, hostname=None): Raise ValueError if the database name is invalid. ''' if os.path.sep in name: - raise ValueError('Invalid database name {}'.format(name)) + raise ValueError(f'Invalid database name {name}') return os.path.join(os.path.expanduser(dump_path), hostname or 'localhost', name) @@ -60,9 +60,7 @@ def remove_database_dumps(dump_path, database_type_name, log_prefix, dry_run): ''' dry_run_label = ' (dry run; not actually removing anything)' if dry_run else '' - logger.debug( - '{}: Removing {} database dumps{}'.format(log_prefix, database_type_name, dry_run_label) - ) + logger.debug(f'{log_prefix}: Removing {database_type_name} database dumps{dry_run_label}') expanded_path = os.path.expanduser(dump_path) @@ -78,4 +76,4 @@ def convert_glob_patterns_to_borg_patterns(patterns): Convert a sequence of shell glob patterns like "/etc/*" to the corresponding Borg archive patterns like "sh:etc/*". ''' - return ['sh:{}'.format(pattern.lstrip(os.path.sep)) for pattern in patterns] + return [f'sh:{pattern.lstrip(os.path.sep)}' for pattern in patterns] diff --git a/borgmatic/hooks/healthchecks.py b/borgmatic/hooks/healthchecks.py index 6ad8449f..4cafc49f 100644 --- a/borgmatic/hooks/healthchecks.py +++ b/borgmatic/hooks/healthchecks.py @@ -99,7 +99,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_ ping_url = ( hook_config['ping_url'] if hook_config['ping_url'].startswith('http') - else 'https://hc-ping.com/{}'.format(hook_config['ping_url']) + else f"https://hc-ping.com/{hook_config['ping_url']}" ) dry_run_label = ' (dry run; not actually pinging)' if dry_run else '' @@ -111,12 +111,10 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_ healthchecks_state = MONITOR_STATE_TO_HEALTHCHECKS.get(state) if healthchecks_state: - ping_url = '{}/{}'.format(ping_url, healthchecks_state) + ping_url = f'{ping_url}/{healthchecks_state}' - logger.info( - '{}: Pinging Healthchecks {}{}'.format(config_filename, state.name.lower(), dry_run_label) - ) - logger.debug('{}: Using Healthchecks ping URL {}'.format(config_filename, ping_url)) + logger.info(f'{config_filename}: Pinging Healthchecks {state.name.lower()}{dry_run_label}') + logger.debug(f'{config_filename}: Using Healthchecks ping URL {ping_url}') if state in (monitor.State.FINISH, monitor.State.FAIL, monitor.State.LOG): payload = format_buffered_logs_for_payload() diff --git a/borgmatic/hooks/mongodb.py b/borgmatic/hooks/mongodb.py index 8c3cab74..be5f656b 100644 --- a/borgmatic/hooks/mongodb.py +++ b/borgmatic/hooks/mongodb.py @@ -27,7 +27,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run): ''' dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else '' - logger.info('{}: Dumping MongoDB databases{}'.format(log_prefix, dry_run_label)) + logger.info(f'{log_prefix}: Dumping MongoDB databases{dry_run_label}') processes = [] for database in databases: @@ -38,9 +38,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run): dump_format = database.get('format', 'archive') logger.debug( - '{}: Dumping MongoDB database {} to {}{}'.format( - log_prefix, name, dump_filename, dry_run_label - ) + f'{log_prefix}: Dumping MongoDB database {name} to {dump_filename}{dry_run_label}', ) if dry_run: continue @@ -126,9 +124,7 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run, ) restore_command = build_restore_command(extract_process, database, dump_filename) - logger.debug( - '{}: Restoring MongoDB database {}{}'.format(log_prefix, database['name'], dry_run_label) - ) + logger.debug(f"{log_prefix}: Restoring MongoDB database {database['name']}{dry_run_label}") if dry_run: return diff --git a/borgmatic/hooks/mysql.py b/borgmatic/hooks/mysql.py index e53b8967..0bf97745 100644 --- a/borgmatic/hooks/mysql.py +++ b/borgmatic/hooks/mysql.py @@ -119,7 +119,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run): dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else '' processes = [] - logger.info('{}: Dumping MySQL databases{}'.format(log_prefix, dry_run_label)) + logger.info(f'{log_prefix}: Dumping MySQL databases{dry_run_label}') for database in databases: dump_path = make_dump_path(location_config) @@ -209,9 +209,7 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run, ) extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None - logger.debug( - '{}: Restoring MySQL database {}{}'.format(log_prefix, database['name'], dry_run_label) - ) + logger.debug(f"{log_prefix}: Restoring MySQL database {database['name']}{dry_run_label}") if dry_run: return diff --git a/borgmatic/hooks/pagerduty.py b/borgmatic/hooks/pagerduty.py index fbb67fbf..561b1e24 100644 --- a/borgmatic/hooks/pagerduty.py +++ b/borgmatic/hooks/pagerduty.py @@ -29,14 +29,12 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_ ''' if state != monitor.State.FAIL: logger.debug( - '{}: Ignoring unsupported monitoring {} in PagerDuty hook'.format( - config_filename, state.name.lower() - ) + f'{config_filename}: Ignoring unsupported monitoring {state.name.lower()} in PagerDuty hook', ) return dry_run_label = ' (dry run; not actually sending)' if dry_run else '' - logger.info('{}: Sending failure event to PagerDuty {}'.format(config_filename, dry_run_label)) + logger.info(f'{config_filename}: Sending failure event to PagerDuty {dry_run_label}') if dry_run: return @@ -50,7 +48,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_ 'routing_key': hook_config['integration_key'], 'event_action': 'trigger', 'payload': { - 'summary': 'backup failed on {}'.format(hostname), + 'summary': f'backup failed on {hostname}', 'severity': 'error', 'source': hostname, 'timestamp': local_timestamp, @@ -65,7 +63,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_ }, } ) - logger.debug('{}: Using PagerDuty payload: {}'.format(config_filename, payload)) + logger.debug(f'{config_filename}: Using PagerDuty payload: {payload}') logging.getLogger('urllib3').setLevel(logging.ERROR) try: diff --git a/borgmatic/hooks/postgresql.py b/borgmatic/hooks/postgresql.py index 3d3676fe..d4799f5f 100644 --- a/borgmatic/hooks/postgresql.py +++ b/borgmatic/hooks/postgresql.py @@ -93,7 +93,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run): dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else '' processes = [] - logger.info('{}: Dumping PostgreSQL databases{}'.format(log_prefix, dry_run_label)) + logger.info(f'{log_prefix}: Dumping PostgreSQL databases{dry_run_label}') for database in databases: extra_environment = make_extra_environment(database) @@ -228,9 +228,7 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run, ) extra_environment = make_extra_environment(database) - logger.debug( - '{}: Restoring PostgreSQL database {}{}'.format(log_prefix, database['name'], dry_run_label) - ) + logger.debug(f"{log_prefix}: Restoring PostgreSQL database {database['name']}{dry_run_label}") if dry_run: return diff --git a/borgmatic/hooks/sqlite.py b/borgmatic/hooks/sqlite.py index 9e7ecf37..d9f105d8 100644 --- a/borgmatic/hooks/sqlite.py +++ b/borgmatic/hooks/sqlite.py @@ -26,7 +26,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run): dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else '' processes = [] - logger.info('{}: Dumping SQLite databases{}'.format(log_prefix, dry_run_label)) + logger.info(f'{log_prefix}: Dumping SQLite databases{dry_run_label}') for database in databases: database_path = database['path'] diff --git a/borgmatic/logger.py b/borgmatic/logger.py index 0916bfa9..e098bf96 100644 --- a/borgmatic/logger.py +++ b/borgmatic/logger.py @@ -108,7 +108,7 @@ def color_text(color, message): if not color: return message - return '{}{}{}'.format(color, message, colorama.Style.RESET_ALL) + return f'{color}{message}{colorama.Style.RESET_ALL}' def add_logging_level(level_name, level_number): diff --git a/test_requirements.txt b/test_requirements.txt index 9cae8fb4..d34bc623 100644 --- a/test_requirements.txt +++ b/test_requirements.txt @@ -6,6 +6,8 @@ colorama==0.4.4 coverage==5.3 flake8==4.0.1 flake8-quotes==3.3.2 +flake8-use-fstring==1.4 +flake8-variables-names==0.0.5 flexmock==0.10.4 isort==5.9.1 mccabe==0.6.1 diff --git a/tests/end-to-end/test_borgmatic.py b/tests/end-to-end/test_borgmatic.py index c2d10291..de38bbd0 100644 --- a/tests/end-to-end/test_borgmatic.py +++ b/tests/end-to-end/test_borgmatic.py @@ -12,17 +12,15 @@ def generate_configuration(config_path, repository_path): to work for testing (including injecting the given repository path and tacking on an encryption passphrase). ''' - subprocess.check_call( - 'generate-borgmatic-config --destination {}'.format(config_path).split(' ') - ) + subprocess.check_call(f'generate-borgmatic-config --destination {config_path}'.split(' ')) config = ( open(config_path) .read() .replace('ssh://user@backupserver/./sourcehostname.borg', repository_path) - .replace('- ssh://user@backupserver/./{fqdn}', '') + .replace('- ssh://user@backupserver/./{fqdn}', '') # noqa: FS003 .replace('- /var/local/backups/local.borg', '') .replace('- /home/user/path with spaces', '') - .replace('- /home', '- {}'.format(config_path)) + .replace('- /home', f'- {config_path}') .replace('- /etc', '') .replace('- /var/log/syslog*', '') + 'storage:\n encryption_passphrase: "test"' @@ -47,13 +45,13 @@ def test_borgmatic_command(): generate_configuration(config_path, repository_path) subprocess.check_call( - 'borgmatic -v 2 --config {} init --encryption repokey'.format(config_path).split(' ') + f'borgmatic -v 2 --config {config_path} init --encryption repokey'.split(' ') ) # Run borgmatic to generate a backup archive, and then list it to make sure it exists. - subprocess.check_call('borgmatic --config {}'.format(config_path).split(' ')) + subprocess.check_call(f'borgmatic --config {config_path}'.split(' ')) output = subprocess.check_output( - 'borgmatic --config {} list --json'.format(config_path).split(' ') + f'borgmatic --config {config_path} list --json'.split(' ') ).decode(sys.stdout.encoding) parsed_output = json.loads(output) @@ -64,16 +62,14 @@ def test_borgmatic_command(): # Extract the created archive into the current (temporary) directory, and confirm that the # extracted file looks right. output = subprocess.check_output( - 'borgmatic --config {} extract --archive {}'.format(config_path, archive_name).split( - ' ' - ) + f'borgmatic --config {config_path} extract --archive {archive_name}'.split(' '), ).decode(sys.stdout.encoding) extracted_config_path = os.path.join(extract_path, config_path) assert open(extracted_config_path).read() == open(config_path).read() # Exercise the info action. output = subprocess.check_output( - 'borgmatic --config {} info --json'.format(config_path).split(' ') + f'borgmatic --config {config_path} info --json'.split(' '), ).decode(sys.stdout.encoding) parsed_output = json.loads(output) diff --git a/tests/end-to-end/test_database.py b/tests/end-to-end/test_database.py index 8849b3c8..30aea4a8 100644 --- a/tests/end-to-end/test_database.py +++ b/tests/end-to-end/test_database.py @@ -189,7 +189,7 @@ def test_database_dump_with_error_causes_borgmatic_to_exit(): '-v', '2', '--override', - "hooks.postgresql_databases=[{'name': 'nope'}]", + "hooks.postgresql_databases=[{'name': 'nope'}]", # noqa: FS003 ] ) finally: diff --git a/tests/end-to-end/test_override.py b/tests/end-to-end/test_override.py index 0a42018d..e86186d9 100644 --- a/tests/end-to-end/test_override.py +++ b/tests/end-to-end/test_override.py @@ -10,17 +10,15 @@ def generate_configuration(config_path, repository_path): to work for testing (including injecting the given repository path and tacking on an encryption passphrase). ''' - subprocess.check_call( - 'generate-borgmatic-config --destination {}'.format(config_path).split(' ') - ) + subprocess.check_call(f'generate-borgmatic-config --destination {config_path}'.split(' ')) config = ( open(config_path) .read() .replace('ssh://user@backupserver/./sourcehostname.borg', repository_path) - .replace('- ssh://user@backupserver/./{fqdn}', '') + .replace('- ssh://user@backupserver/./{fqdn}', '') # noqa: FS003 .replace('- /var/local/backups/local.borg', '') .replace('- /home/user/path with spaces', '') - .replace('- /home', '- {}'.format(config_path)) + .replace('- /home', f'- {config_path}') .replace('- /etc', '') .replace('- /var/log/syslog*', '') + 'storage:\n encryption_passphrase: "test"' diff --git a/tests/end-to-end/test_validate_config.py b/tests/end-to-end/test_validate_config.py index 5de83a39..d41464e6 100644 --- a/tests/end-to-end/test_validate_config.py +++ b/tests/end-to-end/test_validate_config.py @@ -7,12 +7,8 @@ def test_validate_config_command_with_valid_configuration_succeeds(): with tempfile.TemporaryDirectory() as temporary_directory: config_path = os.path.join(temporary_directory, 'test.yaml') - subprocess.check_call( - 'generate-borgmatic-config --destination {}'.format(config_path).split(' ') - ) - exit_code = subprocess.call( - 'validate-borgmatic-config --config {}'.format(config_path).split(' ') - ) + subprocess.check_call(f'generate-borgmatic-config --destination {config_path}'.split(' ')) + exit_code = subprocess.call(f'validate-borgmatic-config --config {config_path}'.split(' ')) assert exit_code == 0 @@ -21,16 +17,12 @@ def test_validate_config_command_with_invalid_configuration_fails(): with tempfile.TemporaryDirectory() as temporary_directory: config_path = os.path.join(temporary_directory, 'test.yaml') - subprocess.check_call( - 'generate-borgmatic-config --destination {}'.format(config_path).split(' ') - ) + subprocess.check_call(f'generate-borgmatic-config --destination {config_path}'.split(' ')) config = open(config_path).read().replace('keep_daily: 7', 'keep_daily: "7"') config_file = open(config_path, 'w') config_file.write(config) config_file.close() - exit_code = subprocess.call( - 'validate-borgmatic-config --config {}'.format(config_path).split(' ') - ) + exit_code = subprocess.call(f'validate-borgmatic-config --config {config_path}'.split(' ')) assert exit_code == 1 diff --git a/tests/integration/config/test_legacy.py b/tests/integration/config/test_legacy.py index 870da886..c73e7eec 100644 --- a/tests/integration/config/test_legacy.py +++ b/tests/integration/config/test_legacy.py @@ -7,7 +7,7 @@ from borgmatic.config import legacy as module def test_parse_section_options_with_punctuation_should_return_section_options(): parser = module.RawConfigParser() - parser.read_file(StringIO('[section]\nfoo: {}\n'.format(string.punctuation))) + parser.read_file(StringIO(f'[section]\nfoo: {string.punctuation}\n')) section_format = module.Section_format( 'section', (module.Config_option('foo', str, required=True),) diff --git a/tests/unit/borg/test_create.py b/tests/unit/borg/test_create.py index 69a3ede2..5fb51f3c 100644 --- a/tests/unit/borg/test_create.py +++ b/tests/unit/borg/test_create.py @@ -449,7 +449,7 @@ def test_collect_special_file_paths_excludes_non_special_files(): ) == ('/foo', '/baz') -DEFAULT_ARCHIVE_NAME = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}' +DEFAULT_ARCHIVE_NAME = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}' # noqa: FS003 REPO_ARCHIVE_WITH_PATHS = (f'repo::{DEFAULT_ARCHIVE_NAME}', 'foo', 'bar') @@ -2193,7 +2193,7 @@ def test_create_archive_with_source_directories_glob_expands(): ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', 'repo::{}'.format(DEFAULT_ARCHIVE_NAME), 'foo', 'food'), + ('borg', 'create', f'repo::{DEFAULT_ARCHIVE_NAME}', 'foo', 'food'), output_log_level=logging.INFO, output_file=None, borg_local_path='borg', @@ -2236,7 +2236,7 @@ def test_create_archive_with_non_matching_source_directories_glob_passes_through ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', 'repo::{}'.format(DEFAULT_ARCHIVE_NAME), 'foo*'), + ('borg', 'create', f'repo::{DEFAULT_ARCHIVE_NAME}', 'foo*'), output_log_level=logging.INFO, output_file=None, borg_local_path='borg', @@ -2279,7 +2279,7 @@ def test_create_archive_with_glob_calls_borg_with_expanded_directories(): ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', 'repo::{}'.format(DEFAULT_ARCHIVE_NAME), 'foo', 'food'), + ('borg', 'create', f'repo::{DEFAULT_ARCHIVE_NAME}', 'foo', 'food'), output_log_level=logging.INFO, output_file=None, borg_local_path='borg', @@ -2345,7 +2345,7 @@ def test_create_archive_with_archive_name_format_calls_borg_with_archive_name(): def test_create_archive_with_archive_name_format_accepts_borg_placeholders(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - repository_archive_pattern = 'repo::Documents_{hostname}-{now}' + repository_archive_pattern = 'repo::Documents_{hostname}-{now}' # noqa: FS003 flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) @@ -2380,7 +2380,7 @@ def test_create_archive_with_archive_name_format_accepts_borg_placeholders(): 'repositories': ['repo'], 'exclude_patterns': None, }, - storage_config={'archive_name_format': 'Documents_{hostname}-{now}'}, + storage_config={'archive_name_format': 'Documents_{hostname}-{now}'}, # noqa: FS003 local_borg_version='1.2.3', ) @@ -2388,7 +2388,7 @@ def test_create_archive_with_archive_name_format_accepts_borg_placeholders(): def test_create_archive_with_repository_accepts_borg_placeholders(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - repository_archive_pattern = '{fqdn}::Documents_{hostname}-{now}' + repository_archive_pattern = '{fqdn}::Documents_{hostname}-{now}' # noqa: FS003 flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) @@ -2417,13 +2417,13 @@ def test_create_archive_with_repository_accepts_borg_placeholders(): module.create_archive( dry_run=False, - repository='{fqdn}', + repository='{fqdn}', # noqa: FS003 location_config={ 'source_directories': ['foo', 'bar'], - 'repositories': ['{fqdn}'], + 'repositories': ['{fqdn}'], # noqa: FS003 'exclude_patterns': None, }, - storage_config={'archive_name_format': 'Documents_{hostname}-{now}'}, + storage_config={'archive_name_format': 'Documents_{hostname}-{now}'}, # noqa: FS003 local_borg_version='1.2.3', ) diff --git a/tests/unit/borg/test_prune.py b/tests/unit/borg/test_prune.py index ed4101e2..dd240dcb 100644 --- a/tests/unit/borg/test_prune.py +++ b/tests/unit/borg/test_prune.py @@ -27,27 +27,39 @@ def test_make_prune_flags_returns_flags_from_config_plus_default_prefix_glob(): result = module.make_prune_flags(retention_config, local_borg_version='1.2.3') - assert tuple(result) == BASE_PRUNE_FLAGS + (('--match-archives', 'sh:{hostname}-*'),) + assert tuple(result) == BASE_PRUNE_FLAGS + ( + ('--match-archives', 'sh:{hostname}-*'), # noqa: FS003 + ) def test_make_prune_flags_accepts_prefix_with_placeholders(): - retention_config = OrderedDict((('keep_daily', 1), ('prefix', 'Documents_{hostname}-{now}'))) + retention_config = OrderedDict( + (('keep_daily', 1), ('prefix', 'Documents_{hostname}-{now}')) # noqa: FS003 + ) flexmock(module.feature).should_receive('available').and_return(True) result = module.make_prune_flags(retention_config, local_borg_version='1.2.3') - expected = (('--keep-daily', '1'), ('--match-archives', 'sh:Documents_{hostname}-{now}*')) + expected = ( + ('--keep-daily', '1'), + ('--match-archives', 'sh:Documents_{hostname}-{now}*'), # noqa: FS003 + ) assert tuple(result) == expected def test_make_prune_flags_with_prefix_without_borg_features_uses_glob_archives(): - retention_config = OrderedDict((('keep_daily', 1), ('prefix', 'Documents_{hostname}-{now}'))) + retention_config = OrderedDict( + (('keep_daily', 1), ('prefix', 'Documents_{hostname}-{now}')) # noqa: FS003 + ) flexmock(module.feature).should_receive('available').and_return(False) result = module.make_prune_flags(retention_config, local_borg_version='1.2.3') - expected = (('--keep-daily', '1'), ('--glob-archives', 'Documents_{hostname}-{now}*')) + expected = ( + ('--keep-daily', '1'), + ('--glob-archives', 'Documents_{hostname}-{now}*'), # noqa: FS003 + ) assert tuple(result) == expected diff --git a/tests/unit/config/test_environment.py b/tests/unit/config/test_environment.py index b7b56dd8..3e342fa0 100644 --- a/tests/unit/config/test_environment.py +++ b/tests/unit/config/test_environment.py @@ -12,7 +12,7 @@ def test_env(monkeypatch): def test_env_braces(monkeypatch): monkeypatch.setenv('MY_CUSTOM_VALUE', 'foo') - config = {'key': 'Hello ${MY_CUSTOM_VALUE}'} + config = {'key': 'Hello ${MY_CUSTOM_VALUE}'} # noqa: FS003 module.resolve_env_variables(config) assert config == {'key': 'Hello foo'} @@ -20,7 +20,7 @@ def test_env_braces(monkeypatch): def test_env_multi(monkeypatch): monkeypatch.setenv('MY_CUSTOM_VALUE', 'foo') monkeypatch.setenv('MY_CUSTOM_VALUE2', 'bar') - config = {'key': 'Hello ${MY_CUSTOM_VALUE}${MY_CUSTOM_VALUE2}'} + config = {'key': 'Hello ${MY_CUSTOM_VALUE}${MY_CUSTOM_VALUE2}'} # noqa: FS003 module.resolve_env_variables(config) assert config == {'key': 'Hello foobar'} @@ -28,21 +28,21 @@ def test_env_multi(monkeypatch): def test_env_escape(monkeypatch): monkeypatch.setenv('MY_CUSTOM_VALUE', 'foo') monkeypatch.setenv('MY_CUSTOM_VALUE2', 'bar') - config = {'key': r'Hello ${MY_CUSTOM_VALUE} \${MY_CUSTOM_VALUE}'} + config = {'key': r'Hello ${MY_CUSTOM_VALUE} \${MY_CUSTOM_VALUE}'} # noqa: FS003 module.resolve_env_variables(config) - assert config == {'key': r'Hello foo ${MY_CUSTOM_VALUE}'} + assert config == {'key': r'Hello foo ${MY_CUSTOM_VALUE}'} # noqa: FS003 def test_env_default_value(monkeypatch): monkeypatch.delenv('MY_CUSTOM_VALUE', raising=False) - config = {'key': 'Hello ${MY_CUSTOM_VALUE:-bar}'} + config = {'key': 'Hello ${MY_CUSTOM_VALUE:-bar}'} # noqa: FS003 module.resolve_env_variables(config) assert config == {'key': 'Hello bar'} def test_env_unknown(monkeypatch): monkeypatch.delenv('MY_CUSTOM_VALUE', raising=False) - config = {'key': 'Hello ${MY_CUSTOM_VALUE}'} + config = {'key': 'Hello ${MY_CUSTOM_VALUE}'} # noqa: FS003 with pytest.raises(ValueError): module.resolve_env_variables(config) @@ -55,20 +55,20 @@ def test_env_full(monkeypatch): 'dict': { 'key': 'value', 'anotherdict': { - 'key': 'My ${MY_CUSTOM_VALUE} here', - 'other': '${MY_CUSTOM_VALUE}', - 'escaped': r'\${MY_CUSTOM_VALUE}', + 'key': 'My ${MY_CUSTOM_VALUE} here', # noqa: FS003 + 'other': '${MY_CUSTOM_VALUE}', # noqa: FS003 + 'escaped': r'\${MY_CUSTOM_VALUE}', # noqa: FS003 'list': [ - '/home/${MY_CUSTOM_VALUE}/.local', + '/home/${MY_CUSTOM_VALUE}/.local', # noqa: FS003 '/var/log/', - '/home/${MY_CUSTOM_VALUE2:-bar}/.config', + '/home/${MY_CUSTOM_VALUE2:-bar}/.config', # noqa: FS003 ], }, }, 'list': [ - '/home/${MY_CUSTOM_VALUE}/.local', + '/home/${MY_CUSTOM_VALUE}/.local', # noqa: FS003 '/var/log/', - '/home/${MY_CUSTOM_VALUE2-bar}/.config', + '/home/${MY_CUSTOM_VALUE2-bar}/.config', # noqa: FS003 ], } module.resolve_env_variables(config) @@ -79,7 +79,7 @@ def test_env_full(monkeypatch): 'anotherdict': { 'key': 'My foo here', 'other': 'foo', - 'escaped': '${MY_CUSTOM_VALUE}', + 'escaped': '${MY_CUSTOM_VALUE}', # noqa: FS003 'list': ['/home/foo/.local', '/var/log/', '/home/bar/.config'], }, }, diff --git a/tests/unit/config/test_validate.py b/tests/unit/config/test_validate.py index 6a9f4a4e..28527226 100644 --- a/tests/unit/config/test_validate.py +++ b/tests/unit/config/test_validate.py @@ -13,7 +13,7 @@ def test_format_json_error_path_element_formats_property(): def test_format_json_error_formats_error_including_path(): - flexmock(module).format_json_error_path_element = lambda element: '.{}'.format(element) + flexmock(module).format_json_error_path_element = lambda element: f'.{element}' error = flexmock(message='oops', path=['foo', 'bar']) assert module.format_json_error(error) == "At 'foo.bar': oops" @@ -66,9 +66,9 @@ def test_apply_logical_validation_does_not_raise_if_archive_name_format_and_pref module.apply_logical_validation( 'config.yaml', { - 'storage': {'archive_name_format': '{hostname}-{now}'}, - 'retention': {'prefix': '{hostname}-'}, - 'consistency': {'prefix': '{hostname}-'}, + 'storage': {'archive_name_format': '{hostname}-{now}'}, # noqa: FS003 + 'retention': {'prefix': '{hostname}-'}, # noqa: FS003 + 'consistency': {'prefix': '{hostname}-'}, # noqa: FS003 }, ) diff --git a/tests/unit/hooks/test_command.py b/tests/unit/hooks/test_command.py index 3d1686d0..3a657eb8 100644 --- a/tests/unit/hooks/test_command.py +++ b/tests/unit/hooks/test_command.py @@ -11,27 +11,20 @@ def test_interpolate_context_passes_through_command_without_variable(): def test_interpolate_context_passes_through_command_with_unknown_variable(): - assert ( - module.interpolate_context('test.yaml', 'pre-backup', 'ls {baz}', {'foo': 'bar'}) - == 'ls {baz}' - ) + command = 'ls {baz}' # noqa: FS003 + + assert module.interpolate_context('test.yaml', 'pre-backup', command, {'foo': 'bar'}) == command def test_interpolate_context_interpolates_variables(): + command = 'ls {foo}{baz} {baz}' # noqa: FS003 context = {'foo': 'bar', 'baz': 'quux'} assert ( - module.interpolate_context('test.yaml', 'pre-backup', 'ls {foo}{baz} {baz}', context) - == 'ls barquux quux' + module.interpolate_context('test.yaml', 'pre-backup', command, context) == 'ls barquux quux' ) -def test_interpolate_context_does_not_touch_unknown_variables(): - context = {'foo': 'bar', 'baz': 'quux'} - - assert module.interpolate_context('test.yaml', 'pre-backup', 'ls {wtf}', context) == 'ls {wtf}' - - def test_execute_hook_invokes_each_command(): flexmock(module).should_receive('interpolate_context').replace_with( lambda config_file, hook_description, command, context: command diff --git a/tests/unit/hooks/test_healthchecks.py b/tests/unit/hooks/test_healthchecks.py index d5779534..c975e4fd 100644 --- a/tests/unit/hooks/test_healthchecks.py +++ b/tests/unit/hooks/test_healthchecks.py @@ -206,9 +206,7 @@ def test_ping_monitor_with_ping_uuid_hits_corresponding_url(): payload = 'data' flexmock(module).should_receive('format_buffered_logs_for_payload').and_return(payload) flexmock(module.requests).should_receive('post').with_args( - 'https://hc-ping.com/{}'.format(hook_config['ping_url']), - data=payload.encode('utf-8'), - verify=True, + f"https://hc-ping.com/{hook_config['ping_url']}", data=payload.encode('utf-8'), verify=True, ).and_return(flexmock(ok=True)) module.ping_monitor( diff --git a/tests/unit/hooks/test_mongodb.py b/tests/unit/hooks/test_mongodb.py index f61f3c70..44e427f1 100644 --- a/tests/unit/hooks/test_mongodb.py +++ b/tests/unit/hooks/test_mongodb.py @@ -17,7 +17,7 @@ def test_dump_databases_runs_mongodump_for_each_database(): for name, process in zip(('foo', 'bar'), processes): flexmock(module).should_receive('execute_command').with_args( - ['mongodump', '--db', name, '--archive', '>', 'databases/localhost/{}'.format(name)], + ['mongodump', '--db', name, '--archive', '>', f'databases/localhost/{name}'], shell=True, run_to_completion=False, ).and_return(process).once() diff --git a/tests/unit/hooks/test_postgresql.py b/tests/unit/hooks/test_postgresql.py index 9cb4c0ff..349c04be 100644 --- a/tests/unit/hooks/test_postgresql.py +++ b/tests/unit/hooks/test_postgresql.py @@ -134,7 +134,7 @@ def test_dump_databases_runs_pg_dump_for_each_database(): 'custom', name, '>', - 'databases/localhost/{}'.format(name), + f'databases/localhost/{name}', ), shell=True, extra_environment={'PGSSLMODE': 'disable'},