diff --git a/.drone.yml b/.drone.yml index cf4c358a..e353b3c7 100644 --- a/.drone.yml +++ b/.drone.yml @@ -1,56 +1,86 @@ +--- kind: pipeline name: python-3-8-alpine-3-13 services: - name: postgresql - image: postgres:13.1-alpine + image: docker.io/postgres:13.1-alpine environment: POSTGRES_PASSWORD: test POSTGRES_DB: test + - name: postgresql2 + image: docker.io/postgres:13.1-alpine + environment: + POSTGRES_PASSWORD: test2 + POSTGRES_DB: test + POSTGRES_USER: postgres2 + commands: + - docker-entrypoint.sh -p 5433 - name: mysql - image: mariadb:10.5 + image: docker.io/mariadb:10.5 environment: MYSQL_ROOT_PASSWORD: test MYSQL_DATABASE: test + - name: mysql2 + image: docker.io/mariadb:10.5 + environment: + MYSQL_ROOT_PASSWORD: test2 + MYSQL_DATABASE: test + commands: + - docker-entrypoint.sh --port=3307 - name: mongodb - image: mongo:5.0.5 + image: docker.io/mongo:5.0.5 environment: MONGO_INITDB_ROOT_USERNAME: root MONGO_INITDB_ROOT_PASSWORD: test + - name: mongodb2 + image: docker.io/mongo:5.0.5 + environment: + MONGO_INITDB_ROOT_USERNAME: root2 + MONGO_INITDB_ROOT_PASSWORD: test2 + commands: + - docker-entrypoint.sh --port=27018 clone: skip_verify: true steps: - name: build - image: alpine:3.13 + image: docker.io/alpine:3.13 + environment: + TEST_CONTAINER: true pull: always commands: - scripts/run-full-tests --- kind: pipeline name: documentation +type: exec + +platform: + os: linux + arch: amd64 clone: skip_verify: true steps: - name: build - image: plugins/docker - settings: - username: + environment: + USERNAME: from_secret: docker_username - password: + PASSWORD: from_secret: docker_password - registry: projects.torsion.org - repo: projects.torsion.org/borgmatic-collective/borgmatic - tags: docs - dockerfile: docs/Dockerfile + IMAGE_NAME: projects.torsion.org/borgmatic-collective/borgmatic:docs + commands: + - podman login --username "$USERNAME" --password "$PASSWORD" projects.torsion.org + - podman build --tag "$IMAGE_NAME" --file docs/Dockerfile --storage-opt "overlay.mount_program=/usr/bin/fuse-overlayfs" . + - podman push "$IMAGE_NAME" trigger: repo: - borgmatic-collective/borgmatic branch: - - master + - main event: - push diff --git a/.gitea/issue_template.md b/.gitea/issue_template.md deleted file mode 100644 index 99b1c654..00000000 --- a/.gitea/issue_template.md +++ /dev/null @@ -1,35 +0,0 @@ -#### What I'm trying to do and why - -#### Steps to reproduce (if a bug) - -Include (sanitized) borgmatic configuration files if applicable. - -#### Actual behavior (if a bug) - -Include (sanitized) `--verbosity 2` output if applicable. - -#### Expected behavior (if a bug) - -#### Other notes / implementation ideas - -#### Environment - -**borgmatic version:** [version here] - -Use `sudo borgmatic --version` or `sudo pip show borgmatic | grep ^Version` - -**borgmatic installation method:** [e.g., Debian package, Docker container, etc.] - -**Borg version:** [version here] - -Use `sudo borg --version` - -**Python version:** [version here] - -Use `python3 --version` - -**Database version (if applicable):** [version here] - -Use `psql --version` or `mysql --version` on client and server. - -**operating system and version:** [OS here] diff --git a/.gitea/issue_template/bug_template.yaml b/.gitea/issue_template/bug_template.yaml new file mode 100644 index 00000000..8f4cad12 --- /dev/null +++ b/.gitea/issue_template/bug_template.yaml @@ -0,0 +1,77 @@ +name: "Bug or question/support" +about: "For filing a bug or getting support" +body: + - type: textarea + id: problem + attributes: + label: What I'm trying to do and why + validations: + required: true + - type: textarea + id: repro_steps + attributes: + label: Steps to reproduce + description: Include (sanitized) borgmatic configuration files if applicable. + validations: + required: false + - type: textarea + id: actual_behavior + attributes: + label: Actual behavior + description: Include (sanitized) `--verbosity 2` output if applicable. + validations: + required: false + - type: textarea + id: expected_behavior + attributes: + label: Expected behavior + validations: + required: false + - type: textarea + id: notes + attributes: + label: Other notes / implementation ideas + validations: + required: false + - type: input + id: borgmatic_version + attributes: + label: borgmatic version + description: Use `sudo borgmatic --version` or `sudo pip show borgmatic | grep ^Version` + validations: + required: false + - type: input + id: borgmatic_install_method + attributes: + label: borgmatic installation method + description: e.g., pip install, Debian package, container, etc. + validations: + required: false + - type: input + id: borg_version + attributes: + label: Borg version + description: Use `sudo borg --version` + validations: + required: false + - type: input + id: python_version + attributes: + label: Python version + description: Use `python3 --version` + validations: + required: false + - type: input + id: database_version + attributes: + label: Database version (if applicable) + description: Use `psql --version` / `mysql --version` / `mongodump --version` / `sqlite3 --version` + validations: + required: false + - type: input + id: operating_system_version + attributes: + label: Operating system and version + description: On Linux, use `cat /etc/os-release` + validations: + required: false diff --git a/.gitea/issue_template/config.yaml b/.gitea/issue_template/config.yaml new file mode 100644 index 00000000..3ba13e0c --- /dev/null +++ b/.gitea/issue_template/config.yaml @@ -0,0 +1 @@ +blank_issues_enabled: false diff --git a/.gitea/issue_template/feature_template.yaml b/.gitea/issue_template/feature_template.yaml new file mode 100644 index 00000000..ac4c38f5 --- /dev/null +++ b/.gitea/issue_template/feature_template.yaml @@ -0,0 +1,15 @@ +name: "Feature" +about: "For filing a feature request or idea" +body: + - type: textarea + id: request + attributes: + label: What I'd like to do and why + validations: + required: true + - type: textarea + id: notes + attributes: + label: Other notes / implementation ideas + validations: + required: false diff --git a/NEWS b/NEWS index 7d02c0e2..565d5c78 100644 --- a/NEWS +++ b/NEWS @@ -1,4 +1,129 @@ -1.7.10.dev0 +1.8.1.dev0 + * #728: Fix for "prune" action error when using the "keep_exclude_tags" option. + * #730: Fix for Borg's interactive prompt on the "check --repair" action automatically getting + answered "NO" even when the "check_i_know_what_i_am_doing" option isn't set. + +1.8.0 + * #575: BREAKING: For the "borgmatic borg" action, instead of implicitly injecting + repository/archive into the resulting Borg command-line, pass repository to Borg via an + environment variable and make archive available for explicit use in your commands. See the + documentation for more information: + https://torsion.org/borgmatic/docs/how-to/run-arbitrary-borg-commands/ + * #719: Fix an error when running "borg key export" through borgmatic. + * #720: Fix an error when dumping a database and the "exclude_nodump" option is set. + * #724: Add "check_i_know_what_i_am_doing" option to bypass Borg confirmation prompt when running + "check --repair". + * When merging two configuration files, error gracefully if the two files do not adhere to the same + format. + * #721: Remove configuration sections ("location:", "storage:", "hooks:" etc.), while still keeping + deprecated support for them. Now, all options are at the same level, and you don't need to worry + about commenting/uncommenting section headers when you change an option (if you remove your + sections first). + * #721: BREAKING: The retention prefix and the consistency prefix can no longer have different + values (unless one is not set). + * #721: BREAKING: The storage umask and the hooks umask can no longer have different values (unless + one is not set). + * BREAKING: Flags like "--config" that previously took multiple values now need to be given once + per value, e.g. "--config first.yaml --config second.yaml" instead of "--config first.yaml + second.yaml". This prevents argument parsing errors on ambiguous commands. + * BREAKING: Remove the deprecated (and silently ignored) "--successful" flag on the "list" action, + as newer versions of Borg list successful (non-checkpoint) archives by default. + * All deprecated configuration option values now generate warning logs. + * Remove the deprecated (and non-functional) "--excludes" flag in favor of excludes within + configuration. + * Fix an error when logging too-long command output during error handling. Now, long command output + is truncated before logging. + +1.7.15 + * #326: Add configuration options and command-line flags for backing up a database from one + location while restoring it somewhere else. + * #399: Add a documentation troubleshooting note for MySQL/MariaDB authentication errors. + * #529: Remove upgrade-borgmatic-config command for upgrading borgmatic 1.1.0 INI-style + configuration. + * #529: Deprecate generate-borgmatic-config in favor of new "config generate" action. + * #529: Deprecate validate-borgmatic-config in favor of new "config validate" action. + * #697, #712, #716: Extract borgmatic configuration from backup via new "config bootstrap" + action—even when borgmatic has no configuration yet! + * #669: Add sample systemd user service for running borgmatic as a non-root user. + * #711, #713: Fix an error when "data" check time files are accessed without getting upgraded + first. + +1.7.14 + * #484: Add a new verbosity level (-2) to disable output entirely (for console, syslog, log file, + or monitoring), so not even errors are shown. + * #688: Tweak archive check probing logic to use the newest timestamp found when multiple exist. + * #659: Add Borg 2 date-based matching flags to various actions for archive selection. + * #703: Fix an error when loading the configuration schema on Fedora Linux. + * #704: Fix "check" action error when repository and archive checks are configured but the archive + check gets skipped due to the configured frequency. + * #706: Fix "--archive latest" on "list" and "info" actions that only worked on the first of + multiple configured repositories. + +1.7.13 + * #375: Restore particular PostgreSQL schemas from a database dump via "borgmatic restore --schema" + flag. See the documentation for more information: + https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#restore-particular-schemas + * #678: Fix error from PostgreSQL when dumping a database with a "format" of "plain". + * #678: Fix PostgreSQL hook to support "psql_command" and "pg_restore_command" options containing + commands with arguments. + * #678: Fix calls to psql in PostgreSQL hook to ignore "~/.psqlrc", whose settings can break + database dumping. + * #680: Add support for logging each log line as a JSON object via global "--log-json" flag. + * #682: Fix "source_directories_must_exist" option to expand globs and tildes in source directories. + * #684: Rename "master" development branch to "main" to use more inclusive language. You'll need to + update your development checkouts accordingly. + * #686: Add fish shell completion script so you can tab-complete on the borgmatic command-line. See + the documentation for more information: + https://torsion.org/borgmatic/docs/how-to/set-up-backups/#shell-completion + * #687: Fix borgmatic error when not finding the configuration schema for certain "pip install + --editable" development installs. + * #688: Fix archive checks being skipped even when particular archives haven't been checked + recently. This occurred when using multiple borgmatic configuration files with different + "archive_name_format"s, for instance. + * #691: Fix error in "borgmatic restore" action when the configured repository path is relative + instead of absolute. + * #694: Run "borgmatic borg" action without capturing output so interactive prompts and flags like + "--progress" still work. + +1.7.12 + * #413: Add "log_file" context to command hooks so your scripts can consume the borgmatic log file. + See the documentation for more information: + https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/ + * #666, #670: Fix error when running the "info" action with the "--match-archives" or "--archive" + flags. Also fix the "--match-archives"/"--archive" flags to correctly override the + "match_archives" configuration option for the "transfer", "list", "rlist", and "info" actions. + * #668: Fix error when running the "prune" action with both "archive_name_format" and "prefix" + options set. + * #672: Selectively shallow merge certain mappings or sequences when including configuration files. + See the documentation for more information: + https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#shallow-merge + * #672: Selectively omit list values when including configuration files. See the documentation for + more information: + https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#list-merge + * #673: View the results of configuration file merging via "validate-borgmatic-config --show" flag. + See the documentation for more information: + https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#debugging-includes + * Add optional support for running end-to-end tests and building documentation with rootless Podman + instead of Docker. + +1.7.11 + * #479, #588: BREAKING: Automatically use the "archive_name_format" option to filter which archives + get used for borgmatic actions that operate on multiple archives. Override this behavior with the + new "match_archives" option in the storage section. This change is "breaking" in that it silently + changes which archives get considered for "rlist", "prune", "check", etc. See the documentation + for more information: + https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#archive-naming + * #479, #588: The "prefix" options have been deprecated in favor of the new "archive_name_format" + auto-matching behavior and the "match_archives" option. + * #658: Add "--log-file-format" flag for customizing the log message format. See the documentation + for more information: + https://torsion.org/borgmatic/docs/how-to/inspect-your-backups/#logging-to-file + * #662: Fix regression in which the "check_repositories" option failed to match repositories. + * #663: Fix regression in which the "transfer" action produced a traceback. + * Add spellchecking of source code during test runs. + +1.7.10 + * #396: When a database command errors, display and log the error message instead of swallowing it. * #501: Optionally error if a source directory does not exist via "source_directories_must_exist" option in borgmatic's location configuration. * #576: Add support for "file://" paths within "repositories" option. @@ -8,6 +133,9 @@ * #618: Add support for BORG_FILES_CACHE_TTL environment variable via "borg_files_cache_ttl" option in borgmatic's storage configuration. * #623: Fix confusing message when an error occurs running actions for a configuration file. + * #635: Add optional repository labels so you can select a repository via "--repository yourlabel" + at the command-line. See the configuration reference for more information: + https://torsion.org/borgmatic/docs/reference/configuration/ * #649: Add documentation on backing up a database running in a container: https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#containers * #655: Fix error when databases are configured and a source directory doesn't exist. @@ -321,7 +449,7 @@ * #398: Clarify canonical home of borgmatic in documentation. * #406: Clarify that spaces in path names should not be backslashed in path names. * #423: Fix error handling to error loudly when Borg gets killed due to running out of memory! - * Fix build so as not to attempt to build and push documentation for a non-master branch. + * Fix build so as not to attempt to build and push documentation for a non-main branch. * "Fix" build failure with Alpine Edge by switching from Edge to Alpine 3.13. * Move #borgmatic IRC channel from Freenode to Libera Chat due to Freenode takeover drama. IRC connection info: https://torsion.org/borgmatic/#issues @@ -384,7 +512,7 @@ configuration schema descriptions. 1.5.6 - * #292: Allow before_backup and similiar hooks to exit with a soft failure without altering the + * #292: Allow before_backup and similar hooks to exit with a soft failure without altering the monitoring status on Healthchecks or other providers. Support this by waiting to ping monitoring services with a "start" status until after before_* hooks finish. Failures in before_* hooks still trigger a monitoring "fail" status. @@ -453,7 +581,7 @@ * For "list" and "info" actions, show repository names even at verbosity 0. 1.4.22 - * #276, #285: Disable colored output when "--json" flag is used, so as to produce valid JSON ouput. + * #276, #285: Disable colored output when "--json" flag is used, so as to produce valid JSON output. * After a backup of a database dump in directory format, properly remove the dump directory. * In "borgmatic --help", don't expand $HOME in listing of default "--config" paths. @@ -825,7 +953,7 @@ * #77: Skip non-"*.yaml" config filenames in /etc/borgmatic.d/ so as not to parse backup files, editor swap files, etc. * #81: Document user-defined hooks run before/after backup, or on error. - * Add code style guidelines to the documention. + * Add code style guidelines to the documentation. 1.2.0 * #61: Support for Borg --list option via borgmatic command-line to list all archives. diff --git a/README.md b/README.md index 27fc6cd2..9cd52108 100644 --- a/README.md +++ b/README.md @@ -11,54 +11,46 @@ borgmatic is simple, configuration-driven backup software for servers and workstations. Protect your files with client-side encryption. Backup your databases too. Monitor it all with integrated third-party services. -The canonical home of borgmatic is at https://torsion.org/borgmatic. +The canonical home of borgmatic is at https://torsion.org/borgmatic/ Here's an example configuration file: ```yaml -location: - # List of source directories to backup. - source_directories: - - /home - - /etc +# List of source directories to backup. +source_directories: + - /home + - /etc - # Paths of local or remote repositories to backup to. - repositories: - - ssh://1234@usw-s001.rsync.net/./backups.borg - - ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo - - /var/lib/backups/local.borg +# Paths of local or remote repositories to backup to. +repositories: + - path: ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo + label: borgbase + - path: /var/lib/backups/local.borg + label: local -retention: - # Retention policy for how many backups to keep. - keep_daily: 7 - keep_weekly: 4 - keep_monthly: 6 +# Retention policy for how many backups to keep. +keep_daily: 7 +keep_weekly: 4 +keep_monthly: 6 -consistency: - # List of checks to run to validate your backups. - checks: - - name: repository - - name: archives - frequency: 2 weeks +# List of checks to run to validate your backups. +checks: + - name: repository + - name: archives + frequency: 2 weeks -hooks: - # Custom preparation scripts to run. - before_backup: - - prepare-for-backup.sh +# Custom preparation scripts to run. +before_backup: + - prepare-for-backup.sh - # Databases to dump and include in backups. - postgresql_databases: - - name: users +# Databases to dump and include in backups. +postgresql_databases: + - name: users - # Third-party services to notify you if backups aren't happening. - healthchecks: https://hc-ping.com/be067061-cf96-4412-8eae-62b0c50d6a8c +# Third-party services to notify you if backups aren't happening. +healthchecks: https://hc-ping.com/be067061-cf96-4412-8eae-62b0c50d6a8c ``` -Want to see borgmatic in action? Check out the screencast. - - - borgmatic is powered by [Borg Backup](https://www.borgbackup.org/). ## Integrations @@ -90,16 +82,15 @@ reference guides. Need somewhere to store your encrypted off-site backups? The following hosting providers include specific support for Borg/borgmatic—and fund borgmatic -development and hosting when you use these links to sign up. (These are -referral links, but without any tracking scripts or cookies.) +development and hosting when you use these referral links to sign up: -Additionally, [rsync.net](https://www.rsync.net/products/borg.html) and -[Hetzner](https://www.hetzner.com/storage/storage-box) have compatible storage -offerings, but do not currently fund borgmatic development or hosting. +Additionally, rsync.net has a compatible storage offering, but does not fund +borgmatic development or hosting. ## Support and contributing @@ -120,10 +111,7 @@ issues. ### Social -Check out the [Borg subreddit](https://www.reddit.com/r/BorgBackup/) for -general Borg and borgmatic discussion and support. - -Also follow [borgmatic on Mastodon](https://fosstodon.org/@borgmatic). +Follow [borgmatic on Mastodon](https://fosstodon.org/@borgmatic). ### Chat @@ -164,5 +152,5 @@ Also, please check out the [borgmatic development how-to](https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/) for info on cloning source code, running tests, etc. -![Build Status](https://build.torsion.org/api/badges/borgmatic-collective/borgmatic/status.svg?ref=refs/heads/master) +![Build Status](https://build.torsion.org/api/badges/borgmatic-collective/borgmatic/status.svg?ref=refs/heads/main) diff --git a/SECURITY.md b/SECURITY.md index d82b6f32..64c3d3b3 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -7,8 +7,8 @@ permalink: security-policy/index.html While we want to hear about security vulnerabilities in all versions of borgmatic, security fixes are only made to the most recently released version. -It's simply not practical for our small volunteer effort to maintain multiple -release branches and put out separate security patches for each. +It's not practical for our small volunteer effort to maintain multiple release +branches and put out separate security patches for each. ## Reporting a vulnerability diff --git a/borgmatic/actions/arguments.py b/borgmatic/actions/arguments.py new file mode 100644 index 00000000..7fd77fc8 --- /dev/null +++ b/borgmatic/actions/arguments.py @@ -0,0 +1,9 @@ +import argparse + + +def update_arguments(arguments, **updates): + ''' + Given an argparse.Namespace instance of command-line arguments and one or more keyword argument + updates to perform, return a copy of the arguments with those updates applied. + ''' + return argparse.Namespace(**dict(vars(arguments), **updates)) diff --git a/borgmatic/actions/borg.py b/borgmatic/actions/borg.py index ee94f1dc..6e46596f 100644 --- a/borgmatic/actions/borg.py +++ b/borgmatic/actions/borg.py @@ -8,7 +8,13 @@ logger = logging.getLogger(__name__) def run_borg( - repository, storage, local_borg_version, borg_arguments, local_path, remote_path, + repository, + config, + local_borg_version, + borg_arguments, + global_arguments, + local_path, + remote_path, ): ''' Run the "borg" action for the given repository. @@ -16,18 +22,21 @@ def run_borg( if borg_arguments.repository is None or borgmatic.config.validate.repositories_match( repository, borg_arguments.repository ): - logger.info(f'{repository}: Running arbitrary Borg command') + logger.info( + f'{repository.get("label", repository["path"])}: Running arbitrary Borg command' + ) archive_name = borgmatic.borg.rlist.resolve_archive_name( - repository, + repository['path'], borg_arguments.archive, - storage, + config, local_borg_version, + global_arguments, local_path, remote_path, ) borgmatic.borg.borg.run_arbitrary_borg( - repository, - storage, + repository['path'], + config, local_borg_version, options=borg_arguments.options, archive=archive_name, diff --git a/borgmatic/actions/break_lock.py b/borgmatic/actions/break_lock.py index 65384d7a..e94ab466 100644 --- a/borgmatic/actions/break_lock.py +++ b/borgmatic/actions/break_lock.py @@ -7,7 +7,13 @@ logger = logging.getLogger(__name__) def run_break_lock( - repository, storage, local_borg_version, break_lock_arguments, local_path, remote_path, + repository, + config, + local_borg_version, + break_lock_arguments, + global_arguments, + local_path, + remote_path, ): ''' Run the "break-lock" action for the given repository. @@ -15,7 +21,14 @@ def run_break_lock( if break_lock_arguments.repository is None or borgmatic.config.validate.repositories_match( repository, break_lock_arguments.repository ): - logger.info(f'{repository}: Breaking repository and cache locks') - borgmatic.borg.break_lock.break_lock( - repository, storage, local_borg_version, local_path=local_path, remote_path=remote_path, + logger.info( + f'{repository.get("label", repository["path"])}: Breaking repository and cache locks' + ) + borgmatic.borg.break_lock.break_lock( + repository['path'], + config, + local_borg_version, + global_arguments, + local_path=local_path, + remote_path=remote_path, ) diff --git a/borgmatic/actions/check.py b/borgmatic/actions/check.py index cdee9edb..fe800156 100644 --- a/borgmatic/actions/check.py +++ b/borgmatic/actions/check.py @@ -10,10 +10,7 @@ logger = logging.getLogger(__name__) def run_check( config_filename, repository, - location, - storage, - consistency, - hooks, + config, hook_context, local_borg_version, check_arguments, @@ -30,20 +27,19 @@ def run_check( return borgmatic.hooks.command.execute_hook( - hooks.get('before_check'), - hooks.get('umask'), + config.get('before_check'), + config.get('umask'), config_filename, 'pre-check', global_arguments.dry_run, **hook_context, ) - logger.info(f'{repository}: Running consistency checks') + logger.info(f'{repository.get("label", repository["path"])}: Running consistency checks') borgmatic.borg.check.check_archives( - repository, - location, - storage, - consistency, + repository['path'], + config, local_borg_version, + global_arguments, local_path=local_path, remote_path=remote_path, progress=check_arguments.progress, @@ -52,8 +48,8 @@ def run_check( force=check_arguments.force, ) borgmatic.hooks.command.execute_hook( - hooks.get('after_check'), - hooks.get('umask'), + config.get('after_check'), + config.get('umask'), config_filename, 'post-check', global_arguments.dry_run, diff --git a/borgmatic/actions/compact.py b/borgmatic/actions/compact.py index a0efa3a2..29cf8943 100644 --- a/borgmatic/actions/compact.py +++ b/borgmatic/actions/compact.py @@ -11,9 +11,7 @@ logger = logging.getLogger(__name__) def run_compact( config_filename, repository, - storage, - retention, - hooks, + config, hook_context, local_borg_version, compact_arguments, @@ -31,20 +29,23 @@ def run_compact( return borgmatic.hooks.command.execute_hook( - hooks.get('before_compact'), - hooks.get('umask'), + config.get('before_compact'), + config.get('umask'), config_filename, 'pre-compact', global_arguments.dry_run, **hook_context, ) if borgmatic.borg.feature.available(borgmatic.borg.feature.Feature.COMPACT, local_borg_version): - logger.info(f'{repository}: Compacting segments{dry_run_label}') + logger.info( + f'{repository.get("label", repository["path"])}: Compacting segments{dry_run_label}' + ) borgmatic.borg.compact.compact_segments( global_arguments.dry_run, - repository, - storage, + repository['path'], + config, local_borg_version, + global_arguments, local_path=local_path, remote_path=remote_path, progress=compact_arguments.progress, @@ -52,10 +53,12 @@ def run_compact( threshold=compact_arguments.threshold, ) else: # pragma: nocover - logger.info(f'{repository}: Skipping compact (only available/needed in Borg 1.2+)') + logger.info( + f'{repository.get("label", repository["path"])}: Skipping compact (only available/needed in Borg 1.2+)' + ) borgmatic.hooks.command.execute_hook( - hooks.get('after_compact'), - hooks.get('umask'), + config.get('after_compact'), + config.get('umask'), config_filename, 'post-compact', global_arguments.dry_run, diff --git a/borgmatic/actions/config/__init__.py b/borgmatic/actions/config/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/borgmatic/actions/config/bootstrap.py b/borgmatic/actions/config/bootstrap.py new file mode 100644 index 00000000..ee167478 --- /dev/null +++ b/borgmatic/actions/config/bootstrap.py @@ -0,0 +1,103 @@ +import json +import logging +import os + +import borgmatic.borg.extract +import borgmatic.borg.rlist +import borgmatic.config.validate +import borgmatic.hooks.command +from borgmatic.borg.state import DEFAULT_BORGMATIC_SOURCE_DIRECTORY + +logger = logging.getLogger(__name__) + + +def get_config_paths(bootstrap_arguments, global_arguments, local_borg_version): + ''' + Given: + The bootstrap arguments, which include the repository and archive name, borgmatic source directory, + destination directory, and whether to strip components. + The global arguments, which include the dry run flag + and the local borg version, + Return: + The config paths from the manifest.json file in the borgmatic source directory after extracting it from the + repository. + + Raise ValueError if the manifest JSON is missing, can't be decoded, or doesn't contain the + expected configuration path data. + ''' + borgmatic_source_directory = ( + bootstrap_arguments.borgmatic_source_directory or DEFAULT_BORGMATIC_SOURCE_DIRECTORY + ) + borgmatic_manifest_path = os.path.expanduser( + os.path.join(borgmatic_source_directory, 'bootstrap', 'manifest.json') + ) + extract_process = borgmatic.borg.extract.extract_archive( + global_arguments.dry_run, + bootstrap_arguments.repository, + borgmatic.borg.rlist.resolve_archive_name( + bootstrap_arguments.repository, + bootstrap_arguments.archive, + {}, + local_borg_version, + global_arguments, + ), + [borgmatic_manifest_path], + {}, + local_borg_version, + global_arguments, + extract_to_stdout=True, + ) + + manifest_json = extract_process.stdout.read() + if not manifest_json: + raise ValueError( + 'Cannot read configuration paths from archive due to missing bootstrap manifest' + ) + + try: + manifest_data = json.loads(manifest_json) + except json.JSONDecodeError as error: + raise ValueError( + f'Cannot read configuration paths from archive due to invalid bootstrap manifest JSON: {error}' + ) + + try: + return manifest_data['config_paths'] + except KeyError: + raise ValueError( + 'Cannot read configuration paths from archive due to invalid bootstrap manifest' + ) + + +def run_bootstrap(bootstrap_arguments, global_arguments, local_borg_version): + ''' + Run the "bootstrap" action for the given repository. + + Raise ValueError if the bootstrap configuration could not be loaded. + Raise CalledProcessError or OSError if Borg could not be run. + ''' + manifest_config_paths = get_config_paths( + bootstrap_arguments, global_arguments, local_borg_version + ) + + logger.info(f"Bootstrapping config paths: {', '.join(manifest_config_paths)}") + + borgmatic.borg.extract.extract_archive( + global_arguments.dry_run, + bootstrap_arguments.repository, + borgmatic.borg.rlist.resolve_archive_name( + bootstrap_arguments.repository, + bootstrap_arguments.archive, + {}, + local_borg_version, + global_arguments, + ), + [config_path.lstrip(os.path.sep) for config_path in manifest_config_paths], + {}, + local_borg_version, + global_arguments, + extract_to_stdout=False, + destination_path=bootstrap_arguments.destination, + strip_components=bootstrap_arguments.strip_components, + progress=bootstrap_arguments.progress, + ) diff --git a/borgmatic/actions/config/generate.py b/borgmatic/actions/config/generate.py new file mode 100644 index 00000000..48b9a7dd --- /dev/null +++ b/borgmatic/actions/config/generate.py @@ -0,0 +1,48 @@ +import logging + +import borgmatic.config.generate +import borgmatic.config.validate +import borgmatic.logger + +logger = logging.getLogger(__name__) + + +def run_generate(generate_arguments, global_arguments): + ''' + Given the generate arguments and the global arguments, each as an argparse.Namespace instance, + run the "generate" action. + + Raise FileExistsError if a file already exists at the destination path and the generate + arguments do not have overwrite set. + ''' + borgmatic.logger.add_custom_log_levels() + dry_run_label = ' (dry run; not actually writing anything)' if global_arguments.dry_run else '' + + logger.answer( + f'Generating a configuration file at: {generate_arguments.destination_filename}{dry_run_label}' + ) + + borgmatic.config.generate.generate_sample_configuration( + global_arguments.dry_run, + generate_arguments.source_filename, + generate_arguments.destination_filename, + borgmatic.config.validate.schema_filename(), + overwrite=generate_arguments.overwrite, + ) + + if generate_arguments.source_filename: + logger.answer( + f''' +Merged in the contents of configuration file at: {generate_arguments.source_filename} +To review the changes made, run: + + diff --unified {generate_arguments.source_filename} {generate_arguments.destination_filename}''' + ) + + logger.answer( + ''' +This includes all available configuration options with example values, the few +required options as indicated. Please edit the file to suit your needs. + +If you ever need help: https://torsion.org/borgmatic/#issues''' + ) diff --git a/borgmatic/actions/config/validate.py b/borgmatic/actions/config/validate.py new file mode 100644 index 00000000..2929ccaa --- /dev/null +++ b/borgmatic/actions/config/validate.py @@ -0,0 +1,25 @@ +import logging + +import borgmatic.config.generate +import borgmatic.logger + +logger = logging.getLogger(__name__) + + +def run_validate(validate_arguments, configs): + ''' + Given the validate arguments as an argparse.Namespace instance and a dict of configuration + filename to corresponding parsed configuration, run the "validate" action. + + Most of the validation is actually performed implicitly by the standard borgmatic configuration + loading machinery prior to here, so this function mainly exists to support additional validate + flags like "--show". + ''' + borgmatic.logger.add_custom_log_levels() + + if validate_arguments.show: + for config_path, config in configs.items(): + if len(configs) > 1: + logger.answer('---') + + logger.answer(borgmatic.config.generate.render_configuration(config)) diff --git a/borgmatic/actions/create.py b/borgmatic/actions/create.py index ac2617d2..1d750f62 100644 --- a/borgmatic/actions/create.py +++ b/borgmatic/actions/create.py @@ -1,7 +1,14 @@ import json import logging +import os + +try: + import importlib_metadata +except ModuleNotFoundError: # pragma: nocover + import importlib.metadata as importlib_metadata import borgmatic.borg.create +import borgmatic.borg.state import borgmatic.config.validate import borgmatic.hooks.command import borgmatic.hooks.dispatch @@ -10,12 +17,39 @@ import borgmatic.hooks.dump logger = logging.getLogger(__name__) +def create_borgmatic_manifest(config, config_paths, dry_run): + ''' + Create a borgmatic manifest file to store the paths to the configuration files used to create + the archive. + ''' + if dry_run: + return + + borgmatic_source_directory = config.get( + 'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY + ) + + borgmatic_manifest_path = os.path.expanduser( + os.path.join(borgmatic_source_directory, 'bootstrap', 'manifest.json') + ) + + if not os.path.exists(borgmatic_manifest_path): + os.makedirs(os.path.dirname(borgmatic_manifest_path), exist_ok=True) + + with open(borgmatic_manifest_path, 'w') as config_list_file: + json.dump( + { + 'borgmatic_version': importlib_metadata.version('borgmatic'), + 'config_paths': config_paths, + }, + config_list_file, + ) + + def run_create( config_filename, repository, - location, - storage, - hooks, + config, hook_context, local_borg_version, create_arguments, @@ -35,38 +69,37 @@ def run_create( return borgmatic.hooks.command.execute_hook( - hooks.get('before_backup'), - hooks.get('umask'), + config.get('before_backup'), + config.get('umask'), config_filename, 'pre-backup', global_arguments.dry_run, **hook_context, ) - logger.info(f'{repository}: Creating archive{dry_run_label}') + logger.info(f'{repository.get("label", repository["path"])}: Creating archive{dry_run_label}') borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( 'remove_database_dumps', - hooks, - repository, + config, + repository['path'], borgmatic.hooks.dump.DATABASE_HOOK_NAMES, - location, global_arguments.dry_run, ) active_dumps = borgmatic.hooks.dispatch.call_hooks( 'dump_databases', - hooks, - repository, + config, + repository['path'], borgmatic.hooks.dump.DATABASE_HOOK_NAMES, - location, global_arguments.dry_run, ) + create_borgmatic_manifest(config, global_arguments.used_config_paths, global_arguments.dry_run) stream_processes = [process for processes in active_dumps.values() for process in processes] json_output = borgmatic.borg.create.create_archive( global_arguments.dry_run, - repository, - location, - storage, + repository['path'], + config, local_borg_version, + global_arguments, local_path=local_path, remote_path=remote_path, progress=create_arguments.progress, @@ -80,15 +113,14 @@ def run_create( borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( 'remove_database_dumps', - hooks, + config, config_filename, borgmatic.hooks.dump.DATABASE_HOOK_NAMES, - location, global_arguments.dry_run, ) borgmatic.hooks.command.execute_hook( - hooks.get('after_backup'), - hooks.get('umask'), + config.get('after_backup'), + config.get('umask'), config_filename, 'post-backup', global_arguments.dry_run, diff --git a/borgmatic/actions/export_tar.py b/borgmatic/actions/export_tar.py index b5b6089d..f1937b54 100644 --- a/borgmatic/actions/export_tar.py +++ b/borgmatic/actions/export_tar.py @@ -9,7 +9,7 @@ logger = logging.getLogger(__name__) def run_export_tar( repository, - storage, + config, local_borg_version, export_tar_arguments, global_arguments, @@ -22,22 +22,26 @@ def run_export_tar( if export_tar_arguments.repository is None or borgmatic.config.validate.repositories_match( repository, export_tar_arguments.repository ): - logger.info(f'{repository}: Exporting archive {export_tar_arguments.archive} as tar file') + logger.info( + f'{repository["path"]}: Exporting archive {export_tar_arguments.archive} as tar file' + ) borgmatic.borg.export_tar.export_tar_archive( global_arguments.dry_run, - repository, + repository['path'], borgmatic.borg.rlist.resolve_archive_name( - repository, + repository['path'], export_tar_arguments.archive, - storage, + config, local_borg_version, + global_arguments, local_path, remote_path, ), export_tar_arguments.paths, export_tar_arguments.destination, - storage, + config, local_borg_version, + global_arguments, local_path=local_path, remote_path=remote_path, tar_filter=export_tar_arguments.tar_filter, diff --git a/borgmatic/actions/extract.py b/borgmatic/actions/extract.py index 6af9caa1..5d02d4c7 100644 --- a/borgmatic/actions/extract.py +++ b/borgmatic/actions/extract.py @@ -11,9 +11,7 @@ logger = logging.getLogger(__name__) def run_extract( config_filename, repository, - location, - storage, - hooks, + config, hook_context, local_borg_version, extract_arguments, @@ -25,8 +23,8 @@ def run_extract( Run the "extract" action for the given repository. ''' borgmatic.hooks.command.execute_hook( - hooks.get('before_extract'), - hooks.get('umask'), + config.get('before_extract'), + config.get('umask'), config_filename, 'pre-extract', global_arguments.dry_run, @@ -35,22 +33,25 @@ def run_extract( if extract_arguments.repository is None or borgmatic.config.validate.repositories_match( repository, extract_arguments.repository ): - logger.info(f'{repository}: Extracting archive {extract_arguments.archive}') + logger.info( + f'{repository.get("label", repository["path"])}: Extracting archive {extract_arguments.archive}' + ) borgmatic.borg.extract.extract_archive( global_arguments.dry_run, - repository, + repository['path'], borgmatic.borg.rlist.resolve_archive_name( - repository, + repository['path'], extract_arguments.archive, - storage, + config, local_borg_version, + global_arguments, local_path, remote_path, ), extract_arguments.paths, - location, - storage, + config, local_borg_version, + global_arguments, local_path=local_path, remote_path=remote_path, destination_path=extract_arguments.destination, @@ -58,8 +59,8 @@ def run_extract( progress=extract_arguments.progress, ) borgmatic.hooks.command.execute_hook( - hooks.get('after_extract'), - hooks.get('umask'), + config.get('after_extract'), + config.get('umask'), config_filename, 'post-extract', global_arguments.dry_run, diff --git a/borgmatic/actions/info.py b/borgmatic/actions/info.py index ab4fe426..b09f3ece 100644 --- a/borgmatic/actions/info.py +++ b/borgmatic/actions/info.py @@ -1,6 +1,7 @@ import json import logging +import borgmatic.actions.arguments import borgmatic.borg.info import borgmatic.borg.rlist import borgmatic.config.validate @@ -9,7 +10,13 @@ logger = logging.getLogger(__name__) def run_info( - repository, storage, local_borg_version, info_arguments, local_path, remote_path, + repository, + config, + local_borg_version, + info_arguments, + global_arguments, + local_path, + remote_path, ): ''' Run the "info" action for the given repository and archive. @@ -20,22 +27,26 @@ def run_info( repository, info_arguments.repository ): if not info_arguments.json: # pragma: nocover - logger.answer(f'{repository}: Displaying archive summary information') - info_arguments.archive = borgmatic.borg.rlist.resolve_archive_name( - repository, + logger.answer( + f'{repository.get("label", repository["path"])}: Displaying archive summary information' + ) + archive_name = borgmatic.borg.rlist.resolve_archive_name( + repository['path'], info_arguments.archive, - storage, + config, local_borg_version, + global_arguments, local_path, remote_path, ) json_output = borgmatic.borg.info.display_archives_info( - repository, - storage, + repository['path'], + config, local_borg_version, - info_arguments=info_arguments, - local_path=local_path, - remote_path=remote_path, + borgmatic.actions.arguments.update_arguments(info_arguments, archive=archive_name), + global_arguments, + local_path, + remote_path, ) if json_output: # pragma: nocover yield json.loads(json_output) diff --git a/borgmatic/actions/list.py b/borgmatic/actions/list.py index 78efdf59..ae9da63c 100644 --- a/borgmatic/actions/list.py +++ b/borgmatic/actions/list.py @@ -1,6 +1,7 @@ import json import logging +import borgmatic.actions.arguments import borgmatic.borg.list import borgmatic.config.validate @@ -8,7 +9,13 @@ logger = logging.getLogger(__name__) def run_list( - repository, storage, local_borg_version, list_arguments, local_path, remote_path, + repository, + config, + local_borg_version, + list_arguments, + global_arguments, + local_path, + remote_path, ): ''' Run the "list" action for the given repository and archive. @@ -20,24 +27,27 @@ def run_list( ): if not list_arguments.json: # pragma: nocover if list_arguments.find_paths: - logger.answer(f'{repository}: Searching archives') + logger.answer(f'{repository.get("label", repository["path"])}: Searching archives') elif not list_arguments.archive: - logger.answer(f'{repository}: Listing archives') - list_arguments.archive = borgmatic.borg.rlist.resolve_archive_name( - repository, + logger.answer(f'{repository.get("label", repository["path"])}: Listing archives') + + archive_name = borgmatic.borg.rlist.resolve_archive_name( + repository['path'], list_arguments.archive, - storage, + config, local_borg_version, + global_arguments, local_path, remote_path, ) json_output = borgmatic.borg.list.list_archive( - repository, - storage, + repository['path'], + config, local_borg_version, - list_arguments=list_arguments, - local_path=local_path, - remote_path=remote_path, + borgmatic.actions.arguments.update_arguments(list_arguments, archive=archive_name), + global_arguments, + local_path, + remote_path, ) if json_output: # pragma: nocover yield json.loads(json_output) diff --git a/borgmatic/actions/mount.py b/borgmatic/actions/mount.py index 262e7d9e..86b05859 100644 --- a/borgmatic/actions/mount.py +++ b/borgmatic/actions/mount.py @@ -8,7 +8,13 @@ logger = logging.getLogger(__name__) def run_mount( - repository, storage, local_borg_version, mount_arguments, local_path, remote_path, + repository, + config, + local_borg_version, + mount_arguments, + global_arguments, + local_path, + remote_path, ): ''' Run the "mount" action for the given repository. @@ -17,26 +23,27 @@ def run_mount( repository, mount_arguments.repository ): if mount_arguments.archive: - logger.info(f'{repository}: Mounting archive {mount_arguments.archive}') + logger.info( + f'{repository.get("label", repository["path"])}: Mounting archive {mount_arguments.archive}' + ) else: # pragma: nocover - logger.info(f'{repository}: Mounting repository') + logger.info(f'{repository.get("label", repository["path"])}: Mounting repository') borgmatic.borg.mount.mount_archive( - repository, + repository['path'], borgmatic.borg.rlist.resolve_archive_name( - repository, + repository['path'], mount_arguments.archive, - storage, + config, local_borg_version, + global_arguments, local_path, remote_path, ), - mount_arguments.mount_point, - mount_arguments.paths, - mount_arguments.foreground, - mount_arguments.options, - storage, + mount_arguments, + config, local_borg_version, + global_arguments, local_path=local_path, remote_path=remote_path, ) diff --git a/borgmatic/actions/prune.py b/borgmatic/actions/prune.py index 76a42a9a..0cb074b5 100644 --- a/borgmatic/actions/prune.py +++ b/borgmatic/actions/prune.py @@ -10,9 +10,7 @@ logger = logging.getLogger(__name__) def run_prune( config_filename, repository, - storage, - retention, - hooks, + config, hook_context, local_borg_version, prune_arguments, @@ -30,28 +28,27 @@ def run_prune( return borgmatic.hooks.command.execute_hook( - hooks.get('before_prune'), - hooks.get('umask'), + config.get('before_prune'), + config.get('umask'), config_filename, 'pre-prune', global_arguments.dry_run, **hook_context, ) - logger.info(f'{repository}: Pruning archives{dry_run_label}') + logger.info(f'{repository.get("label", repository["path"])}: Pruning archives{dry_run_label}') borgmatic.borg.prune.prune_archives( global_arguments.dry_run, - repository, - storage, - retention, + repository['path'], + config, local_borg_version, + prune_arguments, + global_arguments, local_path=local_path, remote_path=remote_path, - stats=prune_arguments.stats, - list_archives=prune_arguments.list_archives, ) borgmatic.hooks.command.execute_hook( - hooks.get('after_prune'), - hooks.get('umask'), + config.get('after_prune'), + config.get('umask'), config_filename, 'post-prune', global_arguments.dry_run, diff --git a/borgmatic/actions/rcreate.py b/borgmatic/actions/rcreate.py index 59b147d7..32cdef40 100644 --- a/borgmatic/actions/rcreate.py +++ b/borgmatic/actions/rcreate.py @@ -8,7 +8,7 @@ logger = logging.getLogger(__name__) def run_rcreate( repository, - storage, + config, local_borg_version, rcreate_arguments, global_arguments, @@ -23,12 +23,13 @@ def run_rcreate( ): return - logger.info(f'{repository}: Creating repository') + logger.info(f'{repository.get("label", repository["path"])}: Creating repository') borgmatic.borg.rcreate.create_repository( global_arguments.dry_run, - repository, - storage, + repository['path'], + config, local_borg_version, + global_arguments, rcreate_arguments.encryption_mode, rcreate_arguments.source_repository, rcreate_arguments.copy_crypt_key, diff --git a/borgmatic/actions/restore.py b/borgmatic/actions/restore.py index 50c39737..06fd1b87 100644 --- a/borgmatic/actions/restore.py +++ b/borgmatic/actions/restore.py @@ -18,12 +18,12 @@ UNSPECIFIED_HOOK = object() def get_configured_database( - hooks, archive_database_names, hook_name, database_name, configuration_database_name=None + config, archive_database_names, hook_name, database_name, configuration_database_name=None ): ''' - Find the first database with the given hook name and database name in the configured hooks - dict and the given archive database names dict (from hook name to database names contained in - a particular backup archive). If UNSPECIFIED_HOOK is given as the hook name, search all database + Find the first database with the given hook name and database name in the configuration dict and + the given archive database names dict (from hook name to database names contained in a + particular backup archive). If UNSPECIFIED_HOOK is given as the hook name, search all database hooks for the named database. If a configuration database name is given, use that instead of the database name to lookup the database in the given hooks configuration. @@ -33,9 +33,13 @@ def get_configured_database( configuration_database_name = database_name if hook_name == UNSPECIFIED_HOOK: - hooks_to_search = hooks + hooks_to_search = { + hook_name: value + for (hook_name, value) in config.items() + if hook_name in borgmatic.hooks.dump.DATABASE_HOOK_NAMES + } else: - hooks_to_search = {hook_name: hooks[hook_name]} + hooks_to_search = {hook_name: config[hook_name]} return next( ( @@ -58,9 +62,7 @@ def get_configured_hook_name_and_database(hooks, database_name): def restore_single_database( repository, - location, - storage, - hooks, + config, local_borg_version, global_arguments, local_path, @@ -68,31 +70,34 @@ def restore_single_database( archive_name, hook_name, database, + connection_params, ): # pragma: no cover ''' - Given (among other things) an archive name, a database hook name, and a configured database + Given (among other things) an archive name, a database hook name, the hostname, + port, username and password as connection params, and a configured database configuration dict, restore that database from the archive. ''' - logger.info(f'{repository}: Restoring database {database["name"]}') + logger.info( + f'{repository.get("label", repository["path"])}: Restoring database {database["name"]}' + ) dump_pattern = borgmatic.hooks.dispatch.call_hooks( 'make_database_dump_pattern', - hooks, - repository, + config, + repository['path'], borgmatic.hooks.dump.DATABASE_HOOK_NAMES, - location, database['name'], )[hook_name] # Kick off a single database extract to stdout. extract_process = borgmatic.borg.extract.extract_archive( dry_run=global_arguments.dry_run, - repository=repository, + repository=repository['path'], archive=archive_name, paths=borgmatic.hooks.dump.convert_glob_patterns_to_borg_patterns([dump_pattern]), - location_config=location, - storage_config=storage, + config=config, local_borg_version=local_borg_version, + global_arguments=global_arguments, local_path=local_path, remote_path=remote_path, destination_path='/', @@ -104,26 +109,33 @@ def restore_single_database( # Run a single database restore, consuming the extract stdout (if any). borgmatic.hooks.dispatch.call_hooks( 'restore_database_dump', - {hook_name: [database]}, - repository, + config, + repository['path'], + database['name'], borgmatic.hooks.dump.DATABASE_HOOK_NAMES, - location, global_arguments.dry_run, extract_process, + connection_params, ) def collect_archive_database_names( - repository, archive, location, storage, local_borg_version, local_path, remote_path, + repository, + archive, + config, + local_borg_version, + global_arguments, + local_path, + remote_path, ): ''' - Given a local or remote repository path, a resolved archive name, a location configuration dict, - a storage configuration dict, the local Borg version, and local and remote Borg paths, query the - archive for the names of databases it contains and return them as a dict from hook name to a - sequence of database names. + Given a local or remote repository path, a resolved archive name, a configuration dict, the + local Borg version, global_arguments an argparse.Namespace, and local and remote Borg paths, + query the archive for the names of databases it contains and return them as a dict from hook + name to a sequence of database names. ''' borgmatic_source_directory = os.path.expanduser( - location.get( + config.get( 'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY ) ).lstrip('/') @@ -133,8 +145,9 @@ def collect_archive_database_names( dump_paths = borgmatic.borg.list.capture_archive_listing( repository, archive, - storage, + config, local_borg_version, + global_arguments, list_path=parent_dump_path, local_path=local_path, remote_path=remote_path, @@ -180,7 +193,7 @@ def find_databases_to_restore(requested_database_names, archive_database_names): if 'all' in restore_names[UNSPECIFIED_HOOK]: restore_names[UNSPECIFIED_HOOK].remove('all') - for (hook_name, database_names) in archive_database_names.items(): + for hook_name, database_names in archive_database_names.items(): restore_names.setdefault(hook_name, []).extend(database_names) # If a database is to be restored as part of "all", then remove it from restore names so @@ -235,9 +248,7 @@ def ensure_databases_found(restore_names, remaining_restore_names, found_names): def run_restore( repository, - location, - storage, - hooks, + config, local_borg_version, restore_arguments, global_arguments, @@ -255,31 +266,51 @@ def run_restore( ): return - logger.info(f'{repository}: Restoring databases from archive {restore_arguments.archive}') + logger.info( + f'{repository.get("label", repository["path"])}: Restoring databases from archive {restore_arguments.archive}' + ) borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( 'remove_database_dumps', - hooks, - repository, + config, + repository['path'], borgmatic.hooks.dump.DATABASE_HOOK_NAMES, - location, global_arguments.dry_run, ) archive_name = borgmatic.borg.rlist.resolve_archive_name( - repository, restore_arguments.archive, storage, local_borg_version, local_path, remote_path, + repository['path'], + restore_arguments.archive, + config, + local_borg_version, + global_arguments, + local_path, + remote_path, ) archive_database_names = collect_archive_database_names( - repository, archive_name, location, storage, local_borg_version, local_path, remote_path, + repository['path'], + archive_name, + config, + local_borg_version, + global_arguments, + local_path, + remote_path, ) restore_names = find_databases_to_restore(restore_arguments.databases, archive_database_names) found_names = set() remaining_restore_names = {} + connection_params = { + 'hostname': restore_arguments.hostname, + 'port': restore_arguments.port, + 'username': restore_arguments.username, + 'password': restore_arguments.password, + 'restore_path': restore_arguments.restore_path, + } for hook_name, database_names in restore_names.items(): for database_name in database_names: found_hook_name, found_database = get_configured_database( - hooks, archive_database_names, hook_name, database_name + config, archive_database_names, hook_name, database_name ) if not found_database: @@ -291,24 +322,23 @@ def run_restore( found_names.add(database_name) restore_single_database( repository, - location, - storage, - hooks, + config, local_borg_version, global_arguments, local_path, remote_path, archive_name, found_hook_name or hook_name, - found_database, + dict(found_database, **{'schemas': restore_arguments.schemas}), + connection_params, ) - # For any database that weren't found via exact matches in the hooks configuration, try to - # fallback to "all" entries. + # For any database that weren't found via exact matches in the configuration, try to fallback + # to "all" entries. for hook_name, database_names in remaining_restore_names.items(): for database_name in database_names: found_hook_name, found_database = get_configured_database( - hooks, archive_database_names, hook_name, database_name, 'all' + config, archive_database_names, hook_name, database_name, 'all' ) if not found_database: @@ -320,24 +350,22 @@ def run_restore( restore_single_database( repository, - location, - storage, - hooks, + config, local_borg_version, global_arguments, local_path, remote_path, archive_name, found_hook_name or hook_name, - database, + dict(database, **{'schemas': restore_arguments.schemas}), + connection_params, ) borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( 'remove_database_dumps', - hooks, - repository, + config, + repository['path'], borgmatic.hooks.dump.DATABASE_HOOK_NAMES, - location, global_arguments.dry_run, ) diff --git a/borgmatic/actions/rinfo.py b/borgmatic/actions/rinfo.py index e7132c04..00de8922 100644 --- a/borgmatic/actions/rinfo.py +++ b/borgmatic/actions/rinfo.py @@ -8,7 +8,13 @@ logger = logging.getLogger(__name__) def run_rinfo( - repository, storage, local_borg_version, rinfo_arguments, local_path, remote_path, + repository, + config, + local_borg_version, + rinfo_arguments, + global_arguments, + local_path, + remote_path, ): ''' Run the "rinfo" action for the given repository. @@ -19,13 +25,16 @@ def run_rinfo( repository, rinfo_arguments.repository ): if not rinfo_arguments.json: # pragma: nocover - logger.answer(f'{repository}: Displaying repository summary information') + logger.answer( + f'{repository.get("label", repository["path"])}: Displaying repository summary information' + ) json_output = borgmatic.borg.rinfo.display_repository_info( - repository, - storage, + repository['path'], + config, local_borg_version, rinfo_arguments=rinfo_arguments, + global_arguments=global_arguments, local_path=local_path, remote_path=remote_path, ) diff --git a/borgmatic/actions/rlist.py b/borgmatic/actions/rlist.py index aa2032b1..a79920b6 100644 --- a/borgmatic/actions/rlist.py +++ b/borgmatic/actions/rlist.py @@ -8,7 +8,13 @@ logger = logging.getLogger(__name__) def run_rlist( - repository, storage, local_borg_version, rlist_arguments, local_path, remote_path, + repository, + config, + local_borg_version, + rlist_arguments, + global_arguments, + local_path, + remote_path, ): ''' Run the "rlist" action for the given repository. @@ -19,13 +25,14 @@ def run_rlist( repository, rlist_arguments.repository ): if not rlist_arguments.json: # pragma: nocover - logger.answer(f'{repository}: Listing repository') + logger.answer(f'{repository.get("label", repository["path"])}: Listing repository') json_output = borgmatic.borg.rlist.list_repository( - repository, - storage, + repository['path'], + config, local_borg_version, rlist_arguments=rlist_arguments, + global_arguments=global_arguments, local_path=local_path, remote_path=remote_path, ) diff --git a/borgmatic/actions/transfer.py b/borgmatic/actions/transfer.py index 628f2735..4051b14e 100644 --- a/borgmatic/actions/transfer.py +++ b/borgmatic/actions/transfer.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) def run_transfer( repository, - storage, + config, local_borg_version, transfer_arguments, global_arguments, @@ -17,13 +17,16 @@ def run_transfer( ''' Run the "transfer" action for the given repository. ''' - logger.info(f'{repository}: Transferring archives to repository') + logger.info( + f'{repository.get("label", repository["path"])}: Transferring archives to repository' + ) borgmatic.borg.transfer.transfer_archives( global_arguments.dry_run, - repository, - storage, + repository['path'], + config, local_borg_version, transfer_arguments, + global_arguments, local_path=local_path, remote_path=remote_path, ) diff --git a/borgmatic/borg/borg.py b/borgmatic/borg/borg.py index 460d9d68..1c0d6d1c 100644 --- a/borgmatic/borg/borg.py +++ b/borgmatic/borg/borg.py @@ -1,20 +1,19 @@ import logging +import borgmatic.commands.arguments import borgmatic.logger from borgmatic.borg import environment, flags -from borgmatic.execute import execute_command +from borgmatic.execute import DO_NOT_CAPTURE, execute_command logger = logging.getLogger(__name__) -REPOSITORYLESS_BORG_COMMANDS = {'serve', None} BORG_SUBCOMMANDS_WITH_SUBCOMMANDS = {'key', 'debug'} -BORG_SUBCOMMANDS_WITHOUT_REPOSITORY = (('debug', 'info'), ('debug', 'convert-profile'), ()) def run_arbitrary_borg( - repository, - storage_config, + repository_path, + config, local_borg_version, options, archive=None, @@ -22,12 +21,13 @@ def run_arbitrary_borg( remote_path=None, ): ''' - Given a local or remote repository path, a storage config dict, the local Borg version, a + Given a local or remote repository path, a configuration dict, the local Borg version, a sequence of arbitrary command-line Borg options, and an optional archive name, run an arbitrary - Borg command on the given repository/archive. + Borg command, passing in REPOSITORY and ARCHIVE environment variables for optional use in the + command. ''' borgmatic.logger.add_custom_log_levels() - lock_wait = storage_config.get('lock_wait', None) + lock_wait = config.get('lock_wait', None) try: options = options[1:] if options[0] == '--' else options @@ -36,33 +36,35 @@ def run_arbitrary_borg( command_options_start_index = 2 if options[0] in BORG_SUBCOMMANDS_WITH_SUBCOMMANDS else 1 borg_command = tuple(options[:command_options_start_index]) command_options = tuple(options[command_options_start_index:]) + + if borg_command and borg_command[0] in borgmatic.commands.arguments.ACTION_ALIASES.keys(): + logger.warning( + f"Borg's {borg_command[0]} subcommand is supported natively by borgmatic. Try this instead: borgmatic {borg_command[0]}" + ) except IndexError: borg_command = () command_options = () - if borg_command in BORG_SUBCOMMANDS_WITHOUT_REPOSITORY: - repository_archive_flags = () - elif archive: - repository_archive_flags = flags.make_repository_archive_flags( - repository, archive, local_borg_version - ) - else: - repository_archive_flags = flags.make_repository_flags(repository, local_borg_version) - full_command = ( (local_path,) + borg_command - + repository_archive_flags - + command_options + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + flags.make_flags('remote-path', remote_path) + flags.make_flags('lock-wait', lock_wait) + + command_options ) return execute_command( full_command, - output_log_level=logging.ANSWER, + output_file=DO_NOT_CAPTURE, borg_local_path=local_path, - extra_environment=environment.make_environment(storage_config), + shell=True, + extra_environment=dict( + (environment.make_environment(config) or {}), + **{ + 'BORG_REPO': repository_path, + 'ARCHIVE': archive if archive else '', + }, + ), ) diff --git a/borgmatic/borg/break_lock.py b/borgmatic/borg/break_lock.py index 820b1c56..c0ee5dbc 100644 --- a/borgmatic/borg/break_lock.py +++ b/borgmatic/borg/break_lock.py @@ -7,25 +7,31 @@ logger = logging.getLogger(__name__) def break_lock( - repository, storage_config, local_borg_version, local_path='borg', remote_path=None, + repository_path, + config, + local_borg_version, + global_arguments, + local_path='borg', + remote_path=None, ): ''' - Given a local or remote repository path, a storage configuration dict, the local Borg version, - and optional local and remote Borg paths, break any repository and cache locks leftover from Borg - aborting. + Given a local or remote repository path, a configuration dict, the local Borg version, an + argparse.Namespace of global arguments, and optional local and remote Borg paths, break any + repository and cache locks leftover from Borg aborting. ''' - umask = storage_config.get('umask', None) - lock_wait = storage_config.get('lock_wait', None) + umask = config.get('umask', None) + lock_wait = config.get('lock_wait', None) full_command = ( (local_path, 'break-lock') + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + + (('--log-json',) if global_arguments.log_json else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) - + flags.make_repository_flags(repository, local_borg_version) + + flags.make_repository_flags(repository_path, local_borg_version) ) - borg_environment = environment.make_environment(storage_config) + borg_environment = environment.make_environment(config) execute_command(full_command, borg_local_path=local_path, extra_environment=borg_environment) diff --git a/borgmatic/borg/check.py b/borgmatic/borg/check.py index 2914d83b..eabbdd31 100644 --- a/borgmatic/borg/check.py +++ b/borgmatic/borg/check.py @@ -1,5 +1,7 @@ import argparse import datetime +import hashlib +import itertools import json import logging import os @@ -12,18 +14,17 @@ DEFAULT_CHECKS = ( {'name': 'repository', 'frequency': '1 month'}, {'name': 'archives', 'frequency': '1 month'}, ) -DEFAULT_PREFIX = '{hostname}-' # noqa: FS003 logger = logging.getLogger(__name__) -def parse_checks(consistency_config, only_checks=None): +def parse_checks(config, only_checks=None): ''' - Given a consistency config with a "checks" sequence of dicts and an optional list of override + Given a configuration dict with a "checks" sequence of dicts and an optional list of override checks, return a tuple of named checks to run. - For example, given a retention config of: + For example, given a config of: {'checks': ({'name': 'repository'}, {'name': 'archives'})} @@ -35,8 +36,7 @@ def parse_checks(consistency_config, only_checks=None): has a name of "disabled", return an empty tuple, meaning that no checks should be run. ''' checks = only_checks or tuple( - check_config['name'] - for check_config in (consistency_config.get('checks', None) or DEFAULT_CHECKS) + check_config['name'] for check_config in (config.get('checks', None) or DEFAULT_CHECKS) ) checks = tuple(check.lower() for check in checks) if 'disabled' in checks: @@ -89,17 +89,22 @@ def parse_frequency(frequency): def filter_checks_on_frequency( - location_config, consistency_config, borg_repository_id, checks, force + config, + borg_repository_id, + checks, + force, + archives_check_id=None, ): ''' - Given a location config, a consistency config with a "checks" sequence of dicts, a Borg - repository ID, a sequence of checks, and whether to force checks to run, filter down those - checks based on the configured "frequency" for each check as compared to its check time file. + Given a configuration dict with a "checks" sequence of dicts, a Borg repository ID, a sequence + of checks, whether to force checks to run, and an ID for the archives check potentially being + run (if any), filter down those checks based on the configured "frequency" for each check as + compared to its check time file. In other words, a check whose check time file's timestamp is too new (based on the configured frequency) will get cut from the returned sequence of checks. Example: - consistency_config = { + config = { 'checks': [ { 'name': 'archives', @@ -108,9 +113,9 @@ def filter_checks_on_frequency( ] } - When this function is called with that consistency_config and "archives" in checks, "archives" - will get filtered out of the returned result if its check time file is newer than 2 weeks old, - indicating that it's not yet time to run that check again. + When this function is called with that config and "archives" in checks, "archives" will get + filtered out of the returned result if its check time file is newer than 2 weeks old, indicating + that it's not yet time to run that check again. Raise ValueError if a frequency cannot be parsed. ''' @@ -119,7 +124,7 @@ def filter_checks_on_frequency( if force: return tuple(filtered_checks) - for check_config in consistency_config.get('checks', DEFAULT_CHECKS): + for check_config in config.get('checks', DEFAULT_CHECKS): check = check_config['name'] if checks and check not in checks: continue @@ -128,9 +133,7 @@ def filter_checks_on_frequency( if not frequency_delta: continue - check_time = read_check_time( - make_check_time_path(location_config, borg_repository_id, check) - ) + check_time = probe_for_check_time(config, borg_repository_id, check, archives_check_id) if not check_time: continue @@ -139,17 +142,66 @@ def filter_checks_on_frequency( if datetime.datetime.now() < check_time + frequency_delta: remaining = check_time + frequency_delta - datetime.datetime.now() logger.info( - f'Skipping {check} check due to configured frequency; {remaining} until next check' + f'Skipping {check} check due to configured frequency; {remaining} until next check (use --force to check anyway)' ) filtered_checks.remove(check) return tuple(filtered_checks) -def make_check_flags(local_borg_version, checks, check_last=None, prefix=None): +def make_archive_filter_flags(local_borg_version, config, checks, check_last=None, prefix=None): ''' - Given the local Borg version and a parsed sequence of checks, transform the checks into tuple of - command-line flags. + Given the local Borg version, a configuration dict, a parsed sequence of checks, the check last + value, and a consistency check prefix, transform the checks into tuple of command-line flags for + filtering archives in a check command. + + If a check_last value is given and "archives" is in checks, then include a "--last" flag. And if + a prefix value is given and "archives" is in checks, then include a "--match-archives" flag. + ''' + if 'archives' in checks or 'data' in checks: + return (('--last', str(check_last)) if check_last else ()) + ( + ( + ('--match-archives', f'sh:{prefix}*') + if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version) + else ('--glob-archives', f'{prefix}*') + ) + if prefix + else ( + flags.make_match_archives_flags( + config.get('match_archives'), + config.get('archive_name_format'), + local_borg_version, + ) + ) + ) + + if check_last: + logger.warning( + 'Ignoring check_last option, as "archives" or "data" are not in consistency checks' + ) + if prefix: + logger.warning( + 'Ignoring consistency prefix option, as "archives" or "data" are not in consistency checks' + ) + + return () + + +def make_archives_check_id(archive_filter_flags): + ''' + Given a sequence of flags to filter archives, return a unique hash corresponding to those + particular flags. If there are no flags, return None. + ''' + if not archive_filter_flags: + return None + + return hashlib.sha256(' '.join(archive_filter_flags).encode()).hexdigest() + + +def make_check_flags(checks, archive_filter_flags): + ''' + Given a parsed sequence of checks and a sequence of flags to filter archives, transform the + checks into tuple of command-line check flags. For example, given parsed checks of: @@ -161,10 +213,6 @@ def make_check_flags(local_borg_version, checks, check_last=None, prefix=None): However, if both "repository" and "archives" are in checks, then omit them from the returned flags because Borg does both checks by default. If "data" is in checks, that implies "archives". - - Additionally, if a check_last value is given and "archives" is in checks, then include a - "--last" flag. And if a prefix value is given and "archives" is in checks, then include a - "--match-archives" flag. ''' if 'data' in checks: data_flags = ('--verify-data',) @@ -172,25 +220,7 @@ def make_check_flags(local_borg_version, checks, check_last=None, prefix=None): else: data_flags = () - if 'archives' in checks: - last_flags = ('--last', str(check_last)) if check_last else () - if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version): - match_archives_flags = ('--match-archives', f'sh:{prefix}*') if prefix else () - else: - match_archives_flags = ('--glob-archives', f'{prefix}*') if prefix else () - else: - last_flags = () - match_archives_flags = () - if check_last: - logger.warning( - 'Ignoring check_last option, as "archives" or "data" are not in consistency checks' - ) - if prefix: - logger.warning( - 'Ignoring consistency prefix option, as "archives" or "data" are not in consistency checks' - ) - - common_flags = last_flags + match_archives_flags + data_flags + common_flags = (archive_filter_flags if 'archives' in checks else ()) + data_flags if {'repository', 'archives'}.issubset(set(checks)): return common_flags @@ -201,18 +231,27 @@ def make_check_flags(local_borg_version, checks, check_last=None, prefix=None): ) -def make_check_time_path(location_config, borg_repository_id, check_type): +def make_check_time_path(config, borg_repository_id, check_type, archives_check_id=None): ''' - Given a location configuration dict, a Borg repository ID, and the name of a check type - ("repository", "archives", etc.), return a path for recording that check's time (the time of - that check last occurring). + Given a configuration dict, a Borg repository ID, the name of a check type ("repository", + "archives", etc.), and a unique hash of the archives filter flags, return a path for recording + that check's time (the time of that check last occurring). ''' + borgmatic_source_directory = os.path.expanduser( + config.get('borgmatic_source_directory', state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY) + ) + + if check_type in ('archives', 'data'): + return os.path.join( + borgmatic_source_directory, + 'checks', + borg_repository_id, + check_type, + archives_check_id if archives_check_id else 'all', + ) + return os.path.join( - os.path.expanduser( - location_config.get( - 'borgmatic_source_directory', state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY - ) - ), + borgmatic_source_directory, 'checks', borg_repository_id, check_type, @@ -242,12 +281,79 @@ def read_check_time(path): return None +def probe_for_check_time(config, borg_repository_id, check, archives_check_id): + ''' + Given a configuration dict, a Borg repository ID, the name of a check type ("repository", + "archives", etc.), and a unique hash of the archives filter flags, return a the corresponding + check time or None if such a check time does not exist. + + When the check type is "archives" or "data", this function probes two different paths to find + the check time, e.g.: + + ~/.borgmatic/checks/1234567890/archives/9876543210 + ~/.borgmatic/checks/1234567890/archives/all + + ... and returns the maximum modification time of the files found (if any). The first path + represents a more specific archives check time (a check on a subset of archives), and the second + is a fallback to the last "all" archives check. + + For other check types, this function reads from a single check time path, e.g.: + + ~/.borgmatic/checks/1234567890/repository + ''' + check_times = ( + read_check_time(group[0]) + for group in itertools.groupby( + ( + make_check_time_path(config, borg_repository_id, check, archives_check_id), + make_check_time_path(config, borg_repository_id, check), + ) + ) + ) + + try: + return max(check_time for check_time in check_times if check_time) + except ValueError: + return None + + +def upgrade_check_times(config, borg_repository_id): + ''' + Given a configuration dict and a Borg repository ID, upgrade any corresponding check times on + disk from old-style paths to new-style paths. + + Currently, the only upgrade performed is renaming an archive or data check path that looks like: + + ~/.borgmatic/checks/1234567890/archives + + to: + + ~/.borgmatic/checks/1234567890/archives/all + ''' + for check_type in ('archives', 'data'): + new_path = make_check_time_path(config, borg_repository_id, check_type, 'all') + old_path = os.path.dirname(new_path) + temporary_path = f'{old_path}.temp' + + if not os.path.isfile(old_path) and not os.path.isfile(temporary_path): + continue + + logger.debug(f'Upgrading archives check time from {old_path} to {new_path}') + + try: + os.rename(old_path, temporary_path) + except FileNotFoundError: + pass + + os.mkdir(old_path) + os.rename(temporary_path, new_path) + + def check_archives( - repository, - location_config, - storage_config, - consistency_config, + repository_path, + config, local_borg_version, + global_arguments, local_path='borg', remote_path=None, progress=None, @@ -256,10 +362,9 @@ def check_archives( force=None, ): ''' - Given a local or remote repository path, a storage config dict, a consistency config dict, - local/remote commands to run, whether to include progress information, whether to attempt a - repair, and an optional list of checks to use instead of configured checks, check the contained - Borg archives for consistency. + Given a local or remote repository path, a configuration dict, local/remote commands to run, + whether to include progress information, whether to attempt a repair, and an optional list of + checks to use instead of configured checks, check the contained Borg archives for consistency. If there are no consistency checks to run, skip running them. @@ -268,30 +373,40 @@ def check_archives( try: borg_repository_id = json.loads( rinfo.display_repository_info( - repository, - storage_config, + repository_path, + config, local_borg_version, argparse.Namespace(json=True), + global_arguments, local_path, remote_path, ) )['repository']['id'] except (json.JSONDecodeError, KeyError): - raise ValueError(f'Cannot determine Borg repository ID for {repository}') + raise ValueError(f'Cannot determine Borg repository ID for {repository_path}') + + upgrade_check_times(config, borg_repository_id) + + check_last = config.get('check_last', None) + prefix = config.get('prefix') + configured_checks = parse_checks(config, only_checks) + lock_wait = None + extra_borg_options = config.get('extra_borg_options', {}).get('check', '') + archive_filter_flags = make_archive_filter_flags( + local_borg_version, config, configured_checks, check_last, prefix + ) + archives_check_id = make_archives_check_id(archive_filter_flags) checks = filter_checks_on_frequency( - location_config, - consistency_config, + config, borg_repository_id, - parse_checks(consistency_config, only_checks), + configured_checks, force, + archives_check_id, ) - check_last = consistency_config.get('check_last', None) - lock_wait = None - extra_borg_options = storage_config.get('extra_borg_options', {}).get('check', '') if set(checks).intersection({'repository', 'archives', 'data'}): - lock_wait = storage_config.get('lock_wait', None) + lock_wait = config.get('lock_wait') verbosity_flags = () if logger.isEnabledFor(logging.INFO): @@ -299,21 +414,20 @@ def check_archives( if logger.isEnabledFor(logging.DEBUG): verbosity_flags = ('--debug', '--show-rc') - prefix = consistency_config.get('prefix', DEFAULT_PREFIX) - full_command = ( (local_path, 'check') + (('--repair',) if repair else ()) - + make_check_flags(local_borg_version, checks, check_last, prefix) + + make_check_flags(checks, archive_filter_flags) + (('--remote-path', remote_path) if remote_path else ()) + + (('--log-json',) if global_arguments.log_json else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + verbosity_flags + (('--progress',) if progress else ()) + (tuple(extra_borg_options.split(' ')) if extra_borg_options else ()) - + flags.make_repository_flags(repository, local_borg_version) + + flags.make_repository_flags(repository_path, local_borg_version) ) - borg_environment = environment.make_environment(storage_config) + borg_environment = environment.make_environment(config) # The Borg repair option triggers an interactive prompt, which won't work when output is # captured. And progress messes with the terminal directly. @@ -325,10 +439,18 @@ def check_archives( execute_command(full_command, extra_environment=borg_environment) for check in checks: - write_check_time(make_check_time_path(location_config, borg_repository_id, check)) + write_check_time( + make_check_time_path(config, borg_repository_id, check, archives_check_id) + ) if 'extract' in checks: extract.extract_last_archive_dry_run( - storage_config, local_borg_version, repository, lock_wait, local_path, remote_path + config, + local_borg_version, + global_arguments, + repository_path, + lock_wait, + local_path, + remote_path, ) - write_check_time(make_check_time_path(location_config, borg_repository_id, 'extract')) + write_check_time(make_check_time_path(config, borg_repository_id, 'extract')) diff --git a/borgmatic/borg/compact.py b/borgmatic/borg/compact.py index 847ed26b..20bbe129 100644 --- a/borgmatic/borg/compact.py +++ b/borgmatic/borg/compact.py @@ -8,9 +8,10 @@ logger = logging.getLogger(__name__) def compact_segments( dry_run, - repository, - storage_config, + repository_path, + config, local_borg_version, + global_arguments, local_path='borg', remote_path=None, progress=False, @@ -18,17 +19,18 @@ def compact_segments( threshold=None, ): ''' - Given dry-run flag, a local or remote repository path, a storage config dict, and the local - Borg version, compact the segments in a repository. + Given dry-run flag, a local or remote repository path, a configuration dict, and the local Borg + version, compact the segments in a repository. ''' - umask = storage_config.get('umask', None) - lock_wait = storage_config.get('lock_wait', None) - extra_borg_options = storage_config.get('extra_borg_options', {}).get('compact', '') + umask = config.get('umask', None) + lock_wait = config.get('lock_wait', None) + extra_borg_options = config.get('extra_borg_options', {}).get('compact', '') full_command = ( (local_path, 'compact') + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + + (('--log-json',) if global_arguments.log_json else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--progress',) if progress else ()) + (('--cleanup-commits',) if cleanup_commits else ()) @@ -36,16 +38,16 @@ def compact_segments( + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (tuple(extra_borg_options.split(' ')) if extra_borg_options else ()) - + flags.make_repository_flags(repository, local_borg_version) + + flags.make_repository_flags(repository_path, local_borg_version) ) if dry_run: - logging.info(f'{repository}: Skipping compact (dry run)') + logging.info(f'{repository_path}: Skipping compact (dry run)') return execute_command( full_command, output_log_level=logging.INFO, borg_local_path=local_path, - extra_environment=environment.make_environment(storage_config), + extra_environment=environment.make_environment(config), ) diff --git a/borgmatic/borg/create.py b/borgmatic/borg/create.py index d557a6ab..d778e565 100644 --- a/borgmatic/borg/create.py +++ b/borgmatic/borg/create.py @@ -146,12 +146,12 @@ def ensure_files_readable(*filename_lists): open(file_object).close() -def make_pattern_flags(location_config, pattern_filename=None): +def make_pattern_flags(config, pattern_filename=None): ''' - Given a location config dict with a potential patterns_from option, and a filename containing - any additional patterns, return the corresponding Borg flags for those files as a tuple. + Given a configuration dict with a potential patterns_from option, and a filename containing any + additional patterns, return the corresponding Borg flags for those files as a tuple. ''' - pattern_filenames = tuple(location_config.get('patterns_from') or ()) + ( + pattern_filenames = tuple(config.get('patterns_from') or ()) + ( (pattern_filename,) if pattern_filename else () ) @@ -162,12 +162,12 @@ def make_pattern_flags(location_config, pattern_filename=None): ) -def make_exclude_flags(location_config, exclude_filename=None): +def make_exclude_flags(config, exclude_filename=None): ''' - Given a location config dict with various exclude options, and a filename containing any exclude + Given a configuration dict with various exclude options, and a filename containing any exclude patterns, return the corresponding Borg flags as a tuple. ''' - exclude_filenames = tuple(location_config.get('exclude_from') or ()) + ( + exclude_filenames = tuple(config.get('exclude_from') or ()) + ( (exclude_filename,) if exclude_filename else () ) exclude_from_flags = tuple( @@ -175,17 +175,15 @@ def make_exclude_flags(location_config, exclude_filename=None): ('--exclude-from', exclude_filename) for exclude_filename in exclude_filenames ) ) - caches_flag = ('--exclude-caches',) if location_config.get('exclude_caches') else () + caches_flag = ('--exclude-caches',) if config.get('exclude_caches') else () if_present_flags = tuple( itertools.chain.from_iterable( ('--exclude-if-present', if_present) - for if_present in location_config.get('exclude_if_present', ()) + for if_present in config.get('exclude_if_present', ()) ) ) - keep_exclude_tags_flags = ( - ('--keep-exclude-tags',) if location_config.get('keep_exclude_tags') else () - ) - exclude_nodump_flags = ('--exclude-nodump',) if location_config.get('exclude_nodump') else () + keep_exclude_tags_flags = ('--keep-exclude-tags',) if config.get('keep_exclude_tags') else () + exclude_nodump_flags = ('--exclude-nodump',) if config.get('exclude_nodump') else () return ( exclude_from_flags @@ -280,17 +278,21 @@ def collect_special_file_paths( create_command, local_path, working_directory, borg_environment, skip_directories ): ''' - Given a Borg create command as a tuple, a local Borg path, a working directory, and a dict of + Given a Borg create command as a tuple, a local Borg path, a working directory, a dict of environment variables to pass to Borg, and a sequence of parent directories to skip, collect the paths for any special files (character devices, block devices, and named pipes / FIFOs) that Borg would encounter during a create. These are all paths that could cause Borg to hang if its --read-special flag is used. ''' + # Omit "--exclude-nodump" from the Borg dry run command, because that flag causes Borg to open + # files including any named pipe we've created. paths_output = execute_command_and_capture_output( - create_command + ('--dry-run', '--list'), + tuple(argument for argument in create_command if argument != '--exclude-nodump') + + ('--dry-run', '--list'), capture_stderr=True, working_directory=working_directory, extra_environment=borg_environment, + borg_local_path=local_path, ) paths = tuple( @@ -314,7 +316,7 @@ def check_all_source_directories_exist(source_directories): missing_directories = [ source_directory for source_directory in source_directories - if not os.path.exists(source_directory) + if not all([os.path.exists(directory) for directory in expand_directory(source_directory)]) ] if missing_directories: raise ValueError(f"Source directories do not exist: {', '.join(missing_directories)}") @@ -322,10 +324,10 @@ def check_all_source_directories_exist(source_directories): def create_archive( dry_run, - repository, - location_config, - storage_config, + repository_path, + config, local_borg_version, + global_arguments, local_path='borg', remote_path=None, progress=False, @@ -335,70 +337,70 @@ def create_archive( stream_processes=None, ): ''' - Given vebosity/dry-run flags, a local or remote repository path, a location config dict, and a - storage config dict, create a Borg archive and return Borg's JSON output (if any). + Given vebosity/dry-run flags, a local or remote repository path, and a configuration dict, + create a Borg archive and return Borg's JSON output (if any). If a sequence of stream processes is given (instances of subprocess.Popen), then execute the create command while also triggering the given processes to produce output. ''' borgmatic.logger.add_custom_log_levels() borgmatic_source_directories = expand_directories( - collect_borgmatic_source_directories(location_config.get('borgmatic_source_directory')) + collect_borgmatic_source_directories(config.get('borgmatic_source_directory')) ) - if location_config.get('source_directories_must_exist', False): - check_all_source_directories_exist(location_config.get('source_directories')) + if config.get('source_directories_must_exist', False): + check_all_source_directories_exist(config.get('source_directories')) sources = deduplicate_directories( map_directories_to_devices( expand_directories( - tuple(location_config.get('source_directories', ())) + borgmatic_source_directories + tuple(config.get('source_directories', ())) + + borgmatic_source_directories + + tuple(global_arguments.used_config_paths) ) ), additional_directory_devices=map_directories_to_devices( - expand_directories(pattern_root_directories(location_config.get('patterns'))) + expand_directories(pattern_root_directories(config.get('patterns'))) ), ) - ensure_files_readable(location_config.get('patterns_from'), location_config.get('exclude_from')) + ensure_files_readable(config.get('patterns_from'), config.get('exclude_from')) try: - working_directory = os.path.expanduser(location_config.get('working_directory')) + working_directory = os.path.expanduser(config.get('working_directory')) except TypeError: working_directory = None pattern_file = ( - write_pattern_file(location_config.get('patterns'), sources) - if location_config.get('patterns') or location_config.get('patterns_from') + write_pattern_file(config.get('patterns'), sources) + if config.get('patterns') or config.get('patterns_from') else None ) - exclude_file = write_pattern_file( - expand_home_directories(location_config.get('exclude_patterns')) - ) - checkpoint_interval = storage_config.get('checkpoint_interval', None) - checkpoint_volume = storage_config.get('checkpoint_volume', None) - chunker_params = storage_config.get('chunker_params', None) - compression = storage_config.get('compression', None) - upload_rate_limit = storage_config.get('upload_rate_limit', None) - umask = storage_config.get('umask', None) - lock_wait = storage_config.get('lock_wait', None) + exclude_file = write_pattern_file(expand_home_directories(config.get('exclude_patterns'))) + checkpoint_interval = config.get('checkpoint_interval', None) + checkpoint_volume = config.get('checkpoint_volume', None) + chunker_params = config.get('chunker_params', None) + compression = config.get('compression', None) + upload_rate_limit = config.get('upload_rate_limit', None) + umask = config.get('umask', None) + lock_wait = config.get('lock_wait', None) list_filter_flags = make_list_filter_flags(local_borg_version, dry_run) - files_cache = location_config.get('files_cache') - archive_name_format = storage_config.get('archive_name_format', DEFAULT_ARCHIVE_NAME_FORMAT) - extra_borg_options = storage_config.get('extra_borg_options', {}).get('create', '') + files_cache = config.get('files_cache') + archive_name_format = config.get('archive_name_format', DEFAULT_ARCHIVE_NAME_FORMAT) + extra_borg_options = config.get('extra_borg_options', {}).get('create', '') if feature.available(feature.Feature.ATIME, local_borg_version): - atime_flags = ('--atime',) if location_config.get('atime') is True else () + atime_flags = ('--atime',) if config.get('atime') is True else () else: - atime_flags = ('--noatime',) if location_config.get('atime') is False else () + atime_flags = ('--noatime',) if config.get('atime') is False else () if feature.available(feature.Feature.NOFLAGS, local_borg_version): - noflags_flags = ('--noflags',) if location_config.get('flags') is False else () + noflags_flags = ('--noflags',) if config.get('flags') is False else () else: - noflags_flags = ('--nobsdflags',) if location_config.get('flags') is False else () + noflags_flags = ('--nobsdflags',) if config.get('flags') is False else () if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version): - numeric_ids_flags = ('--numeric-ids',) if location_config.get('numeric_ids') else () + numeric_ids_flags = ('--numeric-ids',) if config.get('numeric_ids') else () else: - numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_ids') else () + numeric_ids_flags = ('--numeric-owner',) if config.get('numeric_ids') else () if feature.available(feature.Feature.UPLOAD_RATELIMIT, local_borg_version): upload_ratelimit_flags = ( @@ -409,35 +411,32 @@ def create_archive( ('--remote-ratelimit', str(upload_rate_limit)) if upload_rate_limit else () ) - if stream_processes and location_config.get('read_special') is False: + if stream_processes and config.get('read_special') is False: logger.warning( - f'{repository}: Ignoring configured "read_special" value of false, as true is needed for database hooks.' + f'{repository_path}: Ignoring configured "read_special" value of false, as true is needed for database hooks.' ) create_command = ( tuple(local_path.split(' ')) + ('create',) - + make_pattern_flags(location_config, pattern_file.name if pattern_file else None) - + make_exclude_flags(location_config, exclude_file.name if exclude_file else None) + + make_pattern_flags(config, pattern_file.name if pattern_file else None) + + make_exclude_flags(config, exclude_file.name if exclude_file else None) + (('--checkpoint-interval', str(checkpoint_interval)) if checkpoint_interval else ()) + (('--checkpoint-volume', str(checkpoint_volume)) if checkpoint_volume else ()) + (('--chunker-params', chunker_params) if chunker_params else ()) + (('--compression', compression) if compression else ()) + upload_ratelimit_flags - + ( - ('--one-file-system',) - if location_config.get('one_file_system') or stream_processes - else () - ) + + (('--one-file-system',) if config.get('one_file_system') or stream_processes else ()) + numeric_ids_flags + atime_flags - + (('--noctime',) if location_config.get('ctime') is False else ()) - + (('--nobirthtime',) if location_config.get('birthtime') is False else ()) - + (('--read-special',) if location_config.get('read_special') or stream_processes else ()) + + (('--noctime',) if config.get('ctime') is False else ()) + + (('--nobirthtime',) if config.get('birthtime') is False else ()) + + (('--read-special',) if config.get('read_special') or stream_processes else ()) + noflags_flags + (('--files-cache', files_cache) if files_cache else ()) + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + + (('--log-json',) if global_arguments.log_json else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + ( ('--list', '--filter', list_filter_flags) @@ -446,7 +445,9 @@ def create_archive( ) + (('--dry-run',) if dry_run else ()) + (tuple(extra_borg_options.split(' ')) if extra_borg_options else ()) - + flags.make_repository_archive_flags(repository, archive_name_format, local_borg_version) + + flags.make_repository_archive_flags( + repository_path, archive_name_format, local_borg_version + ) + (sources if not pattern_file else ()) ) @@ -461,12 +462,12 @@ def create_archive( # the terminal directly. output_file = DO_NOT_CAPTURE if progress else None - borg_environment = environment.make_environment(storage_config) + borg_environment = environment.make_environment(config) # If database hooks are enabled (as indicated by streaming processes), exclude files that might # cause Borg to hang. But skip this if the user has explicitly set the "read_special" to True. - if stream_processes and not location_config.get('read_special'): - logger.debug(f'{repository}: Collecting special file paths') + if stream_processes and not config.get('read_special'): + logger.debug(f'{repository_path}: Collecting special file paths') special_file_paths = collect_special_file_paths( create_command, local_path, @@ -477,15 +478,15 @@ def create_archive( if special_file_paths: logger.warning( - f'{repository}: Excluding special files to prevent Borg from hanging: {", ".join(special_file_paths)}' + f'{repository_path}: Excluding special files to prevent Borg from hanging: {", ".join(special_file_paths)}' ) exclude_file = write_pattern_file( expand_home_directories( - tuple(location_config.get('exclude_patterns') or ()) + special_file_paths + tuple(config.get('exclude_patterns') or ()) + special_file_paths ), pattern_file=exclude_file, ) - create_command += make_exclude_flags(location_config, exclude_file.name) + create_command += make_exclude_flags(config, exclude_file.name) create_command += ( (('--info',) if logger.getEffectiveLevel() == logging.INFO and not json else ()) @@ -507,7 +508,10 @@ def create_archive( ) elif output_log_level is None: return execute_command_and_capture_output( - create_command, working_directory=working_directory, extra_environment=borg_environment, + create_command, + working_directory=working_directory, + extra_environment=borg_environment, + borg_local_path=local_path, ) else: execute_command( diff --git a/borgmatic/borg/environment.py b/borgmatic/borg/environment.py index 1b14369a..6c7b6e7d 100644 --- a/borgmatic/borg/environment.py +++ b/borgmatic/borg/environment.py @@ -11,21 +11,25 @@ OPTION_TO_ENVIRONMENT_VARIABLE = { 'temporary_directory': 'TMPDIR', } -DEFAULT_BOOL_OPTION_TO_ENVIRONMENT_VARIABLE = { +DEFAULT_BOOL_OPTION_TO_DOWNCASE_ENVIRONMENT_VARIABLE = { 'relocated_repo_access_is_ok': 'BORG_RELOCATED_REPO_ACCESS_IS_OK', 'unknown_unencrypted_repo_access_is_ok': 'BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK', } +DEFAULT_BOOL_OPTION_TO_UPPERCASE_ENVIRONMENT_VARIABLE = { + 'check_i_know_what_i_am_doing': 'BORG_CHECK_I_KNOW_WHAT_I_AM_DOING', +} -def make_environment(storage_config): + +def make_environment(config): ''' - Given a borgmatic storage configuration dict, return its options converted to a Borg environment + Given a borgmatic configuration dict, return its options converted to a Borg environment variable dict. ''' environment = {} for option_name, environment_variable_name in OPTION_TO_ENVIRONMENT_VARIABLE.items(): - value = storage_config.get(option_name) + value = config.get(option_name) if value: environment[environment_variable_name] = str(value) @@ -33,8 +37,17 @@ def make_environment(storage_config): for ( option_name, environment_variable_name, - ) in DEFAULT_BOOL_OPTION_TO_ENVIRONMENT_VARIABLE.items(): - value = storage_config.get(option_name, False) - environment[environment_variable_name] = 'yes' if value else 'no' + ) in DEFAULT_BOOL_OPTION_TO_DOWNCASE_ENVIRONMENT_VARIABLE.items(): + value = config.get(option_name) + if value is not None: + environment[environment_variable_name] = 'yes' if value else 'no' + + for ( + option_name, + environment_variable_name, + ) in DEFAULT_BOOL_OPTION_TO_UPPERCASE_ENVIRONMENT_VARIABLE.items(): + value = config.get(option_name) + if value is not None: + environment[environment_variable_name] = 'YES' if value else 'NO' return environment diff --git a/borgmatic/borg/export_tar.py b/borgmatic/borg/export_tar.py index 01d1b7ed..47e3c20d 100644 --- a/borgmatic/borg/export_tar.py +++ b/borgmatic/borg/export_tar.py @@ -9,12 +9,13 @@ logger = logging.getLogger(__name__) def export_tar_archive( dry_run, - repository, + repository_path, archive, paths, destination_path, - storage_config, + config, local_borg_version, + global_arguments, local_path='borg', remote_path=None, tar_filter=None, @@ -23,21 +24,22 @@ def export_tar_archive( ): ''' Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to - export from the archive, a destination path to export to, a storage configuration dict, the - local Borg version, optional local and remote Borg paths, an optional filter program, whether to - include per-file details, and an optional number of path components to strip, export the archive - into the given destination path as a tar-formatted file. + export from the archive, a destination path to export to, a configuration dict, the local Borg + version, optional local and remote Borg paths, an optional filter program, whether to include + per-file details, and an optional number of path components to strip, export the archive into + the given destination path as a tar-formatted file. If the destination path is "-", then stream the output to stdout instead of to a file. ''' borgmatic.logger.add_custom_log_levels() - umask = storage_config.get('umask', None) - lock_wait = storage_config.get('lock_wait', None) + umask = config.get('umask', None) + lock_wait = config.get('lock_wait', None) full_command = ( (local_path, 'export-tar') + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + + (('--log-json',) if global_arguments.log_json else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--list',) if list_files else ()) @@ -45,7 +47,11 @@ def export_tar_archive( + (('--dry-run',) if dry_run else ()) + (('--tar-filter', tar_filter) if tar_filter else ()) + (('--strip-components', str(strip_components)) if strip_components else ()) - + flags.make_repository_archive_flags(repository, archive, local_borg_version,) + + flags.make_repository_archive_flags( + repository_path, + archive, + local_borg_version, + ) + (destination_path,) + (tuple(paths) if paths else ()) ) @@ -56,7 +62,7 @@ def export_tar_archive( output_log_level = logging.INFO if dry_run: - logging.info(f'{repository}: Skipping export to tar file (dry run)') + logging.info(f'{repository_path}: Skipping export to tar file (dry run)') return execute_command( @@ -64,5 +70,5 @@ def export_tar_archive( output_file=DO_NOT_CAPTURE if destination_path == '-' else None, output_log_level=output_log_level, borg_local_path=local_path, - extra_environment=environment.make_environment(storage_config), + extra_environment=environment.make_environment(config), ) diff --git a/borgmatic/borg/extract.py b/borgmatic/borg/extract.py index 6c32f7f0..dec203fc 100644 --- a/borgmatic/borg/extract.py +++ b/borgmatic/borg/extract.py @@ -2,6 +2,7 @@ import logging import os import subprocess +import borgmatic.config.validate from borgmatic.borg import environment, feature, flags, rlist from borgmatic.execute import DO_NOT_CAPTURE, execute_command @@ -9,9 +10,10 @@ logger = logging.getLogger(__name__) def extract_last_archive_dry_run( - storage_config, + config, local_borg_version, - repository, + global_arguments, + repository_path, lock_wait=None, local_path='borg', remote_path=None, @@ -20,8 +22,6 @@ def extract_last_archive_dry_run( Perform an extraction dry-run of the most recent archive. If there are no archives, skip the dry-run. ''' - remote_path_flags = ('--remote-path', remote_path) if remote_path else () - lock_wait_flags = ('--lock-wait', str(lock_wait)) if lock_wait else () verbosity_flags = () if logger.isEnabledFor(logging.DEBUG): verbosity_flags = ('--debug', '--show-rc') @@ -30,21 +30,30 @@ def extract_last_archive_dry_run( try: last_archive_name = rlist.resolve_archive_name( - repository, 'latest', storage_config, local_borg_version, local_path, remote_path + repository_path, + 'latest', + config, + local_borg_version, + global_arguments, + local_path, + remote_path, ) except ValueError: logger.warning('No archives found. Skipping extract consistency check.') return list_flag = ('--list',) if logger.isEnabledFor(logging.DEBUG) else () - borg_environment = environment.make_environment(storage_config) + borg_environment = environment.make_environment(config) full_extract_command = ( (local_path, 'extract', '--dry-run') - + remote_path_flags - + lock_wait_flags + + (('--remote-path', remote_path) if remote_path else ()) + + (('--log-json',) if global_arguments.log_json else ()) + + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + verbosity_flags + list_flag - + flags.make_repository_archive_flags(repository, last_archive_name, local_borg_version) + + flags.make_repository_archive_flags( + repository_path, last_archive_name, local_borg_version + ) ) execute_command( @@ -57,9 +66,9 @@ def extract_archive( repository, archive, paths, - location_config, - storage_config, + config, local_borg_version, + global_arguments, local_path='borg', remote_path=None, destination_path=None, @@ -69,23 +78,23 @@ def extract_archive( ): ''' Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to - restore from the archive, the local Borg version string, location/storage configuration dicts, - optional local and remote Borg paths, and an optional destination path to extract to, extract - the archive into the current directory. + restore from the archive, the local Borg version string, an argparse.Namespace of global + arguments, a configuration dict, optional local and remote Borg paths, and an optional + destination path to extract to, extract the archive into the current directory. If extract to stdout is True, then start the extraction streaming to stdout, and return that extract process as an instance of subprocess.Popen. ''' - umask = storage_config.get('umask', None) - lock_wait = storage_config.get('lock_wait', None) + umask = config.get('umask', None) + lock_wait = config.get('lock_wait', None) if progress and extract_to_stdout: raise ValueError('progress and extract_to_stdout cannot both be set') if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version): - numeric_ids_flags = ('--numeric-ids',) if location_config.get('numeric_ids') else () + numeric_ids_flags = ('--numeric-ids',) if config.get('numeric_ids') else () else: - numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_ids') else () + numeric_ids_flags = ('--numeric-owner',) if config.get('numeric_ids') else () if strip_components == 'all': if not paths: @@ -99,6 +108,7 @@ def extract_archive( + (('--remote-path', remote_path) if remote_path else ()) + numeric_ids_flags + (('--umask', str(umask)) if umask else ()) + + (('--log-json',) if global_arguments.log_json else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--list', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) @@ -106,11 +116,17 @@ def extract_archive( + (('--strip-components', str(strip_components)) if strip_components else ()) + (('--progress',) if progress else ()) + (('--stdout',) if extract_to_stdout else ()) - + flags.make_repository_archive_flags(repository, archive, local_borg_version,) + + flags.make_repository_archive_flags( + # Make the repository path absolute so the working directory changes below don't + # prevent Borg from finding the repo. + borgmatic.config.validate.normalize_repository_path(repository), + archive, + local_borg_version, + ) + (tuple(paths) if paths else ()) ) - borg_environment = environment.make_environment(storage_config) + borg_environment = environment.make_environment(config) # The progress output isn't compatible with captured and logged output, as progress messes with # the terminal directly. diff --git a/borgmatic/borg/feature.py b/borgmatic/borg/feature.py index 5294121d..b9311cd1 100644 --- a/borgmatic/borg/feature.py +++ b/borgmatic/borg/feature.py @@ -1,6 +1,6 @@ from enum import Enum -from pkg_resources import parse_version +from packaging.version import parse class Feature(Enum): @@ -18,17 +18,17 @@ class Feature(Enum): FEATURE_TO_MINIMUM_BORG_VERSION = { - Feature.COMPACT: parse_version('1.2.0a2'), # borg compact - Feature.ATIME: parse_version('1.2.0a7'), # borg create --atime - Feature.NOFLAGS: parse_version('1.2.0a8'), # borg create --noflags - Feature.NUMERIC_IDS: parse_version('1.2.0b3'), # borg create/extract/mount --numeric-ids - Feature.UPLOAD_RATELIMIT: parse_version('1.2.0b3'), # borg create --upload-ratelimit - Feature.SEPARATE_REPOSITORY_ARCHIVE: parse_version('2.0.0a2'), # --repo with separate archive - Feature.RCREATE: parse_version('2.0.0a2'), # borg rcreate - Feature.RLIST: parse_version('2.0.0a2'), # borg rlist - Feature.RINFO: parse_version('2.0.0a2'), # borg rinfo - Feature.MATCH_ARCHIVES: parse_version('2.0.0b3'), # borg --match-archives - Feature.EXCLUDED_FILES_MINUS: parse_version('2.0.0b5'), # --list --filter uses "-" for excludes + Feature.COMPACT: parse('1.2.0a2'), # borg compact + Feature.ATIME: parse('1.2.0a7'), # borg create --atime + Feature.NOFLAGS: parse('1.2.0a8'), # borg create --noflags + Feature.NUMERIC_IDS: parse('1.2.0b3'), # borg create/extract/mount --numeric-ids + Feature.UPLOAD_RATELIMIT: parse('1.2.0b3'), # borg create --upload-ratelimit + Feature.SEPARATE_REPOSITORY_ARCHIVE: parse('2.0.0a2'), # --repo with separate archive + Feature.RCREATE: parse('2.0.0a2'), # borg rcreate + Feature.RLIST: parse('2.0.0a2'), # borg rlist + Feature.RINFO: parse('2.0.0a2'), # borg rinfo + Feature.MATCH_ARCHIVES: parse('2.0.0b3'), # borg --match-archives + Feature.EXCLUDED_FILES_MINUS: parse('2.0.0b5'), # --list --filter uses "-" for excludes } @@ -37,4 +37,4 @@ def available(feature, borg_version): Given a Borg Feature constant and a Borg version string, return whether that feature is available in that version of Borg. ''' - return FEATURE_TO_MINIMUM_BORG_VERSION[feature] <= parse_version(borg_version) + return FEATURE_TO_MINIMUM_BORG_VERSION[feature] <= parse(borg_version) diff --git a/borgmatic/borg/flags.py b/borgmatic/borg/flags.py index 845e0ff3..986531bc 100644 --- a/borgmatic/borg/flags.py +++ b/borgmatic/borg/flags.py @@ -1,4 +1,5 @@ import itertools +import re from borgmatic.borg import feature @@ -33,7 +34,7 @@ def make_flags_from_arguments(arguments, excludes=()): ) -def make_repository_flags(repository, local_borg_version): +def make_repository_flags(repository_path, local_borg_version): ''' Given the path of a Borg repository and the local Borg version, return Borg-version-appropriate command-line flags (as a tuple) for selecting that repository. @@ -42,17 +43,41 @@ def make_repository_flags(repository, local_borg_version): ('--repo',) if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version) else () - ) + (repository,) + ) + (repository_path,) -def make_repository_archive_flags(repository, archive, local_borg_version): +def make_repository_archive_flags(repository_path, archive, local_borg_version): ''' Given the path of a Borg repository, an archive name or pattern, and the local Borg version, return Borg-version-appropriate command-line flags (as a tuple) for selecting that repository and archive. ''' return ( - ('--repo', repository, archive) + ('--repo', repository_path, archive) if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version) - else (f'{repository}::{archive}',) + else (f'{repository_path}::{archive}',) ) + + +def make_match_archives_flags(match_archives, archive_name_format, local_borg_version): + ''' + Return match archives flags based on the given match archives value, if any. If it isn't set, + return match archives flags to match archives created with the given archive name format, if + any. This is done by replacing certain archive name format placeholders for ephemeral data (like + "{now}") with globs. + ''' + if match_archives: + if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version): + return ('--match-archives', match_archives) + else: + return ('--glob-archives', re.sub(r'^sh:', '', match_archives)) + + if not archive_name_format: + return () + + derived_match_archives = re.sub(r'\{(now|utcnow|pid)([:%\w\.-]*)\}', '*', archive_name_format) + + if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version): + return ('--match-archives', f'sh:{derived_match_archives}') + else: + return ('--glob-archives', f'{derived_match_archives}') diff --git a/borgmatic/borg/info.py b/borgmatic/borg/info.py index bcde24cd..3e596ca4 100644 --- a/borgmatic/borg/info.py +++ b/borgmatic/borg/info.py @@ -8,20 +8,21 @@ logger = logging.getLogger(__name__) def display_archives_info( - repository, - storage_config, + repository_path, + config, local_borg_version, info_arguments, + global_arguments, local_path='borg', remote_path=None, ): ''' - Given a local or remote repository path, a storage config dict, the local Borg version, and the - arguments to the info action, display summary information for Borg archives in the repository or - return JSON summary information. + Given a local or remote repository path, a configuration dict, the local Borg version, global + arguments as an argparse.Namespace, and the arguments to the info action, display summary + information for Borg archives in the repository or return JSON summary information. ''' borgmatic.logger.add_custom_log_levels() - lock_wait = storage_config.get('lock_wait', None) + lock_wait = config.get('lock_wait', None) full_command = ( (local_path, 'info') @@ -36,6 +37,7 @@ def display_archives_info( else () ) + flags.make_flags('remote-path', remote_path) + + flags.make_flags('log-json', global_arguments.log_json) + flags.make_flags('lock-wait', lock_wait) + ( ( @@ -44,27 +46,32 @@ def display_archives_info( else flags.make_flags('glob-archives', f'{info_arguments.prefix}*') ) if info_arguments.prefix - else () + else ( + flags.make_match_archives_flags( + info_arguments.match_archives + or info_arguments.archive + or config.get('match_archives'), + config.get('archive_name_format'), + local_borg_version, + ) + ) ) + flags.make_flags_from_arguments( - info_arguments, excludes=('repository', 'archive', 'prefix') - ) - + flags.make_repository_flags(repository, local_borg_version) - + ( - flags.make_flags('match-archives', info_arguments.archive) - if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version) - else flags.make_flags('glob-archives', info_arguments.archive) + info_arguments, excludes=('repository', 'archive', 'prefix', 'match_archives') ) + + flags.make_repository_flags(repository_path, local_borg_version) ) if info_arguments.json: return execute_command_and_capture_output( - full_command, extra_environment=environment.make_environment(storage_config), + full_command, + extra_environment=environment.make_environment(config), + borg_local_path=local_path, ) else: execute_command( full_command, output_log_level=logging.ANSWER, borg_local_path=local_path, - extra_environment=environment.make_environment(storage_config), + extra_environment=environment.make_environment(config), ) diff --git a/borgmatic/borg/list.py b/borgmatic/borg/list.py index 916d17b0..5a245d97 100644 --- a/borgmatic/borg/list.py +++ b/borgmatic/borg/list.py @@ -14,26 +14,26 @@ ARCHIVE_FILTER_FLAGS_MOVED_TO_RLIST = ('prefix', 'match_archives', 'sort_by', 'f MAKE_FLAGS_EXCLUDES = ( 'repository', 'archive', - 'successful', 'paths', 'find_paths', ) + ARCHIVE_FILTER_FLAGS_MOVED_TO_RLIST def make_list_command( - repository, - storage_config, + repository_path, + config, local_borg_version, list_arguments, + global_arguments, local_path='borg', remote_path=None, ): ''' - Given a local or remote repository path, a storage config dict, the arguments to the list - action, and local and remote Borg paths, return a command as a tuple to list archives or paths - within an archive. + Given a local or remote repository path, a configuration dict, the arguments to the list action, + and local and remote Borg paths, return a command as a tuple to list archives or paths within an + archive. ''' - lock_wait = storage_config.get('lock_wait', None) + lock_wait = config.get('lock_wait', None) return ( (local_path, 'list') @@ -48,14 +48,15 @@ def make_list_command( else () ) + flags.make_flags('remote-path', remote_path) + + flags.make_flags('log-json', global_arguments.log_json) + flags.make_flags('lock-wait', lock_wait) + flags.make_flags_from_arguments(list_arguments, excludes=MAKE_FLAGS_EXCLUDES) + ( flags.make_repository_archive_flags( - repository, list_arguments.archive, local_borg_version + repository_path, list_arguments.archive, local_borg_version ) if list_arguments.archive - else flags.make_repository_flags(repository, local_borg_version) + else flags.make_repository_flags(repository_path, local_borg_version) ) + (tuple(list_arguments.paths) if list_arguments.paths else ()) ) @@ -86,39 +87,43 @@ def make_find_paths(find_paths): def capture_archive_listing( - repository, + repository_path, archive, - storage_config, + config, local_borg_version, + global_arguments, list_path=None, local_path='borg', remote_path=None, ): ''' - Given a local or remote repository path, an archive name, a storage config dict, the local Borg - version, the archive path in which to list files, and local and remote Borg paths, capture the - output of listing that archive and return it as a list of file paths. + Given a local or remote repository path, an archive name, a configuration dict, the local Borg + version, global arguments as an argparse.Namespace, the archive path in which to list files, and + local and remote Borg paths, capture the output of listing that archive and return it as a list + of file paths. ''' - borg_environment = environment.make_environment(storage_config) + borg_environment = environment.make_environment(config) return tuple( execute_command_and_capture_output( make_list_command( - repository, - storage_config, + repository_path, + config, local_borg_version, argparse.Namespace( - repository=repository, + repository=repository_path, archive=archive, paths=[f'sh:{list_path}'], find_paths=None, json=None, format='{path}{NL}', # noqa: FS003 ), + global_arguments, local_path, remote_path, ), extra_environment=borg_environment, + borg_local_path=local_path, ) .strip('\n') .split('\n') @@ -126,19 +131,21 @@ def capture_archive_listing( def list_archive( - repository, - storage_config, + repository_path, + config, local_borg_version, list_arguments, + global_arguments, local_path='borg', remote_path=None, ): ''' - Given a local or remote repository path, a storage config dict, the local Borg version, the - arguments to the list action, and local and remote Borg paths, display the output of listing - the files of a Borg archive (or return JSON output). If list_arguments.find_paths are given, - list the files by searching across multiple archives. If neither find_paths nor archive name - are given, instead list the archives in the given repository. + Given a local or remote repository path, a configuration dict, the local Borg version, global + arguments as an argparse.Namespace, the arguments to the list action as an argparse.Namespace, + and local and remote Borg paths, display the output of listing the files of a Borg archive (or + return JSON output). If list_arguments.find_paths are given, list the files by searching across + multiple archives. If neither find_paths nor archive name are given, instead list the archives + in the given repository. ''' borgmatic.logger.add_custom_log_levels() @@ -149,7 +156,7 @@ def list_archive( ) rlist_arguments = argparse.Namespace( - repository=repository, + repository=repository_path, short=list_arguments.short, format=list_arguments.format, json=list_arguments.json, @@ -160,7 +167,13 @@ def list_archive( last=list_arguments.last, ) return rlist.list_repository( - repository, storage_config, local_borg_version, rlist_arguments, local_path, remote_path + repository_path, + config, + local_borg_version, + rlist_arguments, + global_arguments, + local_path, + remote_path, ) if list_arguments.archive: @@ -175,13 +188,13 @@ def list_archive( 'The --json flag on the list action is not supported when using the --archive/--find flags.' ) - borg_environment = environment.make_environment(storage_config) + borg_environment = environment.make_environment(config) # If there are any paths to find (and there's not a single archive already selected), start by # getting a list of archives to search. if list_arguments.find_paths and not list_arguments.archive: rlist_arguments = argparse.Namespace( - repository=repository, + repository=repository_path, short=True, format=None, json=None, @@ -196,14 +209,16 @@ def list_archive( archive_lines = tuple( execute_command_and_capture_output( rlist.make_rlist_command( - repository, - storage_config, + repository_path, + config, local_borg_version, rlist_arguments, + global_arguments, local_path, remote_path, ), extra_environment=borg_environment, + borg_local_path=local_path, ) .strip('\n') .split('\n') @@ -213,7 +228,7 @@ def list_archive( # For each archive listed by Borg, run list on the contents of that archive. for archive in archive_lines: - logger.answer(f'{repository}: Listing archive {archive}') + logger.answer(f'{repository_path}: Listing archive {archive}') archive_arguments = copy.copy(list_arguments) archive_arguments.archive = archive @@ -224,10 +239,11 @@ def list_archive( setattr(archive_arguments, name, None) main_command = make_list_command( - repository, - storage_config, + repository_path, + config, local_borg_version, archive_arguments, + global_arguments, local_path, remote_path, ) + make_find_paths(list_arguments.find_paths) diff --git a/borgmatic/borg/mount.py b/borgmatic/borg/mount.py index 92d689b2..9d034688 100644 --- a/borgmatic/borg/mount.py +++ b/borgmatic/borg/mount.py @@ -7,38 +7,40 @@ logger = logging.getLogger(__name__) def mount_archive( - repository, + repository_path, archive, - mount_point, - paths, - foreground, - options, - storage_config, + mount_arguments, + config, local_borg_version, + global_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, an optional archive name, a filesystem mount point, zero or more paths to mount from the archive, extra Borg mount options, a storage configuration - dict, the local Borg version, and optional local and remote Borg paths, mount the archive onto - the mount point. + dict, the local Borg version, global arguments as an argparse.Namespace instance, and optional + local and remote Borg paths, mount the archive onto the mount point. ''' - umask = storage_config.get('umask', None) - lock_wait = storage_config.get('lock_wait', None) + umask = config.get('umask', None) + lock_wait = config.get('lock_wait', None) full_command = ( (local_path, 'mount') + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + + (('--log-json',) if global_arguments.log_json else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) - + (('--foreground',) if foreground else ()) - + (('-o', options) if options else ()) + + flags.make_flags_from_arguments( + mount_arguments, + excludes=('repository', 'archive', 'mount_point', 'paths', 'options'), + ) + + (('-o', mount_arguments.options) if mount_arguments.options else ()) + ( ( - flags.make_repository_flags(repository, local_borg_version) + flags.make_repository_flags(repository_path, local_borg_version) + ( ('--match-archives', archive) if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version) @@ -47,19 +49,19 @@ def mount_archive( ) if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version) else ( - flags.make_repository_archive_flags(repository, archive, local_borg_version) + flags.make_repository_archive_flags(repository_path, archive, local_borg_version) if archive - else flags.make_repository_flags(repository, local_borg_version) + else flags.make_repository_flags(repository_path, local_borg_version) ) ) - + (mount_point,) - + (tuple(paths) if paths else ()) + + (mount_arguments.mount_point,) + + (tuple(mount_arguments.paths) if mount_arguments.paths else ()) ) - borg_environment = environment.make_environment(storage_config) + borg_environment = environment.make_environment(config) # Don't capture the output when foreground mode is used so that ctrl-C can work properly. - if foreground: + if mount_arguments.foreground: execute_command( full_command, output_file=DO_NOT_CAPTURE, diff --git a/borgmatic/borg/prune.py b/borgmatic/borg/prune.py index 5be85de2..79a43da3 100644 --- a/borgmatic/borg/prune.py +++ b/borgmatic/borg/prune.py @@ -7,10 +7,10 @@ from borgmatic.execute import execute_command logger = logging.getLogger(__name__) -def make_prune_flags(retention_config, local_borg_version): +def make_prune_flags(config, local_borg_version): ''' - Given a retention config dict mapping from option name to value, tranform it into an iterable of - command-line name-value flag pairs. + Given a configuration dict mapping from option name to value, transform it into an sequence of + command-line flags. For example, given a retention config of: @@ -23,61 +23,70 @@ def make_prune_flags(retention_config, local_borg_version): ('--keep-monthly', '6'), ) ''' - config = retention_config.copy() - prefix = config.pop('prefix', '{hostname}-') # noqa: FS003 + flag_pairs = ( + ('--' + option_name.replace('_', '-'), str(value)) + for option_name, value in config.items() + if option_name.startswith('keep_') and option_name != 'keep_exclude_tags' + ) + prefix = config.get('prefix') - if prefix: - if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version): - config['match_archives'] = f'sh:{prefix}*' - else: - config['glob_archives'] = f'{prefix}*' - - return ( - ('--' + option_name.replace('_', '-'), str(value)) for option_name, value in config.items() + return tuple(element for pair in flag_pairs for element in pair) + ( + ( + ('--match-archives', f'sh:{prefix}*') + if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version) + else ('--glob-archives', f'{prefix}*') + ) + if prefix + else ( + flags.make_match_archives_flags( + config.get('match_archives'), + config.get('archive_name_format'), + local_borg_version, + ) + ) ) def prune_archives( dry_run, - repository, - storage_config, - retention_config, + repository_path, + config, local_borg_version, + prune_arguments, + global_arguments, local_path='borg', remote_path=None, - stats=False, - list_archives=False, ): ''' - Given dry-run flag, a local or remote repository path, a storage config dict, and a - retention config dict, prune Borg archives according to the retention policy specified in that - configuration. + Given dry-run flag, a local or remote repository path, and a configuration dict, prune Borg + archives according to the retention policy specified in that configuration. ''' borgmatic.logger.add_custom_log_levels() - umask = storage_config.get('umask', None) - lock_wait = storage_config.get('lock_wait', None) - extra_borg_options = storage_config.get('extra_borg_options', {}).get('prune', '') + umask = config.get('umask', None) + lock_wait = config.get('lock_wait', None) + extra_borg_options = config.get('extra_borg_options', {}).get('prune', '') full_command = ( (local_path, 'prune') - + tuple( - element - for pair in make_prune_flags(retention_config, local_borg_version) - for element in pair - ) + + make_prune_flags(config, local_borg_version) + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + + (('--log-json',) if global_arguments.log_json else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) - + (('--stats',) if stats and not dry_run else ()) + + (('--stats',) if prune_arguments.stats and not dry_run else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) - + (('--list',) if list_archives else ()) + + flags.make_flags_from_arguments( + prune_arguments, + excludes=('repository', 'stats', 'list_archives'), + ) + + (('--list',) if prune_arguments.list_archives else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (('--dry-run',) if dry_run else ()) + (tuple(extra_borg_options.split(' ')) if extra_borg_options else ()) - + flags.make_repository_flags(repository, local_borg_version) + + flags.make_repository_flags(repository_path, local_borg_version) ) - if stats or list_archives: + if prune_arguments.stats or prune_arguments.list_archives: output_log_level = logging.ANSWER else: output_log_level = logging.INFO @@ -86,5 +95,5 @@ def prune_archives( full_command, output_log_level=output_log_level, borg_local_path=local_path, - extra_environment=environment.make_environment(storage_config), + extra_environment=environment.make_environment(config), ) diff --git a/borgmatic/borg/rcreate.py b/borgmatic/borg/rcreate.py index d3a8f7aa..8fc70d95 100644 --- a/borgmatic/borg/rcreate.py +++ b/borgmatic/borg/rcreate.py @@ -13,9 +13,10 @@ RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE = 2 def create_repository( dry_run, - repository, - storage_config, + repository_path, + config, local_borg_version, + global_arguments, encryption_mode, source_repository=None, copy_crypt_key=False, @@ -26,27 +27,29 @@ def create_repository( remote_path=None, ): ''' - Given a dry-run flag, a local or remote repository path, a storage configuration dict, the local - Borg version, a Borg encryption mode, the path to another repo whose key material should be - reused, whether the repository should be append-only, and the storage quota to use, create the + Given a dry-run flag, a local or remote repository path, a configuration dict, the local Borg + version, a Borg encryption mode, the path to another repo whose key material should be reused, + whether the repository should be append-only, and the storage quota to use, create the repository. If the repository already exists, then log and skip creation. ''' try: rinfo.display_repository_info( - repository, - storage_config, + repository_path, + config, local_borg_version, argparse.Namespace(json=True), + global_arguments, local_path, remote_path, ) - logger.info(f'{repository}: Repository already exists. Skipping creation.') + logger.info(f'{repository_path}: Repository already exists. Skipping creation.') return except subprocess.CalledProcessError as error: if error.returncode != RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE: raise - extra_borg_options = storage_config.get('extra_borg_options', {}).get('rcreate', '') + lock_wait = config.get('lock_wait') + extra_borg_options = config.get('extra_borg_options', {}).get('rcreate', '') rcreate_command = ( (local_path,) @@ -63,13 +66,15 @@ def create_repository( + (('--make-parent-dirs',) if make_parent_dirs else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug',) if logger.isEnabledFor(logging.DEBUG) else ()) + + (('--log-json',) if global_arguments.log_json else ()) + + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--remote-path', remote_path) if remote_path else ()) + (tuple(extra_borg_options.split(' ')) if extra_borg_options else ()) - + flags.make_repository_flags(repository, local_borg_version) + + flags.make_repository_flags(repository_path, local_borg_version) ) if dry_run: - logging.info(f'{repository}: Skipping repository creation (dry run)') + logging.info(f'{repository_path}: Skipping repository creation (dry run)') return # Do not capture output here, so as to support interactive prompts. @@ -77,5 +82,5 @@ def create_repository( rcreate_command, output_file=DO_NOT_CAPTURE, borg_local_path=local_path, - extra_environment=environment.make_environment(storage_config), + extra_environment=environment.make_environment(config), ) diff --git a/borgmatic/borg/rinfo.py b/borgmatic/borg/rinfo.py index 7bc9a5e9..ab4197e6 100644 --- a/borgmatic/borg/rinfo.py +++ b/borgmatic/borg/rinfo.py @@ -8,20 +8,21 @@ logger = logging.getLogger(__name__) def display_repository_info( - repository, - storage_config, + repository_path, + config, local_borg_version, rinfo_arguments, + global_arguments, local_path='borg', remote_path=None, ): ''' - Given a local or remote repository path, a storage config dict, the local Borg version, and the - arguments to the rinfo action, display summary information for the Borg repository or return - JSON summary information. + Given a local or remote repository path, a configuration dict, the local Borg version, the + arguments to the rinfo action, and global arguments as an argparse.Namespace, display summary + information for the Borg repository or return JSON summary information. ''' borgmatic.logger.add_custom_log_levels() - lock_wait = storage_config.get('lock_wait', None) + lock_wait = config.get('lock_wait', None) full_command = ( (local_path,) @@ -41,16 +42,19 @@ def display_repository_info( else () ) + flags.make_flags('remote-path', remote_path) + + flags.make_flags('log-json', global_arguments.log_json) + flags.make_flags('lock-wait', lock_wait) + (('--json',) if rinfo_arguments.json else ()) - + flags.make_repository_flags(repository, local_borg_version) + + flags.make_repository_flags(repository_path, local_borg_version) ) - extra_environment = environment.make_environment(storage_config) + extra_environment = environment.make_environment(config) if rinfo_arguments.json: return execute_command_and_capture_output( - full_command, extra_environment=extra_environment, + full_command, + extra_environment=extra_environment, + borg_local_path=local_path, ) else: execute_command( diff --git a/borgmatic/borg/rlist.py b/borgmatic/borg/rlist.py index 43bc28d6..b6ceca31 100644 --- a/borgmatic/borg/rlist.py +++ b/borgmatic/borg/rlist.py @@ -8,63 +8,70 @@ logger = logging.getLogger(__name__) def resolve_archive_name( - repository, archive, storage_config, local_borg_version, local_path='borg', remote_path=None + repository_path, + archive, + config, + local_borg_version, + global_arguments, + local_path='borg', + remote_path=None, ): ''' - Given a local or remote repository path, an archive name, a storage config dict, a local Borg - path, and a remote Borg path, simply return the archive name. But if the archive name is - "latest", then instead introspect the repository for the latest archive and return its name. + Given a local or remote repository path, an archive name, a configuration dict, the local Borg + version, global arguments as an argparse.Namespace, a local Borg path, and a remote Borg path, + return the archive name. But if the archive name is "latest", then instead introspect the + repository for the latest archive and return its name. Raise ValueError if "latest" is given but there are no archives in the repository. ''' if archive != 'latest': return archive - lock_wait = storage_config.get('lock_wait', None) - full_command = ( ( local_path, 'rlist' if feature.available(feature.Feature.RLIST, local_borg_version) else 'list', ) + flags.make_flags('remote-path', remote_path) - + flags.make_flags('lock-wait', lock_wait) + + flags.make_flags('log-json', global_arguments.log_json) + + flags.make_flags('lock-wait', config.get('lock_wait')) + flags.make_flags('last', 1) + ('--short',) - + flags.make_repository_flags(repository, local_borg_version) + + flags.make_repository_flags(repository_path, local_borg_version) ) output = execute_command_and_capture_output( - full_command, extra_environment=environment.make_environment(storage_config), + full_command, + extra_environment=environment.make_environment(config), + borg_local_path=local_path, ) try: latest_archive = output.strip().splitlines()[-1] except IndexError: raise ValueError('No archives found in the repository') - logger.debug(f'{repository}: Latest archive is {latest_archive}') + logger.debug(f'{repository_path}: Latest archive is {latest_archive}') return latest_archive -MAKE_FLAGS_EXCLUDES = ('repository', 'prefix') +MAKE_FLAGS_EXCLUDES = ('repository', 'prefix', 'match_archives') def make_rlist_command( - repository, - storage_config, + repository_path, + config, local_borg_version, rlist_arguments, + global_arguments, local_path='borg', remote_path=None, ): ''' - Given a local or remote repository path, a storage config dict, the local Borg version, the - arguments to the rlist action, and local and remote Borg paths, return a command as a tuple to - list archives with a repository. + Given a local or remote repository path, a configuration dict, the local Borg version, the + arguments to the rlist action, global arguments as an argparse.Namespace instance, and local and + remote Borg paths, return a command as a tuple to list archives with a repository. ''' - lock_wait = storage_config.get('lock_wait', None) - return ( ( local_path, @@ -81,7 +88,8 @@ def make_rlist_command( else () ) + flags.make_flags('remote-path', remote_path) - + flags.make_flags('lock-wait', lock_wait) + + flags.make_flags('log-json', global_arguments.log_json) + + flags.make_flags('lock-wait', config.get('lock_wait')) + ( ( flags.make_flags('match-archives', f'sh:{rlist_arguments.prefix}*') @@ -89,35 +97,51 @@ def make_rlist_command( else flags.make_flags('glob-archives', f'{rlist_arguments.prefix}*') ) if rlist_arguments.prefix - else () + else ( + flags.make_match_archives_flags( + rlist_arguments.match_archives or config.get('match_archives'), + config.get('archive_name_format'), + local_borg_version, + ) + ) ) + flags.make_flags_from_arguments(rlist_arguments, excludes=MAKE_FLAGS_EXCLUDES) - + flags.make_repository_flags(repository, local_borg_version) + + flags.make_repository_flags(repository_path, local_borg_version) ) def list_repository( - repository, - storage_config, + repository_path, + config, local_borg_version, rlist_arguments, + global_arguments, local_path='borg', remote_path=None, ): ''' - Given a local or remote repository path, a storage config dict, the local Borg version, the - arguments to the list action, and local and remote Borg paths, display the output of listing - Borg archives in the given repository (or return JSON output). + Given a local or remote repository path, a configuration dict, the local Borg version, the + arguments to the list action, global arguments as an argparse.Namespace instance, and local and + remote Borg paths, display the output of listing Borg archives in the given repository (or + return JSON output). ''' borgmatic.logger.add_custom_log_levels() - borg_environment = environment.make_environment(storage_config) + borg_environment = environment.make_environment(config) main_command = make_rlist_command( - repository, storage_config, local_borg_version, rlist_arguments, local_path, remote_path + repository_path, + config, + local_borg_version, + rlist_arguments, + global_arguments, + local_path, + remote_path, ) if rlist_arguments.json: - return execute_command_and_capture_output(main_command, extra_environment=borg_environment) + return execute_command_and_capture_output( + main_command, extra_environment=borg_environment, borg_local_path=local_path + ) else: execute_command( main_command, diff --git a/borgmatic/borg/transfer.py b/borgmatic/borg/transfer.py index bad02d06..f91349fc 100644 --- a/borgmatic/borg/transfer.py +++ b/borgmatic/borg/transfer.py @@ -9,16 +9,18 @@ logger = logging.getLogger(__name__) def transfer_archives( dry_run, - repository, - storage_config, + repository_path, + config, local_borg_version, transfer_arguments, + global_arguments, local_path='borg', remote_path=None, ): ''' - Given a dry-run flag, a local or remote repository path, a storage config dict, the local Borg - version, and the arguments to the transfer action, transfer archives to the given repository. + Given a dry-run flag, a local or remote repository path, a configuration dict, the local Borg + version, the arguments to the transfer action, and global arguments as an argparse.Namespace + instance, transfer archives to the given repository. ''' borgmatic.logger.add_custom_log_levels() @@ -27,18 +29,24 @@ def transfer_archives( + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + flags.make_flags('remote-path', remote_path) - + flags.make_flags('lock-wait', storage_config.get('lock_wait', None)) - + (('--progress',) if transfer_arguments.progress else ()) + + flags.make_flags('log-json', global_arguments.log_json) + + flags.make_flags('lock-wait', config.get('lock_wait', None)) + ( - flags.make_flags( - 'match-archives', transfer_arguments.match_archives or transfer_arguments.archive + flags.make_flags_from_arguments( + transfer_arguments, + excludes=('repository', 'source_repository', 'archive', 'match_archives'), + ) + or ( + flags.make_match_archives_flags( + transfer_arguments.match_archives + or transfer_arguments.archive + or config.get('match_archives'), + config.get('archive_name_format'), + local_borg_version, + ) ) ) - + flags.make_flags_from_arguments( - transfer_arguments, - excludes=('repository', 'source_repository', 'archive', 'match_archives'), - ) - + flags.make_repository_flags(repository, local_borg_version) + + flags.make_repository_flags(repository_path, local_borg_version) + flags.make_flags('other-repo', transfer_arguments.source_repository) + flags.make_flags('dry-run', dry_run) ) @@ -48,5 +56,5 @@ def transfer_archives( output_log_level=logging.ANSWER, output_file=DO_NOT_CAPTURE if transfer_arguments.progress else None, borg_local_path=local_path, - extra_environment=environment.make_environment(storage_config), + extra_environment=environment.make_environment(config), ) diff --git a/borgmatic/borg/version.py b/borgmatic/borg/version.py index 6d6c302c..9ded62a7 100644 --- a/borgmatic/borg/version.py +++ b/borgmatic/borg/version.py @@ -6,9 +6,9 @@ from borgmatic.execute import execute_command_and_capture_output logger = logging.getLogger(__name__) -def local_borg_version(storage_config, local_path='borg'): +def local_borg_version(config, local_path='borg'): ''' - Given a storage configuration dict and a local Borg binary path, return a version string for it. + Given a configuration dict and a local Borg binary path, return a version string for it. Raise OSError or CalledProcessError if there is a problem running Borg. Raise ValueError if the version cannot be parsed. @@ -19,7 +19,9 @@ def local_borg_version(storage_config, local_path='borg'): + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) ) output = execute_command_and_capture_output( - full_command, extra_environment=environment.make_environment(storage_config), + full_command, + extra_environment=environment.make_environment(config), + borg_local_path=local_path, ) try: diff --git a/borgmatic/commands/arguments.py b/borgmatic/commands/arguments.py index ddff26d8..eef24535 100644 --- a/borgmatic/commands/arguments.py +++ b/borgmatic/commands/arguments.py @@ -1,14 +1,17 @@ import collections -from argparse import Action, ArgumentParser +import itertools +import sys +from argparse import ArgumentParser from borgmatic.config import collect -SUBPARSER_ALIASES = { +ACTION_ALIASES = { 'rcreate': ['init', '-I'], 'prune': ['-p'], 'compact': [], 'create': ['-C'], 'check': ['-k'], + 'config': [], 'extract': ['-x'], 'export-tar': [], 'mount': ['-m'], @@ -24,119 +27,220 @@ SUBPARSER_ALIASES = { } -def parse_subparser_arguments(unparsed_arguments, subparsers): +def get_subaction_parsers(action_parser): ''' - Given a sequence of arguments and a dict from subparser name to argparse.ArgumentParser - instance, give each requested action's subparser a shot at parsing all arguments. This allows - common arguments like "--repository" to be shared across multiple subparsers. + Given an argparse.ArgumentParser instance, lookup the subactions in it and return a dict from + subaction name to subaction parser. + ''' + if not action_parser._subparsers: + return {} - Return the result as a tuple of (a dict mapping from subparser name to a parsed namespace of - arguments, a list of remaining arguments not claimed by any subparser). - ''' - arguments = collections.OrderedDict() - remaining_arguments = list(unparsed_arguments) - alias_to_subparser_name = { - alias: subparser_name - for subparser_name, aliases in SUBPARSER_ALIASES.items() - for alias in aliases + return { + subaction_name: subaction_parser + for group_action in action_parser._subparsers._group_actions + for subaction_name, subaction_parser in group_action.choices.items() } - # If the "borg" action is used, skip all other subparsers. This avoids confusion like - # "borg list" triggering borgmatic's own list action. - if 'borg' in unparsed_arguments: - subparsers = {'borg': subparsers['borg']} - for argument in remaining_arguments: - canonical_name = alias_to_subparser_name.get(argument, argument) - subparser = subparsers.get(canonical_name) +def get_subactions_for_actions(action_parsers): + ''' + Given a dict from action name to an argparse.ArgumentParser instance, make a map from action + name to the names of contained sub-actions. + ''' + return { + action: tuple( + subaction_name + for group_action in action_parser._subparsers._group_actions + for subaction_name in group_action.choices.keys() + ) + for action, action_parser in action_parsers.items() + if action_parser._subparsers + } - if not subparser: - continue - # If a parsed value happens to be the same as the name of a subparser, remove it from the - # remaining arguments. This prevents, for instance, "check --only extract" from triggering - # the "extract" subparser. - parsed, unused_remaining = subparser.parse_known_args(unparsed_arguments) +def omit_values_colliding_with_action_names(unparsed_arguments, parsed_arguments): + ''' + Given a sequence of string arguments and a dict from action name to parsed argparse.Namespace + arguments, return the string arguments with any values omitted that happen to be the same as + the name of a borgmatic action. + + This prevents, for instance, "check --only extract" from triggering the "extract" action. + ''' + remaining_arguments = list(unparsed_arguments) + + for action_name, parsed in parsed_arguments.items(): for value in vars(parsed).values(): if isinstance(value, str): - if value in subparsers: + if value in ACTION_ALIASES.keys(): remaining_arguments.remove(value) elif isinstance(value, list): for item in value: - if item in subparsers: + if item in ACTION_ALIASES.keys(): remaining_arguments.remove(item) - arguments[canonical_name] = parsed + return tuple(remaining_arguments) - # If no actions are explicitly requested, assume defaults. - if not arguments and '--help' not in unparsed_arguments and '-h' not in unparsed_arguments: - for subparser_name in ('create', 'prune', 'compact', 'check'): - subparser = subparsers[subparser_name] - parsed, unused_remaining = subparser.parse_known_args(unparsed_arguments) - arguments[subparser_name] = parsed - remaining_arguments = list(unparsed_arguments) +def parse_and_record_action_arguments( + unparsed_arguments, parsed_arguments, action_parser, action_name, canonical_name=None +): + ''' + Given unparsed arguments as a sequence of strings, parsed arguments as a dict from action name + to parsed argparse.Namespace, a parser to parse with, an action name, and an optional canonical + action name (in case this the action name is an alias), parse the arguments and return a list of + any remaining string arguments that were not parsed. Also record the parsed argparse.Namespace + by setting it into the given parsed arguments. Return None if no parsing occurs because the + given action doesn't apply to the given unparsed arguments. + ''' + filtered_arguments = omit_values_colliding_with_action_names( + unparsed_arguments, parsed_arguments + ) - # Now ask each subparser, one by one, to greedily consume arguments. - for subparser_name, subparser in subparsers.items(): - if subparser_name not in arguments.keys(): + if action_name not in filtered_arguments: + return tuple(unparsed_arguments) + + parsed, remaining = action_parser.parse_known_args(filtered_arguments) + parsed_arguments[canonical_name or action_name] = parsed + + # Special case: If this is a "borg" action, greedily consume all arguments after (+1) the "borg" + # argument. + if action_name == 'borg': + borg_options_index = remaining.index('borg') + 1 + parsed_arguments['borg'].options = remaining[borg_options_index:] + remaining = remaining[:borg_options_index] + + return tuple(argument for argument in remaining if argument != action_name) + + +def get_unparsable_arguments(remaining_action_arguments): + ''' + Given a sequence of argument tuples (one per action parser that parsed arguments), determine the + remaining arguments that no action parsers have consumed. + ''' + if not remaining_action_arguments: + return () + + return tuple( + argument + for argument in dict.fromkeys( + itertools.chain.from_iterable(remaining_action_arguments) + ).keys() + if all(argument in action_arguments for action_arguments in remaining_action_arguments) + ) + + +def parse_arguments_for_actions(unparsed_arguments, action_parsers, global_parser): + ''' + Given a sequence of arguments, a dict from action name to argparse.ArgumentParser instance, + and the global parser as a argparse.ArgumentParser instance, give each requested action's + parser a shot at parsing all arguments. This allows common arguments like "--repository" to be + shared across multiple action parsers. + + Return the result as a tuple of: (a dict mapping from action name to an argparse.Namespace of + parsed arguments, a tuple of argument tuples where each is the remaining arguments not claimed + by any action parser). + ''' + arguments = collections.OrderedDict() + help_requested = bool('--help' in unparsed_arguments or '-h' in unparsed_arguments) + remaining_action_arguments = [] + alias_to_action_name = { + alias: action_name for action_name, aliases in ACTION_ALIASES.items() for alias in aliases + } + + # If the "borg" action is used, skip all other action parsers. This avoids confusion like + # "borg list" triggering borgmatic's own list action. + if 'borg' in unparsed_arguments: + action_parsers = {'borg': action_parsers['borg']} + + # Ask each action parser, one by one, to parse arguments. + for argument in unparsed_arguments: + action_name = argument + canonical_name = alias_to_action_name.get(action_name, action_name) + action_parser = action_parsers.get(action_name) + + if not action_parser: continue - subparser = subparsers[subparser_name] - unused_parsed, remaining_arguments = subparser.parse_known_args(remaining_arguments) + subaction_parsers = get_subaction_parsers(action_parser) - # Special case: If "borg" is present in the arguments, consume all arguments after (+1) the - # "borg" action. - if 'borg' in arguments: - borg_options_index = remaining_arguments.index('borg') + 1 - arguments['borg'].options = remaining_arguments[borg_options_index:] - remaining_arguments = remaining_arguments[:borg_options_index] + # But first parse with subaction parsers, if any. + if subaction_parsers: + subactions_parsed = False - # Remove the subparser names themselves. - for subparser_name, subparser in subparsers.items(): - if subparser_name in remaining_arguments: - remaining_arguments.remove(subparser_name) + for subaction_name, subaction_parser in subaction_parsers.items(): + remaining_action_arguments.append( + tuple( + argument + for argument in parse_and_record_action_arguments( + unparsed_arguments, + arguments, + subaction_parser, + subaction_name, + ) + if argument != action_name + ) + ) - return (arguments, remaining_arguments) + if subaction_name in arguments: + subactions_parsed = True - -class Extend_action(Action): - ''' - An argparse action to support Python 3.8's "extend" action in older versions of Python. - ''' - - def __call__(self, parser, namespace, values, option_string=None): - items = getattr(namespace, self.dest, None) - - if items: - items.extend(values) + if not subactions_parsed: + if help_requested: + action_parser.print_help() + sys.exit(0) + else: + raise ValueError( + f"Missing sub-action after {action_name} action. Expected one of: {', '.join(get_subactions_for_actions(action_parsers)[action_name])}" + ) + # Otherwise, parse with the main action parser. else: - setattr(namespace, self.dest, list(values)) + remaining_action_arguments.append( + parse_and_record_action_arguments( + unparsed_arguments, arguments, action_parser, action_name, canonical_name + ) + ) + + # If no actions were explicitly requested, assume defaults. + if not arguments and not help_requested: + for default_action_name in ('create', 'prune', 'compact', 'check'): + default_action_parser = action_parsers[default_action_name] + remaining_action_arguments.append( + parse_and_record_action_arguments( + tuple(unparsed_arguments) + (default_action_name,), + arguments, + default_action_parser, + default_action_name, + ) + ) + + arguments['global'], remaining = global_parser.parse_known_args(unparsed_arguments) + remaining_action_arguments.append(remaining) + + return ( + arguments, + tuple(remaining_action_arguments) if arguments else unparsed_arguments, + ) def make_parsers(): ''' - Build a top-level parser and its subparsers and return them as a tuple. + Build a global arguments parser, individual action parsers, and a combined parser containing + both. Return them as a tuple. The global parser is useful for parsing just global arguments + while ignoring actions, and the combined parser is handy for displaying help that includes + everything: global flags, a list of actions, etc. ''' config_paths = collect.get_default_config_paths(expand_home=True) unexpanded_config_paths = collect.get_default_config_paths(expand_home=False) global_parser = ArgumentParser(add_help=False) - global_parser.register('action', 'extend', Extend_action) global_group = global_parser.add_argument_group('global arguments') global_group.add_argument( '-c', '--config', - nargs='*', dest='config_paths', - default=config_paths, - help=f"Configuration filenames or directories, defaults to: {' '.join(unexpanded_config_paths)}", - ) - global_group.add_argument( - '--excludes', - dest='excludes_filename', - help='Deprecated in favor of exclude_patterns within configuration', + action='append', + help=f"Configuration filename or directory, can specify flag multiple times, defaults to: {' '.join(unexpanded_config_paths)}", ) global_group.add_argument( '-n', @@ -158,44 +262,52 @@ def make_parsers(): '-v', '--verbosity', type=int, - choices=range(-1, 3), + choices=range(-2, 3), default=0, - help='Display verbose progress to the console (from only errors to very verbose: -1, 0, 1, or 2)', + help='Display verbose progress to the console (disabled, errors only, default, some, or lots: -2, -1, 0, 1, or 2)', ) global_group.add_argument( '--syslog-verbosity', type=int, - choices=range(-1, 3), + choices=range(-2, 3), default=0, - help='Log verbose progress to syslog (from only errors to very verbose: -1, 0, 1, or 2). Ignored when console is interactive or --log-file is given', + help='Log verbose progress to syslog (disabled, errors only, default, some, or lots: -2, -1, 0, 1, or 2). Ignored when console is interactive or --log-file is given', ) global_group.add_argument( '--log-file-verbosity', type=int, - choices=range(-1, 3), + choices=range(-2, 3), default=0, - help='Log verbose progress to log file (from only errors to very verbose: -1, 0, 1, or 2). Only used when --log-file is given', + help='Log verbose progress to log file (disabled, errors only, default, some, or lots: -2, -1, 0, 1, or 2). Only used when --log-file is given', ) global_group.add_argument( '--monitoring-verbosity', type=int, - choices=range(-1, 3), + choices=range(-2, 3), default=0, - help='Log verbose progress to monitoring integrations that support logging (from only errors to very verbose: -1, 0, 1, or 2)', + help='Log verbose progress to monitoring integrations that support logging (from disabled, errors only, default, some, or lots: -2, -1, 0, 1, or 2)', ) global_group.add_argument( '--log-file', type=str, - default=None, help='Write log messages to this file instead of syslog', ) + global_group.add_argument( + '--log-file-format', + type=str, + help='Log format string used for log messages written to the log file', + ) + global_group.add_argument( + '--log-json', + action='store_true', + help='Write log messages and console output as one JSON object per log line instead of formatted text', + ) global_group.add_argument( '--override', - metavar='SECTION.OPTION=VALUE', - nargs='+', + metavar='OPTION.SUBOPTION=VALUE', dest='overrides', - action='extend', - help='One or more configuration file options to override with specified values', + action='append', + help='Configuration file option to override with specified value, can specify flag multiple times', ) global_group.add_argument( '--no-environment-interpolation', @@ -209,6 +321,12 @@ def make_parsers(): action='store_true', help='Show bash completion script and exit', ) + global_group.add_argument( + '--fish-completion', + default=False, + action='store_true', + help='Show fish completion script and exit', + ) global_group.add_argument( '--version', dest='version', @@ -217,7 +335,7 @@ def make_parsers(): help='Display installed version number of borgmatic and exit', ) - top_level_parser = ArgumentParser( + global_plus_action_parser = ArgumentParser( description=''' Simple, configuration-driven backup software for servers and workstations. If none of the action options are given, then borgmatic defaults to: create, prune, compact, and @@ -226,14 +344,14 @@ def make_parsers(): parents=[global_parser], ) - subparsers = top_level_parser.add_subparsers( + action_parsers = global_plus_action_parser.add_subparsers( title='actions', metavar='', help='Specify zero or more actions. Defaults to create, prune, compact, and check. Use --help with action for details:', ) - rcreate_parser = subparsers.add_parser( + rcreate_parser = action_parsers.add_parser( 'rcreate', - aliases=SUBPARSER_ALIASES['rcreate'], + aliases=ACTION_ALIASES['rcreate'], help='Create a new, empty Borg repository', description='Create a new, empty Borg repository', add_help=False, @@ -250,7 +368,7 @@ def make_parsers(): '--source-repository', '--other-repo', metavar='KEY_REPOSITORY', - help='Path to an existing Borg repository whose key material should be reused (Borg 2.x+ only)', + help='Path to an existing Borg repository whose key material should be reused [Borg 2.x+ only]', ) rcreate_group.add_argument( '--repository', @@ -259,13 +377,16 @@ def make_parsers(): rcreate_group.add_argument( '--copy-crypt-key', action='store_true', - help='Copy the crypt key used for authenticated encryption from the source repository, defaults to a new random key (Borg 2.x+ only)', + help='Copy the crypt key used for authenticated encryption from the source repository, defaults to a new random key [Borg 2.x+ only]', ) rcreate_group.add_argument( - '--append-only', action='store_true', help='Create an append-only repository', + '--append-only', + action='store_true', + help='Create an append-only repository', ) rcreate_group.add_argument( - '--storage-quota', help='Create a repository with a fixed storage quota', + '--storage-quota', + help='Create a repository with a fixed storage quota', ) rcreate_group.add_argument( '--make-parent-dirs', @@ -276,11 +397,11 @@ def make_parsers(): '-h', '--help', action='help', help='Show this help message and exit' ) - transfer_parser = subparsers.add_parser( + transfer_parser = action_parsers.add_parser( 'transfer', - aliases=SUBPARSER_ALIASES['transfer'], - help='Transfer archives from one repository to another, optionally upgrading the transferred data (Borg 2.0+ only)', - description='Transfer archives from one repository to another, optionally upgrading the transferred data (Borg 2.0+ only)', + aliases=ACTION_ALIASES['transfer'], + help='Transfer archives from one repository to another, optionally upgrading the transferred data [Borg 2.0+ only]', + description='Transfer archives from one repository to another, optionally upgrading the transferred data [Borg 2.0+ only]', add_help=False, ) transfer_group = transfer_parser.add_argument_group('transfer arguments') @@ -299,7 +420,7 @@ def make_parsers(): ) transfer_group.add_argument( '--upgrader', - help='Upgrader type used to convert the transfered data, e.g. "From12To20" to upgrade data from Borg 1.2 to 2.0 format, defaults to no conversion', + help='Upgrader type used to convert the transferred data, e.g. "From12To20" to upgrade data from Borg 1.2 to 2.0 format, defaults to no conversion', ) transfer_group.add_argument( '--progress', @@ -325,13 +446,33 @@ def make_parsers(): transfer_group.add_argument( '--last', metavar='N', help='Only transfer last N archives after other filters are applied' ) + transfer_group.add_argument( + '--oldest', + metavar='TIMESPAN', + help='Transfer archives within a specified time range starting from the timestamp of the oldest archive (e.g. 7d or 12m) [Borg 2.x+ only]', + ) + transfer_group.add_argument( + '--newest', + metavar='TIMESPAN', + help='Transfer archives within a time range that ends at timestamp of the newest archive and starts a specified time range ago (e.g. 7d or 12m) [Borg 2.x+ only]', + ) + transfer_group.add_argument( + '--older', + metavar='TIMESPAN', + help='Transfer archives that are older than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', + ) + transfer_group.add_argument( + '--newer', + metavar='TIMESPAN', + help='Transfer archives that are newer than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', + ) transfer_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit' ) - prune_parser = subparsers.add_parser( + prune_parser = action_parsers.add_parser( 'prune', - aliases=SUBPARSER_ALIASES['prune'], + aliases=ACTION_ALIASES['prune'], help='Prune archives according to the retention policy (with Borg 1.2+, run compact afterwards to actually free space)', description='Prune archives according to the retention policy (with Borg 1.2+, run compact afterwards to actually free space)', add_help=False, @@ -351,13 +492,33 @@ def make_parsers(): prune_group.add_argument( '--list', dest='list_archives', action='store_true', help='List archives kept/pruned' ) + prune_group.add_argument( + '--oldest', + metavar='TIMESPAN', + help='Prune archives within a specified time range starting from the timestamp of the oldest archive (e.g. 7d or 12m) [Borg 2.x+ only]', + ) + prune_group.add_argument( + '--newest', + metavar='TIMESPAN', + help='Prune archives within a time range that ends at timestamp of the newest archive and starts a specified time range ago (e.g. 7d or 12m) [Borg 2.x+ only]', + ) + prune_group.add_argument( + '--older', + metavar='TIMESPAN', + help='Prune archives that are older than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', + ) + prune_group.add_argument( + '--newer', + metavar='TIMESPAN', + help='Prune archives that are newer than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', + ) prune_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') - compact_parser = subparsers.add_parser( + compact_parser = action_parsers.add_parser( 'compact', - aliases=SUBPARSER_ALIASES['compact'], - help='Compact segments to free space (Borg 1.2+, borgmatic 1.5.23+ only)', - description='Compact segments to free space (Borg 1.2+, borgmatic 1.5.23+ only)', + aliases=ACTION_ALIASES['compact'], + help='Compact segments to free space [Borg 1.2+, borgmatic 1.5.23+ only]', + description='Compact segments to free space [Borg 1.2+, borgmatic 1.5.23+ only]', add_help=False, ) compact_group = compact_parser.add_argument_group('compact arguments') @@ -377,7 +538,7 @@ def make_parsers(): dest='cleanup_commits', default=False, action='store_true', - help='Cleanup commit-only 17-byte segment files left behind by Borg 1.1 (flag in Borg 1.2 only)', + help='Cleanup commit-only 17-byte segment files left behind by Borg 1.1 [flag in Borg 1.2 only]', ) compact_group.add_argument( '--threshold', @@ -389,9 +550,9 @@ def make_parsers(): '-h', '--help', action='help', help='Show this help message and exit' ) - create_parser = subparsers.add_parser( + create_parser = action_parsers.add_parser( 'create', - aliases=SUBPARSER_ALIASES['create'], + aliases=ACTION_ALIASES['create'], help='Create an archive (actually perform a backup)', description='Create an archive (actually perform a backup)', add_help=False, @@ -423,9 +584,9 @@ def make_parsers(): ) create_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') - check_parser = subparsers.add_parser( + check_parser = action_parsers.add_parser( 'check', - aliases=SUBPARSER_ALIASES['check'], + aliases=ACTION_ALIASES['check'], help='Check archives for consistency', description='Check archives for consistency', add_help=False, @@ -465,9 +626,9 @@ def make_parsers(): ) check_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') - extract_parser = subparsers.add_parser( + extract_parser = action_parsers.add_parser( 'extract', - aliases=SUBPARSER_ALIASES['extract'], + aliases=ACTION_ALIASES['extract'], help='Extract files from a named archive to the current directory', description='Extract a named archive to the current directory', add_help=False, @@ -484,9 +645,9 @@ def make_parsers(): '--path', '--restore-path', metavar='PATH', - nargs='+', dest='paths', - help='Paths to extract from archive, defaults to the entire archive', + action='append', + help='Path to extract from archive, can specify flag multiple times, defaults to the entire archive', ) extract_group.add_argument( '--destination', @@ -511,9 +672,118 @@ def make_parsers(): '-h', '--help', action='help', help='Show this help message and exit' ) - export_tar_parser = subparsers.add_parser( + config_parser = action_parsers.add_parser( + 'config', + aliases=ACTION_ALIASES['config'], + help='Perform configuration file related operations', + description='Perform configuration file related operations', + add_help=False, + ) + + config_group = config_parser.add_argument_group('config arguments') + config_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') + + config_parsers = config_parser.add_subparsers( + title='config sub-actions', + ) + + config_bootstrap_parser = config_parsers.add_parser( + 'bootstrap', + help='Extract the borgmatic configuration files from a named archive', + description='Extract the borgmatic configuration files from a named archive', + add_help=False, + ) + config_bootstrap_group = config_bootstrap_parser.add_argument_group( + 'config bootstrap arguments' + ) + config_bootstrap_group.add_argument( + '--repository', + help='Path of repository to extract config files from', + required=True, + ) + config_bootstrap_group.add_argument( + '--borgmatic-source-directory', + help='Path that stores the config files used to create an archive and additional source files used for temporary internal state like borgmatic database dumps. Defaults to ~/.borgmatic', + ) + config_bootstrap_group.add_argument( + '--archive', + help='Name of archive to extract config files from, defaults to "latest"', + default='latest', + ) + config_bootstrap_group.add_argument( + '--destination', + metavar='PATH', + dest='destination', + help='Directory to extract config files into, defaults to /', + default='/', + ) + config_bootstrap_group.add_argument( + '--strip-components', + type=lambda number: number if number == 'all' else int(number), + metavar='NUMBER', + help='Number of leading path components to remove from each extracted path or "all" to strip all leading path components. Skip paths with fewer elements', + ) + config_bootstrap_group.add_argument( + '--progress', + dest='progress', + default=False, + action='store_true', + help='Display progress for each file as it is extracted', + ) + config_bootstrap_group.add_argument( + '-h', '--help', action='help', help='Show this help message and exit' + ) + + config_generate_parser = config_parsers.add_parser( + 'generate', + help='Generate a sample borgmatic configuration file', + description='Generate a sample borgmatic configuration file', + add_help=False, + ) + config_generate_group = config_generate_parser.add_argument_group('config generate arguments') + config_generate_group.add_argument( + '-s', + '--source', + dest='source_filename', + help='Optional configuration file to merge into the generated configuration, useful for upgrading your configuration', + ) + config_generate_group.add_argument( + '-d', + '--destination', + dest='destination_filename', + default=config_paths[0], + help=f'Destination configuration file, default: {unexpanded_config_paths[0]}', + ) + config_generate_group.add_argument( + '--overwrite', + default=False, + action='store_true', + help='Whether to overwrite any existing destination file, defaults to false', + ) + config_generate_group.add_argument( + '-h', '--help', action='help', help='Show this help message and exit' + ) + + config_validate_parser = config_parsers.add_parser( + 'validate', + help='Validate borgmatic configuration files specified with --config (see borgmatic --help)', + description='Validate borgmatic configuration files specified with --config (see borgmatic --help)', + add_help=False, + ) + config_validate_group = config_validate_parser.add_argument_group('config validate arguments') + config_validate_group.add_argument( + '-s', + '--show', + action='store_true', + help='Show the validated configuration after all include merging has occurred', + ) + config_validate_group.add_argument( + '-h', '--help', action='help', help='Show this help message and exit' + ) + + export_tar_parser = action_parsers.add_parser( 'export-tar', - aliases=SUBPARSER_ALIASES['export-tar'], + aliases=ACTION_ALIASES['export-tar'], help='Export an archive to a tar-formatted file or stream', description='Export an archive to a tar-formatted file or stream', add_help=False, @@ -529,9 +799,9 @@ def make_parsers(): export_tar_group.add_argument( '--path', metavar='PATH', - nargs='+', dest='paths', - help='Paths to export from archive, defaults to the entire archive', + action='append', + help='Path to export from archive, can specify flag multiple times, defaults to the entire archive', ) export_tar_group.add_argument( '--destination', @@ -557,9 +827,9 @@ def make_parsers(): '-h', '--help', action='help', help='Show this help message and exit' ) - mount_parser = subparsers.add_parser( + mount_parser = action_parsers.add_parser( 'mount', - aliases=SUBPARSER_ALIASES['mount'], + aliases=ACTION_ALIASES['mount'], help='Mount files from a named archive as a FUSE filesystem', description='Mount a named archive as a FUSE filesystem', add_help=False, @@ -580,9 +850,9 @@ def make_parsers(): mount_group.add_argument( '--path', metavar='PATH', - nargs='+', dest='paths', - help='Paths to mount from archive, defaults to the entire archive', + action='append', + help='Path to mount from archive, can specify multiple times, defaults to the entire archive', ) mount_group.add_argument( '--foreground', @@ -591,12 +861,40 @@ def make_parsers(): action='store_true', help='Stay in foreground until ctrl-C is pressed', ) + mount_group.add_argument( + '--first', + metavar='N', + help='Mount first N archives after other filters are applied', + ) + mount_group.add_argument( + '--last', metavar='N', help='Mount last N archives after other filters are applied' + ) + mount_group.add_argument( + '--oldest', + metavar='TIMESPAN', + help='Mount archives within a specified time range starting from the timestamp of the oldest archive (e.g. 7d or 12m) [Borg 2.x+ only]', + ) + mount_group.add_argument( + '--newest', + metavar='TIMESPAN', + help='Mount archives within a time range that ends at timestamp of the newest archive and starts a specified time range ago (e.g. 7d or 12m) [Borg 2.x+ only]', + ) + mount_group.add_argument( + '--older', + metavar='TIMESPAN', + help='Mount archives that are older than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', + ) + mount_group.add_argument( + '--newer', + metavar='TIMESPAN', + help='Mount archives that are newer than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', + ) mount_group.add_argument('--options', dest='options', help='Extra Borg mount options') mount_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') - umount_parser = subparsers.add_parser( + umount_parser = action_parsers.add_parser( 'umount', - aliases=SUBPARSER_ALIASES['umount'], + aliases=ACTION_ALIASES['umount'], help='Unmount a FUSE filesystem that was mounted with "borgmatic mount"', description='Unmount a mounted FUSE filesystem', add_help=False, @@ -611,9 +909,9 @@ def make_parsers(): ) umount_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') - restore_parser = subparsers.add_parser( + restore_parser = action_parsers.add_parser( 'restore', - aliases=SUBPARSER_ALIASES['restore'], + aliases=ACTION_ALIASES['restore'], help='Restore database dumps from a named archive', description='Restore database dumps from a named archive. (To extract files instead, use "borgmatic extract".)', add_help=False, @@ -629,24 +927,52 @@ def make_parsers(): restore_group.add_argument( '--database', metavar='NAME', - nargs='+', dest='databases', - help="Names of databases to restore from archive, defaults to all databases. Note that any databases to restore must be defined in borgmatic's configuration", + action='append', + help="Name of database to restore from archive, must be defined in borgmatic's configuration, can specify flag multiple times, defaults to all databases", + ) + restore_group.add_argument( + '--schema', + metavar='NAME', + dest='schemas', + action='append', + help='Name of schema to restore from the database, can specify flag multiple times, defaults to all schemas. Schemas are only supported for PostgreSQL and MongoDB databases', + ) + restore_group.add_argument( + '--hostname', + help='Database hostname to restore to. Defaults to the "restore_hostname" option in borgmatic\'s configuration', + ) + restore_group.add_argument( + '--port', + help='Port to restore to. Defaults to the "restore_port" option in borgmatic\'s configuration', + ) + restore_group.add_argument( + '--username', + help='Username with which to connect to the database. Defaults to the "restore_username" option in borgmatic\'s configuration', + ) + restore_group.add_argument( + '--password', + help='Password with which to connect to the restore database. Defaults to the "restore_password" option in borgmatic\'s configuration', + ) + restore_group.add_argument( + '--restore-path', + help='Path to restore SQLite database dumps to. Defaults to the "restore_path" option in borgmatic\'s configuration', ) restore_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit' ) - rlist_parser = subparsers.add_parser( + rlist_parser = action_parsers.add_parser( 'rlist', - aliases=SUBPARSER_ALIASES['rlist'], + aliases=ACTION_ALIASES['rlist'], help='List repository', description='List the archives in a repository', add_help=False, ) rlist_group = rlist_parser.add_argument_group('rlist arguments') rlist_group.add_argument( - '--repository', help='Path of repository to list, defaults to the configured repositories', + '--repository', + help='Path of repository to list, defaults to the configured repositories', ) rlist_group.add_argument( '--short', default=False, action='store_true', help='Output only archive names' @@ -656,7 +982,7 @@ def make_parsers(): '--json', default=False, action='store_true', help='Output results as JSON' ) rlist_group.add_argument( - '-P', '--prefix', help='Only list archive names starting with this prefix' + '-P', '--prefix', help='Deprecated. Only list archive names starting with this prefix' ) rlist_group.add_argument( '-a', @@ -674,11 +1000,31 @@ def make_parsers(): rlist_group.add_argument( '--last', metavar='N', help='List last N archives after other filters are applied' ) + rlist_group.add_argument( + '--oldest', + metavar='TIMESPAN', + help='List archives within a specified time range starting from the timestamp of the oldest archive (e.g. 7d or 12m) [Borg 2.x+ only]', + ) + rlist_group.add_argument( + '--newest', + metavar='TIMESPAN', + help='List archives within a time range that ends at timestamp of the newest archive and starts a specified time range ago (e.g. 7d or 12m) [Borg 2.x+ only]', + ) + rlist_group.add_argument( + '--older', + metavar='TIMESPAN', + help='List archives that are older than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', + ) + rlist_group.add_argument( + '--newer', + metavar='TIMESPAN', + help='List archives that are newer than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', + ) rlist_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') - list_parser = subparsers.add_parser( + list_parser = action_parsers.add_parser( 'list', - aliases=SUBPARSER_ALIASES['list'], + aliases=ACTION_ALIASES['list'], help='List archive', description='List the files in an archive or search for a file across archives', add_help=False, @@ -692,16 +1038,16 @@ def make_parsers(): list_group.add_argument( '--path', metavar='PATH', - nargs='+', dest='paths', - help='Paths or patterns to list from a single selected archive (via "--archive"), defaults to listing the entire archive', + action='append', + help='Path or pattern to list from a single selected archive (via "--archive"), can specify flag multiple times, defaults to listing the entire archive', ) list_group.add_argument( '--find', metavar='PATH', - nargs='+', dest='find_paths', - help='Partial paths or patterns to search for and list across multiple archives', + action='append', + help='Partial path or pattern to search for and list across multiple archives, can specify flag multiple times', ) list_group.add_argument( '--short', default=False, action='store_true', help='Output only path names' @@ -711,7 +1057,7 @@ def make_parsers(): '--json', default=False, action='store_true', help='Output results as JSON' ) list_group.add_argument( - '-P', '--prefix', help='Only list archive names starting with this prefix' + '-P', '--prefix', help='Deprecated. Only list archive names starting with this prefix' ) list_group.add_argument( '-a', @@ -720,12 +1066,6 @@ def make_parsers(): metavar='PATTERN', help='Only list archive names matching this pattern', ) - list_group.add_argument( - '--successful', - default=True, - action='store_true', - help='Deprecated; no effect. Newer versions of Borg shows successful (non-checkpoint) archives by default.', - ) list_group.add_argument( '--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys' ) @@ -749,9 +1089,9 @@ def make_parsers(): ) list_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') - rinfo_parser = subparsers.add_parser( + rinfo_parser = action_parsers.add_parser( 'rinfo', - aliases=SUBPARSER_ALIASES['rinfo'], + aliases=ACTION_ALIASES['rinfo'], help='Show repository summary information such as disk space used', description='Show repository summary information such as disk space used', add_help=False, @@ -766,9 +1106,9 @@ def make_parsers(): ) rinfo_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') - info_parser = subparsers.add_parser( + info_parser = action_parsers.add_parser( 'info', - aliases=SUBPARSER_ALIASES['info'], + aliases=ACTION_ALIASES['info'], help='Show archive summary information such as disk space used', description='Show archive summary information such as disk space used', add_help=False, @@ -783,7 +1123,9 @@ def make_parsers(): '--json', dest='json', default=False, action='store_true', help='Output results as JSON' ) info_group.add_argument( - '-P', '--prefix', help='Only show info for archive names starting with this prefix' + '-P', + '--prefix', + help='Deprecated. Only show info for archive names starting with this prefix', ) info_group.add_argument( '-a', @@ -803,11 +1145,31 @@ def make_parsers(): info_group.add_argument( '--last', metavar='N', help='Show info for last N archives after other filters are applied' ) + info_group.add_argument( + '--oldest', + metavar='TIMESPAN', + help='Show info for archives within a specified time range starting from the timestamp of the oldest archive (e.g. 7d or 12m) [Borg 2.x+ only]', + ) + info_group.add_argument( + '--newest', + metavar='TIMESPAN', + help='Show info for archives within a time range that ends at timestamp of the newest archive and starts a specified time range ago (e.g. 7d or 12m) [Borg 2.x+ only]', + ) + info_group.add_argument( + '--older', + metavar='TIMESPAN', + help='Show info for archives that are older than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', + ) + info_group.add_argument( + '--newer', + metavar='TIMESPAN', + help='Show info for archives that are newer than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', + ) info_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') - break_lock_parser = subparsers.add_parser( + break_lock_parser = action_parsers.add_parser( 'break-lock', - aliases=SUBPARSER_ALIASES['break-lock'], + aliases=ACTION_ALIASES['break-lock'], help='Break the repository and cache locks left behind by Borg aborting', description='Break Borg repository and cache locks left behind by Borg aborting', add_help=False, @@ -821,9 +1183,9 @@ def make_parsers(): '-h', '--help', action='help', help='Show this help message and exit' ) - borg_parser = subparsers.add_parser( + borg_parser = action_parsers.add_parser( 'borg', - aliases=SUBPARSER_ALIASES['borg'], + aliases=ACTION_ALIASES['borg'], help='Run an arbitrary Borg command', description="Run an arbitrary Borg command based on borgmatic's configuration", add_help=False, @@ -843,30 +1205,53 @@ def make_parsers(): ) borg_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') - return top_level_parser, subparsers + return global_parser, action_parsers, global_plus_action_parser def parse_arguments(*unparsed_arguments): ''' Given command-line arguments with which this script was invoked, parse the arguments and return - them as a dict mapping from subparser name (or "global") to an argparse.Namespace instance. + them as a dict mapping from action name (or "global") to an argparse.Namespace instance. + + Raise ValueError if the arguments cannot be parsed. + Raise SystemExit with an error code of 0 if "--help" was requested. ''' - top_level_parser, subparsers = make_parsers() - - arguments, remaining_arguments = parse_subparser_arguments( - unparsed_arguments, subparsers.choices + global_parser, action_parsers, global_plus_action_parser = make_parsers() + arguments, remaining_action_arguments = parse_arguments_for_actions( + unparsed_arguments, action_parsers.choices, global_parser ) - arguments['global'] = top_level_parser.parse_args(remaining_arguments) - if arguments['global'].excludes_filename: + if not arguments['global'].config_paths: + arguments['global'].config_paths = collect.get_default_config_paths(expand_home=True) + + for action_name in ('bootstrap', 'generate', 'validate'): + if ( + action_name in arguments.keys() and len(arguments.keys()) > 2 + ): # 2 = 1 for 'global' + 1 for the action + raise ValueError( + f'The {action_name} action cannot be combined with other actions. Please run it separately.' + ) + + unknown_arguments = get_unparsable_arguments(remaining_action_arguments) + + if unknown_arguments: + if '--help' in unknown_arguments or '-h' in unknown_arguments: + global_plus_action_parser.print_help() + sys.exit(0) + + global_plus_action_parser.print_usage() raise ValueError( - 'The --excludes flag has been replaced with exclude_patterns in configuration.' + f"Unrecognized argument{'s' if len(unknown_arguments) > 1 else ''}: {' '.join(unknown_arguments)}" ) if 'create' in arguments and arguments['create'].list_files and arguments['create'].progress: raise ValueError( 'With the create action, only one of --list (--files) and --progress flags can be used.' ) + if 'create' in arguments and arguments['create'].list_files and arguments['create'].json: + raise ValueError( + 'With the create action, only one of --list (--files) and --json flags can be used.' + ) if ( ('list' in arguments and 'rinfo' in arguments and arguments['list'].json) @@ -881,7 +1266,17 @@ def parse_arguments(*unparsed_arguments): and arguments['transfer'].match_archives ): raise ValueError( - 'With the transfer action, only one of --archive and --glob-archives flags can be used.' + 'With the transfer action, only one of --archive and --match-archives flags can be used.' + ) + + if 'list' in arguments and (arguments['list'].prefix and arguments['list'].match_archives): + raise ValueError( + 'With the list action, only one of --prefix or --match-archives flags can be used.' + ) + + if 'rlist' in arguments and (arguments['rlist'].prefix and arguments['rlist'].match_archives): + raise ValueError( + 'With the rlist action, only one of --prefix or --match-archives flags can be used.' ) if 'info' in arguments and ( diff --git a/borgmatic/commands/borgmatic.py b/borgmatic/commands/borgmatic.py index 3c2dcc0a..d3eead5b 100644 --- a/borgmatic/commands/borgmatic.py +++ b/borgmatic/commands/borgmatic.py @@ -8,12 +8,19 @@ from queue import Queue from subprocess import CalledProcessError import colorama -import pkg_resources + +try: + import importlib_metadata +except ModuleNotFoundError: # pragma: nocover + import importlib.metadata as importlib_metadata import borgmatic.actions.borg import borgmatic.actions.break_lock import borgmatic.actions.check import borgmatic.actions.compact +import borgmatic.actions.config.bootstrap +import borgmatic.actions.config.generate +import borgmatic.actions.config.validate import borgmatic.actions.create import borgmatic.actions.export_tar import borgmatic.actions.extract @@ -26,20 +33,19 @@ import borgmatic.actions.restore import borgmatic.actions.rinfo import borgmatic.actions.rlist import borgmatic.actions.transfer -import borgmatic.commands.completion +import borgmatic.commands.completion.bash +import borgmatic.commands.completion.fish from borgmatic.borg import umount as borg_umount from borgmatic.borg import version as borg_version from borgmatic.commands.arguments import parse_arguments -from borgmatic.config import checks, collect, convert, validate +from borgmatic.config import checks, collect, validate from borgmatic.hooks import command, dispatch, monitor -from borgmatic.logger import add_custom_log_levels, configure_logging, should_do_markup +from borgmatic.logger import DISABLED, add_custom_log_levels, configure_logging, should_do_markup from borgmatic.signals import configure_signals from borgmatic.verbosity import verbosity_to_log_level logger = logging.getLogger(__name__) -LEGACY_CONFIG_PATH = '/etc/borgmatic/config' - def run_configuration(config_filename, config, arguments): ''' @@ -52,16 +58,12 @@ def run_configuration(config_filename, config, arguments): * JSON output strings from successfully executing any actions that produce JSON * logging.LogRecord instances containing errors from any actions or backup hooks that fail ''' - (location, storage, retention, consistency, hooks) = ( - config.get(section_name, {}) - for section_name in ('location', 'storage', 'retention', 'consistency', 'hooks') - ) global_arguments = arguments['global'] - local_path = location.get('local_path', 'borg') - remote_path = location.get('remote_path') - retries = storage.get('retries', 0) - retry_wait = storage.get('retry_wait', 0) + local_path = config.get('local_path', 'borg') + remote_path = config.get('remote_path') + retries = config.get('retries', 0) + retry_wait = config.get('retry_wait', 0) encountered_error = None error_repository = '' using_primary_action = {'create', 'prune', 'compact', 'check'}.intersection(arguments) @@ -70,18 +72,19 @@ def run_configuration(config_filename, config, arguments): action_names = [ action for action in arguments.keys() if action != 'global' ] + monitoring_hooks_are_activated = using_primary_action and monitoring_log_level != DISABLED try: - local_borg_version = borg_version.local_borg_version(storage, local_path) + local_borg_version = borg_version.local_borg_version(config, local_path) except (OSError, CalledProcessError, ValueError) as error: yield from log_error_records(f'{config_filename}: Error getting local Borg version', error) return try: - if using_primary_action and not skip_monitoring: + if using_primary_action and not skip_monitoring and monitoring_hooks_are_activated: dispatch.call_hooks( 'initialize_monitor', - hooks, + config, config_filename, monitor.MONITOR_HOOK_NAMES, monitoring_log_level, @@ -91,7 +94,7 @@ def run_configuration(config_filename, config, arguments): for action_name in action_names: dispatch.call_hooks( 'ping_monitor', - hooks, + config, config_filename, monitor.MONITOR_HOOK_NAMES, monitor.State.START, @@ -108,46 +111,47 @@ def run_configuration(config_filename, config, arguments): if not encountered_error: repo_queue = Queue() - for repo in location['repositories']: + for repo in config['repositories']: repo_queue.put( (repo, 0), ) while not repo_queue.empty(): - repository_path, retry_num = repo_queue.get() + repository, retry_num = repo_queue.get() + logger.debug( + f'{repository.get("label", repository["path"])}: Running actions for repository' + ) timeout = retry_num * retry_wait if timeout: - logger.warning(f'{config_filename}: Sleeping {timeout}s before next retry') + logger.warning( + f'{repository.get("label", repository["path"])}: Sleeping {timeout}s before next retry' + ) time.sleep(timeout) try: yield from run_actions( arguments=arguments, config_filename=config_filename, - location=location, - storage=storage, - retention=retention, - consistency=consistency, - hooks=hooks, + config=config, local_path=local_path, remote_path=remote_path, local_borg_version=local_borg_version, - repository_path=repository_path, + repository=repository, ) except (OSError, CalledProcessError, ValueError) as error: if retry_num < retries: repo_queue.put( - (repository_path, retry_num + 1), + (repository, retry_num + 1), ) tuple( # Consume the generator so as to trigger logging. log_error_records( - f'{repository_path}: Error running actions for repository', + f'{repository.get("label", repository["path"])}: Error running actions for repository', error, levelno=logging.WARNING, log_command_error_output=True, ) ) logger.warning( - f'{config_filename}: Retrying... attempt {retry_num + 1}/{retries}' + f'{repository.get("label", repository["path"])}: Retrying... attempt {retry_num + 1}/{retries}' ) continue @@ -155,18 +159,19 @@ def run_configuration(config_filename, config, arguments): return yield from log_error_records( - f'{repository_path}: Error running actions for repository', error + f'{repository.get("label", repository["path"])}: Error running actions for repository', + error, ) encountered_error = error - error_repository = repository_path + error_repository = repository['path'] try: - if using_primary_action and not skip_monitoring: + if using_primary_action and not skip_monitoring and monitoring_hooks_are_activated: # send logs irrespective of error for action_name in action_names: dispatch.call_hooks( 'ping_monitor', - hooks, + config, config_filename, monitor.MONITOR_HOOK_NAMES, monitor.State.LOG, @@ -179,15 +184,15 @@ def run_configuration(config_filename, config, arguments): return encountered_error = error - yield from log_error_records(f'{repository_path}: Error pinging monitor', error) + yield from log_error_records(f'{repository["path"]}: Error pinging monitor', error) if not encountered_error: try: - if using_primary_action and not skip_monitoring: + if using_primary_action and not skip_monitoring and monitoring_hooks_are_activated: for action_name in action_names: dispatch.call_hooks( 'ping_monitor', - hooks, + config, config_filename, monitor.MONITOR_HOOK_NAMES, monitor.State.FINISH, @@ -197,7 +202,7 @@ def run_configuration(config_filename, config, arguments): ) dispatch.call_hooks( 'destroy_monitor', - hooks, + config, config_filename, monitor.MONITOR_HOOK_NAMES, monitoring_log_level, @@ -213,8 +218,8 @@ def run_configuration(config_filename, config, arguments): if encountered_error and using_primary_action: try: command.execute_hook( - hooks.get('on_error'), - hooks.get('umask'), + config.get('on_error'), + config.get('umask'), config_filename, 'on-error', global_arguments.dry_run, @@ -225,7 +230,7 @@ def run_configuration(config_filename, config, arguments): for action_name in action_names: dispatch.call_hooks( 'ping_monitor', - hooks, + config, config_filename, monitor.MONITOR_HOOK_NAMES, monitor.State.FAIL, @@ -235,7 +240,7 @@ def run_configuration(config_filename, config, arguments): ) dispatch.call_hooks( 'destroy_monitor', - hooks, + config, config_filename, monitor.MONITOR_HOOK_NAMES, monitoring_log_level, @@ -252,15 +257,11 @@ def run_actions( *, arguments, config_filename, - location, - storage, - retention, - consistency, - hooks, + config, local_path, remote_path, local_borg_version, - repository_path, + repository, ): ''' Given parsed command-line arguments as an argparse.ArgumentParser instance, the configuration @@ -275,29 +276,30 @@ def run_actions( invalid. ''' add_custom_log_levels() - repository = os.path.expanduser(repository_path) + repository_path = os.path.expanduser(repository['path']) global_arguments = arguments['global'] dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else '' hook_context = { 'repository': repository_path, # Deprecated: For backwards compatibility with borgmatic < 1.6.0. - 'repositories': ','.join(location['repositories']), + 'repositories': ','.join([repo['path'] for repo in config['repositories']]), + 'log_file': global_arguments.log_file if global_arguments.log_file else '', } command.execute_hook( - hooks.get('before_actions'), - hooks.get('umask'), + config.get('before_actions'), + config.get('umask'), config_filename, 'pre-actions', global_arguments.dry_run, **hook_context, ) - for (action_name, action_arguments) in arguments.items(): + for action_name, action_arguments in arguments.items(): if action_name == 'rcreate': borgmatic.actions.rcreate.run_rcreate( repository, - storage, + config, local_borg_version, action_arguments, global_arguments, @@ -307,7 +309,7 @@ def run_actions( elif action_name == 'transfer': borgmatic.actions.transfer.run_transfer( repository, - storage, + config, local_borg_version, action_arguments, global_arguments, @@ -318,9 +320,7 @@ def run_actions( yield from borgmatic.actions.create.run_create( config_filename, repository, - location, - storage, - hooks, + config, hook_context, local_borg_version, action_arguments, @@ -333,9 +333,7 @@ def run_actions( borgmatic.actions.prune.run_prune( config_filename, repository, - storage, - retention, - hooks, + config, hook_context, local_borg_version, action_arguments, @@ -348,9 +346,7 @@ def run_actions( borgmatic.actions.compact.run_compact( config_filename, repository, - storage, - retention, - hooks, + config, hook_context, local_borg_version, action_arguments, @@ -360,14 +356,11 @@ def run_actions( remote_path, ) elif action_name == 'check': - if checks.repository_enabled_for_checks(repository, consistency): + if checks.repository_enabled_for_checks(repository, config): borgmatic.actions.check.run_check( config_filename, repository, - location, - storage, - consistency, - hooks, + config, hook_context, local_borg_version, action_arguments, @@ -379,9 +372,7 @@ def run_actions( borgmatic.actions.extract.run_extract( config_filename, repository, - location, - storage, - hooks, + config, hook_context, local_borg_version, action_arguments, @@ -392,7 +383,7 @@ def run_actions( elif action_name == 'export-tar': borgmatic.actions.export_tar.run_export_tar( repository, - storage, + config, local_borg_version, action_arguments, global_arguments, @@ -402,18 +393,17 @@ def run_actions( elif action_name == 'mount': borgmatic.actions.mount.run_mount( repository, - storage, + config, local_borg_version, - arguments['mount'], + action_arguments, + global_arguments, local_path, remote_path, ) elif action_name == 'restore': borgmatic.actions.restore.run_restore( repository, - location, - storage, - hooks, + config, local_borg_version, action_arguments, global_arguments, @@ -423,61 +413,67 @@ def run_actions( elif action_name == 'rlist': yield from borgmatic.actions.rlist.run_rlist( repository, - storage, + config, local_borg_version, action_arguments, + global_arguments, local_path, remote_path, ) elif action_name == 'list': yield from borgmatic.actions.list.run_list( repository, - storage, + config, local_borg_version, action_arguments, + global_arguments, local_path, remote_path, ) elif action_name == 'rinfo': yield from borgmatic.actions.rinfo.run_rinfo( repository, - storage, + config, local_borg_version, action_arguments, + global_arguments, local_path, remote_path, ) elif action_name == 'info': yield from borgmatic.actions.info.run_info( repository, - storage, + config, local_borg_version, action_arguments, + global_arguments, local_path, remote_path, ) elif action_name == 'break-lock': borgmatic.actions.break_lock.run_break_lock( repository, - storage, + config, local_borg_version, - arguments['break-lock'], + action_arguments, + global_arguments, local_path, remote_path, ) elif action_name == 'borg': borgmatic.actions.borg.run_borg( repository, - storage, + config, local_borg_version, action_arguments, + global_arguments, local_path, remote_path, ) command.execute_hook( - hooks.get('after_actions'), - hooks.get('umask'), + config.get('after_actions'), + config.get('umask'), config_filename, 'post-actions', global_arguments.dry_run, @@ -490,6 +486,9 @@ def load_configurations(config_filenames, overrides=None, resolve_env=True): Given a sequence of configuration filenames, load and validate each configuration file. Return the results as a tuple of: dict of configuration filename to corresponding parsed configuration, and sequence of logging.LogRecord instances containing any parse errors. + + Log records are returned here instead of being logged directly because logging isn't yet + initialized at this point! ''' # Dict mapping from config filename to corresponding parsed config dict. configs = collections.OrderedDict() @@ -497,6 +496,17 @@ def load_configurations(config_filenames, overrides=None, resolve_env=True): # Parse and load each configuration file. for config_filename in config_filenames: + logs.extend( + [ + logging.makeLogRecord( + dict( + levelno=logging.DEBUG, + levelname='DEBUG', + msg=f'{config_filename}: Loading configuration file', + ) + ), + ] + ) try: configs[config_filename], parse_logs = validate.parse_configuration( config_filename, validate.schema_filename(), overrides, resolve_env @@ -546,6 +556,9 @@ def log_record(suppress_log=False, **kwargs): return record +MAX_CAPTURED_OUTPUT_LENGTH = 1000 + + def log_error_records( message, error=None, levelno=logging.CRITICAL, log_command_error_output=False ): @@ -568,12 +581,18 @@ def log_error_records( except CalledProcessError as error: yield log_record(levelno=levelno, levelname=level_name, msg=message) if error.output: + try: + output = error.output.decode('utf-8') + except (UnicodeDecodeError, AttributeError): + output = error.output + # Suppress these logs for now and save full error output for the log summary at the end. yield log_record( levelno=levelno, levelname=level_name, - msg=error.output, - suppress_log=not log_command_error_output, + msg=output[:MAX_CAPTURED_OUTPUT_LENGTH] + + ' ...' * (len(output) > MAX_CAPTURED_OUTPUT_LENGTH), + suppress_log=True, ) yield log_record(levelno=levelno, levelname=level_name, msg=error) except (ValueError, OSError) as error: @@ -590,12 +609,106 @@ def get_local_path(configs): Arbitrarily return the local path from the first configuration dict. Default to "borg" if not set. ''' - return next(iter(configs.values())).get('location', {}).get('local_path', 'borg') + return next(iter(configs.values())).get('local_path', 'borg') + + +def collect_highlander_action_summary_logs(configs, arguments, configuration_parse_errors): + ''' + Given a dict of configuration filename to corresponding parsed configuration, parsed + command-line arguments as a dict from subparser name to a parsed namespace of arguments, and + whether any configuration files encountered errors during parsing, run a highlander action + specified in the arguments, if any, and yield a series of logging.LogRecord instances containing + summary information. + + A highlander action is an action that cannot coexist with other actions on the borgmatic + command-line, and borgmatic exits after processing such an action. + ''' + add_custom_log_levels() + + if 'bootstrap' in arguments: + try: + # No configuration file is needed for bootstrap. + local_borg_version = borg_version.local_borg_version({}, 'borg') + except (OSError, CalledProcessError, ValueError) as error: + yield from log_error_records('Error getting local Borg version', error) + return + + try: + borgmatic.actions.config.bootstrap.run_bootstrap( + arguments['bootstrap'], arguments['global'], local_borg_version + ) + yield logging.makeLogRecord( + dict( + levelno=logging.ANSWER, + levelname='ANSWER', + msg='Bootstrap successful', + ) + ) + except ( + CalledProcessError, + ValueError, + OSError, + ) as error: + yield from log_error_records(error) + + return + + if 'generate' in arguments: + try: + borgmatic.actions.config.generate.run_generate( + arguments['generate'], arguments['global'] + ) + yield logging.makeLogRecord( + dict( + levelno=logging.ANSWER, + levelname='ANSWER', + msg='Generate successful', + ) + ) + except ( + CalledProcessError, + ValueError, + OSError, + ) as error: + yield from log_error_records(error) + + return + + if 'validate' in arguments: + if configuration_parse_errors: + yield logging.makeLogRecord( + dict( + levelno=logging.CRITICAL, + levelname='CRITICAL', + msg='Configuration validation failed', + ) + ) + + return + + try: + borgmatic.actions.config.validate.run_validate(arguments['validate'], configs) + + yield logging.makeLogRecord( + dict( + levelno=logging.ANSWER, + levelname='ANSWER', + msg='All configuration files are valid', + ) + ) + except ( + CalledProcessError, + ValueError, + OSError, + ) as error: + yield from log_error_records(error) + + return def collect_configuration_run_summary_logs(configs, arguments): ''' - Given a dict of configuration filename to corresponding parsed configuration, and parsed + Given a dict of configuration filename to corresponding parsed configuration and parsed command-line arguments as a dict from subparser name to a parsed namespace of arguments, run each configuration file and yield a series of logging.LogRecord instances containing summary information about each run. @@ -629,10 +742,9 @@ def collect_configuration_run_summary_logs(configs, arguments): if 'create' in arguments: try: for config_filename, config in configs.items(): - hooks = config.get('hooks', {}) command.execute_hook( - hooks.get('before_everything'), - hooks.get('umask'), + config.get('before_everything'), + config.get('umask'), config_filename, 'pre-everything', arguments['global'].dry_run, @@ -677,10 +789,9 @@ def collect_configuration_run_summary_logs(configs, arguments): if 'create' in arguments: try: for config_filename, config in configs.items(): - hooks = config.get('hooks', {}) command.execute_hook( - hooks.get('after_everything'), - hooks.get('umask'), + config.get('after_everything'), + config.get('umask'), config_filename, 'post-everything', arguments['global'].dry_run, @@ -698,7 +809,7 @@ def exit_with_help_link(): # pragma: no cover sys.exit(1) -def main(): # pragma: no cover +def main(extra_summary_logs=[]): # pragma: no cover configure_signals() try: @@ -716,16 +827,23 @@ def main(): # pragma: no cover global_arguments = arguments['global'] if global_arguments.version: - print(pkg_resources.require('borgmatic')[0].version) + print(importlib_metadata.version('borgmatic')) sys.exit(0) if global_arguments.bash_completion: - print(borgmatic.commands.completion.bash_completion()) + print(borgmatic.commands.completion.bash.bash_completion()) + sys.exit(0) + if global_arguments.fish_completion: + print(borgmatic.commands.completion.fish.fish_completion()) sys.exit(0) config_filenames = tuple(collect.collect_config_filenames(global_arguments.config_paths)) + global_arguments.used_config_paths = list(config_filenames) configs, parse_logs = load_configurations( config_filenames, global_arguments.overrides, global_arguments.resolve_env ) + configuration_parse_errors = ( + (max(log.levelno for log in parse_logs) >= logging.CRITICAL) if parse_logs else False + ) any_json_flags = any( getattr(sub_arguments, 'json', False) for sub_arguments in arguments.values() @@ -741,16 +859,25 @@ def main(): # pragma: no cover verbosity_to_log_level(global_arguments.log_file_verbosity), verbosity_to_log_level(global_arguments.monitoring_verbosity), global_arguments.log_file, + global_arguments.log_file_format, ) except (FileNotFoundError, PermissionError) as error: configure_logging(logging.CRITICAL) logger.critical(f'Error configuring logging: {error}') exit_with_help_link() - logger.debug('Ensuring legacy configuration is upgraded') - convert.guard_configuration_upgraded(LEGACY_CONFIG_PATH, config_filenames) - - summary_logs = parse_logs + list(collect_configuration_run_summary_logs(configs, arguments)) + summary_logs = ( + extra_summary_logs + + parse_logs + + ( + list( + collect_highlander_action_summary_logs( + configs, arguments, configuration_parse_errors + ) + ) + or list(collect_configuration_run_summary_logs(configs, arguments)) + ) + ) summary_logs_max_level = max(log.levelno for log in summary_logs) for message in ('', 'summary:'): diff --git a/borgmatic/commands/completion/__init__.py b/borgmatic/commands/completion/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/borgmatic/commands/completion/actions.py b/borgmatic/commands/completion/actions.py new file mode 100644 index 00000000..1f0e2f36 --- /dev/null +++ b/borgmatic/commands/completion/actions.py @@ -0,0 +1,36 @@ +import borgmatic.commands.arguments + + +def upgrade_message(language: str, upgrade_command: str, completion_file: str): + return f''' +Your {language} completions script is from a different version of borgmatic than is +currently installed. Please upgrade your script so your completions match the +command-line flags in your installed borgmatic! Try this to upgrade: + + {upgrade_command} + source {completion_file} +''' + + +def available_actions(subparsers, current_action=None): + ''' + Given subparsers as an argparse._SubParsersAction instance and a current action name (if + any), return the actions names that can follow the current action on a command-line. + + This takes into account which sub-actions that the current action supports. For instance, if + "bootstrap" is a sub-action for "config", then "bootstrap" should be able to follow a current + action of "config" but not "list". + ''' + action_to_subactions = borgmatic.commands.arguments.get_subactions_for_actions( + subparsers.choices + ) + current_subactions = action_to_subactions.get(current_action) + + if current_subactions: + return current_subactions + + all_subactions = set( + subaction for subactions in action_to_subactions.values() for subaction in subactions + ) + + return tuple(action for action in subparsers.choices.keys() if action not in all_subactions) diff --git a/borgmatic/commands/completion.py b/borgmatic/commands/completion/bash.py similarity index 55% rename from borgmatic/commands/completion.py rename to borgmatic/commands/completion/bash.py index 1fc976bc..7bf28a42 100644 --- a/borgmatic/commands/completion.py +++ b/borgmatic/commands/completion/bash.py @@ -1,13 +1,5 @@ -from borgmatic.commands import arguments - -UPGRADE_MESSAGE = ''' -Your bash completions script is from a different version of borgmatic than is -currently installed. Please upgrade your script so your completions match the -command-line flags in your installed borgmatic! Try this to upgrade: - - sudo sh -c "borgmatic --bash-completion > $BASH_SOURCE" - source $BASH_SOURCE -''' +import borgmatic.commands.arguments +import borgmatic.commands.completion.actions def parser_flags(parser): @@ -23,9 +15,12 @@ def bash_completion(): Return a bash completion script for the borgmatic command. Produce this by introspecting borgmatic's command-line argument parsers. ''' - top_level_parser, subparsers = arguments.make_parsers() - global_flags = parser_flags(top_level_parser) - actions = ' '.join(subparsers.choices.keys()) + ( + unused_global_parser, + action_parsers, + global_plus_action_parser, + ) = borgmatic.commands.arguments.make_parsers() + global_flags = parser_flags(global_plus_action_parser) # Avert your eyes. return '\n'.join( @@ -34,7 +29,11 @@ def bash_completion(): ' local this_script="$(cat "$BASH_SOURCE" 2> /dev/null)"', ' local installed_script="$(borgmatic --bash-completion 2> /dev/null)"', ' if [ "$this_script" != "$installed_script" ] && [ "$installed_script" != "" ];' - f' then cat << EOF\n{UPGRADE_MESSAGE}\nEOF', + f''' then cat << EOF\n{borgmatic.commands.completion.actions.upgrade_message( + 'bash', + 'sudo sh -c "borgmatic --bash-completion > $BASH_SOURCE"', + '$BASH_SOURCE', + )}\nEOF''', ' fi', '}', 'complete_borgmatic() {', @@ -44,12 +43,22 @@ def bash_completion(): COMPREPLY=($(compgen -W "%s %s %s" -- "${COMP_WORDS[COMP_CWORD]}")) return 0 fi''' - % (action, parser_flags(subparser), actions, global_flags) - for action, subparser in subparsers.choices.items() + % ( + action, + parser_flags(action_parser), + ' '.join( + borgmatic.commands.completion.actions.available_actions(action_parsers, action) + ), + global_flags, + ) + for action, action_parser in reversed(action_parsers.choices.items()) ) + ( ' COMPREPLY=($(compgen -W "%s %s" -- "${COMP_WORDS[COMP_CWORD]}"))' # noqa: FS003 - % (actions, global_flags), + % ( + ' '.join(borgmatic.commands.completion.actions.available_actions(action_parsers)), + global_flags, + ), ' (check_version &)', '}', '\ncomplete -o bashdefault -o default -F complete_borgmatic borgmatic', diff --git a/borgmatic/commands/completion/fish.py b/borgmatic/commands/completion/fish.py new file mode 100644 index 00000000..edca0226 --- /dev/null +++ b/borgmatic/commands/completion/fish.py @@ -0,0 +1,176 @@ +import shlex +from argparse import Action +from textwrap import dedent + +import borgmatic.commands.arguments +import borgmatic.commands.completion.actions + + +def has_file_options(action: Action): + ''' + Given an argparse.Action instance, return True if it takes a file argument. + ''' + return action.metavar in ( + 'FILENAME', + 'PATH', + ) or action.dest in ('config_paths',) + + +def has_choice_options(action: Action): + ''' + Given an argparse.Action instance, return True if it takes one of a predefined set of arguments. + ''' + return action.choices is not None + + +def has_unknown_required_param_options(action: Action): + ''' + A catch-all for options that take a required parameter, but we don't know what the parameter is. + This should be used last. These are actions that take something like a glob, a list of numbers, or a string. + + Actions that match this pattern should not show the normal arguments, because those are unlikely to be valid. + ''' + return ( + action.required is True + or action.nargs + in ( + '+', + '*', + ) + or action.metavar in ('PATTERN', 'KEYS', 'N') + or (action.type is not None and action.default is None) + ) + + +def has_exact_options(action: Action): + return ( + has_file_options(action) + or has_choice_options(action) + or has_unknown_required_param_options(action) + ) + + +def exact_options_completion(action: Action): + ''' + Given an argparse.Action instance, return a completion invocation that forces file completions, options completion, + or just that some value follow the action, if the action takes such an argument and was the last action on the + command line prior to the cursor. + + Otherwise, return an empty string. + ''' + + if not has_exact_options(action): + return '' + + args = ' '.join(action.option_strings) + + if has_file_options(action): + return f'''\ncomplete -c borgmatic -Fr -n "__borgmatic_current_arg {args}"''' + + if has_choice_options(action): + return f'''\ncomplete -c borgmatic -f -a '{' '.join(map(str, action.choices))}' -n "__borgmatic_current_arg {args}"''' + + if has_unknown_required_param_options(action): + return f'''\ncomplete -c borgmatic -x -n "__borgmatic_current_arg {args}"''' + + raise ValueError( + f'Unexpected action: {action} passes has_exact_options but has no choices produced' + ) + + +def dedent_strip_as_tuple(string: str): + ''' + Dedent a string, then strip it to avoid requiring your first line to have content, then return a tuple of the string. + Makes it easier to write multiline strings for completions when you join them with a tuple. + ''' + return (dedent(string).strip('\n'),) + + +def fish_completion(): + ''' + Return a fish completion script for the borgmatic command. Produce this by introspecting + borgmatic's command-line argument parsers. + ''' + ( + unused_global_parser, + action_parsers, + global_plus_action_parser, + ) = borgmatic.commands.arguments.make_parsers() + + all_action_parsers = ' '.join(action for action in action_parsers.choices.keys()) + + exact_option_args = tuple( + ' '.join(action.option_strings) + for action_parser in action_parsers.choices.values() + for action in action_parser._actions + if has_exact_options(action) + ) + tuple( + ' '.join(action.option_strings) + for action in global_plus_action_parser._actions + if len(action.option_strings) > 0 + if has_exact_options(action) + ) + + # Avert your eyes. + return '\n'.join( + dedent_strip_as_tuple( + f''' + function __borgmatic_check_version + set -fx this_filename (status current-filename) + fish -c ' + if test -f "$this_filename" + set this_script (cat $this_filename 2> /dev/null) + set installed_script (borgmatic --fish-completion 2> /dev/null) + if [ "$this_script" != "$installed_script" ] && [ "$installed_script" != "" ] + echo "{borgmatic.commands.completion.actions.upgrade_message( + 'fish', + 'borgmatic --fish-completion | sudo tee $this_filename', + '$this_filename', + )}" + end + end + ' & + end + __borgmatic_check_version + + function __borgmatic_current_arg --description 'Check if any of the given arguments are the last on the command line before the cursor' + set -l all_args (commandline -poc) + # premature optimization to avoid iterating all args if there aren't enough + # to have a last arg beyond borgmatic + if [ (count $all_args) -lt 2 ] + return 1 + end + for arg in $argv + if [ "$arg" = "$all_args[-1]" ] + return 0 + end + end + return 1 + end + + set --local action_parser_condition "not __fish_seen_subcommand_from {all_action_parsers}" + set --local exact_option_condition "not __borgmatic_current_arg {' '.join(exact_option_args)}" + ''' + ) + + ('\n# action_parser completions',) + + tuple( + f'''complete -c borgmatic -f -n "$action_parser_condition" -n "$exact_option_condition" -a '{action_name}' -d {shlex.quote(action_parser.description)}''' + for action_name, action_parser in action_parsers.choices.items() + ) + + ('\n# global flags',) + + tuple( + # -n is checked in order, so put faster / more likely to be true checks first + f'''complete -c borgmatic -f -n "$exact_option_condition" -a '{' '.join(action.option_strings)}' -d {shlex.quote(action.help)}{exact_options_completion(action)}''' + for action in global_plus_action_parser._actions + # ignore the noargs action, as this is an impossible completion for fish + if len(action.option_strings) > 0 + if 'Deprecated' not in action.help + ) + + ('\n# action_parser flags',) + + tuple( + f'''complete -c borgmatic -f -n "$exact_option_condition" -a '{' '.join(action.option_strings)}' -d {shlex.quote(action.help)} -n "__fish_seen_subcommand_from {action_name}"{exact_options_completion(action)}''' + for action_name, action_parser in action_parsers.choices.items() + for action in action_parser._actions + if 'Deprecated' not in (action.help or ()) + ) + ) diff --git a/borgmatic/commands/convert_config.py b/borgmatic/commands/convert_config.py deleted file mode 100644 index 64a89486..00000000 --- a/borgmatic/commands/convert_config.py +++ /dev/null @@ -1,102 +0,0 @@ -import os -import sys -import textwrap -from argparse import ArgumentParser - -from ruamel import yaml - -from borgmatic.config import convert, generate, legacy, validate - -DEFAULT_SOURCE_CONFIG_FILENAME = '/etc/borgmatic/config' -DEFAULT_SOURCE_EXCLUDES_FILENAME = '/etc/borgmatic/excludes' -DEFAULT_DESTINATION_CONFIG_FILENAME = '/etc/borgmatic/config.yaml' - - -def parse_arguments(*arguments): - ''' - Given command-line arguments with which this script was invoked, parse the arguments and return - them as an ArgumentParser instance. - ''' - parser = ArgumentParser( - description=''' - Convert legacy INI-style borgmatic configuration and excludes files to a single YAML - configuration file. Note that this replaces any comments from the source files. - ''' - ) - parser.add_argument( - '-s', - '--source-config', - dest='source_config_filename', - default=DEFAULT_SOURCE_CONFIG_FILENAME, - help=f'Source INI-style configuration filename. Default: {DEFAULT_SOURCE_CONFIG_FILENAME}', - ) - parser.add_argument( - '-e', - '--source-excludes', - dest='source_excludes_filename', - default=DEFAULT_SOURCE_EXCLUDES_FILENAME - if os.path.exists(DEFAULT_SOURCE_EXCLUDES_FILENAME) - else None, - help='Excludes filename', - ) - parser.add_argument( - '-d', - '--destination-config', - dest='destination_config_filename', - default=DEFAULT_DESTINATION_CONFIG_FILENAME, - help=f'Destination YAML configuration filename. Default: {DEFAULT_DESTINATION_CONFIG_FILENAME}', - ) - - return parser.parse_args(arguments) - - -TEXT_WRAP_CHARACTERS = 80 - - -def display_result(args): # pragma: no cover - result_lines = textwrap.wrap( - f'Your borgmatic configuration has been upgraded. Please review the result in {args.destination_config_filename}.', - TEXT_WRAP_CHARACTERS, - ) - - excludes_phrase = ( - f' and {args.source_excludes_filename}' if args.source_excludes_filename else '' - ) - delete_lines = textwrap.wrap( - f'Once you are satisfied, you can safely delete {args.source_config_filename}{excludes_phrase}.', - TEXT_WRAP_CHARACTERS, - ) - - print('\n'.join(result_lines)) - print() - print('\n'.join(delete_lines)) - - -def main(): # pragma: no cover - try: - args = parse_arguments(*sys.argv[1:]) - schema = yaml.round_trip_load(open(validate.schema_filename()).read()) - source_config = legacy.parse_configuration( - args.source_config_filename, legacy.CONFIG_FORMAT - ) - source_config_file_mode = os.stat(args.source_config_filename).st_mode - source_excludes = ( - open(args.source_excludes_filename).read().splitlines() - if args.source_excludes_filename - else [] - ) - - destination_config = convert.convert_legacy_parsed_config( - source_config, source_excludes, schema - ) - - generate.write_configuration( - args.destination_config_filename, - generate.render_configuration(destination_config), - mode=source_config_file_mode, - ) - - display_result(args) - except (ValueError, OSError) as error: - print(error, file=sys.stderr) - sys.exit(1) diff --git a/borgmatic/commands/generate_config.py b/borgmatic/commands/generate_config.py index 78c32f04..f95b3094 100644 --- a/borgmatic/commands/generate_config.py +++ b/borgmatic/commands/generate_config.py @@ -1,63 +1,17 @@ +import logging import sys -from argparse import ArgumentParser -from borgmatic.config import generate, validate - -DEFAULT_DESTINATION_CONFIG_FILENAME = '/etc/borgmatic/config.yaml' +import borgmatic.commands.borgmatic -def parse_arguments(*arguments): - ''' - Given command-line arguments with which this script was invoked, parse the arguments and return - them as an ArgumentParser instance. - ''' - parser = ArgumentParser(description='Generate a sample borgmatic YAML configuration file.') - parser.add_argument( - '-s', - '--source', - dest='source_filename', - help='Optional YAML configuration file to merge into the generated configuration, useful for upgrading your configuration', - ) - parser.add_argument( - '-d', - '--destination', - dest='destination_filename', - default=DEFAULT_DESTINATION_CONFIG_FILENAME, - help=f'Destination YAML configuration file, default: {DEFAULT_DESTINATION_CONFIG_FILENAME}', - ) - parser.add_argument( - '--overwrite', - default=False, - action='store_true', - help='Whether to overwrite any existing destination file, defaults to false', - ) - - return parser.parse_args(arguments) - - -def main(): # pragma: no cover - try: - args = parse_arguments(*sys.argv[1:]) - - generate.generate_sample_configuration( - args.source_filename, - args.destination_filename, - validate.schema_filename(), - overwrite=args.overwrite, +def main(): + warning_log = logging.makeLogRecord( + dict( + levelno=logging.WARNING, + levelname='WARNING', + msg='generate-borgmatic-config is deprecated and will be removed from a future release. Please use "borgmatic config generate" instead.', ) + ) - print(f'Generated a sample configuration file at {args.destination_filename}.') - print() - if args.source_filename: - print(f'Merged in the contents of configuration file at {args.source_filename}.') - print('To review the changes made, run:') - print() - print(f' diff --unified {args.source_filename} {args.destination_filename}') - print() - print('This includes all available configuration options with example values. The few') - print('required options are indicated. Please edit the file to suit your needs.') - print() - print('If you ever need help: https://torsion.org/borgmatic/#issues') - except (ValueError, OSError) as error: - print(error, file=sys.stderr) - sys.exit(1) + sys.argv = ['borgmatic', 'config', 'generate'] + sys.argv[1:] + borgmatic.commands.borgmatic.main([warning_log]) diff --git a/borgmatic/commands/validate_config.py b/borgmatic/commands/validate_config.py index 44c0082a..0b3dd1ff 100644 --- a/borgmatic/commands/validate_config.py +++ b/borgmatic/commands/validate_config.py @@ -1,52 +1,17 @@ import logging import sys -from argparse import ArgumentParser -from borgmatic.config import collect, validate - -logger = logging.getLogger(__name__) +import borgmatic.commands.borgmatic -def parse_arguments(*arguments): - ''' - Given command-line arguments with which this script was invoked, parse the arguments and return - them as an ArgumentParser instance. - ''' - config_paths = collect.get_default_config_paths() - - parser = ArgumentParser(description='Validate borgmatic configuration file(s).') - parser.add_argument( - '-c', - '--config', - nargs='+', - dest='config_paths', - default=config_paths, - help=f'Configuration filenames or directories, defaults to: {config_paths}', +def main(): + warning_log = logging.makeLogRecord( + dict( + levelno=logging.WARNING, + levelname='WARNING', + msg='validate-borgmatic-config is deprecated and will be removed from a future release. Please use "borgmatic config validate" instead.', + ) ) - return parser.parse_args(arguments) - - -def main(): # pragma: no cover - args = parse_arguments(*sys.argv[1:]) - - logging.basicConfig(level=logging.INFO, format='%(message)s') - - config_filenames = tuple(collect.collect_config_filenames(args.config_paths)) - if len(config_filenames) == 0: - logger.critical('No files to validate found') - sys.exit(1) - - found_issues = False - for config_filename in config_filenames: - try: - validate.parse_configuration(config_filename, validate.schema_filename()) - except (ValueError, OSError, validate.Validation_error) as error: - logging.critical(f'{config_filename}: Error parsing configuration file') - logging.critical(error) - found_issues = True - - if found_issues: - sys.exit(1) - else: - logger.info(f"All given configuration files are valid: {', '.join(config_filenames)}") + sys.argv = ['borgmatic', 'config', 'validate'] + sys.argv[1:] + borgmatic.commands.borgmatic.main([warning_log]) diff --git a/borgmatic/config/collect.py b/borgmatic/config/collect.py index bd38fee2..80c6c5c0 100644 --- a/borgmatic/config/collect.py +++ b/borgmatic/config/collect.py @@ -24,9 +24,9 @@ def get_default_config_paths(expand_home=True): def collect_config_filenames(config_paths): ''' Given a sequence of config paths, both filenames and directories, resolve that to an iterable - of files. Accomplish this by listing any given directories looking for contained config files - (ending with the ".yaml" or ".yml" extension). This is non-recursive, so any directories within the given - directories are ignored. + of absolute files. Accomplish this by listing any given directories looking for contained config + files (ending with the ".yaml" or ".yml" extension). This is non-recursive, so any directories + within the given directories are ignored. Return paths even if they don't exist on disk, so the user can find out about missing configuration paths. However, skip a default config path if it's missing, so the user doesn't @@ -41,7 +41,7 @@ def collect_config_filenames(config_paths): continue if not os.path.isdir(path) or not exists: - yield path + yield os.path.abspath(path) continue if not os.access(path, os.R_OK): @@ -51,4 +51,4 @@ def collect_config_filenames(config_paths): full_filename = os.path.join(path, filename) matching_filetype = full_filename.endswith('.yaml') or full_filename.endswith('.yml') if matching_filetype and not os.path.isdir(full_filename): - yield full_filename + yield os.path.abspath(full_filename) diff --git a/borgmatic/config/convert.py b/borgmatic/config/convert.py deleted file mode 100644 index e4e55e43..00000000 --- a/borgmatic/config/convert.py +++ /dev/null @@ -1,95 +0,0 @@ -import os - -from ruamel import yaml - -from borgmatic.config import generate - - -def _convert_section(source_section_config, section_schema): - ''' - Given a legacy Parsed_config instance for a single section, convert it to its corresponding - yaml.comments.CommentedMap representation in preparation for actual serialization to YAML. - - Where integer types exist in the given section schema, convert their values to integers. - ''' - destination_section_config = yaml.comments.CommentedMap( - [ - ( - option_name, - int(option_value) - if section_schema['properties'].get(option_name, {}).get('type') == 'integer' - else option_value, - ) - for option_name, option_value in source_section_config.items() - ] - ) - - return destination_section_config - - -def convert_legacy_parsed_config(source_config, source_excludes, schema): - ''' - Given a legacy Parsed_config instance loaded from an INI-style config file and a list of exclude - patterns, convert them to a corresponding yaml.comments.CommentedMap representation in - preparation for serialization to a single YAML config file. - - Additionally, use the given schema as a source of helpful comments to include within the - returned CommentedMap. - ''' - destination_config = yaml.comments.CommentedMap( - [ - (section_name, _convert_section(section_config, schema['properties'][section_name])) - for section_name, section_config in source_config._asdict().items() - ] - ) - - # Split space-seperated values into actual lists, make "repository" into a list, and merge in - # excludes. - location = destination_config['location'] - location['source_directories'] = source_config.location['source_directories'].split(' ') - location['repositories'] = [location.pop('repository')] - location['exclude_patterns'] = source_excludes - - if source_config.consistency.get('checks'): - destination_config['consistency']['checks'] = source_config.consistency['checks'].split(' ') - - # Add comments to each section, and then add comments to the fields in each section. - generate.add_comments_to_configuration_object(destination_config, schema) - - for section_name, section_config in destination_config.items(): - generate.add_comments_to_configuration_object( - section_config, schema['properties'][section_name], indent=generate.INDENT - ) - - return destination_config - - -class Legacy_configuration_not_upgraded(FileNotFoundError): - def __init__(self): - super(Legacy_configuration_not_upgraded, self).__init__( - '''borgmatic changed its configuration file format in version 1.1.0 from INI-style -to YAML. This better supports validation, and has a more natural way to express -lists of values. To upgrade your existing configuration, run: - - sudo upgrade-borgmatic-config - -That will generate a new YAML configuration file at /etc/borgmatic/config.yaml -(by default) using the values from both your existing configuration and excludes -files. The new version of borgmatic will consume the YAML configuration file -instead of the old one.''' - ) - - -def guard_configuration_upgraded(source_config_filename, destination_config_filenames): - ''' - If legacy source configuration exists but no destination upgraded configs do, raise - Legacy_configuration_not_upgraded. - - The idea is that we want to alert the user about upgrading their config if they haven't already. - ''' - destination_config_exists = any( - os.path.exists(filename) for filename in destination_config_filenames - ) - - if os.path.exists(source_config_filename) and not destination_config_exists: - raise Legacy_configuration_not_upgraded() diff --git a/borgmatic/config/generate.py b/borgmatic/config/generate.py index d486f23c..01096547 100644 --- a/borgmatic/config/generate.py +++ b/borgmatic/config/generate.py @@ -11,7 +11,7 @@ INDENT = 4 SEQUENCE_INDENT = 2 -def _insert_newline_before_comment(config, field_name): +def insert_newline_before_comment(config, field_name): ''' Using some ruamel.yaml black magic, insert a blank line in the config right before the given field and its comments. @@ -21,10 +21,10 @@ def _insert_newline_before_comment(config, field_name): ) -def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False): +def schema_to_sample_configuration(schema, level=0, parent_is_sequence=False): ''' Given a loaded configuration schema, generate and return sample config for it. Include comments - for each section based on the schema "description". + for each option based on the schema "description". ''' schema_type = schema.get('type') example = schema.get('example') @@ -33,13 +33,13 @@ def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False): if schema_type == 'array': config = yaml.comments.CommentedSeq( - [_schema_to_sample_configuration(schema['items'], level, parent_is_sequence=True)] + [schema_to_sample_configuration(schema['items'], level, parent_is_sequence=True)] ) add_comments_to_configuration_sequence(config, schema, indent=(level * INDENT)) elif schema_type == 'object': config = yaml.comments.CommentedMap( [ - (field_name, _schema_to_sample_configuration(sub_schema, level + 1)) + (field_name, schema_to_sample_configuration(sub_schema, level + 1)) for field_name, sub_schema in schema['properties'].items() ] ) @@ -53,13 +53,13 @@ def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False): return config -def _comment_out_line(line): +def comment_out_line(line): # If it's already is commented out (or empty), there's nothing further to do! stripped_line = line.lstrip() if not stripped_line or stripped_line.startswith('#'): return line - # Comment out the names of optional sections, inserting the '#' after any indent for aesthetics. + # Comment out the names of optional options, inserting the '#' after any indent for aesthetics. matches = re.match(r'(\s*)', line) indent_spaces = matches.group(0) if matches else '' count_indent_spaces = len(indent_spaces) @@ -67,7 +67,7 @@ def _comment_out_line(line): return '# '.join((indent_spaces, line[count_indent_spaces:])) -def _comment_out_optional_configuration(rendered_config): +def comment_out_optional_configuration(rendered_config): ''' Post-process a rendered configuration string to comment out optional key/values, as determined by a sentinel in the comment before each key. @@ -92,7 +92,7 @@ def _comment_out_optional_configuration(rendered_config): if not line.strip(): optional = False - lines.append(_comment_out_line(line) if optional else line) + lines.append(comment_out_line(line) if optional else line) return '\n'.join(lines) @@ -165,7 +165,6 @@ def add_comments_to_configuration_sequence(config, schema, indent=0): return -REQUIRED_SECTION_NAMES = {'location', 'retention'} REQUIRED_KEYS = {'source_directories', 'repositories', 'keep_daily'} COMMENTED_OUT_SENTINEL = 'COMMENT_OUT' @@ -185,7 +184,7 @@ def add_comments_to_configuration_object(config, schema, indent=0, skip_first=Fa # If this is an optional key, add an indicator to the comment flagging it to be commented # out from the sample configuration. This sentinel is consumed by downstream processing that # does the actual commenting out. - if field_name not in REQUIRED_SECTION_NAMES and field_name not in REQUIRED_KEYS: + if field_name not in REQUIRED_KEYS: description = ( '\n'.join((description, COMMENTED_OUT_SENTINEL)) if description @@ -199,7 +198,7 @@ def add_comments_to_configuration_object(config, schema, indent=0, skip_first=Fa config.yaml_set_comment_before_after_key(key=field_name, before=description, indent=indent) if index > 0: - _insert_newline_before_comment(config, field_name) + insert_newline_before_comment(config, field_name) RUAMEL_YAML_COMMENTS_INDEX = 1 @@ -260,14 +259,14 @@ def merge_source_configuration_into_destination(destination_config, source_confi ) continue - # This is some sort of scalar. Simply set it into the destination. + # This is some sort of scalar. Set it into the destination. destination_config[field_name] = source_config[field_name] return destination_config def generate_sample_configuration( - source_filename, destination_filename, schema_filename, overwrite=False + dry_run, source_filename, destination_filename, schema_filename, overwrite=False ): ''' Given an optional source configuration filename, and a required destination configuration @@ -284,11 +283,14 @@ def generate_sample_configuration( normalize.normalize(source_filename, source_config) destination_config = merge_source_configuration_into_destination( - _schema_to_sample_configuration(schema), source_config + schema_to_sample_configuration(schema), source_config ) + if dry_run: + return + write_configuration( destination_filename, - _comment_out_optional_configuration(render_configuration(destination_config)), + comment_out_optional_configuration(render_configuration(destination_config)), overwrite=overwrite, ) diff --git a/borgmatic/config/legacy.py b/borgmatic/config/legacy.py deleted file mode 100644 index ec1e50a1..00000000 --- a/borgmatic/config/legacy.py +++ /dev/null @@ -1,146 +0,0 @@ -from collections import OrderedDict, namedtuple -from configparser import RawConfigParser - -Section_format = namedtuple('Section_format', ('name', 'options')) -Config_option = namedtuple('Config_option', ('name', 'value_type', 'required')) - - -def option(name, value_type=str, required=True): - ''' - Given a config file option name, an expected type for its value, and whether it's required, - return a Config_option capturing that information. - ''' - return Config_option(name, value_type, required) - - -CONFIG_FORMAT = ( - Section_format( - 'location', - ( - option('source_directories'), - option('one_file_system', value_type=bool, required=False), - option('remote_path', required=False), - option('repository'), - ), - ), - Section_format( - 'storage', - ( - option('encryption_passphrase', required=False), - option('compression', required=False), - option('umask', required=False), - ), - ), - Section_format( - 'retention', - ( - option('keep_within', required=False), - option('keep_hourly', int, required=False), - option('keep_daily', int, required=False), - option('keep_weekly', int, required=False), - option('keep_monthly', int, required=False), - option('keep_yearly', int, required=False), - option('prefix', required=False), - ), - ), - Section_format( - 'consistency', (option('checks', required=False), option('check_last', required=False)) - ), -) - - -def validate_configuration_format(parser, config_format): - ''' - Given an open RawConfigParser and an expected config file format, validate that the parsed - configuration file has the expected sections, that any required options are present in those - sections, and that there aren't any unexpected options. - - A section is required if any of its contained options are required. - - Raise ValueError if anything is awry. - ''' - section_names = set(parser.sections()) - required_section_names = tuple( - section.name - for section in config_format - if any(option.required for option in section.options) - ) - - unknown_section_names = section_names - set( - section_format.name for section_format in config_format - ) - if unknown_section_names: - raise ValueError(f"Unknown config sections found: {', '.join(unknown_section_names)}") - - missing_section_names = set(required_section_names) - section_names - if missing_section_names: - raise ValueError(f"Missing config sections: {', '.join(missing_section_names)}") - - for section_format in config_format: - if section_format.name not in section_names: - continue - - option_names = parser.options(section_format.name) - expected_options = section_format.options - - unexpected_option_names = set(option_names) - set( - option.name for option in expected_options - ) - - if unexpected_option_names: - raise ValueError( - f"Unexpected options found in config section {section_format.name}: {', '.join(sorted(unexpected_option_names))}", - ) - - missing_option_names = tuple( - option.name - for option in expected_options - if option.required - if option.name not in option_names - ) - - if missing_option_names: - raise ValueError( - f"Required options missing from config section {section_format.name}: {', '.join(missing_option_names)}", - ) - - -def parse_section_options(parser, section_format): - ''' - Given an open RawConfigParser and an expected section format, return the option values from that - section as a dict mapping from option name to value. Omit those options that are not present in - the parsed options. - - Raise ValueError if any option values cannot be coerced to the expected Python data type. - ''' - type_getter = {str: parser.get, int: parser.getint, bool: parser.getboolean} - - return OrderedDict( - (option.name, type_getter[option.value_type](section_format.name, option.name)) - for option in section_format.options - if parser.has_option(section_format.name, option.name) - ) - - -def parse_configuration(config_filename, config_format): - ''' - Given a config filename and an expected config file format, return the parsed configuration - as a namedtuple with one attribute for each parsed section. - - Raise IOError if the file cannot be read, or ValueError if the format is not as expected. - ''' - parser = RawConfigParser() - if not parser.read(config_filename): - raise ValueError(f'Configuration file cannot be opened: {config_filename}') - - validate_configuration_format(parser, config_format) - - # Describes a parsed configuration, where each attribute is the name of a configuration file - # section and each value is a dict of that section's parsed options. - Parsed_config = namedtuple( - 'Parsed_config', (section_format.name for section_format in config_format) - ) - - return Parsed_config( - *(parse_section_options(parser, section_format) for section_format in config_format) - ) diff --git a/borgmatic/config/load.py b/borgmatic/config/load.py index f3c45d52..e0fabfa6 100644 --- a/borgmatic/config/load.py +++ b/borgmatic/config/load.py @@ -38,6 +38,37 @@ def include_configuration(loader, filename_node, include_directory): return load_configuration(include_filename) +def raise_retain_node_error(loader, node): + ''' + Given a ruamel.yaml.loader.Loader and a YAML node, raise an error about "!retain" usage. + + Raise ValueError if a mapping or sequence node is given, as that indicates that "!retain" was + used in a configuration file without a merge. In configuration files with a merge, mapping and + sequence nodes with "!retain" tags are handled by deep_merge_nodes() below. + + Also raise ValueError if a scalar node is given, as "!retain" is not supported on scalar nodes. + ''' + if isinstance(node, (ruamel.yaml.nodes.MappingNode, ruamel.yaml.nodes.SequenceNode)): + raise ValueError( + 'The !retain tag may only be used within a configuration file containing a merged !include tag.' + ) + + raise ValueError('The !retain tag may only be used on a YAML mapping or sequence.') + + +def raise_omit_node_error(loader, node): + ''' + Given a ruamel.yaml.loader.Loader and a YAML node, raise an error about "!omit" usage. + + Raise ValueError unconditionally, as an "!omit" node here indicates it was used in a + configuration file without a merge. In configuration files with a merge, nodes with "!omit" + tags are handled by deep_merge_nodes() below. + ''' + raise ValueError( + 'The !omit tag may only be used on a scalar (e.g., string) list element within a configuration file containing a merged !include tag.' + ) + + class Include_constructor(ruamel.yaml.SafeConstructor): ''' A YAML "constructor" (a ruamel.yaml concept) that supports a custom "!include" tag for including @@ -50,6 +81,8 @@ class Include_constructor(ruamel.yaml.SafeConstructor): '!include', functools.partial(include_configuration, include_directory=include_directory), ) + self.add_constructor('!retain', raise_retain_node_error) + self.add_constructor('!omit', raise_omit_node_error) def flatten_mapping(self, node): ''' @@ -64,8 +97,8 @@ class Include_constructor(ruamel.yaml.SafeConstructor): ``` These includes are deep merged into the current configuration file. For instance, in this - example, any "retention" options in common.yaml will get merged into the "retention" section - in the example configuration file. + example, any "option" with sub-options in common.yaml will get merged into the corresponding + "option" with sub-options in the example configuration file. ''' representer = ruamel.yaml.representer.SafeRepresenter() @@ -83,11 +116,12 @@ def load_configuration(filename): ''' Load the given configuration file and return its contents as a data structure of nested dicts and lists. Also, replace any "{constant}" strings with the value of the "constant" key in the - "constants" section of the configuration file. + "constants" option of the configuration file. Raise ruamel.yaml.error.YAMLError if something goes wrong parsing the YAML, or RecursionError if there are too many recursive includes. ''' + # Use an embedded derived class for the include constructor so as to capture the filename # value. (functools.partial doesn't work for this use case because yaml.Constructor has to be # an actual class.) @@ -115,6 +149,16 @@ def load_configuration(filename): return config +def filter_omitted_nodes(nodes): + ''' + Given a list of nodes, return a filtered list omitting any nodes with an "!omit" tag or with a + value matching such nodes. + ''' + omitted_values = tuple(node.value for node in nodes if node.tag == '!omit') + + return [node for node in nodes if node.value not in omitted_values] + + DELETED_NODE = object() @@ -176,9 +220,13 @@ def deep_merge_nodes(nodes): ), ] + If a mapping or sequence node has a YAML "!retain" tag, then that node is not merged. + The purpose of deep merging like this is to support, for instance, merging one borgmatic - configuration file into another for reuse, such that a configuration section ("retention", - etc.) does not completely replace the corresponding section in a merged file. + configuration file into another for reuse, such that a configuration option with sub-options + does not completely replace the corresponding option in a merged file. + + Raise ValueError if a merge is implied using two incompatible types. ''' # Map from original node key/value to the replacement merged node. DELETED_NODE as a replacement # node indications deletion. @@ -193,37 +241,52 @@ def deep_merge_nodes(nodes): # If the keys match and the values are different, we need to merge these two A and B nodes. if a_key.tag == b_key.tag and a_key.value == b_key.value and a_value != b_value: + if not type(a_value) is type(b_value): + raise ValueError( + f'Incompatible types found when trying to merge "{a_key.value}:" values across configuration files: {type(a_value).id} and {type(b_value).id}' + ) + # Since we're merging into the B node, consider the A node a duplicate and remove it. replaced_nodes[(a_key, a_value)] = DELETED_NODE # If we're dealing with MappingNodes, recurse and merge its values as well. if isinstance(b_value, ruamel.yaml.nodes.MappingNode): - replaced_nodes[(b_key, b_value)] = ( - b_key, - ruamel.yaml.nodes.MappingNode( - tag=b_value.tag, - value=deep_merge_nodes(a_value.value + b_value.value), - start_mark=b_value.start_mark, - end_mark=b_value.end_mark, - flow_style=b_value.flow_style, - comment=b_value.comment, - anchor=b_value.anchor, - ), - ) + # A "!retain" tag says to skip deep merging for this node. Replace the tag so + # downstream schema validation doesn't break on our application-specific tag. + if b_value.tag == '!retain': + b_value.tag = 'tag:yaml.org,2002:map' + else: + replaced_nodes[(b_key, b_value)] = ( + b_key, + ruamel.yaml.nodes.MappingNode( + tag=b_value.tag, + value=deep_merge_nodes(a_value.value + b_value.value), + start_mark=b_value.start_mark, + end_mark=b_value.end_mark, + flow_style=b_value.flow_style, + comment=b_value.comment, + anchor=b_value.anchor, + ), + ) # If we're dealing with SequenceNodes, merge by appending one sequence to the other. elif isinstance(b_value, ruamel.yaml.nodes.SequenceNode): - replaced_nodes[(b_key, b_value)] = ( - b_key, - ruamel.yaml.nodes.SequenceNode( - tag=b_value.tag, - value=a_value.value + b_value.value, - start_mark=b_value.start_mark, - end_mark=b_value.end_mark, - flow_style=b_value.flow_style, - comment=b_value.comment, - anchor=b_value.anchor, - ), - ) + # A "!retain" tag says to skip deep merging for this node. Replace the tag so + # downstream schema validation doesn't break on our application-specific tag. + if b_value.tag == '!retain': + b_value.tag = 'tag:yaml.org,2002:seq' + else: + replaced_nodes[(b_key, b_value)] = ( + b_key, + ruamel.yaml.nodes.SequenceNode( + tag=b_value.tag, + value=filter_omitted_nodes(a_value.value + b_value.value), + start_mark=b_value.start_mark, + end_mark=b_value.end_mark, + flow_style=b_value.flow_style, + comment=b_value.comment, + anchor=b_value.anchor, + ), + ) return [ replaced_nodes.get(node, node) for node in nodes if replaced_nodes.get(node) != DELETED_NODE diff --git a/borgmatic/config/normalize.py b/borgmatic/config/normalize.py index 9079500e..af8e6b15 100644 --- a/borgmatic/config/normalize.py +++ b/borgmatic/config/normalize.py @@ -2,31 +2,108 @@ import logging import os +def normalize_sections(config_filename, config): + ''' + Given a configuration filename and a configuration dict of its loaded contents, airlift any + options out of sections ("location:", etc.) to the global scope and delete those sections. + Return any log message warnings produced based on the normalization performed. + + Raise ValueError if the "prefix" option is set in both "location" and "consistency" sections. + ''' + location = config.get('location') or {} + storage = config.get('storage') or {} + consistency = config.get('consistency') or {} + hooks = config.get('hooks') or {} + + if ( + location.get('prefix') + and consistency.get('prefix') + and location.get('prefix') != consistency.get('prefix') + ): + raise ValueError( + 'The retention prefix and the consistency prefix cannot have different values (unless one is not set).' + ) + + if storage.get('umask') and hooks.get('umask') and storage.get('umask') != hooks.get('umask'): + raise ValueError( + 'The storage umask and the hooks umask cannot have different values (unless one is not set).' + ) + + any_section_upgraded = False + + # Move any options from deprecated sections into the global scope. + for section_name in ('location', 'storage', 'retention', 'consistency', 'output', 'hooks'): + section_config = config.get(section_name) + + if section_config: + any_section_upgraded = True + del config[section_name] + config.update(section_config) + + if any_section_upgraded: + return [ + logging.makeLogRecord( + dict( + levelno=logging.WARNING, + levelname='WARNING', + msg=f'{config_filename}: Configuration sections (like location: and storage:) are deprecated and support will be removed from a future release. To prepare for this, move your options out of sections to the global scope.', + ) + ) + ] + + return [] + + def normalize(config_filename, config): ''' Given a configuration filename and a configuration dict of its loaded contents, apply particular hard-coded rules to normalize the configuration to adhere to the current schema. Return any log message warnings produced based on the normalization performed. + + Raise ValueError the configuration cannot be normalized. ''' - logs = [] - location = config.get('location') or {} - storage = config.get('storage') or {} - consistency = config.get('consistency') or {} - hooks = config.get('hooks') or {} + logs = normalize_sections(config_filename, config) # Upgrade exclude_if_present from a string to a list. - exclude_if_present = location.get('exclude_if_present') + exclude_if_present = config.get('exclude_if_present') if isinstance(exclude_if_present, str): - config['location']['exclude_if_present'] = [exclude_if_present] + logs.append( + logging.makeLogRecord( + dict( + levelno=logging.WARNING, + levelname='WARNING', + msg=f'{config_filename}: The exclude_if_present option now expects a list value. String values for this option are deprecated and support will be removed from a future release.', + ) + ) + ) + config['exclude_if_present'] = [exclude_if_present] # Upgrade various monitoring hooks from a string to a dict. - healthchecks = hooks.get('healthchecks') + healthchecks = config.get('healthchecks') if isinstance(healthchecks, str): - config['hooks']['healthchecks'] = {'ping_url': healthchecks} + logs.append( + logging.makeLogRecord( + dict( + levelno=logging.WARNING, + levelname='WARNING', + msg=f'{config_filename}: The healthchecks hook now expects a mapping value. String values for this option are deprecated and support will be removed from a future release.', + ) + ) + ) + config['healthchecks'] = {'ping_url': healthchecks} - cronitor = hooks.get('cronitor') + cronitor = config.get('cronitor') if isinstance(cronitor, str): - config['hooks']['cronitor'] = {'ping_url': cronitor} + logs.append( + logging.makeLogRecord( + dict( + levelno=logging.WARNING, + levelname='WARNING', + msg=f'{config_filename}: The healthchecks hook now expects key/value pairs. String values for this option are deprecated and support will be removed from a future release.', + ) + ) + ) + config['cronitor'] = {'ping_url': cronitor} if isinstance(cronitor, dict) and 'ping_url' in cronitor: config['hooks']['cronitor'] = { 'create': cronitor['ping_url'], @@ -35,67 +112,158 @@ def normalize(config_filename, config): 'check': cronitor['ping_url'], } - pagerduty = hooks.get('pagerduty') + pagerduty = config.get('pagerduty') if isinstance(pagerduty, str): - config['hooks']['pagerduty'] = {'integration_key': pagerduty} + logs.append( + logging.makeLogRecord( + dict( + levelno=logging.WARNING, + levelname='WARNING', + msg=f'{config_filename}: The healthchecks hook now expects key/value pairs. String values for this option are deprecated and support will be removed from a future release.', + ) + ) + ) + config['pagerduty'] = {'integration_key': pagerduty} - cronhub = hooks.get('cronhub') + cronhub = config.get('cronhub') if isinstance(cronhub, str): - config['hooks']['cronhub'] = {'ping_url': cronhub} + logs.append( + logging.makeLogRecord( + dict( + levelno=logging.WARNING, + levelname='WARNING', + msg=f'{config_filename}: The healthchecks hook now expects key/value pairs. String values for this option are deprecated and support will be removed from a future release.', + ) + ) + ) + config['cronhub'] = {'ping_url': cronhub} # Upgrade consistency checks from a list of strings to a list of dicts. - checks = consistency.get('checks') + checks = config.get('checks') if isinstance(checks, list) and len(checks) and isinstance(checks[0], str): - config['consistency']['checks'] = [{'name': check_type} for check_type in checks] + logs.append( + logging.makeLogRecord( + dict( + levelno=logging.WARNING, + levelname='WARNING', + msg=f'{config_filename}: The checks option now expects a list of key/value pairs. Lists of strings for this option are deprecated and support will be removed from a future release.', + ) + ) + ) + config['checks'] = [{'name': check_type} for check_type in checks] # Rename various configuration options. - numeric_owner = location.pop('numeric_owner', None) + numeric_owner = config.pop('numeric_owner', None) if numeric_owner is not None: - config['location']['numeric_ids'] = numeric_owner + logs.append( + logging.makeLogRecord( + dict( + levelno=logging.WARNING, + levelname='WARNING', + msg=f'{config_filename}: The numeric_owner option has been renamed to numeric_ids. numeric_owner is deprecated and support will be removed from a future release.', + ) + ) + ) + config['numeric_ids'] = numeric_owner - bsd_flags = location.pop('bsd_flags', None) + bsd_flags = config.pop('bsd_flags', None) if bsd_flags is not None: - config['location']['flags'] = bsd_flags + logs.append( + logging.makeLogRecord( + dict( + levelno=logging.WARNING, + levelname='WARNING', + msg=f'{config_filename}: The bsd_flags option has been renamed to flags. bsd_flags is deprecated and support will be removed from a future release.', + ) + ) + ) + config['flags'] = bsd_flags - remote_rate_limit = storage.pop('remote_rate_limit', None) + remote_rate_limit = config.pop('remote_rate_limit', None) if remote_rate_limit is not None: - config['storage']['upload_rate_limit'] = remote_rate_limit + logs.append( + logging.makeLogRecord( + dict( + levelno=logging.WARNING, + levelname='WARNING', + msg=f'{config_filename}: The remote_rate_limit option has been renamed to upload_rate_limit. remote_rate_limit is deprecated and support will be removed from a future release.', + ) + ) + ) + config['upload_rate_limit'] = remote_rate_limit # Upgrade remote repositories to ssh:// syntax, required in Borg 2. - repositories = location.get('repositories') + repositories = config.get('repositories') if repositories: - config['location']['repositories'] = [] - for repository in repositories: - if '~' in repository: + if isinstance(repositories[0], str): + logs.append( + logging.makeLogRecord( + dict( + levelno=logging.WARNING, + levelname='WARNING', + msg=f'{config_filename}: The repositories option now expects a list of key/value pairs. Lists of strings for this option are deprecated and support will be removed from a future release.', + ) + ) + ) + config['repositories'] = [{'path': repository} for repository in repositories] + repositories = config['repositories'] + + config['repositories'] = [] + + for repository_dict in repositories: + repository_path = repository_dict['path'] + if '~' in repository_path: logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', - msg=f'{config_filename}: Repository paths containing "~" are deprecated in borgmatic and no longer work in Borg 2.x+.', + msg=f'{config_filename}: Repository paths containing "~" are deprecated in borgmatic and support will be removed from a future release.', ) ) ) - if ':' in repository: - if repository.startswith('file://'): - config['location']['repositories'].append( - os.path.abspath(repository.partition('file://')[-1]) + if ':' in repository_path: + if repository_path.startswith('file://'): + updated_repository_path = os.path.abspath( + repository_path.partition('file://')[-1] ) - elif repository.startswith('ssh://'): - config['location']['repositories'].append(repository) + config['repositories'].append( + dict( + repository_dict, + path=updated_repository_path, + ) + ) + elif repository_path.startswith('ssh://'): + config['repositories'].append(repository_dict) else: - rewritten_repository = f"ssh://{repository.replace(':~', '/~').replace(':/', '/').replace(':', '/./')}" + rewritten_repository_path = f"ssh://{repository_path.replace(':~', '/~').replace(':/', '/').replace(':', '/./')}" logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', - msg=f'{config_filename}: Remote repository paths without ssh:// syntax are deprecated. Interpreting "{repository}" as "{rewritten_repository}"', + msg=f'{config_filename}: Remote repository paths without ssh:// syntax are deprecated and support will be removed from a future release. Interpreting "{repository_path}" as "{rewritten_repository_path}"', ) ) ) - config['location']['repositories'].append(rewritten_repository) + config['repositories'].append( + dict( + repository_dict, + path=rewritten_repository_path, + ) + ) else: - config['location']['repositories'].append(repository) + config['repositories'].append(repository_dict) + + if config.get('prefix'): + logs.append( + logging.makeLogRecord( + dict( + levelno=logging.WARNING, + levelname='WARNING', + msg=f'{config_filename}: The prefix option is deprecated and support will be removed from a future release. Use archive_name_format or match_archives instead.', + ) + ) + ) return logs diff --git a/borgmatic/config/override.py b/borgmatic/config/override.py index 8b2a1ab8..05173d2c 100644 --- a/borgmatic/config/override.py +++ b/borgmatic/config/override.py @@ -32,19 +32,33 @@ def convert_value_type(value): return ruamel.yaml.YAML(typ='safe').load(io.StringIO(value)) +LEGACY_SECTION_NAMES = {'location', 'storage', 'retention', 'consistency', 'output', 'hooks'} + + +def strip_section_names(parsed_override_key): + ''' + Given a parsed override key as a tuple of option and suboption names, strip out any initial + legacy section names, since configuration file normalization also strips them out. + ''' + if parsed_override_key[0] in LEGACY_SECTION_NAMES: + return parsed_override_key[1:] + + return parsed_override_key + + def parse_overrides(raw_overrides): ''' - Given a sequence of configuration file override strings in the form of "section.option=value", + Given a sequence of configuration file override strings in the form of "option.suboption=value", parse and return a sequence of tuples (keys, values), where keys is a sequence of strings. For instance, given the following raw overrides: - ['section.my_option=value1', 'section.other_option=value2'] + ['my_option.suboption=value1', 'other_option=value2'] ... return this: ( - (('section', 'my_option'), 'value1'), - (('section', 'other_option'), 'value2'), + (('my_option', 'suboption'), 'value1'), + (('other_option'), 'value2'), ) Raise ValueError if an override can't be parsed. @@ -57,10 +71,15 @@ def parse_overrides(raw_overrides): for raw_override in raw_overrides: try: raw_keys, value = raw_override.split('=', 1) - parsed_overrides.append((tuple(raw_keys.split('.')), convert_value_type(value),)) + parsed_overrides.append( + ( + strip_section_names(tuple(raw_keys.split('.'))), + convert_value_type(value), + ) + ) except ValueError: raise ValueError( - f"Invalid override '{raw_override}'. Make sure you use the form: SECTION.OPTION=VALUE" + f"Invalid override '{raw_override}'. Make sure you use the form: OPTION=VALUE or OPTION.SUBOPTION=VALUE" ) except ruamel.yaml.error.YAMLError as error: raise ValueError(f"Invalid override '{raw_override}': {error.problem}") @@ -71,9 +90,9 @@ def parse_overrides(raw_overrides): def apply_overrides(config, raw_overrides): ''' Given a configuration dict and a sequence of configuration file override strings in the form of - "section.option=value", parse each override and set it the configuration dict. + "option.suboption=value", parse each override and set it the configuration dict. ''' overrides = parse_overrides(raw_overrides) - for (keys, value) in overrides: + for keys, value in overrides: set_values(config, keys, value) diff --git a/borgmatic/config/schema.yaml b/borgmatic/config/schema.yaml index c29aadea..bb20d61d 100644 --- a/borgmatic/config/schema.yaml +++ b/borgmatic/config/schema.yaml @@ -1,12 +1,12 @@ type: object required: - - location + - repositories additionalProperties: false properties: constants: type: object description: | - Constants to use in the configuration file. All occurences of the + Constants to use in the configuration file. All occurrences of the constant name within culy braces will be replaced with the value. For example, if you have a constant named "hostname" with the value "myhostname", then the string "{hostname}" will be replaced with @@ -14,1229 +14,1142 @@ properties: example: hostname: myhostname prefix: myprefix - location: - type: object + source_directories: + type: array + items: + type: string description: | - Where to look for files to backup, and where to store those backups. - See https://borgbackup.readthedocs.io/en/stable/quickstart.html and - https://borgbackup.readthedocs.io/en/stable/usage/create.html - for details. - required: - - repositories - additionalProperties: false - properties: - source_directories: - type: array - items: + List of source directories and files to backup. Globs and tildes are + expanded. Do not backslash spaces in path names. + example: + - /home + - /etc + - /var/log/syslog* + - /home/user/path with spaces + repositories: + type: array + items: + type: object + required: + - path + properties: + path: type: string - description: | - List of source directories to backup. Globs and tildes are - expanded. Do not backslash spaces in path names. - example: - - /home - - /etc - - /var/log/syslog* - - /home/user/path with spaces - repositories: - type: array - items: + example: ssh://user@backupserver/./{fqdn} + label: type: string - description: | - Paths to local or remote repositories (required). Tildes are - expanded. Multiple repositories are backed up to in - sequence. Borg placeholders can be used. See the output of - "borg help placeholders" for details. See ssh_command for - SSH options like identity file or port. If systemd service - is used, then add local repository paths in the systemd - service file to the ReadWritePaths list. - example: - - ssh://user@backupserver/./sourcehostname.borg - - ssh://user@backupserver/./{fqdn} - - /var/local/backups/local.borg - working_directory: - type: string - description: | - Working directory for the "borg create" command. Tildes are - expanded. Useful for backing up using relative paths. See - http://borgbackup.readthedocs.io/en/stable/usage/create.html - for details. Defaults to not set. - example: /path/to/working/directory - one_file_system: - type: boolean - description: | - Stay in same file system: do not cross mount points beyond - the given source directories. Defaults to false. But when a - database hook is used, the setting here is ignored and - one_file_system is considered true. - example: true - numeric_ids: - type: boolean - description: | - Only store/extract numeric user and group identifiers. - Defaults to false. - example: true - atime: - type: boolean - description: | - Store atime into archive. Defaults to true in Borg < 1.2, - false in Borg 1.2+. - example: false - ctime: - type: boolean - description: Store ctime into archive. Defaults to true. - example: false - birthtime: - type: boolean - description: | - Store birthtime (creation date) into archive. Defaults to - true. - example: false - read_special: - type: boolean - description: | - Use Borg's --read-special flag to allow backup of block and - other special devices. Use with caution, as it will lead to - problems if used when backing up special devices such as - /dev/zero. Defaults to false. But when a database hook is - used, the setting here is ignored and read_special is - considered true. - example: false - flags: - type: boolean - description: | - Record filesystem flags (e.g. NODUMP, IMMUTABLE) in archive. - Defaults to true. - example: true - files_cache: - type: string - description: | - Mode in which to operate the files cache. See - http://borgbackup.readthedocs.io/en/stable/usage/create.html - for details. Defaults to "ctime,size,inode". - example: ctime,size,inode - local_path: - type: string - description: | - Alternate Borg local executable. Defaults to "borg". - example: borg1 - remote_path: - type: string - description: | - Alternate Borg remote executable. Defaults to "borg". - example: borg1 - patterns: - type: array - items: - type: string - description: | - Any paths matching these patterns are included/excluded from - backups. Globs are expanded. (Tildes are not.) See the - output of "borg help patterns" for more details. Quote any - value if it contains leading punctuation, so it parses - correctly. Note that only one of "patterns" and - "source_directories" may be used. - example: - - 'R /' - - '- /home/*/.cache' - - '+ /home/susan' - - '- /home/*' - patterns_from: - type: array - items: - type: string - description: | - Read include/exclude patterns from one or more separate - named files, one pattern per line. Note that Borg considers - this option experimental. See the output of "borg help - patterns" for more details. - example: - - /etc/borgmatic/patterns - exclude_patterns: - type: array - items: - type: string - description: | - Any paths matching these patterns are excluded from backups. - Globs and tildes are expanded. Note that a glob pattern must - either start with a glob or be an absolute path. Do not - backslash spaces in path names. See the output of "borg help - patterns" for more details. - example: - - '*.pyc' - - /home/*/.cache - - '*/.vim*.tmp' - - /etc/ssl - - /home/user/path with spaces - exclude_from: - type: array - items: - type: string - description: | - Read exclude patterns from one or more separate named files, - one pattern per line. See the output of "borg help patterns" - for more details. - example: - - /etc/borgmatic/excludes - exclude_caches: - type: boolean - description: | - Exclude directories that contain a CACHEDIR.TAG file. See - http://www.brynosaurus.com/cachedir/spec.html for details. - Defaults to false. - example: true - exclude_if_present: - type: array - items: - type: string - description: | - Exclude directories that contain a file with the given - filenames. Defaults to not set. - example: - - .nobackup - keep_exclude_tags: - type: boolean - description: | - If true, the exclude_if_present filename is included in - backups. Defaults to false, meaning that the - exclude_if_present filename is omitted from backups. - example: true - exclude_nodump: - type: boolean - description: | - Exclude files with the NODUMP flag. Defaults to false. - example: true - borgmatic_source_directory: - type: string - description: | - Path for additional source files used for temporary internal - state like borgmatic database dumps. Note that changing this - path prevents "borgmatic restore" from finding any database - dumps created before the change. Defaults to ~/.borgmatic - example: /tmp/borgmatic - source_directories_must_exist: - type: boolean - description: | - If true, then source directories must exist, otherwise an - error is raised. Defaults to false. - example: true - storage: - type: object + example: backupserver description: | - Repository storage options. See - https://borgbackup.readthedocs.io/en/stable/usage/create.html and - https://borgbackup.readthedocs.io/en/stable/usage/general.html for + A required list of local or remote repositories with paths and + optional labels (which can be used with the --repository flag to + select a repository). Tildes are expanded. Multiple repositories are + backed up to in sequence. Borg placeholders can be used. See the + output of "borg help placeholders" for details. See ssh_command for + SSH options like identity file or port. If systemd service is used, + then add local repository paths in the systemd service file to the + ReadWritePaths list. Prior to borgmatic 1.7.10, repositories was a + list of plain path strings. + example: + - path: ssh://user@backupserver/./sourcehostname.borg + label: backupserver + - path: /mnt/backup + label: local + working_directory: + type: string + description: | + Working directory for the "borg create" command. Tildes are + expanded. Useful for backing up using relative paths. See + http://borgbackup.readthedocs.io/en/stable/usage/create.html for + details. Defaults to not set. + example: /path/to/working/directory + one_file_system: + type: boolean + description: | + Stay in same file system; do not cross mount points beyond the given + source directories. Defaults to false. But when a database hook is + used, the setting here is ignored and one_file_system is considered + true. + example: true + numeric_ids: + type: boolean + description: | + Only store/extract numeric user and group identifiers. Defaults to + false. + example: true + atime: + type: boolean + description: | + Store atime into archive. Defaults to true in Borg < 1.2, false in + Borg 1.2+. + example: false + ctime: + type: boolean + description: Store ctime into archive. Defaults to true. + example: false + birthtime: + type: boolean + description: | + Store birthtime (creation date) into archive. Defaults to true. + example: false + read_special: + type: boolean + description: | + Use Borg's --read-special flag to allow backup of block and other + special devices. Use with caution, as it will lead to problems if + used when backing up special devices such as /dev/zero. Defaults to + false. But when a database hook is used, the setting here is ignored + and read_special is considered true. + example: false + flags: + type: boolean + description: | + Record filesystem flags (e.g. NODUMP, IMMUTABLE) in archive. + Defaults to true. + example: true + files_cache: + type: string + description: | + Mode in which to operate the files cache. See + http://borgbackup.readthedocs.io/en/stable/usage/create.html for + details. Defaults to "ctime,size,inode". + example: ctime,size,inode + local_path: + type: string + description: | + Alternate Borg local executable. Defaults to "borg". + example: borg1 + remote_path: + type: string + description: | + Alternate Borg remote executable. Defaults to "borg". + example: borg1 + patterns: + type: array + items: + type: string + description: | + Any paths matching these patterns are included/excluded from + backups. Globs are expanded. (Tildes are not.) See the output of + "borg help patterns" for more details. Quote any value if it + contains leading punctuation, so it parses correctly. Note that only + one of "patterns" and "source_directories" may be used. + example: + - 'R /' + - '- /home/*/.cache' + - '+ /home/susan' + - '- /home/*' + patterns_from: + type: array + items: + type: string + description: | + Read include/exclude patterns from one or more separate named files, + one pattern per line. Note that Borg considers this option + experimental. See the output of "borg help patterns" for more details. - additionalProperties: false - properties: - encryption_passcommand: - type: string - description: | - The standard output of this command is used to unlock the - encryption key. Only use on repositories that were - initialized with passcommand/repokey/keyfile encryption. - Note that if both encryption_passcommand and - encryption_passphrase are set, then encryption_passphrase - takes precedence. Defaults to not set. - example: "secret-tool lookup borg-repository repo-name" - encryption_passphrase: - type: string - description: | - Passphrase to unlock the encryption key with. Only use on - repositories that were initialized with - passphrase/repokey/keyfile encryption. Quote the value if it - contains punctuation, so it parses correctly. And backslash - any quote or backslash literals as well. Defaults to not - set. - example: "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~" - checkpoint_interval: - type: integer - description: | - Number of seconds between each checkpoint during a - long-running backup. See - https://borgbackup.readthedocs.io/en/stable/faq.html - for details. Defaults to checkpoints every 1800 seconds (30 - minutes). - example: 1800 - checkpoint_volume: - type: integer - description: | - Number of backed up bytes between each checkpoint during a - long-running backup. Only supported with Borg 2+. See - https://borgbackup.readthedocs.io/en/stable/faq.html - for details. Defaults to only time-based checkpointing (see - "checkpoint_interval") instead of volume-based - checkpointing. - example: 1048576 - chunker_params: - type: string - description: | - Specify the parameters passed to then chunker - (CHUNK_MIN_EXP, CHUNK_MAX_EXP, HASH_MASK_BITS, - HASH_WINDOW_SIZE). See - https://borgbackup.readthedocs.io/en/stable/internals.html - for details. Defaults to "19,23,21,4095". - example: 19,23,21,4095 - compression: - type: string - description: | - Type of compression to use when creating archives. See - http://borgbackup.readthedocs.io/en/stable/usage/create.html - for details. Defaults to "lz4". - example: lz4 - upload_rate_limit: - type: integer - description: | - Remote network upload rate limit in kiBytes/second. Defaults - to unlimited. - example: 100 - retries: - type: integer - description: | - Number of times to retry a failing backup before giving up. - Defaults to 0 (i.e., does not attempt retry). - example: 3 - retry_wait: - type: integer - description: | - Wait time between retries (in seconds) to allow transient - issues to pass. Increases after each retry as a form of - backoff. Defaults to 0 (no wait). - example: 10 - temporary_directory: - type: string - description: | - Directory where temporary files are stored. Defaults to - $TMPDIR - example: /path/to/tmpdir - ssh_command: - type: string - description: | - Command to use instead of "ssh". This can be used to specify - ssh options. Defaults to not set. - example: ssh -i /path/to/private/key - borg_base_directory: - type: string - description: | - Base path used for various Borg directories. Defaults to - $HOME, ~$USER, or ~. - example: /path/to/base - borg_config_directory: - type: string - description: | - Path for Borg configuration files. Defaults to - $borg_base_directory/.config/borg - example: /path/to/base/config - borg_cache_directory: - type: string - description: | - Path for Borg cache files. Defaults to - $borg_base_directory/.cache/borg - example: /path/to/base/cache - borg_files_cache_ttl: - type: integer - description: | - Maximum time to live (ttl) for entries in the Borg files - cache. - example: 20 - borg_security_directory: - type: string - description: | - Path for Borg security and encryption nonce files. Defaults - to $borg_base_directory/.config/borg/security - example: /path/to/base/config/security - borg_keys_directory: - type: string - description: | - Path for Borg encryption key files. Defaults to - $borg_base_directory/.config/borg/keys - example: /path/to/base/config/keys - umask: - type: integer - description: Umask to be used for borg create. Defaults to 0077. - example: 0077 - lock_wait: - type: integer - description: | - Maximum seconds to wait for acquiring a repository/cache - lock. Defaults to 1. - example: 5 - archive_name_format: - type: string - description: | - Name of the archive. Borg placeholders can be used. See the - output of "borg help placeholders" for details. Defaults to - "{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}". If you specify this - option, consider also specifying a prefix in the retention - and consistency sections to avoid accidental - pruning/checking of archives with different archive name - formats. - example: "{hostname}-documents-{now}" - relocated_repo_access_is_ok: - type: boolean - description: | - Bypass Borg error about a repository that has been moved. - Defaults to false. - example: true - unknown_unencrypted_repo_access_is_ok: - type: boolean - description: | - Bypass Borg error about a previously unknown unencrypted - repository. Defaults to false. - example: true - extra_borg_options: - type: object - additionalProperties: false - properties: - init: - type: string - description: | - Extra command-line options to pass to "borg init". - example: "--extra-option" - create: - type: string - description: | - Extra command-line options to pass to "borg create". - example: "--extra-option" - prune: - type: string - description: | - Extra command-line options to pass to "borg prune". - example: "--extra-option" - compact: - type: string - description: | - Extra command-line options to pass to "borg compact". - example: "--extra-option" - check: - type: string - description: | - Extra command-line options to pass to "borg check". - example: "--extra-option" - description: | - Additional options to pass directly to particular Borg - commands, handy for Borg options that borgmatic does not yet - support natively. Note that borgmatic does not perform any - validation on these options. Running borgmatic with - "--verbosity 2" shows the exact Borg command-line - invocation. - retention: - type: object + example: + - /etc/borgmatic/patterns + exclude_patterns: + type: array + items: + type: string description: | - Retention policy for how many backups to keep in each category. See - https://borgbackup.readthedocs.io/en/stable/usage/prune.html for - details. At least one of the "keep" options is required for pruning - to work. To skip pruning entirely, run "borgmatic create" or "check" - without the "prune" action. See borgmatic documentation for details. - additionalProperties: false - properties: - keep_within: - type: string - description: Keep all archives within this time interval. - example: 3H - keep_secondly: - type: integer - description: Number of secondly archives to keep. - example: 60 - keep_minutely: - type: integer - description: Number of minutely archives to keep. - example: 60 - keep_hourly: - type: integer - description: Number of hourly archives to keep. - example: 24 - keep_daily: - type: integer - description: Number of daily archives to keep. - example: 7 - keep_weekly: - type: integer - description: Number of weekly archives to keep. - example: 4 - keep_monthly: - type: integer - description: Number of monthly archives to keep. - example: 6 - keep_yearly: - type: integer - description: Number of yearly archives to keep. - example: 1 - prefix: - type: string - description: | - When pruning, only consider archive names starting with this - prefix. Borg placeholders can be used. See the output of - "borg help placeholders" for details. Defaults to - "{hostname}-". Use an empty value to disable the default. - example: sourcehostname - consistency: - type: object + Any paths matching these patterns are excluded from backups. Globs + and tildes are expanded. Note that a glob pattern must either start + with a glob or be an absolute path. Do not backslash spaces in path + names. See the output of "borg help patterns" for more details. + example: + - '*.pyc' + - /home/*/.cache + - '*/.vim*.tmp' + - /etc/ssl + - /home/user/path with spaces + exclude_from: + type: array + items: + type: string description: | - Consistency checks to run after backups. See - https://borgbackup.readthedocs.io/en/stable/usage/check.html and - https://borgbackup.readthedocs.io/en/stable/usage/extract.html for + Read exclude patterns from one or more separate named files, one + pattern per line. See the output of "borg help patterns" for more details. + example: + - /etc/borgmatic/excludes + exclude_caches: + type: boolean + description: | + Exclude directories that contain a CACHEDIR.TAG file. See + http://www.brynosaurus.com/cachedir/spec.html for details. Defaults + to false. + example: true + exclude_if_present: + type: array + items: + type: string + description: | + Exclude directories that contain a file with the given filenames. + Defaults to not set. + example: + - .nobackup + keep_exclude_tags: + type: boolean + description: | + If true, the exclude_if_present filename is included in backups. + Defaults to false, meaning that the exclude_if_present filename is + omitted from backups. + example: true + exclude_nodump: + type: boolean + description: | + Exclude files with the NODUMP flag. Defaults to false. + example: true + borgmatic_source_directory: + type: string + description: | + Path for additional source files used for temporary internal state + like borgmatic database dumps. Note that changing this path prevents + "borgmatic restore" from finding any database dumps created before + the change. Defaults to ~/.borgmatic + example: /tmp/borgmatic + source_directories_must_exist: + type: boolean + description: | + If true, then source directories must exist, otherwise an error is + raised. Defaults to false. + example: true + encryption_passcommand: + type: string + description: | + The standard output of this command is used to unlock the encryption + key. Only use on repositories that were initialized with + passcommand/repokey/keyfile encryption. Note that if both + encryption_passcommand and encryption_passphrase are set, then + encryption_passphrase takes precedence. Defaults to not set. + example: "secret-tool lookup borg-repository repo-name" + encryption_passphrase: + type: string + description: | + Passphrase to unlock the encryption key with. Only use on + repositories that were initialized with passphrase/repokey/keyfile + encryption. Quote the value if it contains punctuation, so it parses + correctly. And backslash any quote or backslash literals as well. + Defaults to not set. + example: "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~" + checkpoint_interval: + type: integer + description: | + Number of seconds between each checkpoint during a long-running + backup. See https://borgbackup.readthedocs.io/en/stable/faq.html for + details. Defaults to checkpoints every 1800 seconds (30 minutes). + example: 1800 + checkpoint_volume: + type: integer + description: | + Number of backed up bytes between each checkpoint during a + long-running backup. Only supported with Borg 2+. See + https://borgbackup.readthedocs.io/en/stable/faq.html for details. + Defaults to only time-based checkpointing (see + "checkpoint_interval") instead of volume-based checkpointing. + example: 1048576 + chunker_params: + type: string + description: | + Specify the parameters passed to then chunker (CHUNK_MIN_EXP, + CHUNK_MAX_EXP, HASH_MASK_BITS, HASH_WINDOW_SIZE). See + https://borgbackup.readthedocs.io/en/stable/internals.html for + details. Defaults to "19,23,21,4095". + example: 19,23,21,4095 + compression: + type: string + description: | + Type of compression to use when creating archives. See + http://borgbackup.readthedocs.io/en/stable/usage/create.html for + details. Defaults to "lz4". + example: lz4 + upload_rate_limit: + type: integer + description: | + Remote network upload rate limit in kiBytes/second. Defaults to + unlimited. + example: 100 + retries: + type: integer + description: | + Number of times to retry a failing backup before giving up. Defaults + to 0 (i.e., does not attempt retry). + example: 3 + retry_wait: + type: integer + description: | + Wait time between retries (in seconds) to allow transient issues to + pass. Increases after each retry as a form of backoff. Defaults to 0 + (no wait). + example: 10 + temporary_directory: + type: string + description: | + Directory where temporary files are stored. Defaults to $TMPDIR. + example: /path/to/tmpdir + ssh_command: + type: string + description: | + Command to use instead of "ssh". This can be used to specify ssh + options. Defaults to not set. + example: ssh -i /path/to/private/key + borg_base_directory: + type: string + description: | + Base path used for various Borg directories. Defaults to $HOME, + ~$USER, or ~. + example: /path/to/base + borg_config_directory: + type: string + description: | + Path for Borg configuration files. Defaults to + $borg_base_directory/.config/borg + example: /path/to/base/config + borg_cache_directory: + type: string + description: | + Path for Borg cache files. Defaults to + $borg_base_directory/.cache/borg + example: /path/to/base/cache + borg_files_cache_ttl: + type: integer + description: | + Maximum time to live (ttl) for entries in the Borg files cache. + example: 20 + borg_security_directory: + type: string + description: | + Path for Borg security and encryption nonce files. Defaults to + $borg_base_directory/.config/borg/security + example: /path/to/base/config/security + borg_keys_directory: + type: string + description: | + Path for Borg encryption key files. Defaults to + $borg_base_directory/.config/borg/keys + example: /path/to/base/config/keys + umask: + type: integer + description: | + Umask used for when executing Borg or calling hooks. Defaults to + 0077 for Borg or the umask that borgmatic is run with for hooks. + example: 0077 + lock_wait: + type: integer + description: | + Maximum seconds to wait for acquiring a repository/cache lock. + Defaults to 1. + example: 5 + archive_name_format: + type: string + description: | + Name of the archive. Borg placeholders can be used. See the output + of "borg help placeholders" for details. Defaults to + "{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}". When running actions like + rlist, info, or check, borgmatic automatically tries to match only + archives created with this name format. + example: "{hostname}-documents-{now}" + match_archives: + type: string + description: | + A Borg pattern for filtering down the archives used by borgmatic + actions that operate on multiple archives. For Borg 1.x, use a shell + pattern here and see the output of "borg help placeholders" for + details. For Borg 2.x, see the output of "borg help match-archives". + If match_archives is not specified, borgmatic defaults to deriving + the match_archives value from archive_name_format. + example: "sh:{hostname}-*" + relocated_repo_access_is_ok: + type: boolean + description: | + Bypass Borg error about a repository that has been moved. Defaults + to not bypassing. + example: true + unknown_unencrypted_repo_access_is_ok: + type: boolean + description: | + Bypass Borg error about a previously unknown unencrypted repository. + Defaults to not bypassing. + example: true + check_i_know_what_i_am_doing: + type: boolean + description: | + Bypass Borg confirmation about check with repair option. Defaults to + an interactive prompt from Borg. + example: true + extra_borg_options: + type: object additionalProperties: false properties: - checks: - type: array - items: - type: object - required: ['name'] - additionalProperties: false - properties: - name: - type: string - enum: - - repository - - archives - - data - - extract - - disabled - description: | - Name of consistency check to run: "repository", - "archives", "data", and/or "extract". Set to - "disabled" to disable all consistency checks. - "repository" checks the consistency of the - repository, "archives" checks all of the - archives, "data" verifies the integrity of the - data within the archives, and "extract" does an - extraction dry-run of the most recent archive. - Note that "data" implies "archives". - example: repository - frequency: - type: string - description: | - How frequently to run this type of consistency - check (as a best effort). The value is a number - followed by a unit of time. E.g., "2 weeks" to - run this consistency check no more than every - two weeks for a given repository or "1 month" to - run it no more than monthly. Defaults to - "always": running this check every time checks - are run. - example: 2 weeks - description: | - List of one or more consistency checks to run on a periodic - basis (if "frequency" is set) or every time borgmatic runs - checks (if "frequency" is omitted). - check_repositories: - type: array - items: - type: string - description: | - Paths to a subset of the repositories in the location - section on which to run consistency checks. Handy in case - some of your repositories are very large, and so running - consistency checks on them would take too long. Defaults to - running consistency checks on all repositories configured in - the location section. - example: - - user@backupserver:sourcehostname.borg - check_last: - type: integer - description: | - Restrict the number of checked archives to the last n. - Applies only to the "archives" check. Defaults to checking - all archives. - example: 3 - prefix: + init: type: string description: | - When performing the "archives" check, only consider archive - names starting with this prefix. Borg placeholders can be - used. See the output of "borg help placeholders" for - details. Defaults to "{hostname}-". Use an empty value to - disable the default. - example: sourcehostname - output: - type: object + Extra command-line options to pass to "borg init". + example: "--extra-option" + create: + type: string + description: | + Extra command-line options to pass to "borg create". + example: "--extra-option" + prune: + type: string + description: | + Extra command-line options to pass to "borg prune". + example: "--extra-option" + compact: + type: string + description: | + Extra command-line options to pass to "borg compact". + example: "--extra-option" + check: + type: string + description: | + Extra command-line options to pass to "borg check". + example: "--extra-option" description: | - Options for customizing borgmatic's own output and logging. + Additional options to pass directly to particular Borg commands, + handy for Borg options that borgmatic does not yet support natively. + Note that borgmatic does not perform any validation on these + options. Running borgmatic with "--verbosity 2" shows the exact Borg + command-line invocation. + keep_within: + type: string + description: Keep all archives within this time interval. + example: 3H + keep_secondly: + type: integer + description: Number of secondly archives to keep. + example: 60 + keep_minutely: + type: integer + description: Number of minutely archives to keep. + example: 60 + keep_hourly: + type: integer + description: Number of hourly archives to keep. + example: 24 + keep_daily: + type: integer + description: Number of daily archives to keep. + example: 7 + keep_weekly: + type: integer + description: Number of weekly archives to keep. + example: 4 + keep_monthly: + type: integer + description: Number of monthly archives to keep. + example: 6 + keep_yearly: + type: integer + description: Number of yearly archives to keep. + example: 1 + prefix: + type: string + description: | + Deprecated. When pruning or checking archives, only consider archive + names starting with this prefix. Borg placeholders can be used. See + the output of "borg help placeholders" for details. If a prefix is + not specified, borgmatic defaults to matching archives based on the + archive_name_format (see above). + example: sourcehostname + checks: + type: array + items: + type: object + required: ['name'] + additionalProperties: false + properties: + name: + type: string + enum: + - repository + - archives + - data + - extract + - disabled + description: | + Name of consistency check to run: "repository", + "archives", "data", and/or "extract". Set to "disabled" + to disable all consistency checks. "repository" checks + the consistency of the repository, "archives" checks all + of the archives, "data" verifies the integrity of the + data within the archives, and "extract" does an + extraction dry-run of the most recent archive. Note that + "data" implies "archives". + example: repository + frequency: + type: string + description: | + How frequently to run this type of consistency check (as + a best effort). The value is a number followed by a unit + of time. E.g., "2 weeks" to run this consistency check + no more than every two weeks for a given repository or + "1 month" to run it no more than monthly. Defaults to + "always": running this check every time checks are run. + example: 2 weeks + description: | + List of one or more consistency checks to run on a periodic basis + (if "frequency" is set) or every time borgmatic runs checks (if + "frequency" is omitted). + check_repositories: + type: array + items: + type: string + description: | + Paths or labels for a subset of the configured "repositories" (see + above) on which to run consistency checks. Handy in case some of + your repositories are very large, and so running consistency checks + on them would take too long. Defaults to running consistency checks + on all configured repositories. + example: + - user@backupserver:sourcehostname.borg + check_last: + type: integer + description: | + Restrict the number of checked archives to the last n. Applies only + to the "archives" check. Defaults to checking all archives. + example: 3 + color: + type: boolean + description: | + Apply color to console output. Can be overridden with --no-color + command-line flag. Defaults to true. + example: false + before_actions: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute before all + the actions for each repository. + example: + - "echo Starting actions." + before_backup: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute before + creating a backup, run once per repository. + example: + - "echo Starting a backup." + before_prune: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute before + pruning, run once per repository. + example: + - "echo Starting pruning." + before_compact: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute before + compaction, run once per repository. + example: + - "echo Starting compaction." + before_check: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute before + consistency checks, run once per repository. + example: + - "echo Starting checks." + before_extract: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute before + extracting a backup, run once per repository. + example: + - "echo Starting extracting." + after_backup: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute after + creating a backup, run once per repository. + example: + - "echo Finished a backup." + after_compact: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute after + compaction, run once per repository. + example: + - "echo Finished compaction." + after_prune: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute after + pruning, run once per repository. + example: + - "echo Finished pruning." + after_check: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute after + consistency checks, run once per repository. + example: + - "echo Finished checks." + after_extract: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute after + extracting a backup, run once per repository. + example: + - "echo Finished extracting." + after_actions: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute after all + actions for each repository. + example: + - "echo Finished actions." + on_error: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute when an + exception occurs during a "create", "prune", "compact", or "check" + action or an associated before/after hook. + example: + - "echo Error during create/prune/compact/check." + before_everything: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute before + running all actions (if one of them is "create"). These are + collected from all configuration files and then run once before all + of them (prior to all actions). + example: + - "echo Starting actions." + after_everything: + type: array + items: + type: string + description: | + List of one or more shell commands or scripts to execute after + running all actions (if one of them is "create"). These are + collected from all configuration files and then run once after all + of them (after any action). + example: + - "echo Completed actions." + postgresql_databases: + type: array + items: + type: object + required: ['name'] + additionalProperties: false + properties: + name: + type: string + description: | + Database name (required if using this hook). Or "all" to + dump all databases on the host. (Also set the "format" + to dump each database to a separate file instead of one + combined file.) Note that using this database hook + implicitly enables both read_special and one_file_system + (see above) to support dump and restore streaming. + example: users + hostname: + type: string + description: | + Database hostname to connect to. Defaults to connecting + via local Unix socket. + example: database.example.org + restore_hostname: + type: string + description: | + Database hostname to restore to. Defaults to the + "hostname" option. + example: database.example.org + port: + type: integer + description: Port to connect to. Defaults to 5432. + example: 5433 + restore_port: + type: integer + description: | + Port to restore to. Defaults to the "port" option. + example: 5433 + username: + type: string + description: | + Username with which to connect to the database. Defaults + to the username of the current user. You probably want + to specify the "postgres" superuser here when the + database name is "all". + example: dbuser + restore_username: + type: string + description: | + Username with which to restore the database. Defaults to + the "username" option. + example: dbuser + password: + type: string + description: | + Password with which to connect to the database. Omitting + a password will only work if PostgreSQL is configured to + trust the configured username without a password or you + create a ~/.pgpass file. + example: trustsome1 + restore_password: + type: string + description: | + Password with which to connect to the restore database. + Defaults to the "password" option. + example: trustsome1 + no_owner: + type: boolean + description: | + Do not output commands to set ownership of objects to + match the original database. By default, pg_dump and + pg_restore issue ALTER OWNER or SET SESSION + AUTHORIZATION statements to set ownership of created + schema elements. These statements will fail unless the + initial connection to the database is made by a + superuser. + example: true + format: + type: string + enum: ['plain', 'custom', 'directory', 'tar'] + description: | + Database dump output format. One of "plain", "custom", + "directory", or "tar". Defaults to "custom" (unlike raw + pg_dump) for a single database. Or, when database name + is "all" and format is blank, dumps all databases to a + single file. But if a format is specified with an "all" + database name, dumps each database to a separate file of + that format, allowing more convenient restores of + individual databases. See the pg_dump documentation for + more about formats. + example: directory + ssl_mode: + type: string + enum: ['disable', 'allow', 'prefer', + 'require', 'verify-ca', 'verify-full'] + description: | + SSL mode to use to connect to the database server. One + of "disable", "allow", "prefer", "require", "verify-ca" + or "verify-full". Defaults to "disable". + example: require + ssl_cert: + type: string + description: | + Path to a client certificate. + example: "/root/.postgresql/postgresql.crt" + ssl_key: + type: string + description: | + Path to a private client key. + example: "/root/.postgresql/postgresql.key" + ssl_root_cert: + type: string + description: | + Path to a root certificate containing a list of trusted + certificate authorities. + example: "/root/.postgresql/root.crt" + ssl_crl: + type: string + description: | + Path to a certificate revocation list. + example: "/root/.postgresql/root.crl" + pg_dump_command: + type: string + description: | + Command to use instead of "pg_dump" or "pg_dumpall". + This can be used to run a specific pg_dump version + (e.g., one inside a running container). Defaults to + "pg_dump" for single database dump or "pg_dumpall" to + dump all databases. + example: docker exec my_pg_container pg_dump + pg_restore_command: + type: string + description: | + Command to use instead of "pg_restore". This can be used + to run a specific pg_restore version (e.g., one inside a + running container). Defaults to "pg_restore". + example: docker exec my_pg_container pg_restore + psql_command: + type: string + description: | + Command to use instead of "psql". This can be used to + run a specific psql version (e.g., one inside a running + container). Defaults to "psql". + example: docker exec my_pg_container psql + options: + type: string + description: | + Additional pg_dump/pg_dumpall options to pass directly + to the dump command, without performing any validation + on them. See pg_dump documentation for details. + example: --role=someone + list_options: + type: string + description: | + Additional psql options to pass directly to the psql + command that lists available databases, without + performing any validation on them. See psql + documentation for details. + example: --role=someone + restore_options: + type: string + description: | + Additional pg_restore/psql options to pass directly to + the restore command, without performing any validation + on them. See pg_restore/psql documentation for details. + example: --role=someone + analyze_options: + type: string + description: | + Additional psql options to pass directly to the analyze + command run after a restore, without performing any + validation on them. See psql documentation for details. + example: --role=someone + description: | + List of one or more PostgreSQL databases to dump before creating a + backup, run once per configuration file. The database dumps are + added to your source directories at runtime, backed up, and removed + afterwards. Requires pg_dump/pg_dumpall/pg_restore commands. See + https://www.postgresql.org/docs/current/app-pgdump.html and + https://www.postgresql.org/docs/current/libpq-ssl.html for details. + mysql_databases: + type: array + items: + type: object + required: ['name'] + additionalProperties: false + properties: + name: + type: string + description: | + Database name (required if using this hook). Or "all" to + dump all databases on the host. Note that using this + database hook implicitly enables both read_special and + one_file_system (see above) to support dump and restore + streaming. + example: users + hostname: + type: string + description: | + Database hostname to connect to. Defaults to connecting + via local Unix socket. + example: database.example.org + restore_hostname: + type: string + description: | + Database hostname to restore to. Defaults to the + "hostname" option. + example: database.example.org + port: + type: integer + description: Port to connect to. Defaults to 3306. + example: 3307 + restore_port: + type: integer + description: | + Port to restore to. Defaults to the "port" option. + example: 5433 + username: + type: string + description: | + Username with which to connect to the database. Defaults + to the username of the current user. + example: dbuser + restore_username: + type: string + description: | + Username with which to restore the database. Defaults to + the "username" option. + example: dbuser + password: + type: string + description: | + Password with which to connect to the database. Omitting + a password will only work if MySQL is configured to + trust the configured username without a password. + example: trustsome1 + restore_password: + type: string + description: | + Password with which to connect to the restore database. + Defaults to the "password" option. + example: trustsome1 + format: + type: string + enum: ['sql'] + description: | + Database dump output format. Currently only "sql" is + supported. Defaults to "sql" for a single database. Or, + when database name is "all" and format is blank, dumps + all databases to a single file. But if a format is + specified with an "all" database name, dumps each + database to a separate file of that format, allowing + more convenient restores of individual databases. + example: directory + add_drop_database: + type: boolean + description: | + Use the "--add-drop-database" flag with mysqldump, + causing the database to be dropped right before restore. + Defaults to true. + example: false + options: + type: string + description: | + Additional mysqldump options to pass directly to the + dump command, without performing any validation on them. + See mysqldump documentation for details. + example: --skip-comments + list_options: + type: string + description: | + Additional mysql options to pass directly to the mysql + command that lists available databases, without + performing any validation on them. See mysql + documentation for details. + example: --defaults-extra-file=my.cnf + restore_options: + type: string + description: | + Additional mysql options to pass directly to the mysql + command that restores database dumps, without performing + any validation on them. See mysql documentation for + details. + example: --defaults-extra-file=my.cnf + description: | + List of one or more MySQL/MariaDB databases to dump before creating + a backup, run once per configuration file. The database dumps are + added to your source directories at runtime, backed up, and removed + afterwards. Requires mysqldump/mysql commands (from either MySQL or + MariaDB). See https://dev.mysql.com/doc/refman/8.0/en/mysqldump.html + or https://mariadb.com/kb/en/library/mysqldump/ for details. + sqlite_databases: + type: array + items: + type: object + required: ['path','name'] + additionalProperties: false + properties: + name: + type: string + description: | + This is used to tag the database dump file with a name. + It is not the path to the database file itself. The name + "all" has no special meaning for SQLite databases. + example: users + path: + type: string + description: | + Path to the SQLite database file to dump. If relative, + it is relative to the current working directory. Note + that using this database hook implicitly enables both + read_special and one_file_system (see above) to support + dump and restore streaming. + example: /var/lib/sqlite/users.db + restore_path: + type: string + description: | + Path to the SQLite database file to restore to. Defaults + to the "path" option. + example: /var/lib/sqlite/users.db + mongodb_databases: + type: array + items: + type: object + required: ['name'] + additionalProperties: false + properties: + name: + type: string + description: | + Database name (required if using this hook). Or "all" to + dump all databases on the host. Note that using this + database hook implicitly enables both read_special and + one_file_system (see above) to support dump and restore + streaming. + example: users + hostname: + type: string + description: | + Database hostname to connect to. Defaults to connecting + to localhost. + example: database.example.org + restore_hostname: + type: string + description: | + Database hostname to restore to. Defaults to the + "hostname" option. + example: database.example.org + port: + type: integer + description: Port to connect to. Defaults to 27017. + example: 27018 + restore_port: + type: integer + description: | + Port to restore to. Defaults to the "port" option. + example: 5433 + username: + type: string + description: | + Username with which to connect to the database. Skip it + if no authentication is needed. + example: dbuser + restore_username: + type: string + description: | + Username with which to restore the database. Defaults to + the "username" option. + example: dbuser + password: + type: string + description: | + Password with which to connect to the database. Skip it + if no authentication is needed. + example: trustsome1 + restore_password: + type: string + description: | + Password with which to connect to the restore database. + Defaults to the "password" option. + example: trustsome1 + authentication_database: + type: string + description: | + Authentication database where the specified username + exists. If no authentication database is specified, the + database provided in "name" is used. If "name" is "all", + the "admin" database is used. + example: admin + format: + type: string + enum: ['archive', 'directory'] + description: | + Database dump output format. One of "archive", or + "directory". Defaults to "archive". See mongodump + documentation for details. Note that format is ignored + when the database name is "all". + example: directory + options: + type: string + description: | + Additional mongodump options to pass directly to the + dump command, without performing any validation on them. + See mongodump documentation for details. + example: --dumpDbUsersAndRoles + restore_options: + type: string + description: | + Additional mongorestore options to pass directly to the + dump command, without performing any validation on them. + See mongorestore documentation for details. + example: --restoreDbUsersAndRoles + description: | + List of one or more MongoDB databases to dump before creating a + backup, run once per configuration file. The database dumps are + added to your source directories at runtime, backed up, and removed + afterwards. Requires mongodump/mongorestore commands. See + https://docs.mongodb.com/database-tools/mongodump/ and + https://docs.mongodb.com/database-tools/mongorestore/ for details. + ntfy: + type: object + required: ['topic'] additionalProperties: false properties: - color: - type: boolean + topic: + type: string description: | - Apply color to console output. Can be overridden with - --no-color command-line flag. Defaults to true. - example: false - hooks: - type: object - description: | - Shell commands, scripts, or integrations to execute at various - points during a borgmatic run. IMPORTANT: All provided commands and - scripts are executed with user permissions of borgmatic. Do not - forget to set secure permissions on this configuration file (chmod - 0600) as well as on any script called from a hook (chmod 0700) to - prevent potential shell injection or privilege escalation. - additionalProperties: false - properties: - before_actions: - type: array - items: - type: string + The topic to publish to. See https://ntfy.sh/docs/publish/ + for details. + example: topic + server: + type: string description: | - List of one or more shell commands or scripts to execute - before all the actions for each repository. - example: - - echo "Starting actions." - before_backup: - type: array - items: - type: string + The address of your self-hosted ntfy.sh instance. + example: https://ntfy.your-domain.com + username: + type: string description: | - List of one or more shell commands or scripts to execute - before creating a backup, run once per repository. - example: - - echo "Starting a backup." - before_prune: - type: array - items: - type: string + The username used for authentication. + example: testuser + password: + type: string description: | - List of one or more shell commands or scripts to execute - before pruning, run once per repository. - example: - - echo "Starting pruning." - before_compact: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - before compaction, run once per repository. - example: - - echo "Starting compaction." - before_check: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - before consistency checks, run once per repository. - example: - - echo "Starting checks." - before_extract: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - before extracting a backup, run once per repository. - example: - - echo "Starting extracting." - after_backup: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - after creating a backup, run once per repository. - example: - - echo "Finished a backup." - after_compact: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - after compaction, run once per repository. - example: - - echo "Finished compaction." - after_prune: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - after pruning, run once per repository. - example: - - echo "Finished pruning." - after_check: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - after consistency checks, run once per repository. - example: - - echo "Finished checks." - after_extract: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - after extracting a backup, run once per repository. - example: - - echo "Finished extracting." - after_actions: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - after all actions for each repository. - example: - - echo "Finished actions." - on_error: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - when an exception occurs during a "create", "prune", - "compact", or "check" action or an associated before/after - hook. - example: - - echo "Error during create/prune/compact/check." - before_everything: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - before running all actions (if one of them is "create"). - These are collected from all configuration files and then - run once before all of them (prior to all actions). - example: - - echo "Starting actions." - after_everything: - type: array - items: - type: string - description: | - List of one or more shell commands or scripts to execute - after running all actions (if one of them is "create"). - These are collected from all configuration files and then - run once after all of them (after any action). - example: - - echo "Completed actions." - postgresql_databases: - type: array - items: - type: object - required: ['name'] - additionalProperties: false - properties: - name: - type: string - description: | - Database name (required if using this hook). Or - "all" to dump all databases on the host. (Also - set the "format" to dump each database to a - separate file instead of one combined file.) - Note that using this database hook implicitly - enables both read_special and one_file_system - (see above) to support dump and restore - streaming. - example: users - hostname: - type: string - description: | - Database hostname to connect to. Defaults to - connecting via local Unix socket. - example: database.example.org - port: - type: integer - description: Port to connect to. Defaults to 5432. - example: 5433 - username: - type: string - description: | - Username with which to connect to the database. - Defaults to the username of the current user. - You probably want to specify the "postgres" - superuser here when the database name is "all". - example: dbuser - password: - type: string - description: | - Password with which to connect to the database. - Omitting a password will only work if PostgreSQL - is configured to trust the configured username - without a password or you create a ~/.pgpass - file. - example: trustsome1 - format: - type: string - enum: ['plain', 'custom', 'directory', 'tar'] - description: | - Database dump output format. One of "plain", - "custom", "directory", or "tar". Defaults to - "custom" (unlike raw pg_dump) for a single - database. Or, when database name is "all" and - format is blank, dumps all databases to a single - file. But if a format is specified with an "all" - database name, dumps each database to a separate - file of that format, allowing more convenient - restores of individual databases. See the - pg_dump documentation for more about formats. - example: directory - ssl_mode: - type: string - enum: ['disable', 'allow', 'prefer', - 'require', 'verify-ca', 'verify-full'] - description: | - SSL mode to use to connect to the database - server. One of "disable", "allow", "prefer", - "require", "verify-ca" or "verify-full". - Defaults to "disable". - example: require - ssl_cert: - type: string - description: | - Path to a client certificate. - example: "/root/.postgresql/postgresql.crt" - ssl_key: - type: string - description: | - Path to a private client key. - example: "/root/.postgresql/postgresql.key" - ssl_root_cert: - type: string - description: | - Path to a root certificate containing a list of - trusted certificate authorities. - example: "/root/.postgresql/root.crt" - ssl_crl: - type: string - description: | - Path to a certificate revocation list. - example: "/root/.postgresql/root.crl" - pg_dump_command: - type: string - description: | - Command to use instead of "pg_dump" or - "pg_dumpall". This can be used to run a specific - pg_dump version (e.g., one inside a running - docker container). Defaults to "pg_dump" for - single database dump or "pg_dumpall" to dump - all databases. - example: docker exec my_pg_container pg_dump - pg_restore_command: - type: string - description: | - Command to use instead of "pg_restore". This - can be used to run a specific pg_restore - version (e.g., one inside a running docker - container). Defaults to "pg_restore". - example: docker exec my_pg_container pg_restore - psql_command: - type: string - description: | - Command to use instead of "psql". This can be - used to run a specific psql version (e.g., - one inside a running docker container). - Defaults to "psql". - example: docker exec my_pg_container psql - options: - type: string - description: | - Additional pg_dump/pg_dumpall options to pass - directly to the dump command, without performing - any validation on them. See pg_dump - documentation for details. - example: --role=someone - list_options: - type: string - description: | - Additional psql options to pass directly to the - psql command that lists available databases, - without performing any validation on them. See - psql documentation for details. - example: --role=someone - restore_options: - type: string - description: | - Additional pg_restore/psql options to pass - directly to the restore command, without - performing any validation on them. See - pg_restore/psql documentation for details. - example: --role=someone - analyze_options: - type: string - description: | - Additional psql options to pass directly to the - analyze command run after a restore, without - performing any validation on them. See psql - documentation for details. - example: --role=someone - description: | - List of one or more PostgreSQL databases to dump before - creating a backup, run once per configuration file. The - database dumps are added to your source directories at - runtime, backed up, and removed afterwards. Requires - pg_dump/pg_dumpall/pg_restore commands. See - https://www.postgresql.org/docs/current/app-pgdump.html and - https://www.postgresql.org/docs/current/libpq-ssl.html for - details. - mysql_databases: - type: array - items: - type: object - required: ['name'] - additionalProperties: false - properties: - name: - type: string - description: | - Database name (required if using this hook). Or - "all" to dump all databases on the host. Note - that using this database hook implicitly enables - both read_special and one_file_system (see - above) to support dump and restore streaming. - example: users - hostname: - type: string - description: | - Database hostname to connect to. Defaults to - connecting via local Unix socket. - example: database.example.org - port: - type: integer - description: Port to connect to. Defaults to 3306. - example: 3307 - username: - type: string - description: | - Username with which to connect to the database. - Defaults to the username of the current user. - example: dbuser - password: - type: string - description: | - Password with which to connect to the database. - Omitting a password will only work if MySQL is - configured to trust the configured username - without a password. - example: trustsome1 - format: - type: string - enum: ['sql'] - description: | - Database dump output format. Currenly only "sql" - is supported. Defaults to "sql" for a single - database. Or, when database name is "all" and - format is blank, dumps all databases to a single - file. But if a format is specified with an "all" - database name, dumps each database to a separate - file of that format, allowing more convenient - restores of individual databases. - example: directory - add_drop_database: - type: boolean - description: | - Use the "--add-drop-database" flag with - mysqldump, causing the database to be dropped - right before restore. Defaults to true. - example: false - options: - type: string - description: | - Additional mysqldump options to pass directly to - the dump command, without performing any - validation on them. See mysqldump documentation - for details. - example: --skip-comments - list_options: - type: string - description: | - Additional mysql options to pass directly to - the mysql command that lists available - databases, without performing any validation on - them. See mysql documentation for details. - example: --defaults-extra-file=my.cnf - restore_options: - type: string - description: | - Additional mysql options to pass directly to - the mysql command that restores database dumps, - without performing any validation on them. See - mysql documentation for details. - example: --defaults-extra-file=my.cnf - description: | - List of one or more MySQL/MariaDB databases to dump before - creating a backup, run once per configuration file. The - database dumps are added to your source directories at - runtime, backed up, and removed afterwards. Requires - mysqldump/mysql commands (from either MySQL or MariaDB). See - https://dev.mysql.com/doc/refman/8.0/en/mysqldump.html or - https://mariadb.com/kb/en/library/mysqldump/ for details. - sqlite_databases: - type: array - items: - type: object - required: ['path','name'] - additionalProperties: false - properties: - name: - type: string - description: | - This is used to tag the database dump file - with a name. It is not the path to the database - file itself. The name "all" has no special - meaning for SQLite databases. - example: users - path: - type: string - description: | - Path to the SQLite database file to dump. If - relative, it is relative to the current working - directory. Note that using this - database hook implicitly enables both - read_special and one_file_system (see above) to - support dump and restore streaming. - example: /var/lib/sqlite/users.db - mongodb_databases: - type: array - items: - type: object - required: ['name'] - additionalProperties: false - properties: - name: - type: string - description: | - Database name (required if using this hook). Or - "all" to dump all databases on the host. Note - that using this database hook implicitly enables - both read_special and one_file_system (see - above) to support dump and restore streaming. - example: users - hostname: - type: string - description: | - Database hostname to connect to. Defaults to - connecting to localhost. - example: database.example.org - port: - type: integer - description: Port to connect to. Defaults to 27017. - example: 27018 - username: - type: string - description: | - Username with which to connect to the database. - Skip it if no authentication is needed. - example: dbuser - password: - type: string - description: | - Password with which to connect to the database. - Skip it if no authentication is needed. - example: trustsome1 - authentication_database: - type: string - description: | - Authentication database where the specified - username exists. If no authentication database - is specified, the database provided in "name" - is used. If "name" is "all", the "admin" - database is used. - example: admin - format: - type: string - enum: ['archive', 'directory'] - description: | - Database dump output format. One of "archive", - or "directory". Defaults to "archive". See - mongodump documentation for details. Note that - format is ignored when the database name is - "all". - example: directory - options: - type: string - description: | - Additional mongodump options to pass - directly to the dump command, without performing - any validation on them. See mongodump - documentation for details. - example: --dumpDbUsersAndRoles - restore_options: - type: string - description: | - Additional mongorestore options to pass - directly to the dump command, without performing - any validation on them. See mongorestore - documentation for details. - example: --restoreDbUsersAndRoles - description: | - List of one or more MongoDB databases to dump before - creating a backup, run once per configuration file. The - database dumps are added to your source directories at - runtime, backed up, and removed afterwards. Requires - mongodump/mongorestore commands. See - https://docs.mongodb.com/database-tools/mongodump/ and - https://docs.mongodb.com/database-tools/mongorestore/ for - details. - ntfy: + The password used for authentication. + example: fakepassword + start: type: object - required: ['topic'] - additionalProperties: false properties: - topic: + title: type: string description: | - The topic to publish to. - (https://ntfy.sh/docs/publish/) - example: topic - server: + The title of the message. + example: Ping! + message: type: string description: | - The address of your self-hosted ntfy.sh instance. - example: https://ntfy.your-domain.com - username: + The message body to publish. + example: Your backups have failed. + priority: type: string description: | - The username used for authentication. - example: testuser - password: + The priority to set. + example: urgent + tags: type: string description: | - The password used for authentication. - example: fakepassword - start: - type: object - properties: - title: - type: string - description: | - The title of the message - example: Ping! - message: - type: string - description: | - The message body to publish. - example: Your backups have failed. - priority: - type: string - description: | - The priority to set. - example: urgent - tags: - type: string - description: | - Tags to attach to the message. - example: incoming_envelope - finish: - type: object - properties: - title: - type: string - description: | - The title of the message. - example: Ping! - message: - type: string - description: | - The message body to publish. - example: Your backups have failed. - priority: - type: string - description: | - The priority to set. - example: urgent - tags: - type: string - description: | - Tags to attach to the message. - example: incoming_envelope - fail: - type: object - properties: - title: - type: string - description: | - The title of the message. - example: Ping! - message: - type: string - description: | - The message body to publish. - example: Your backups have failed. - priority: - type: string - description: | - The priority to set. - example: urgent - tags: - type: string - description: | - Tags to attach to the message. - example: incoming_envelope - states: - type: array - items: - type: string - enum: - - start - - finish - - fail - uniqueItems: true - description: | - List of one or more monitoring states to ping for: - "start", "finish", and/or "fail". Defaults to - pinging for failure only. - example: - - start - - finish - healthchecks: + Tags to attach to the message. + example: incoming_envelope + finish: type: object - required: ['ping_url'] - additionalProperties: false properties: - ping_url: + title: type: string description: | - Healthchecks ping URL or UUID to notify when a - backup begins, ends, errors or just to send logs. - example: https://hc-ping.com/your-uuid-here - verify_tls: - type: boolean + The title of the message. + example: Ping! + message: + type: string description: | - Verify the TLS certificate of the ping URL host. - Defaults to true. - example: false - send_logs: - type: boolean + The message body to publish. + example: Your backups have failed. + priority: + type: string description: | - Send borgmatic logs to Healthchecks as part the - "finish", "fail", and "log" states. Defaults to - true. - example: false - ping_body_limit: - type: integer + The priority to set. + example: urgent + tags: + type: string description: | - Number of bytes of borgmatic logs to send to - Healthchecks, ideally the same as PING_BODY_LIMIT - configured on the Healthchecks server. Set to 0 to - send all logs and disable this truncation. Defaults - to 100000. - example: 200000 - states: - type: array - items: - type: string - enum: - - start - - finish - - fail - - log - uniqueItems: true - description: | - List of one or more monitoring states to ping for: - "start", "finish", "fail", and/or "log". Defaults to - pinging for all states. - example: - - finish - description: | - Configuration for a monitoring integration with - Healthchecks. Create an account at https://healthchecks.io - (or self-host Healthchecks) if you'd like to use this - service. See borgmatic monitoring documentation for details. - cronitor: + Tags to attach to the message. + example: incoming_envelope + fail: type: object additionalProperties: false properties: @@ -1283,33 +1196,152 @@ properties: integration_key: type: string description: | - PagerDuty integration key used to notify PagerDuty - when a backup errors. - example: a177cad45bd374409f78906a810a3074 - description: | - Configuration for a monitoring integration with PagerDuty. - Create an account at https://www.pagerduty.com/ if you'd - like to use this service. See borgmatic monitoring - documentation for details. - cronhub: - type: object - required: ['ping_url'] - additionalProperties: false - properties: - ping_url: + The message body to publish. + example: Your backups have failed. + priority: type: string description: | - Cronhub ping URL to notify when a backup begins, - ends, or errors. - example: https://cronhub.io/ping/1f5e3410-254c-5587 + The priority to set. + example: urgent + tags: + type: string + description: | + Tags to attach to the message. + example: incoming_envelope + states: + type: array + items: + type: string + enum: + - start + - finish + - fail + uniqueItems: true description: | - Configuration for a monitoring integration with Crunhub. - Create an account at https://cronhub.io if you'd like to - use this service. See borgmatic monitoring documentation - for details. - umask: + List of one or more monitoring states to ping for: "start", + "finish", and/or "fail". Defaults to pinging for failure + only. + example: + - start + - finish + healthchecks: + type: object + required: ['ping_url'] + additionalProperties: false + properties: + ping_url: + type: string + description: | + Healthchecks ping URL or UUID to notify when a backup + begins, ends, errors, or to send only logs. + example: https://hc-ping.com/your-uuid-here + verify_tls: + type: boolean + description: | + Verify the TLS certificate of the ping URL host. Defaults to + true. + example: false + send_logs: + type: boolean + description: | + Send borgmatic logs to Healthchecks as part the "finish", + "fail", and "log" states. Defaults to true. + example: false + ping_body_limit: type: integer description: | - Umask used when executing hooks. Defaults to the umask that - borgmatic is run with. - example: 0077 + Number of bytes of borgmatic logs to send to Healthchecks, + ideally the same as PING_BODY_LIMIT configured on the + Healthchecks server. Set to 0 to send all logs and disable + this truncation. Defaults to 100000. + example: 200000 + states: + type: array + items: + type: string + enum: + - start + - finish + - fail + - log + uniqueItems: true + description: | + List of one or more monitoring states to ping for: "start", + "finish", "fail", and/or "log". Defaults to pinging for all + states. + example: + - finish + description: | + Configuration for a monitoring integration with Healthchecks. Create + an account at https://healthchecks.io (or self-host Healthchecks) if + you'd like to use this service. See borgmatic monitoring + documentation for details. + cronitor: + type: object + additionalProperties: false + properties: + create: + type: string + description: | + Cronitor ping URL to notify when a backup + begins, ends, or errors. + example: https://cronitor.link/d3x0c1 + prune: + type: string + description: | + Cronitor ping URL to notify when a prune action + begins, ends, or errors. + example: https://cronitor.link/d3x0c1 + compact: + type: string + description: | + Cronitor ping URL to notify when a compact action + begins, ends, or errors. + example: https://cronitor.link/d3x0c1 + check: + type: string + description: | + Cronitor ping URL to notify when a check action + begins, ends, or errors. + example: https://cronitor.link/d3x0c1 + ping_url: + type: string + description: | + If this is set, other properties will be ignored + and replaced by this value. + example: https://cronitor.link/d3x0c1 + description: | + Configuration for a monitoring integration with Cronitor. + Create an account at https://cronitor.io if you'd + like to use this service. See borgmatic monitoring + documentation for details. + pagerduty: + type: object + required: ['integration_key'] + additionalProperties: false + properties: + integration_key: + type: string + description: | + PagerDuty integration key used to notify PagerDuty + when a backup errors. + example: a177cad45bd374409f78906a810a3074 + description: | + Configuration for a monitoring integration with PagerDuty. Create an + account at https://www.pagerduty.com/ if you'd like to use this + service. See borgmatic monitoring documentation for details. + cronhub: + type: object + required: ['ping_url'] + additionalProperties: false + properties: + ping_url: + type: string + description: | + Cronhub ping URL to notify when a backup begins, + ends, or errors. + example: https://cronhub.io/ping/1f5e3410-254c-5587 + description: | + Configuration for a monitoring integration with Crunhub. Create an + account at https://cronhub.io if you'd like to use this service. See + borgmatic monitoring documentation for details. diff --git a/borgmatic/config/validate.py b/borgmatic/config/validate.py index 10da19ce..c13329f3 100644 --- a/borgmatic/config/validate.py +++ b/borgmatic/config/validate.py @@ -1,9 +1,9 @@ import os import jsonschema -import pkg_resources import ruamel.yaml +import borgmatic.config from borgmatic.config import environment, load, normalize, override @@ -11,8 +11,13 @@ def schema_filename(): ''' Path to the installed YAML configuration schema file, used to validate and parse the configuration. + + Raise FileNotFoundError when the schema path does not exist. ''' - return pkg_resources.resource_filename('borgmatic', 'config/schema.yaml') + schema_path = os.path.join(os.path.dirname(borgmatic.config.__file__), 'schema.yaml') + + with open(schema_path): + return schema_path def format_json_error_path_element(path_element): @@ -66,15 +71,15 @@ def apply_logical_validation(config_filename, parsed_configuration): below), run through any additional logical validation checks. If there are any such validation problems, raise a Validation_error. ''' - location_repositories = parsed_configuration.get('location', {}).get('repositories') - check_repositories = parsed_configuration.get('consistency', {}).get('check_repositories', []) + repositories = parsed_configuration.get('repositories') + check_repositories = parsed_configuration.get('check_repositories', []) for repository in check_repositories: - if repository not in location_repositories: + if not any( + repositories_match(repository, config_repository) for config_repository in repositories + ): raise Validation_error( config_filename, - ( - f'Unknown repository in the "consistency" section\'s "check_repositories": {repository}', - ), + (f'Unknown repository in "check_repositories": {repository}',), ) @@ -82,11 +87,15 @@ def parse_configuration(config_filename, schema_filename, overrides=None, resolv ''' Given the path to a config filename in YAML format, the path to a schema filename in a YAML rendition of JSON Schema format, a sequence of configuration file override strings in the form - of "section.option=value", return the parsed configuration as a data structure of nested dicts + of "option.suboption=value", return the parsed configuration as a data structure of nested dicts and lists corresponding to the schema. Example return value: - {'location': {'source_directories': ['/home', '/etc'], 'repository': 'hostname.borg'}, - 'retention': {'keep_daily': 7}, 'consistency': {'checks': ['repository', 'archives']}} + { + 'source_directories': ['/home', '/etc'], + 'repository': 'hostname.borg', + 'keep_daily': 7, + 'checks': ['repository', 'archives'], + } Also return a sequence of logging.LogRecord instances containing any warnings about the configuration. @@ -137,9 +146,17 @@ def normalize_repository_path(repository): def repositories_match(first, second): ''' - Given two repository paths (relative and/or absolute), return whether they match. + Given two repository dicts with keys 'path' (relative and/or absolute), + and 'label', or two repository paths, return whether they match. ''' - return normalize_repository_path(first) == normalize_repository_path(second) + if isinstance(first, str): + first = {'path': first, 'label': first} + if isinstance(second, str): + second = {'path': second, 'label': second} + return (first.get('label') == second.get('label')) or ( + normalize_repository_path(first.get('path')) + == normalize_repository_path(second.get('path')) + ) def guard_configuration_contains_repository(repository, configurations): @@ -158,8 +175,8 @@ def guard_configuration_contains_repository(repository, configurations): tuple( config_repository for config in configurations.values() - for config_repository in config['location']['repositories'] - if repositories_match(repository, config_repository) + for config_repository in config['repositories'] + if repositories_match(config_repository, repository) ) ) @@ -182,7 +199,7 @@ def guard_single_repository_selected(repository, configurations): tuple( config_repository for config in configurations.values() - for config_repository in config['location']['repositories'] + for config_repository in config['repositories'] ) ) diff --git a/borgmatic/execute.py b/borgmatic/execute.py index 53d1a098..9238a0c3 100644 --- a/borgmatic/execute.py +++ b/borgmatic/execute.py @@ -43,6 +43,23 @@ def output_buffer_for_process(process, exclude_stdouts): return process.stderr if process.stdout in exclude_stdouts else process.stdout +def append_last_lines(last_lines, captured_output, line, output_log_level): + ''' + Given a rolling list of last lines, a list of captured output, a line to append, and an output + log level, append the line to the last lines and (if necessary) the captured output. Then log + the line at the requested output log level. + ''' + last_lines.append(line) + + if len(last_lines) > ERROR_OUTPUT_MAX_LINE_COUNT: + last_lines.pop(0) + + if output_log_level is None: + captured_output.append(line) + else: + logger.log(output_log_level, line) + + def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path): ''' Given a sequence of subprocess.Popen() instances for multiple processes, log the output for each @@ -98,15 +115,12 @@ def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path): # Keep the last few lines of output in case the process errors, and we need the output for # the exception below. - last_lines = buffer_last_lines[ready_buffer] - last_lines.append(line) - if len(last_lines) > ERROR_OUTPUT_MAX_LINE_COUNT: - last_lines.pop(0) - - if output_log_level is None: - captured_outputs[ready_process].append(line) - else: - logger.log(output_log_level, line) + append_last_lines( + buffer_last_lines[ready_buffer], + captured_outputs[ready_process], + line, + output_log_level, + ) if not still_running: break @@ -125,8 +139,18 @@ def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path): # If an error occurs, include its output in the raised exception so that we don't # inadvertently hide error output. output_buffer = output_buffer_for_process(process, exclude_stdouts) - last_lines = buffer_last_lines[output_buffer] if output_buffer else [] + + # Collect any straggling output lines that came in since we last gathered output. + while output_buffer: # pragma: no cover + line = output_buffer.readline().rstrip().decode() + if not line: + break + + append_last_lines( + last_lines, captured_outputs[process], line, output_log_level=logging.ERROR + ) + if len(last_lines) == ERROR_OUTPUT_MAX_LINE_COUNT: last_lines.insert(0, '...') @@ -212,14 +236,21 @@ def execute_command( def execute_command_and_capture_output( - full_command, capture_stderr=False, shell=False, extra_environment=None, working_directory=None, + full_command, + capture_stderr=False, + shell=False, + extra_environment=None, + working_directory=None, + borg_local_path=None, ): ''' Execute the given command (a sequence of command/argument strings), capturing and returning its output (stdout). If capture stderr is True, then capture and return stderr in addition to stdout. If shell is True, execute the command within a shell. If an extra environment dict is given, then use it to augment the current environment, and pass the result into the command. If - a working directory is given, use that as the present working directory when running the command. + a working directory is given, use that as the present working directory when running the + command. If a Borg local path is given, and the command matches it (regardless of arguments), + treat exit code 1 as a warning instead of an error. Raise subprocesses.CalledProcessError if an error occurs while running the command. ''' @@ -235,12 +266,10 @@ def execute_command_and_capture_output( env=environment, cwd=working_directory, ) - logger.warning(f'Command output: {output}') except subprocess.CalledProcessError as error: - if exit_code_indicates_error(command, error.returncode): + if exit_code_indicates_error(command, error.returncode, borg_local_path): raise output = error.output - logger.warning(f'Command output: {output}') return output.decode() if output is not None else None diff --git a/borgmatic/hooks/cronhub.py b/borgmatic/hooks/cronhub.py index 816cb68f..d7bf2abe 100644 --- a/borgmatic/hooks/cronhub.py +++ b/borgmatic/hooks/cronhub.py @@ -14,7 +14,7 @@ MONITOR_STATE_TO_CRONHUB = { def initialize_monitor( - ping_url, config_filename, monitoring_log_level, dry_run + ping_url, config, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No initialization is necessary for this monitor. @@ -22,7 +22,7 @@ def initialize_monitor( pass -def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run, action_name): +def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run, action_name): ''' Ping the configured Cronhub URL, modified with the monitor.State. Use the given configuration filename in any log entries. If this is a dry run, then don't actually ping anything. @@ -55,7 +55,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_ def destroy_monitor( - ping_url_or_uuid, config_filename, monitoring_log_level, dry_run + ping_url_or_uuid, config, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No destruction is necessary for this monitor. diff --git a/borgmatic/hooks/cronitor.py b/borgmatic/hooks/cronitor.py index f514103d..f17b3a70 100644 --- a/borgmatic/hooks/cronitor.py +++ b/borgmatic/hooks/cronitor.py @@ -14,7 +14,7 @@ MONITOR_STATE_TO_CRONITOR = { def initialize_monitor( - ping_url, config_filename, monitoring_log_level, dry_run + ping_url, config, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No initialization is necessary for this monitor. @@ -22,7 +22,7 @@ def initialize_monitor( pass -def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run, action_name): +def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run, action_name): ''' Ping the configured Cronitor URL, modified with the monitor.State. Use the given configuration filename in any log entries. If this is a dry run, then don't actually ping anything. @@ -56,7 +56,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_ def destroy_monitor( - ping_url_or_uuid, config_filename, monitoring_log_level, dry_run + ping_url_or_uuid, config, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No destruction is necessary for this monitor. diff --git a/borgmatic/hooks/dispatch.py b/borgmatic/hooks/dispatch.py index fa7bd9b3..d98473ab 100644 --- a/borgmatic/hooks/dispatch.py +++ b/borgmatic/hooks/dispatch.py @@ -27,18 +27,17 @@ HOOK_NAME_TO_MODULE = { } -def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs): +def call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs): ''' - Given the hooks configuration dict and a prefix to use in log entries, call the requested - function of the Python module corresponding to the given hook name. Supply that call with the - configuration for this hook (if any), the log prefix, and any given args and kwargs. Return any - return value. + Given a configuration dict and a prefix to use in log entries, call the requested function of + the Python module corresponding to the given hook name. Supply that call with the configuration + for this hook (if any), the log prefix, and any given args and kwargs. Return any return value. Raise ValueError if the hook name is unknown. Raise AttributeError if the function name is not found in the module. Raise anything else that the called function raises. ''' - config = hooks.get(hook_name, {}) + hook_config = config.get(hook_name, {}) try: module = HOOK_NAME_TO_MODULE[hook_name] @@ -46,15 +45,15 @@ def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs): raise ValueError(f'Unknown hook name: {hook_name}') logger.debug(f'{log_prefix}: Calling {hook_name} hook function {function_name}') - return getattr(module, function_name)(config, log_prefix, *args, **kwargs) + return getattr(module, function_name)(hook_config, config, log_prefix, *args, **kwargs) -def call_hooks(function_name, hooks, log_prefix, hook_names, *args, **kwargs): +def call_hooks(function_name, config, log_prefix, hook_names, *args, **kwargs): ''' - Given the hooks configuration dict and a prefix to use in log entries, call the requested - function of the Python module corresponding to each given hook name. Supply each call with the - configuration for that hook, the log prefix, and any given args and kwargs. Collect any return - values into a dict from hook name to return value. + Given a configuration dict and a prefix to use in log entries, call the requested function of + the Python module corresponding to each given hook name. Supply each call with the configuration + for that hook, the log prefix, and any given args and kwargs. Collect any return values into a + dict from hook name to return value. If the hook name is not present in the hooks configuration, then don't call the function for it and omit it from the return values. @@ -64,23 +63,23 @@ def call_hooks(function_name, hooks, log_prefix, hook_names, *args, **kwargs): Raise anything else that a called function raises. An error stops calls to subsequent functions. ''' return { - hook_name: call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs) + hook_name: call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs) for hook_name in hook_names - if hooks.get(hook_name) + if config.get(hook_name) } -def call_hooks_even_if_unconfigured(function_name, hooks, log_prefix, hook_names, *args, **kwargs): +def call_hooks_even_if_unconfigured(function_name, config, log_prefix, hook_names, *args, **kwargs): ''' - Given the hooks configuration dict and a prefix to use in log entries, call the requested - function of the Python module corresponding to each given hook name. Supply each call with the - configuration for that hook, the log prefix, and any given args and kwargs. Collect any return - values into a dict from hook name to return value. + Given a configuration dict and a prefix to use in log entries, call the requested function of + the Python module corresponding to each given hook name. Supply each call with the configuration + for that hook, the log prefix, and any given args and kwargs. Collect any return values into a + dict from hook name to return value. Raise AttributeError if the function name is not found in the module. Raise anything else that a called function raises. An error stops calls to subsequent functions. ''' return { - hook_name: call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs) + hook_name: call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs) for hook_name in hook_names } diff --git a/borgmatic/hooks/healthchecks.py b/borgmatic/hooks/healthchecks.py index b725e059..aa75778a 100644 --- a/borgmatic/hooks/healthchecks.py +++ b/borgmatic/hooks/healthchecks.py @@ -70,7 +70,7 @@ def format_buffered_logs_for_payload(): return payload -def initialize_monitor(hook_config, config_filename, monitoring_log_level, dry_run): +def initialize_monitor(hook_config, config, config_filename, monitoring_log_level, dry_run): ''' Add a handler to the root logger that stores in memory the most recent logs emitted. That way, we can send them all to Healthchecks upon a finish or failure state. But skip this if the @@ -90,7 +90,7 @@ def initialize_monitor(hook_config, config_filename, monitoring_log_level, dry_r ) -def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run, action_name): +def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run, action_name): ''' Ping the configured Healthchecks URL or UUID, modified with the monitor.State. Use the given configuration filename in any log entries, and log to Healthchecks with the giving log level. @@ -133,7 +133,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_ logger.warning(f'{config_filename}: Healthchecks error: {error}') -def destroy_monitor(hook_config, config_filename, monitoring_log_level, dry_run): +def destroy_monitor(hook_config, config, config_filename, monitoring_log_level, dry_run): ''' Remove the monitor handler that was added to the root logger. This prevents the handler from getting reused by other instances of this monitor. diff --git a/borgmatic/hooks/mongodb.py b/borgmatic/hooks/mongodb.py index be5f656b..c94a084d 100644 --- a/borgmatic/hooks/mongodb.py +++ b/borgmatic/hooks/mongodb.py @@ -6,21 +6,20 @@ from borgmatic.hooks import dump logger = logging.getLogger(__name__) -def make_dump_path(location_config): # pragma: no cover +def make_dump_path(config): # pragma: no cover ''' - Make the dump path from the given location configuration and the name of this hook. + Make the dump path from the given configuration dict and the name of this hook. ''' return dump.make_database_dump_path( - location_config.get('borgmatic_source_directory'), 'mongodb_databases' + config.get('borgmatic_source_directory'), 'mongodb_databases' ) -def dump_databases(databases, log_prefix, location_config, dry_run): +def dump_databases(databases, config, log_prefix, dry_run): ''' Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of - dicts, one dict describing each database as per the configuration schema. Use the given log - prefix in any log entries. Use the given location configuration dict to construct the - destination path. + dicts, one dict describing each database as per the configuration schema. Use the configuration + dict to construct the destination path and the given log prefix in any log entries. Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence. @@ -33,7 +32,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run): for database in databases: name = database['name'] dump_filename = dump.make_database_dump_filename( - make_dump_path(location_config), name, database.get('hostname') + make_dump_path(config), name, database.get('hostname') ) dump_format = database.get('format', 'archive') @@ -82,47 +81,57 @@ def build_dump_command(database, dump_filename, dump_format): return command -def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover +def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover ''' Remove all database dump files for this hook regardless of the given databases. Use the log - prefix in any log entries. Use the given location configuration dict to construct the - destination path. If this is a dry run, then don't actually remove anything. + prefix in any log entries. Use the given configuration dict to construct the destination path. + If this is a dry run, then don't actually remove anything. ''' - dump.remove_database_dumps(make_dump_path(location_config), 'MongoDB', log_prefix, dry_run) + dump.remove_database_dumps(make_dump_path(config), 'MongoDB', log_prefix, dry_run) -def make_database_dump_pattern( - databases, log_prefix, location_config, name=None -): # pragma: no cover +def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover ''' - Given a sequence of configurations dicts, a prefix to log with, a location configuration dict, + Given a sequence of database configurations dicts, a configuration dict, a prefix to log with, and a database name to match, return the corresponding glob patterns to match the database dump in an archive. ''' - return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*') + return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*') -def restore_database_dump(database_config, log_prefix, location_config, dry_run, extract_process): +def restore_database_dump( + databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params +): ''' - Restore the given MongoDB database from an extract stream. The database is supplied as a - one-element sequence containing a dict describing the database, as per the configuration schema. - Use the given log prefix in any log entries. If this is a dry run, then don't actually restore - anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce - output to consume. + Restore the given MongoDB database from an extract stream. The databases are supplied as a + sequence containing one dict describing each database (as per the configuration schema), but + only the database corresponding to the given database name is restored. Use the configuration + dict to construct the destination path and the given log prefix in any log entries. If this is a + dry run, then don't actually restore anything. Trigger the given active extract process (an + instance of subprocess.Popen) to produce output to consume. If the extract process is None, then restore the dump from the filesystem rather than from an extract stream. ''' dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else '' - if len(database_config) != 1: - raise ValueError('The database configuration value is invalid') + try: + database = next( + database_config + for database_config in databases_config + if database_config.get('name') == database_name + ) + except StopIteration: + raise ValueError( + f'A database named "{database_name}" could not be found in the configuration' + ) - database = database_config[0] dump_filename = dump.make_database_dump_filename( - make_dump_path(location_config), database['name'], database.get('hostname') + make_dump_path(config), database['name'], database.get('hostname') + ) + restore_command = build_restore_command( + extract_process, database, dump_filename, connection_params ) - restore_command = build_restore_command(extract_process, database, dump_filename) logger.debug(f"{log_prefix}: Restoring MongoDB database {database['name']}{dry_run_label}") if dry_run: @@ -138,10 +147,21 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run, ) -def build_restore_command(extract_process, database, dump_filename): +def build_restore_command(extract_process, database, dump_filename, connection_params): ''' Return the mongorestore command from a single database configuration. ''' + hostname = connection_params['hostname'] or database.get( + 'restore_hostname', database.get('hostname') + ) + port = str(connection_params['port'] or database.get('restore_port', database.get('port', ''))) + username = connection_params['username'] or database.get( + 'restore_username', database.get('username') + ) + password = connection_params['password'] or database.get( + 'restore_password', database.get('password') + ) + command = ['mongorestore'] if extract_process: command.append('--archive') @@ -149,16 +169,19 @@ def build_restore_command(extract_process, database, dump_filename): command.extend(('--dir', dump_filename)) if database['name'] != 'all': command.extend(('--drop', '--db', database['name'])) - if 'hostname' in database: - command.extend(('--host', database['hostname'])) - if 'port' in database: - command.extend(('--port', str(database['port']))) - if 'username' in database: - command.extend(('--username', database['username'])) - if 'password' in database: - command.extend(('--password', database['password'])) + if hostname: + command.extend(('--host', hostname)) + if port: + command.extend(('--port', str(port))) + if username: + command.extend(('--username', username)) + if password: + command.extend(('--password', password)) if 'authentication_database' in database: command.extend(('--authenticationDatabase', database['authentication_database'])) if 'restore_options' in database: command.extend(database['restore_options'].split(' ')) + if database['schemas']: + for schema in database['schemas']: + command.extend(('--nsInclude', schema)) return command diff --git a/borgmatic/hooks/mysql.py b/borgmatic/hooks/mysql.py index 0bf97745..a3b34f15 100644 --- a/borgmatic/hooks/mysql.py +++ b/borgmatic/hooks/mysql.py @@ -12,13 +12,11 @@ from borgmatic.hooks import dump logger = logging.getLogger(__name__) -def make_dump_path(location_config): # pragma: no cover +def make_dump_path(config): # pragma: no cover ''' - Make the dump path from the given location configuration and the name of this hook. + Make the dump path from the given configuration dict and the name of this hook. ''' - return dump.make_database_dump_path( - location_config.get('borgmatic_source_directory'), 'mysql_databases' - ) + return dump.make_database_dump_path(config.get('borgmatic_source_directory'), 'mysql_databases') SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys') @@ -88,9 +86,7 @@ def execute_dump_command( + (('--user', database['username']) if 'username' in database else ()) + ('--databases',) + database_names - # Use shell redirection rather than execute_command(output_file=open(...)) to prevent - # the open() call on a named pipe from hanging the main borgmatic process. - + ('>', dump_filename) + + ('--result-file', dump_filename) ) logger.debug( @@ -102,16 +98,17 @@ def execute_dump_command( dump.create_named_pipe_for_dump(dump_filename) return execute_command( - dump_command, shell=True, extra_environment=extra_environment, run_to_completion=False, + dump_command, + extra_environment=extra_environment, + run_to_completion=False, ) -def dump_databases(databases, log_prefix, location_config, dry_run): +def dump_databases(databases, config, log_prefix, dry_run): ''' Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence - of dicts, one dict describing each database as per the configuration schema. Use the given log - prefix in any log entries. Use the given location configuration dict to construct the - destination path. + of dicts, one dict describing each database as per the configuration schema. Use the given + configuration dict to construct the destination path and the given log prefix in any log entries. Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence. @@ -122,7 +119,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run): logger.info(f'{log_prefix}: Dumping MySQL databases{dry_run_label}') for database in databases: - dump_path = make_dump_path(location_config) + dump_path = make_dump_path(config) extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None dump_database_names = database_names_to_dump( database, extra_environment, log_prefix, dry_run @@ -165,49 +162,67 @@ def dump_databases(databases, log_prefix, location_config, dry_run): return [process for process in processes if process] -def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover +def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover ''' - Remove all database dump files for this hook regardless of the given databases. Use the log - prefix in any log entries. Use the given location configuration dict to construct the - destination path. If this is a dry run, then don't actually remove anything. + Remove all database dump files for this hook regardless of the given databases. Use the given + configuration dict to construct the destination path and the log prefix in any log entries. If + this is a dry run, then don't actually remove anything. ''' - dump.remove_database_dumps(make_dump_path(location_config), 'MySQL', log_prefix, dry_run) + dump.remove_database_dumps(make_dump_path(config), 'MySQL', log_prefix, dry_run) -def make_database_dump_pattern( - databases, log_prefix, location_config, name=None -): # pragma: no cover +def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover ''' - Given a sequence of configurations dicts, a prefix to log with, a location configuration dict, - and a database name to match, return the corresponding glob patterns to match the database dump - in an archive. + Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a + database name to match, return the corresponding glob patterns to match the database dump in an + archive. ''' - return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*') + return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*') -def restore_database_dump(database_config, log_prefix, location_config, dry_run, extract_process): +def restore_database_dump( + databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params +): ''' - Restore the given MySQL/MariaDB database from an extract stream. The database is supplied as a - one-element sequence containing a dict describing the database, as per the configuration schema. - Use the given log prefix in any log entries. If this is a dry run, then don't actually restore - anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce - output to consume. + Restore the given MySQL/MariaDB database from an extract stream. The databases are supplied as a + sequence containing one dict describing each database (as per the configuration schema), but + only the database corresponding to the given database name is restored. Use the given log + prefix in any log entries. If this is a dry run, then don't actually restore anything. Trigger + the given active extract process (an instance of subprocess.Popen) to produce output to consume. ''' dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else '' - if len(database_config) != 1: - raise ValueError('The database configuration value is invalid') + try: + database = next( + database_config + for database_config in databases_config + if database_config.get('name') == database_name + ) + except StopIteration: + raise ValueError( + f'A database named "{database_name}" could not be found in the configuration' + ) + + hostname = connection_params['hostname'] or database.get( + 'restore_hostname', database.get('hostname') + ) + port = str(connection_params['port'] or database.get('restore_port', database.get('port', ''))) + username = connection_params['username'] or database.get( + 'restore_username', database.get('username') + ) + password = connection_params['password'] or database.get( + 'restore_password', database.get('password') + ) - database = database_config[0] restore_command = ( ('mysql', '--batch') + (tuple(database['restore_options'].split(' ')) if 'restore_options' in database else ()) - + (('--host', database['hostname']) if 'hostname' in database else ()) - + (('--port', str(database['port'])) if 'port' in database else ()) - + (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ()) - + (('--user', database['username']) if 'username' in database else ()) + + (('--host', hostname) if hostname else ()) + + (('--port', str(port)) if port else ()) + + (('--protocol', 'tcp') if hostname or port else ()) + + (('--user', username) if username else ()) ) - extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None + extra_environment = {'MYSQL_PWD': password} if password else None logger.debug(f"{log_prefix}: Restoring MySQL database {database['name']}{dry_run_label}") if dry_run: diff --git a/borgmatic/hooks/ntfy.py b/borgmatic/hooks/ntfy.py index b460bfd4..5a46047e 100644 --- a/borgmatic/hooks/ntfy.py +++ b/borgmatic/hooks/ntfy.py @@ -6,7 +6,7 @@ logger = logging.getLogger(__name__) def initialize_monitor( - ping_url, config_filename, monitoring_log_level, dry_run + ping_url, config, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No initialization is necessary for this monitor. @@ -14,7 +14,7 @@ def initialize_monitor( pass -def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run, action_name): +def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run, action_name): ''' Ping the configured Ntfy topic. Use the given configuration filename in any log entries. If this is a dry run, then don't actually ping anything. @@ -28,8 +28,8 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_ state_config = hook_config.get( state.name.lower(), { - 'title': f'A Borgmatic {state.name} event happened', - 'message': f'A Borgmatic {state.name} event happened', + 'title': f'A borgmatic {state.name} event happened', + 'message': f'A borgmatic {state.name} event happened', 'priority': 'default', 'tags': 'borgmatic', }, @@ -75,7 +75,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_ def destroy_monitor( - ping_url_or_uuid, config_filename, monitoring_log_level, dry_run + ping_url_or_uuid, config, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No destruction is necessary for this monitor. diff --git a/borgmatic/hooks/pagerduty.py b/borgmatic/hooks/pagerduty.py index b9ff706a..87cf71a7 100644 --- a/borgmatic/hooks/pagerduty.py +++ b/borgmatic/hooks/pagerduty.py @@ -13,7 +13,7 @@ EVENTS_API_URL = 'https://events.pagerduty.com/v2/enqueue' def initialize_monitor( - integration_key, config_filename, monitoring_log_level, dry_run + integration_key, config, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No initialization is necessary for this monitor. @@ -21,7 +21,7 @@ def initialize_monitor( pass -def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run, action_name): +def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run, action_name): ''' If this is an error state, create a PagerDuty event with the configured integration key. Use the given configuration filename in any log entries. If this is a dry run, then don't actually @@ -75,7 +75,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_ def destroy_monitor( - ping_url_or_uuid, config_filename, monitoring_log_level, dry_run + ping_url_or_uuid, config, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No destruction is necessary for this monitor. diff --git a/borgmatic/hooks/postgresql.py b/borgmatic/hooks/postgresql.py index d4799f5f..598b878c 100644 --- a/borgmatic/hooks/postgresql.py +++ b/borgmatic/hooks/postgresql.py @@ -1,6 +1,8 @@ import csv +import itertools import logging import os +import shlex from borgmatic.execute import ( execute_command, @@ -12,22 +14,32 @@ from borgmatic.hooks import dump logger = logging.getLogger(__name__) -def make_dump_path(location_config): # pragma: no cover +def make_dump_path(config): # pragma: no cover ''' - Make the dump path from the given location configuration and the name of this hook. + Make the dump path from the given configuration dict and the name of this hook. ''' return dump.make_database_dump_path( - location_config.get('borgmatic_source_directory'), 'postgresql_databases' + config.get('borgmatic_source_directory'), 'postgresql_databases' ) -def make_extra_environment(database): +def make_extra_environment(database, restore_connection_params=None): ''' Make the extra_environment dict from the given database configuration. + If restore connection params are given, this is for a restore operation. ''' extra = dict() - if 'password' in database: - extra['PGPASSWORD'] = database['password'] + + try: + if restore_connection_params: + extra['PGPASSWORD'] = restore_connection_params.get('password') or database.get( + 'restore_password', database['password'] + ) + else: + extra['PGPASSWORD'] = database['password'] + except (AttributeError, KeyError): + pass + extra['PGSSLMODE'] = database.get('ssl_mode', 'disable') if 'ssl_cert' in database: extra['PGSSLCERT'] = database['ssl_cert'] @@ -59,8 +71,10 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run): if dry_run: return () + psql_command = shlex.split(database.get('psql_command') or 'psql') list_command = ( - ('psql', '--list', '--no-password', '--csv', '--tuples-only') + tuple(psql_command) + + ('--list', '--no-password', '--no-psqlrc', '--csv', '--tuples-only') + (('--host', database['hostname']) if 'hostname' in database else ()) + (('--port', str(database['port'])) if 'port' in database else ()) + (('--username', database['username']) if 'username' in database else ()) @@ -78,12 +92,12 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run): ) -def dump_databases(databases, log_prefix, location_config, dry_run): +def dump_databases(databases, config, log_prefix, dry_run): ''' Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of - dicts, one dict describing each database as per the configuration schema. Use the given log - prefix in any log entries. Use the given location configuration dict to construct the - destination path. + dicts, one dict describing each database as per the configuration schema. Use the given + configuration dict to construct the destination path and the given log prefix in any log + entries. Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence. @@ -97,7 +111,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run): for database in databases: extra_environment = make_extra_environment(database) - dump_path = make_dump_path(location_config) + dump_path = make_dump_path(config) dump_database_names = database_names_to_dump( database, extra_environment, log_prefix, dry_run ) @@ -122,10 +136,16 @@ def dump_databases(databases, log_prefix, location_config, dry_run): continue command = ( - (dump_command, '--no-password', '--clean', '--if-exists',) + ( + dump_command, + '--no-password', + '--clean', + '--if-exists', + ) + (('--host', database['hostname']) if 'hostname' in database else ()) + (('--port', str(database['port'])) if 'port' in database else ()) + (('--username', database['username']) if 'username' in database else ()) + + (('--no-owner',) if database.get('no_owner', False) else ()) + (('--format', dump_format) if dump_format else ()) + (('--file', dump_filename) if dump_format == 'directory' else ()) + (tuple(database['options'].split(' ')) if 'options' in database else ()) @@ -145,7 +165,9 @@ def dump_databases(databases, log_prefix, location_config, dry_run): if dump_format == 'directory': dump.create_parent_directory_for_dump(dump_filename) execute_command( - command, shell=True, extra_environment=extra_environment, + command, + shell=True, + extra_environment=extra_environment, ) else: dump.create_named_pipe_for_dump(dump_filename) @@ -161,72 +183,100 @@ def dump_databases(databases, log_prefix, location_config, dry_run): return processes -def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover +def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover ''' - Remove all database dump files for this hook regardless of the given databases. Use the log - prefix in any log entries. Use the given location configuration dict to construct the - destination path. If this is a dry run, then don't actually remove anything. + Remove all database dump files for this hook regardless of the given databases. Use the given + configuration dict to construct the destination path and the log prefix in any log entries. If + this is a dry run, then don't actually remove anything. ''' - dump.remove_database_dumps(make_dump_path(location_config), 'PostgreSQL', log_prefix, dry_run) + dump.remove_database_dumps(make_dump_path(config), 'PostgreSQL', log_prefix, dry_run) -def make_database_dump_pattern( - databases, log_prefix, location_config, name=None -): # pragma: no cover +def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover ''' - Given a sequence of configurations dicts, a prefix to log with, a location configuration dict, - and a database name to match, return the corresponding glob patterns to match the database dump - in an archive. + Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a + database name to match, return the corresponding glob patterns to match the database dump in an + archive. ''' - return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*') + return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*') -def restore_database_dump(database_config, log_prefix, location_config, dry_run, extract_process): +def restore_database_dump( + databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params +): ''' - Restore the given PostgreSQL database from an extract stream. The database is supplied as a - one-element sequence containing a dict describing the database, as per the configuration schema. - Use the given log prefix in any log entries. If this is a dry run, then don't actually restore - anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce - output to consume. + Restore the given PostgreSQL database from an extract stream. The databases are supplied as a + sequence containing one dict describing each database (as per the configuration schema), but + only the database corresponding to the given database name is restored. Use the given + configuration dict to construct the destination path and the given log prefix in any log + entries. If this is a dry run, then don't actually restore anything. Trigger the given active + extract process (an instance of subprocess.Popen) to produce output to consume. If the extract process is None, then restore the dump from the filesystem rather than from an extract stream. + + Use the given connection parameters to connect to the database. The connection parameters are + hostname, port, username, and password. ''' dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else '' - if len(database_config) != 1: - raise ValueError('The database configuration value is invalid') + try: + database = next( + database_config + for database_config in databases_config + if database_config.get('name') == database_name + ) + except StopIteration: + raise ValueError( + f'A database named "{database_name}" could not be found in the configuration' + ) + + hostname = connection_params['hostname'] or database.get( + 'restore_hostname', database.get('hostname') + ) + port = str(connection_params['port'] or database.get('restore_port', database.get('port', ''))) + username = connection_params['username'] or database.get( + 'restore_username', database.get('username') + ) - database = database_config[0] all_databases = bool(database['name'] == 'all') dump_filename = dump.make_database_dump_filename( - make_dump_path(location_config), database['name'], database.get('hostname') + make_dump_path(config), database['name'], database.get('hostname') ) - psql_command = database.get('psql_command') or 'psql' + psql_command = shlex.split(database.get('psql_command') or 'psql') analyze_command = ( - (psql_command, '--no-password', '--quiet') - + (('--host', database['hostname']) if 'hostname' in database else ()) - + (('--port', str(database['port'])) if 'port' in database else ()) - + (('--username', database['username']) if 'username' in database else ()) + tuple(psql_command) + + ('--no-password', '--no-psqlrc', '--quiet') + + (('--host', hostname) if hostname else ()) + + (('--port', port) if port else ()) + + (('--username', username) if username else ()) + (('--dbname', database['name']) if not all_databases else ()) + (tuple(database['analyze_options'].split(' ')) if 'analyze_options' in database else ()) + ('--command', 'ANALYZE') ) - pg_restore_command = database.get('pg_restore_command') or 'pg_restore' + use_psql_command = all_databases or database.get('format') == 'plain' + pg_restore_command = shlex.split(database.get('pg_restore_command') or 'pg_restore') restore_command = ( - (psql_command if all_databases else pg_restore_command, '--no-password') - + ( - ('--if-exists', '--exit-on-error', '--clean', '--dbname', database['name']) - if not all_databases - else () - ) - + (('--host', database['hostname']) if 'hostname' in database else ()) - + (('--port', str(database['port'])) if 'port' in database else ()) - + (('--username', database['username']) if 'username' in database else ()) + tuple(psql_command if use_psql_command else pg_restore_command) + + ('--no-password',) + + (('--no-psqlrc',) if use_psql_command else ('--if-exists', '--exit-on-error', '--clean')) + + (('--dbname', database['name']) if not all_databases else ()) + + (('--host', hostname) if hostname else ()) + + (('--port', port) if port else ()) + + (('--username', username) if username else ()) + + (('--no-owner',) if database.get('no_owner', False) else ()) + (tuple(database['restore_options'].split(' ')) if 'restore_options' in database else ()) + (() if extract_process else (dump_filename,)) + + tuple( + itertools.chain.from_iterable(('--schema', schema) for schema in database['schemas']) + if database.get('schemas') + else () + ) + ) + + extra_environment = make_extra_environment( + database, restore_connection_params=connection_params ) - extra_environment = make_extra_environment(database) logger.debug(f"{log_prefix}: Restoring PostgreSQL database {database['name']}{dry_run_label}") if dry_run: diff --git a/borgmatic/hooks/sqlite.py b/borgmatic/hooks/sqlite.py index d9f105d8..524318bc 100644 --- a/borgmatic/hooks/sqlite.py +++ b/borgmatic/hooks/sqlite.py @@ -7,21 +7,21 @@ from borgmatic.hooks import dump logger = logging.getLogger(__name__) -def make_dump_path(location_config): # pragma: no cover +def make_dump_path(config): # pragma: no cover ''' - Make the dump path from the given location configuration and the name of this hook. + Make the dump path from the given configuration dict and the name of this hook. ''' return dump.make_database_dump_path( - location_config.get('borgmatic_source_directory'), 'sqlite_databases' + config.get('borgmatic_source_directory'), 'sqlite_databases' ) -def dump_databases(databases, log_prefix, location_config, dry_run): +def dump_databases(databases, config, log_prefix, dry_run): ''' Dump the given SQLite3 databases to a file. The databases are supplied as a sequence of - configuration dicts, as per the configuration schema. Use the given log prefix in any log - entries. Use the given location configuration dict to construct the destination path. If this - is a dry run, then don't actually dump anything. + configuration dicts, as per the configuration schema. Use the given configuration dict to + construct the destination path and the given log prefix in any log entries. If this is a dry + run, then don't actually dump anything. ''' dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else '' processes = [] @@ -38,7 +38,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run): f'{log_prefix}: No SQLite database at {database_path}; An empty database will be created and dumped' ) - dump_path = make_dump_path(location_config) + dump_path = make_dump_path(config) dump_filename = dump.make_database_dump_filename(dump_path, database['name']) if os.path.exists(dump_filename): logger.warning( @@ -65,40 +65,50 @@ def dump_databases(databases, log_prefix, location_config, dry_run): return processes -def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover +def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover ''' Remove the given SQLite3 database dumps from the filesystem. The databases are supplied as a - sequence of configuration dicts, as per the configuration schema. Use the given log prefix in - any log entries. Use the given location configuration dict to construct the destination path. - If this is a dry run, then don't actually remove anything. + sequence of configuration dicts, as per the configuration schema. Use the given configuration + dict to construct the destination path and the given log prefix in any log entries. If this is a + dry run, then don't actually remove anything. ''' - dump.remove_database_dumps(make_dump_path(location_config), 'SQLite', log_prefix, dry_run) + dump.remove_database_dumps(make_dump_path(config), 'SQLite', log_prefix, dry_run) -def make_database_dump_pattern( - databases, log_prefix, location_config, name=None -): # pragma: no cover +def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover ''' Make a pattern that matches the given SQLite3 databases. The databases are supplied as a sequence of configuration dicts, as per the configuration schema. ''' - return dump.make_database_dump_filename(make_dump_path(location_config), name) + return dump.make_database_dump_filename(make_dump_path(config), name) -def restore_database_dump(database_config, log_prefix, location_config, dry_run, extract_process): +def restore_database_dump( + databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params +): ''' - Restore the given SQLite3 database from an extract stream. The database is supplied as a - one-element sequence containing a dict describing the database, as per the configuration schema. - Use the given log prefix in any log entries. If this is a dry run, then don't actually restore - anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce - output to consume. + Restore the given SQLite3 database from an extract stream. The databases are supplied as a + sequence containing one dict describing each database (as per the configuration schema), but + only the database corresponding to the given database name is restored. Use the given log prefix + in any log entries. If this is a dry run, then don't actually restore anything. Trigger the + given active extract process (an instance of subprocess.Popen) to produce output to consume. ''' dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else '' - if len(database_config) != 1: - raise ValueError('The database configuration value is invalid') + try: + database = next( + database_config + for database_config in databases_config + if database_config.get('name') == database_name + ) + except StopIteration: + raise ValueError( + f'A database named "{database_name}" could not be found in the configuration' + ) - database_path = database_config[0]['path'] + database_path = connection_params['restore_path'] or database.get( + 'restore_path', database.get('path') + ) logger.debug(f'{log_prefix}: Restoring SQLite database at {database_path}{dry_run_label}') if dry_run: diff --git a/borgmatic/logger.py b/borgmatic/logger.py index e098bf96..cda5151d 100644 --- a/borgmatic/logger.py +++ b/borgmatic/logger.py @@ -68,7 +68,7 @@ class Multi_stream_handler(logging.Handler): def emit(self, record): ''' - Dispatch the log record to the approriate stream handler for the record's log level. + Dispatch the log record to the appropriate stream handler for the record's log level. ''' self.log_level_to_handler[record.levelno].emit(record) @@ -141,6 +141,7 @@ def add_logging_level(level_name, level_number): ANSWER = logging.WARN - 5 +DISABLED = logging.CRITICAL + 10 def add_custom_log_levels(): # pragma: no cover @@ -148,6 +149,7 @@ def add_custom_log_levels(): # pragma: no cover Add a custom log level between WARN and INFO for user-requested answers. ''' add_logging_level('ANSWER', ANSWER) + add_logging_level('DISABLED', DISABLED) def configure_logging( @@ -156,6 +158,7 @@ def configure_logging( log_file_log_level=None, monitoring_log_level=None, log_file=None, + log_file_format=None, ): ''' Configure logging to go to both the console and (syslog or log file). Use the given log levels, @@ -174,10 +177,12 @@ def configure_logging( # Log certain log levels to console stderr and others to stdout. This supports use cases like # grepping (non-error) output. + console_disabled = logging.NullHandler() console_error_handler = logging.StreamHandler(sys.stderr) console_standard_handler = logging.StreamHandler(sys.stdout) console_handler = Multi_stream_handler( { + logging.DISABLED: console_disabled, logging.CRITICAL: console_error_handler, logging.ERROR: console_error_handler, logging.WARN: console_error_handler, @@ -190,7 +195,7 @@ def configure_logging( console_handler.setLevel(console_log_level) syslog_path = None - if log_file is None: + if log_file is None and syslog_log_level != logging.DISABLED: if os.path.exists('/dev/log'): syslog_path = '/dev/log' elif os.path.exists('/var/run/syslog'): @@ -200,12 +205,18 @@ def configure_logging( if syslog_path and not interactive_console(): syslog_handler = logging.handlers.SysLogHandler(address=syslog_path) - syslog_handler.setFormatter(logging.Formatter('borgmatic: %(levelname)s %(message)s')) + syslog_handler.setFormatter( + logging.Formatter('borgmatic: {levelname} {message}', style='{') # noqa: FS003 + ) syslog_handler.setLevel(syslog_log_level) handlers = (console_handler, syslog_handler) - elif log_file: + elif log_file and log_file_log_level != logging.DISABLED: file_handler = logging.handlers.WatchedFileHandler(log_file) - file_handler.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)s: %(message)s')) + file_handler.setFormatter( + logging.Formatter( + log_file_format or '[{asctime}] {levelname}: {message}', style='{' # noqa: FS003 + ) + ) file_handler.setLevel(log_file_log_level) handlers = (console_handler, file_handler) else: diff --git a/borgmatic/verbosity.py b/borgmatic/verbosity.py index cdadd61f..8cba009d 100644 --- a/borgmatic/verbosity.py +++ b/borgmatic/verbosity.py @@ -2,6 +2,7 @@ import logging import borgmatic.logger +VERBOSITY_DISABLED = -2 VERBOSITY_ERROR = -1 VERBOSITY_ANSWER = 0 VERBOSITY_SOME = 1 @@ -15,6 +16,7 @@ def verbosity_to_log_level(verbosity): borgmatic.logger.add_custom_log_levels() return { + VERBOSITY_DISABLED: logging.DISABLED, VERBOSITY_ERROR: logging.ERROR, VERBOSITY_ANSWER: logging.ANSWER, VERBOSITY_SOME: logging.INFO, diff --git a/docs/Dockerfile b/docs/Dockerfile index dcda44f3..118768ce 100644 --- a/docs/Dockerfile +++ b/docs/Dockerfile @@ -1,14 +1,14 @@ -FROM alpine:3.17.1 as borgmatic +FROM docker.io/alpine:3.17.1 as borgmatic COPY . /app RUN apk add --no-cache py3-pip py3-ruamel.yaml py3-ruamel.yaml.clib RUN pip install --no-cache /app && generate-borgmatic-config && chmod +r /etc/borgmatic/config.yaml RUN borgmatic --help > /command-line.txt \ - && for action in rcreate transfer create prune compact check extract export-tar mount umount restore rlist list rinfo info break-lock borg; do \ + && for action in rcreate transfer create prune compact check extract config "config bootstrap" "config generate" "config validate" export-tar mount umount restore rlist list rinfo info break-lock borg; do \ echo -e "\n--------------------------------------------------------------------------------\n" >> /command-line.txt \ - && borgmatic "$action" --help >> /command-line.txt; done + && borgmatic $action --help >> /command-line.txt; done -FROM node:19.5.0-alpine as html +FROM docker.io/node:19.5.0-alpine as html ARG ENVIRONMENT=production @@ -28,7 +28,7 @@ COPY . /source RUN NODE_ENV=${ENVIRONMENT} npx eleventy --input=/source/docs --output=/output/docs \ && mv /output/docs/index.html /output/index.html -FROM nginx:1.22.1-alpine +FROM docker.io/nginx:1.22.1-alpine COPY --from=html /output /usr/share/nginx/html COPY --from=borgmatic /etc/borgmatic/config.yaml /usr/share/nginx/html/docs/reference/config.yaml diff --git a/docs/SECURITY.md b/docs/SECURITY.md index 81a633e7..b5f28b50 100644 --- a/docs/SECURITY.md +++ b/docs/SECURITY.md @@ -16,4 +16,4 @@ each. If you find a security vulnerability, please [file a ticket](https://torsion.org/borgmatic/#issues) or [send email directly](mailto:witten@torsion.org) as appropriate. You should expect to hear -back within a few days at most, and generally sooner. +back within a few days at most and generally sooner. diff --git a/docs/_data/borgmatic.js b/docs/_data/borgmatic.js new file mode 100644 index 00000000..5d76ca01 --- /dev/null +++ b/docs/_data/borgmatic.js @@ -0,0 +1,5 @@ +module.exports = function() { + return { + environment: process.env.NODE_ENV || "development" + }; +}; diff --git a/docs/_includes/components/suggestion-link.html b/docs/_includes/components/suggestion-link.html index 2c2d1424..073b8887 100644 --- a/docs/_includes/components/suggestion-link.html +++ b/docs/_includes/components/suggestion-link.html @@ -1,5 +1,5 @@

Improve this documentation

Have an idea on how to make this documentation even better? Use our issue tracker to send your -feedback!

+href="https://torsion.org/borgmatic/#support-and-contributing">issue +tracker to send your feedback!

diff --git a/docs/_includes/components/toc.css b/docs/_includes/components/toc.css index 039673f6..82cf15e5 100644 --- a/docs/_includes/components/toc.css +++ b/docs/_includes/components/toc.css @@ -94,7 +94,7 @@ display: block; } -/* Footer catgory navigation */ +/* Footer category navigation */ .elv-cat-list-active { font-weight: 600; } diff --git a/docs/_includes/layouts/base.njk b/docs/_includes/layouts/base.njk index be588e9b..dbb19d99 100644 --- a/docs/_includes/layouts/base.njk +++ b/docs/_includes/layouts/base.njk @@ -3,6 +3,7 @@ + {{ subtitle + ' - ' if subtitle}}{{ title }} {%- set css %} {% include 'index.css' %} diff --git a/docs/_includes/layouts/main.njk b/docs/_includes/layouts/main.njk index a7e5b665..81300bae 100644 --- a/docs/_includes/layouts/main.njk +++ b/docs/_includes/layouts/main.njk @@ -11,7 +11,7 @@ headerClass: elv-header-default {% set navPages = collections.all | eleventyNavigation %} {% macro renderNavListItem(entry) -%} - {{ entry.title }} + {{ entry.title }} {%- if entry.children.length -%}